Compare commits
No commits in common. "master" and "v0.3.8" have entirely different histories.
7
.babelrc
7
.babelrc
@ -1,7 +0,0 @@
|
||||
{
|
||||
"plugins": ["transform-class-properties"],
|
||||
"presets": [
|
||||
"@babel/preset-env",
|
||||
"@babel/preset-react"
|
||||
]
|
||||
}
|
||||
@ -1,90 +0,0 @@
|
||||
---
|
||||
Language: Cpp
|
||||
# BasedOnStyle: LLVM
|
||||
AccessModifierOffset: -2
|
||||
AlignAfterOpenBracket: Align
|
||||
AlignConsecutiveAssignments: false
|
||||
AlignConsecutiveDeclarations: false
|
||||
AlignEscapedNewlinesLeft: false
|
||||
AlignOperands: true
|
||||
AlignTrailingComments: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: true
|
||||
AllowShortBlocksOnASingleLine: true
|
||||
AllowShortCaseLabelsOnASingleLine: false
|
||||
AllowShortFunctionsOnASingleLine: All
|
||||
AllowShortIfStatementsOnASingleLine: false
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
AlwaysBreakAfterDefinitionReturnType: None
|
||||
AlwaysBreakAfterReturnType: None
|
||||
AlwaysBreakBeforeMultilineStrings: false
|
||||
AlwaysBreakTemplateDeclarations: false
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: true
|
||||
AfterFunction: true
|
||||
AfterNamespace: true
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: true
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeBraces: Allman
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: false
|
||||
ColumnLimit: 100
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: false
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ]
|
||||
IncludeCategories:
|
||||
- Regex: '^<'
|
||||
Priority: 3
|
||||
- Regex: '^"(osrm|util|engine|extract|contract)/'
|
||||
Priority: 2
|
||||
- Regex: '.*'
|
||||
Priority: 1
|
||||
IndentCaseLabels: false
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: true
|
||||
MacroBlockBegin: ''
|
||||
MacroBlockEnd: ''
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCBlockIndentWidth: 2
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: true
|
||||
PenaltyBreakBeforeFirstCallParameter: 19
|
||||
PenaltyBreakComment: 300
|
||||
PenaltyBreakFirstLessLess: 120
|
||||
PenaltyBreakString: 1000
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 60
|
||||
PointerAlignment: Right
|
||||
ReflowComments: true
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 1
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
...
|
||||
|
||||
101
.clang-tidy
101
.clang-tidy
@ -1,101 +0,0 @@
|
||||
---
|
||||
Checks: >
|
||||
bugprone-*,
|
||||
-bugprone-narrowing-conversions,
|
||||
-bugprone-easily-swappable-parameters,
|
||||
-bugprone-branch-clone,
|
||||
-bugprone-misplaced-widening-cast,
|
||||
-bugprone-exception-escape,
|
||||
-bugprone-implicit-widening-of-multiplication-result,
|
||||
-bugprone-integer-division,
|
||||
-bugprone-reserved-identifier,
|
||||
-bugprone-unhandled-self-assignment,
|
||||
-bugprone-forward-declaration-namespace,
|
||||
-bugprone-sizeof-expression,
|
||||
-bugprone-throw-keyword-missing,
|
||||
-bugprone-chained-comparison,
|
||||
-bugprone-incorrect-enable-if,
|
||||
-bugprone-switch-missing-default-case,
|
||||
-bugprone-empty-catch,
|
||||
-bugprone-unchecked-optional-access,
|
||||
-clang-analyzer-*,
|
||||
-clang-diagnostic-deprecated-declarations,
|
||||
-clang-diagnostic-constant-conversion,
|
||||
cppcoreguidelines-avoid-goto,
|
||||
cppcoreguidelines-no-malloc,
|
||||
cppcoreguidelines-virtual-class-destructor,
|
||||
google-*,
|
||||
-google-build-explicit-make-pair,
|
||||
-google-build-using-namespace,
|
||||
-google-explicit-constructor,
|
||||
-google-default-arguments,
|
||||
-google-readability-braces-around-statements,
|
||||
-google-readability-casting,
|
||||
-google-readability-namespace-comments,
|
||||
-google-readability-function,
|
||||
-google-readability-todo,
|
||||
-google-runtime-int,
|
||||
-google-build-namespaces,
|
||||
-google-runtime-references,
|
||||
-google-readability-function-size,
|
||||
llvm-*,
|
||||
-llvm-namespace-comment,
|
||||
-llvm-qualified-auto,
|
||||
-llvm-include-order,
|
||||
-llvm-else-after-return,
|
||||
-llvm-header-guard,
|
||||
-llvm-twine-local,
|
||||
misc-*,
|
||||
-misc-argument-comment,
|
||||
-misc-const-correctness,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-misc-unconventional-assign-operator,
|
||||
-misc-no-recursion,
|
||||
-misc-misplaced-const,
|
||||
-misc-definitions-in-headers,
|
||||
-misc-unused-parameters,
|
||||
-misc-include-cleaner,
|
||||
modernize-concat-nested-namespaces,
|
||||
modernize-use-using,
|
||||
performance-*,
|
||||
-performance-no-int-to-ptr,
|
||||
-performance-enum-size,
|
||||
-performance-avoid-endl,
|
||||
readability-*,
|
||||
-readability-avoid-const-params-in-decls,
|
||||
-readability-braces-around-statements,
|
||||
-readability-container-size-empty,
|
||||
-readability-convert-member-functions-to-static,
|
||||
-readability-const-return-type,
|
||||
-readability-function-cognitive-complexity,
|
||||
-readability-function-size,
|
||||
-readability-identifier-naming,
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-magic-numbers,
|
||||
-readability-else-after-return,
|
||||
-readability-inconsistent-declaration-parameter-name,
|
||||
-readability-isolate-declaration,
|
||||
-readability-identifier-length,
|
||||
-readability-redundant-declaration,
|
||||
-readability-uppercase-literal-suffix,
|
||||
-readability-named-parameter,
|
||||
-readability-qualified-auto,
|
||||
-readability-suspicious-call-argument,
|
||||
-readability-redundant-access-specifiers,
|
||||
-readability-redundant-member-init,
|
||||
-readability-static-definition-in-anonymous-namespace,
|
||||
-readability-use-anyofallof,
|
||||
-readability-simplify-boolean-expr,
|
||||
-readability-make-member-function-const,
|
||||
-readability-redundant-string-init,
|
||||
-readability-non-const-parameter,
|
||||
-readability-redundant-inline-specifier,
|
||||
-readability-avoid-nested-conditional-operator,
|
||||
-readability-avoid-return-with-void-value,
|
||||
-readability-redundant-casting,
|
||||
-readability-static-accessed-through-instance
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
HeaderFilterRegex: '.*'
|
||||
|
||||
|
||||
14
.cncc.style
14
.cncc.style
@ -1,14 +0,0 @@
|
||||
# Kind-specific patterns to check AST nodes against. Both python-clang and
|
||||
# libclang docs explain CursorKind, with differences in detail. See also:
|
||||
# - https://github.com/llvm-mirror/clang/blob/aca4fe314a55cacae29e1548cb7bfd2119c6df4c/bindings/python/clang/cindex.py#L599
|
||||
# - http://clang.llvm.org/doxygen/group__CINDEX.html#gaaccc432245b4cd9f2d470913f9ef0013
|
||||
# - https://docs.python.org/2/library/re.html#regular-expression-syntax
|
||||
|
||||
class_decl: '^([A-Z]+[a-z]+)+$'
|
||||
struct_decl: '^([A-Z]+[a-z]+)+$'
|
||||
field_decl: '^[a-z_]+$'
|
||||
var_decl: '^[a-z]+[a-z0-9_]*$'
|
||||
parm_decl: '^[a-z]*[a-z0-9_]*$'
|
||||
namespace: '^[a-z_]*$'
|
||||
cxx_method: '^([A-Z]+[a-z]+)+$'
|
||||
function_decl: '^[a-z]+([A-Z]+[a-z]+)*$'
|
||||
@ -1,2 +0,0 @@
|
||||
test
|
||||
build
|
||||
@ -1,30 +0,0 @@
|
||||
# EditorConfig is awesome: http://EditorConfig.org
|
||||
#
|
||||
# NOTE: Keep settings in sync with the master .clang-format file
|
||||
#
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# CMake configuration files
|
||||
[{CMakeLists.txt,CMakeSettings.json,*.cmake}]
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# CI configuration files
|
||||
[{.travis.yml,appveyor.yml}]
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# Unix shell scripts
|
||||
[*.sh]
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# Windows shell scripts
|
||||
[*.bat]
|
||||
end_of_line = crlf
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
@ -1,2 +0,0 @@
|
||||
features/support/flatbuffers.js
|
||||
features/support/fbresult_generated.js
|
||||
28
.eslintrc
28
.eslintrc
@ -1,28 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"indent": [
|
||||
2,
|
||||
4
|
||||
],
|
||||
"quotes": [
|
||||
1,
|
||||
"single"
|
||||
],
|
||||
"linebreak-style": [
|
||||
2,
|
||||
"unix"
|
||||
],
|
||||
"semi": [
|
||||
2,
|
||||
"always"
|
||||
],
|
||||
"no-console": [
|
||||
1
|
||||
]
|
||||
},
|
||||
"env": {
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended"
|
||||
}
|
||||
18
.gitattributes
vendored
18
.gitattributes
vendored
@ -1,18 +0,0 @@
|
||||
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||
* text=auto
|
||||
|
||||
# Explicitly declare text files you want to always be normalized and converted
|
||||
# to native line endings on checkout.
|
||||
*.cpp text
|
||||
*.hpp text
|
||||
|
||||
# Declare files that will always have CRLF line endings on checkout.
|
||||
*.bat text eol=crlf
|
||||
*.cmd text eol=crlf
|
||||
*.ps1 text eol=crlf
|
||||
|
||||
# Declare files that will always have LF line endings on checkout.
|
||||
*.sh text eol=lf
|
||||
|
||||
# https://eslint.org/docs/latest/rules/linebreak-style#using-this-rule-with-version-control-systems
|
||||
*.js text eol=lf
|
||||
@ -1,65 +0,0 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
env:
|
||||
IMAGE_NAME: openharbor/osrm-backend
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
strategy:
|
||||
matrix:
|
||||
docker-base-image: ["debian", "alpine"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Docker meta - debug
|
||||
id: metadebug
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
flavor: |
|
||||
latest=true
|
||||
suffix=-debug,onlatest=true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Build and push debug image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
|
||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
||||
tags: ${{ steps.metadebug.outputs.tags }}
|
||||
build-args: |
|
||||
DOCKER_TAG=${{ join(steps.metadebug.outputs.tags) }}-${{ matrix.docker-base-image }}
|
||||
|
||||
- name: Build and push normal image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
|
||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
build-args: |
|
||||
DOCKER_TAG=${{ join(steps.meta.outputs.tags) }}-${{ matrix.docker-base-image }}
|
||||
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,30 +0,0 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Report issue with osrm-backend
|
||||
labels: Bug Report
|
||||
---
|
||||
|
||||
# Issue
|
||||
|
||||
Please describe the issue you are seeing with OSRM.
|
||||
Images are a good way to illustrate your problem.
|
||||
|
||||
**Note**: If your issue relates to the demo site (https://map.project-osrm.org) or routing provided on openstreetmap.org, be aware that they use separate [profile settings](https://github.com/fossgis-routing-server/cbf-routing-profiles) from those provided by default in `osrm-backend`.
|
||||
If your issue relates to the demo site or openstreetmap.org behaviour, please check these profiles first to see if they explain the behaviour before creating an issue here.
|
||||
|
||||
# Steps to reproduce
|
||||
|
||||
Please provide the steps required to reproduce your problem.
|
||||
- `osrm-backend` version being used
|
||||
- OSM extract that was processed
|
||||
- Processing commands (e.g. CH vs MLD processing)
|
||||
- Server queries
|
||||
|
||||
If you're reporting an issue with https://map.project-osrm.org, please provide a link to the problematic request.
|
||||
|
||||
# Specifications
|
||||
|
||||
Please provide details of your development environment.
|
||||
- Library/dependency versions
|
||||
- Operating system
|
||||
- Hardware
|
||||
10
.github/ISSUE_TEMPLATE/feature.md
vendored
10
.github/ISSUE_TEMPLATE/feature.md
vendored
@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Request a new feature in osrm-backend
|
||||
labels: Feature Request
|
||||
---
|
||||
|
||||
# Feature
|
||||
|
||||
Please describe the feature you would like to see in OSRM.
|
||||
Images are often a good way to illustrate your requested feature.
|
||||
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,19 +0,0 @@
|
||||
# Issue
|
||||
|
||||
What issue is this PR targeting? If there is no issue that addresses the problem, please open a corresponding issue and link it here.
|
||||
|
||||
Please read our [documentation](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/releasing.md) on release and version management.
|
||||
If your PR is still work in progress please attach the relevant label.
|
||||
|
||||
## Tasklist
|
||||
|
||||
- [ ] CHANGELOG.md entry ([How to write a changelog entry](http://keepachangelog.com/en/1.0.0/#how))
|
||||
- [ ] update relevant [Wiki pages](https://github.com/Project-OSRM/osrm-backend/wiki)
|
||||
- [ ] add tests (see [testing documentation](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/testing.md))
|
||||
- [ ] review
|
||||
- [ ] adjust for comments
|
||||
- [ ] cherry pick to release branch
|
||||
|
||||
## Requirements / Relations
|
||||
|
||||
Link any requirements here. Other pull requests this PR is based on?
|
||||
84
.github/workflows/osrm-backend-docker.yml
vendored
84
.github/workflows/osrm-backend-docker.yml
vendored
@ -1,84 +0,0 @@
|
||||
name: build and publish container image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
strategy:
|
||||
matrix:
|
||||
docker-base-image: ["debian", "alpine"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Docker meta - debug
|
||||
id: metadebug
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
flavor: |
|
||||
latest=true
|
||||
suffix=-debug,onlatest=true
|
||||
|
||||
- name: Docker meta - assertions
|
||||
id: metaassertions
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
flavor: |
|
||||
latest=true
|
||||
suffix=-assertions,onlatest=true
|
||||
|
||||
- name: Log in to GitHub Docker Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build container image - debug
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
||||
tags: ${{ steps.metadebug.outputs.tags }}
|
||||
build-args: |
|
||||
DOCKER_TAG=${{ join(steps.metadebug.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
||||
|
||||
|
||||
- name: Build container image - assertions
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
||||
tags: ${{ steps.metaassertions.outputs.tags }}
|
||||
build-args: |
|
||||
DOCKER_TAG=${{ join(steps.metaassertions.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
||||
|
||||
# build and publish "normal" image as last to get it listed on top
|
||||
- name: Build container image - normal
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
build-args: |
|
||||
DOCKER_TAG=${{ join(steps.meta.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
||||
786
.github/workflows/osrm-backend.yml
vendored
786
.github/workflows/osrm-backend.yml
vendored
@ -1,786 +0,0 @@
|
||||
name: osrm-backend CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v[1-9]+.[0-9]+.[0-9]+
|
||||
- v[1-9]+.[0-9]+.[0-9]+-[a-zA-Z]+.[0-9]+
|
||||
- v[1-9]+.[0-9]+-[0-9a-zA-Z]+
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
CCACHE_TEMPDIR: /tmp/.ccache-temp
|
||||
CCACHE_COMPRESS: 1
|
||||
CASHER_TIME_OUT: 599 # one second less than 10m to avoid 10m timeout error: https://github.com/Project-OSRM/osrm-backend/issues/2742
|
||||
CMAKE_VERSION: 3.21.2
|
||||
ENABLE_NODE_BINDINGS: "ON"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
windows-release-node:
|
||||
needs: format-taginfo-docs
|
||||
runs-on: windows-2022
|
||||
continue-on-error: false
|
||||
env:
|
||||
BUILD_TYPE: Release
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: pip install "conan<2.0.0"
|
||||
- run: conan --version
|
||||
- run: cmake --version
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- run: node --version
|
||||
- run: npm --version
|
||||
- name: Prepare environment
|
||||
shell: bash
|
||||
run: |
|
||||
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
|
||||
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
|
||||
- run: npm install --ignore-scripts
|
||||
- run: npm link --ignore-scripts
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
|
||||
cmake --build . --config Release
|
||||
|
||||
# TODO: MSVC goes out of memory when building our tests
|
||||
# - name: Run tests
|
||||
# shell: bash
|
||||
# run: |
|
||||
# cd build
|
||||
# cmake --build . --config Release --target tests
|
||||
# # TODO: run tests
|
||||
# - name: Run node tests
|
||||
# shell: bash
|
||||
# run: |
|
||||
# ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
|
||||
|
||||
# mkdir -p test/data/ch
|
||||
# cp test/data/monaco.osrm* test/data/ch/
|
||||
# ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
|
||||
|
||||
# ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
|
||||
# node test/nodejs/index.js
|
||||
- name: Build Node package
|
||||
shell: bash
|
||||
run: ./scripts/ci/node_package.sh
|
||||
- name: Publish Node package
|
||||
if: ${{ env.PUBLISH == 'On' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
allowUpdates: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: build/stage/**/*.tar.gz
|
||||
omitBody: true
|
||||
omitBodyDuringUpdate: true
|
||||
omitName: true
|
||||
omitNameDuringUpdate: true
|
||||
replacesArtifacts: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
format-taginfo-docs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Enable Node.js cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
npm ci --ignore-scripts
|
||||
clang-format-15 --version
|
||||
- name: Run checks
|
||||
run: |
|
||||
./scripts/check_taginfo.py taginfo.json profiles/car.lua
|
||||
./scripts/format.sh && ./scripts/error_on_dirty.sh
|
||||
node ./scripts/validate_changelog.js
|
||||
npm run docs && ./scripts/error_on_dirty.sh
|
||||
npm audit --production
|
||||
|
||||
docker-image-matrix:
|
||||
strategy:
|
||||
matrix:
|
||||
docker-base-image: ["debian", "alpine"]
|
||||
needs: format-taginfo-docs
|
||||
runs-on: ubuntu-22.04
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Enable osm.pbf cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: berlin-latest.osm.pbf
|
||||
key: v1-berlin-osm-pbf
|
||||
restore-keys: |
|
||||
v1-berlin-osm-pbf
|
||||
- name: Docker build
|
||||
run: |
|
||||
docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
|
||||
- name: Test Docker image
|
||||
run: |
|
||||
if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
|
||||
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
|
||||
fi
|
||||
TAG=osrm-backend-local
|
||||
# when `--memory-swap` value equals `--memory` it means container won't use swap
|
||||
# see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
|
||||
MEMORY_ARGS="--memory=1g --memory-swap=1g"
|
||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
|
||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
|
||||
if [ ! -s "${PWD}/berlin-latest.geojson" ]
|
||||
then
|
||||
>&2 echo "No berlin-latest.geojson found"
|
||||
exit 1
|
||||
fi
|
||||
# removing `.osrm.nbg` to check that whole pipeline works without it
|
||||
rm -rf "${PWD}/berlin-latest.osrm.nbg"
|
||||
|
||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
|
||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
|
||||
docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
|
||||
curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
|
||||
docker stop osrm-container
|
||||
|
||||
build-test-publish:
|
||||
needs: format-taginfo-docs
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- name: gcc-13-debug-cov
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CCOMPILER: gcc-13
|
||||
CUCUMBER_TIMEOUT: 20000
|
||||
CXXCOMPILER: g++-13
|
||||
ENABLE_COVERAGE: ON
|
||||
|
||||
- name: clang-18-debug-asan-ubsan
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CCOMPILER: clang-18
|
||||
CUCUMBER_TIMEOUT: 20000
|
||||
CXXCOMPILER: clang++-18
|
||||
ENABLE_SANITIZER: ON
|
||||
TARGET_ARCH: x86_64-asan-ubsan
|
||||
OSRM_CONNECTION_RETRIES: 10
|
||||
OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
|
||||
|
||||
- name: clang-18-release
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-18
|
||||
CXXCOMPILER: clang++-18
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: clang-18-debug
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CCOMPILER: clang-18
|
||||
CXXCOMPILER: clang++-18
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: clang-18-debug-clang-tidy
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CCOMPILER: clang-18
|
||||
CXXCOMPILER: clang++-18
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_CLANG_TIDY: ON
|
||||
|
||||
|
||||
- name: clang-17-release
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-17
|
||||
CXXCOMPILER: clang++-17
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: clang-16-release
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-16
|
||||
CXXCOMPILER: clang++-16
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: conan-linux-debug-asan-ubsan
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-18
|
||||
CXXCOMPILER: clang++-18
|
||||
ENABLE_CONAN: ON
|
||||
ENABLE_SANITIZER: ON
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: conan-linux-release
|
||||
continue-on-error: false
|
||||
node: 18
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-18
|
||||
CXXCOMPILER: clang++-18
|
||||
ENABLE_CONAN: ON
|
||||
ENABLE_LTO: OFF
|
||||
|
||||
- name: gcc-14-release
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: gcc-14
|
||||
CXXCOMPILER: g++-14
|
||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
||||
|
||||
- name: gcc-13-release
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: gcc-13
|
||||
CXXCOMPILER: g++-13
|
||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
||||
|
||||
- name: gcc-12-release
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-22.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: gcc-12
|
||||
CXXCOMPILER: g++-12
|
||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
||||
|
||||
- name: conan-linux-release-node
|
||||
build_node_package: true
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang-16
|
||||
CXXCOMPILER: clang++-16
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
|
||||
- name: conan-linux-debug-node
|
||||
build_node_package: true
|
||||
continue-on-error: false
|
||||
node: 20
|
||||
runs-on: ubuntu-24.04
|
||||
BUILD_TYPE: Debug
|
||||
CCOMPILER: clang-16
|
||||
CXXCOMPILER: clang++-16
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
|
||||
- name: conan-macos-x64-release-node
|
||||
build_node_package: true
|
||||
continue-on-error: true
|
||||
node: 20
|
||||
runs-on: macos-13 # x86_64
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang
|
||||
CXXCOMPILER: clang++
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_ASSERTIONS: ON
|
||||
ENABLE_CONAN: ON
|
||||
|
||||
- name: conan-macos-arm64-release-node
|
||||
build_node_package: true
|
||||
continue-on-error: true
|
||||
node: 20
|
||||
runs-on: macos-14 # arm64
|
||||
BUILD_TYPE: Release
|
||||
CCOMPILER: clang
|
||||
CXXCOMPILER: clang++
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_ASSERTIONS: ON
|
||||
ENABLE_CONAN: ON
|
||||
|
||||
name: ${{ matrix.name}}
|
||||
continue-on-error: ${{ matrix.continue-on-error }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
env:
|
||||
BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }}
|
||||
BUILD_TYPE: ${{ matrix.BUILD_TYPE }}
|
||||
BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }}
|
||||
CCOMPILER: ${{ matrix.CCOMPILER }}
|
||||
CFLAGS: ${{ matrix.CFLAGS }}
|
||||
CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }}
|
||||
CXXCOMPILER: ${{ matrix.CXXCOMPILER }}
|
||||
CXXFLAGS: ${{ matrix.CXXFLAGS }}
|
||||
ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }}
|
||||
ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }}
|
||||
ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }}
|
||||
ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }}
|
||||
ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }}
|
||||
NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }}
|
||||
TARGET_ARCH: ${{ matrix.TARGET_ARCH }}
|
||||
OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }}
|
||||
OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }}
|
||||
ENABLE_LTO: ${{ matrix.ENABLE_LTO }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build machine architecture
|
||||
run: uname -m
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Enable Node.js cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
- name: Enable compiler cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ccache-${{ matrix.name }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache-${{ matrix.name }}-
|
||||
- name: Enable Conan cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.conan
|
||||
key: v9-conan-${{ matrix.name }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
v9-conan-${{ matrix.name }}-
|
||||
- name: Enable test cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{github.workspace}}/test/cache
|
||||
key: v4-test-${{ matrix.name }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
v4-test-${{ matrix.name }}-
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
|
||||
mkdir -p $HOME/.ccache
|
||||
|
||||
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
|
||||
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
|
||||
echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV
|
||||
echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV
|
||||
if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then
|
||||
# We can only set this after checkout once we know the workspace directory
|
||||
echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV
|
||||
echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV
|
||||
echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
||||
echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV
|
||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV
|
||||
fi
|
||||
# See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041
|
||||
# We need it to be able to access system folders while restoring cached Boost below
|
||||
- name: Give tar root ownership
|
||||
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
||||
run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar
|
||||
- name: Cache Boost
|
||||
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
||||
id: cache-boost
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
/usr/local/include/boost
|
||||
/usr/local/lib/libboost*
|
||||
key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
|
||||
restore-keys: |
|
||||
v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
|
||||
|
||||
- name: Install Boost
|
||||
if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
||||
run: |
|
||||
BOOST_VERSION="1.85.0"
|
||||
BOOST_VERSION_FLAVOR="${BOOST_VERSION}-b2-nodocs"
|
||||
wget -q https://github.com/boostorg/boost/releases/download/boost-${BOOST_VERSION}/boost-${BOOST_VERSION_FLAVOR}.tar.gz
|
||||
tar xzf boost-${BOOST_VERSION_FLAVOR}.tar.gz
|
||||
cd boost-${BOOST_VERSION}
|
||||
sudo ./bootstrap.sh
|
||||
sudo ./b2 install
|
||||
cd ..
|
||||
sudo rm -rf boost-${BOOST_VERSION}*
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: |
|
||||
python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages
|
||||
|
||||
# workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499
|
||||
# and that's why CI cannot find conan executable installed above
|
||||
if [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
|
||||
# ccache
|
||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
||||
sudo apt-get update -y && sudo apt-get install ccache
|
||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
brew install ccache
|
||||
fi
|
||||
|
||||
# Linux dev packages
|
||||
if [ "${ENABLE_CONAN}" != "ON" ]; then
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev
|
||||
if [[ "${CCOMPILER}" != clang-* ]]; then
|
||||
sudo apt-get install -y ${CXXCOMPILER}
|
||||
fi
|
||||
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
|
||||
sudo apt-get install -y lcov
|
||||
fi
|
||||
fi
|
||||
|
||||
# TBB
|
||||
TBB_VERSION=2021.12.0
|
||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz"
|
||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz"
|
||||
fi
|
||||
wget --tries 5 ${TBB_URL} -O onetbb.tgz
|
||||
tar zxvf onetbb.tgz
|
||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/
|
||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/
|
||||
- name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18
|
||||
if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }}
|
||||
run: |
|
||||
sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
|
||||
|
||||
conan config init
|
||||
yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml"
|
||||
- name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16
|
||||
if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }}
|
||||
run: |
|
||||
sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq
|
||||
|
||||
conan config init
|
||||
yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml"
|
||||
- name: Prepare build
|
||||
run: |
|
||||
mkdir ${OSRM_BUILD_DIR}
|
||||
ccache --max-size=256M
|
||||
npm ci --ignore-scripts
|
||||
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
|
||||
lcov --directory . --zerocounters # clean cached files
|
||||
fi
|
||||
echo "CC=${CCOMPILER}" >> $GITHUB_ENV
|
||||
echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV
|
||||
if [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
# missing from GCC path, needed for conan builds of libiconv, for example.
|
||||
sudo xcode-select --switch /Library/Developer/CommandLineTools
|
||||
echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV
|
||||
echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build and install OSRM
|
||||
run: |
|
||||
echo "Using ${JOBS} jobs"
|
||||
pushd ${OSRM_BUILD_DIR}
|
||||
|
||||
ccache --zero-stats
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_CONAN=${ENABLE_CONAN:-OFF} \
|
||||
-DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \
|
||||
-DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \
|
||||
-DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \
|
||||
-DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \
|
||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
||||
-DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \
|
||||
-DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \
|
||||
-DENABLE_CCACHE=ON \
|
||||
-DENABLE_LTO=${ENABLE_LTO:-ON} \
|
||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR}
|
||||
make --jobs=${JOBS}
|
||||
|
||||
if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then
|
||||
make tests --jobs=${JOBS}
|
||||
make benchmarks --jobs=${JOBS}
|
||||
|
||||
sudo make install
|
||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
||||
echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV
|
||||
fi
|
||||
popd
|
||||
- name: Build example
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }}
|
||||
run: |
|
||||
mkdir example/build && pushd example/build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE}
|
||||
make --jobs=${JOBS}
|
||||
popd
|
||||
- name: Run all tests
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }}
|
||||
run: |
|
||||
make -C test/data benchmark
|
||||
|
||||
# macOS SIP strips the linker path. Reset this inside the running shell
|
||||
export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }}
|
||||
./example/build/osrm-example test/data/mld/monaco.osrm
|
||||
|
||||
# All tests assume to be run from the build directory
|
||||
pushd ${OSRM_BUILD_DIR}
|
||||
for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done
|
||||
if [ -z "${ENABLE_SANITIZER}" ]; then
|
||||
npm run nodejs-tests
|
||||
fi
|
||||
popd
|
||||
npm test
|
||||
|
||||
- name: Use Node 18
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Run Node package tests on Node 18
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
run: |
|
||||
node --version
|
||||
npm run nodejs-tests
|
||||
- name: Use Node 20
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Run Node package tests on Node 20
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
run: |
|
||||
node --version
|
||||
npm run nodejs-tests
|
||||
- name: Use Node latest
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: latest
|
||||
- name: Run Node package tests on Node-latest
|
||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
||||
run: |
|
||||
node --version
|
||||
npm run nodejs-tests
|
||||
|
||||
- name: Upload test logs
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
name: logs
|
||||
path: test/logs/
|
||||
|
||||
# - name: Generate code coverage
|
||||
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
||||
# run: |
|
||||
# lcov --directory . --capture --output-file coverage.info # capture coverage info
|
||||
# lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system
|
||||
# lcov --list coverage.info #debug info
|
||||
|
||||
# # Uploading report to CodeCov
|
||||
# - name: Upload code coverage
|
||||
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
||||
# uses: codecov/codecov-action@v4
|
||||
# with:
|
||||
# files: coverage.info
|
||||
# name: codecov-osrm-backend
|
||||
# fail_ci_if_error: true
|
||||
# verbose: true
|
||||
- name: Build Node package
|
||||
if: ${{ matrix.build_node_package }}
|
||||
run: ./scripts/ci/node_package.sh
|
||||
- name: Publish Node package
|
||||
if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
allowUpdates: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: build/stage/**/*.tar.gz
|
||||
omitBody: true
|
||||
omitBodyDuringUpdate: true
|
||||
omitName: true
|
||||
omitNameDuringUpdate: true
|
||||
replacesArtifacts: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Show CCache statistics
|
||||
run: |
|
||||
ccache -p
|
||||
ccache -s
|
||||
|
||||
benchmarks:
|
||||
if: github.event_name == 'pull_request'
|
||||
needs: [format-taginfo-docs]
|
||||
runs-on: self-hosted
|
||||
env:
|
||||
CCOMPILER: clang-16
|
||||
CXXCOMPILER: clang++-16
|
||||
CC: clang-16
|
||||
CXX: clang++-16
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
|
||||
steps:
|
||||
- name: Checkout PR Branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
path: pr
|
||||
- name: Activate virtualenv
|
||||
run: |
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
echo PATH=$PATH >> $GITHUB_ENV
|
||||
pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4"
|
||||
- name: Prepare data
|
||||
run: |
|
||||
if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
|
||||
rm -rf ~/data.osm.pbf
|
||||
wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
|
||||
gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
|
||||
else
|
||||
if [ ! -f "~/data.osm.pbf" ]; then
|
||||
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
|
||||
else
|
||||
echo "Using cached data.osm.pbf"
|
||||
fi
|
||||
gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
|
||||
fi
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
|
||||
mkdir -p $HOME/.ccache
|
||||
ccache --zero-stats
|
||||
ccache --max-size=256M
|
||||
- name: Checkout Base Branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
path: base
|
||||
- name: Build Base Branch
|
||||
run: |
|
||||
cd base
|
||||
npm ci --ignore-scripts
|
||||
cd ..
|
||||
mkdir base/build
|
||||
cd base/build
|
||||
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
|
||||
make -j$(nproc)
|
||||
make -j$(nproc) benchmarks
|
||||
cd ..
|
||||
make -C test/data
|
||||
- name: Build PR Branch
|
||||
run: |
|
||||
cd pr
|
||||
npm ci --ignore-scripts
|
||||
cd ..
|
||||
mkdir -p pr/build
|
||||
cd pr/build
|
||||
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
|
||||
make -j$(nproc)
|
||||
make -j$(nproc) benchmarks
|
||||
cd ..
|
||||
make -C test/data
|
||||
# we run benchmarks in tmpfs to avoid impact of disk IO
|
||||
- name: Create folder for tmpfs
|
||||
run: |
|
||||
# if by any chance it was mounted before(e.g. due to previous job failed), unmount it
|
||||
sudo umount ~/benchmarks | true
|
||||
rm -rf ~/benchmarks
|
||||
mkdir -p ~/benchmarks
|
||||
# see https://llvm.org/docs/Benchmarking.html
|
||||
- name: Run PR Benchmarks
|
||||
run: |
|
||||
sudo cset shield -c 2-3 -k on
|
||||
sudo mount -t tmpfs -o size=4g none ~/benchmarks
|
||||
cp -rf pr/build ~/benchmarks/build
|
||||
cp -rf pr/lib ~/benchmarks/lib
|
||||
mkdir -p ~/benchmarks/test
|
||||
cp -rf pr/test/data ~/benchmarks/test/data
|
||||
cp -rf pr/profiles ~/benchmarks/profiles
|
||||
|
||||
sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
|
||||
sudo umount ~/benchmarks
|
||||
sudo cset shield --reset
|
||||
- name: Run Base Benchmarks
|
||||
run: |
|
||||
sudo cset shield -c 2-3 -k on
|
||||
sudo mount -t tmpfs -o size=4g none ~/benchmarks
|
||||
cp -rf base/build ~/benchmarks/build
|
||||
cp -rf base/lib ~/benchmarks/lib
|
||||
mkdir -p ~/benchmarks/test
|
||||
cp -rf base/test/data ~/benchmarks/test/data
|
||||
cp -rf base/profiles ~/benchmarks/profiles
|
||||
|
||||
# TODO: remove it when base branch will have this file at needed location
|
||||
if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
|
||||
cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
|
||||
fi
|
||||
# we intentionally use scripts from PR branch to be able to update them and see results in the same PR
|
||||
sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
|
||||
sudo umount ~/benchmarks
|
||||
sudo cset shield --reset
|
||||
- name: Post Benchmark Results
|
||||
run: |
|
||||
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
|
||||
- name: Show CCache statistics
|
||||
run: |
|
||||
ccache -p
|
||||
ccache -s
|
||||
|
||||
ci-complete:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
|
||||
steps:
|
||||
- run: echo "CI complete"
|
||||
|
||||
29
.github/workflows/stale.yml
vendored
29
.github/workflows/stale.yml
vendored
@ -1,29 +0,0 @@
|
||||
name: 'Close stale issues'
|
||||
on:
|
||||
# NOTE: uncomment if you want to test changes to this file in PRs CI
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - master
|
||||
schedule:
|
||||
- cron: '30 1 * * *' # every day at 1:30am
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
operations-per-run: 3000
|
||||
stale-issue-message: 'This issue seems to be stale. It will be closed in 30 days if no further activity occurs.'
|
||||
stale-pr-message: 'This PR seems to be stale. Is it still relevant?'
|
||||
days-before-issue-stale: 180 # 6 months
|
||||
days-before-issue-close: 30 # 1 month
|
||||
days-before-pr-stale: 180 # 6 months
|
||||
days-before-pr-close: -1 # never close PRs
|
||||
exempt-issue-labels: 'Do Not Stale,Feature Request,Performance,Bug Report,CI,Starter Task,Refactor,Guidance'
|
||||
|
||||
|
||||
|
||||
63
.gitignore
vendored
63
.gitignore
vendored
@ -1,9 +1,3 @@
|
||||
# pre compiled dependencies #
|
||||
#############################
|
||||
osrm-deps
|
||||
|
||||
.ycm_extra_conf.py
|
||||
|
||||
# Compiled source #
|
||||
###################
|
||||
*.com
|
||||
@ -41,13 +35,9 @@ Thumbs.db
|
||||
|
||||
# build related files #
|
||||
#######################
|
||||
/_build*
|
||||
/build/
|
||||
/example/build/
|
||||
/test/data/monaco.osrm*
|
||||
/test/data/ch
|
||||
/test/data/mld
|
||||
/cmake/postinst
|
||||
/Util/UUID.cpp
|
||||
/Util/GitDescription.cpp
|
||||
|
||||
# Eclipse related files #
|
||||
#########################
|
||||
@ -56,19 +46,31 @@ Thumbs.db
|
||||
.cproject
|
||||
.project
|
||||
|
||||
# Visual Studio (Code) related files #
|
||||
######################################
|
||||
/.vs*
|
||||
/*.local.bat
|
||||
/CMakeSettings.json
|
||||
/.cache
|
||||
# stxxl related files #
|
||||
#######################
|
||||
.stxxl
|
||||
stxxl.log
|
||||
stxxl.errlog
|
||||
|
||||
# Jetbrains related files #
|
||||
###########################
|
||||
.idea/
|
||||
# compiled protobuffers #
|
||||
#########################
|
||||
/DataStructures/pbf-proto/*.pb.h
|
||||
/DataStructures/pbf-proto/*.pb.cc
|
||||
|
||||
# Compiled Binary Files #
|
||||
# External Libs #
|
||||
#################
|
||||
/lib/
|
||||
/win/lib
|
||||
|
||||
# Visual Studio Temp + build Files #
|
||||
####################################
|
||||
/win/*.user
|
||||
/win/*.ncb
|
||||
/win/*.suo
|
||||
/win/Debug/
|
||||
/win/Release/
|
||||
/win/bin/
|
||||
/win/bin-debug/
|
||||
/osrm-extract
|
||||
/osrm-io-benchmark
|
||||
/osrm-components
|
||||
@ -77,31 +79,14 @@ Thumbs.db
|
||||
/osrm-prepare
|
||||
/osrm-unlock-all
|
||||
/osrm-cli
|
||||
/osrm-check-hsgr
|
||||
/osrm-springclean
|
||||
/nohup.out
|
||||
|
||||
# Sandbox folder #
|
||||
###################
|
||||
/sandbox/
|
||||
|
||||
# Test related files #
|
||||
######################
|
||||
/test/profile.lua
|
||||
/test/cache
|
||||
/test/speeds.csv
|
||||
/test/penalties.csv
|
||||
node_modules
|
||||
|
||||
# Deprecated config file #
|
||||
##########################
|
||||
/server.ini
|
||||
|
||||
*.swp
|
||||
|
||||
# local lua debugging file
|
||||
debug.lua
|
||||
|
||||
# node-osrm artifacts
|
||||
lib/binding
|
||||
|
||||
|
||||
16
.npmignore
16
.npmignore
@ -1,16 +0,0 @@
|
||||
*
|
||||
!README.md
|
||||
!CHANGELOG.md
|
||||
!CONTRIBUTING.MD
|
||||
!LICENCE.TXT
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!yarn.lock
|
||||
!docs
|
||||
!example
|
||||
!taginfo.json
|
||||
!lib/*.js
|
||||
!profiles/*
|
||||
!profiles/lib/*
|
||||
!profiles/examples/*
|
||||
!scripts/node_install.sh
|
||||
47
.travis.yml
Normal file
47
.travis.yml
Normal file
@ -0,0 +1,47 @@
|
||||
language: cpp
|
||||
compiler:
|
||||
- gcc
|
||||
# - clang
|
||||
# Make sure CMake is installed
|
||||
install:
|
||||
- sudo apt-get update >/dev/null
|
||||
- sudo apt-get -q install libprotoc-dev libprotobuf7 libprotobuf-dev libosmpbf-dev libbz2-dev libstxxl-dev libstxxl1 libxml2-dev libzip-dev libboost1.46-all-dev lua5.1 liblua5.1-0-dev libluabind-dev rubygems
|
||||
- curl -s https://gist.githubusercontent.com/DennisOSRM/803a64a9178ec375069f/raw/ | sudo bash
|
||||
before_script:
|
||||
- rvm use 1.9.3
|
||||
- gem install bundler
|
||||
- bundle install
|
||||
- mkdir build
|
||||
- cd build
|
||||
- cmake .. $CMAKEOPTIONS
|
||||
script:
|
||||
- make -j 2
|
||||
- cd ..
|
||||
- cucumber -p verify
|
||||
after_script:
|
||||
# - cd ..
|
||||
# - cucumber -p verify
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
cache:
|
||||
- bundler
|
||||
- apt
|
||||
env:
|
||||
- CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Release" OSRM_PORT=5000 OSRM_TIMEOUT=60
|
||||
- CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Debug" OSRM_PORT=5010 OSRM_TIMEOUT=60
|
||||
notifications:
|
||||
irc:
|
||||
channels:
|
||||
- irc.oftc.net#osrm
|
||||
on_success: change
|
||||
on_failure: always
|
||||
use_notice: true
|
||||
skip_join: false
|
||||
|
||||
recipients:
|
||||
- dennis@mapbox.com
|
||||
email:
|
||||
on_success: change
|
||||
on_failure: always
|
||||
198
Algorithms/DouglasPeucker.cpp
Normal file
198
Algorithms/DouglasPeucker.cpp
Normal file
@ -0,0 +1,198 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "DouglasPeucker.h"
|
||||
#include "../DataStructures/SegmentInformation.h"
|
||||
#include "../Util/MercatorUtil.h"
|
||||
|
||||
#include <limits>
|
||||
|
||||
//These thresholds are more or less heuristically chosen.
|
||||
static double DouglasPeuckerThresholds[19] = {
|
||||
262144., //z0
|
||||
131072., //z1
|
||||
65536., //z2
|
||||
32768., //z3
|
||||
16384., //z4
|
||||
8192., //z5
|
||||
4096., //z6
|
||||
2048., //z7
|
||||
960., //z8
|
||||
480., //z9
|
||||
240., //z10
|
||||
90., //z11
|
||||
50., //z12
|
||||
25., //z13
|
||||
15., //z14
|
||||
5., //z15
|
||||
.65, //z16
|
||||
.5, //z17
|
||||
.35 //z18
|
||||
};
|
||||
|
||||
/**
|
||||
* Yuck! Code duplication. This function is also in EgdeBasedNode.h
|
||||
*/
|
||||
double DouglasPeucker::ComputeDistance(
|
||||
const FixedPointCoordinate& point,
|
||||
const FixedPointCoordinate& segA,
|
||||
const FixedPointCoordinate& segB
|
||||
) const {
|
||||
const double x = lat2y(point.lat/COORDINATE_PRECISION);
|
||||
const double y = point.lon/COORDINATE_PRECISION;
|
||||
const double a = lat2y(segA.lat/COORDINATE_PRECISION);
|
||||
const double b = segA.lon/COORDINATE_PRECISION;
|
||||
const double c = lat2y(segB.lat/COORDINATE_PRECISION);
|
||||
const double d = segB.lon/COORDINATE_PRECISION;
|
||||
double p,q,nY;
|
||||
if( std::abs(a-c) > std::numeric_limits<double>::epsilon() ){
|
||||
const double m = (d-b)/(c-a); // slope
|
||||
// Projection of (x,y) on line joining (a,b) and (c,d)
|
||||
p = ((x + (m*y)) + (m*m*a - m*b))/(1. + m*m);
|
||||
q = b + m*(p - a);
|
||||
} else {
|
||||
p = c;
|
||||
q = y;
|
||||
}
|
||||
nY = (d*p - c*q)/(a*d - b*c);
|
||||
|
||||
//discretize the result to coordinate precision. it's a hack!
|
||||
if( std::abs(nY) < (1./COORDINATE_PRECISION) ) {
|
||||
nY = 0.;
|
||||
}
|
||||
|
||||
double r = (p - nY*a)/c;
|
||||
if( std::isnan(r) ) {
|
||||
r = ((segB.lat == point.lat) && (segB.lon == point.lon)) ? 1. : 0.;
|
||||
} else if( std::abs(r) <= std::numeric_limits<double>::epsilon() ) {
|
||||
r = 0.;
|
||||
} else if( std::abs(r-1.) <= std::numeric_limits<double>::epsilon() ) {
|
||||
r = 1.;
|
||||
}
|
||||
FixedPointCoordinate nearest_location;
|
||||
BOOST_ASSERT( !std::isnan(r) );
|
||||
if( r <= 0. ){
|
||||
nearest_location.lat = segA.lat;
|
||||
nearest_location.lon = segA.lon;
|
||||
} else if( r >= 1. ){
|
||||
nearest_location.lat = segB.lat;
|
||||
nearest_location.lon = segB.lon;
|
||||
} else { // point lies in between
|
||||
nearest_location.lat = y2lat(p)*COORDINATE_PRECISION;
|
||||
nearest_location.lon = q*COORDINATE_PRECISION;
|
||||
}
|
||||
BOOST_ASSERT( nearest_location.isValid() );
|
||||
const double approximated_distance = FixedPointCoordinate::ApproximateEuclideanDistance(
|
||||
point,
|
||||
nearest_location
|
||||
);
|
||||
BOOST_ASSERT( 0. <= approximated_distance );
|
||||
return approximated_distance;
|
||||
}
|
||||
|
||||
void DouglasPeucker::Run(
|
||||
std::vector<SegmentInformation> & input_geometry,
|
||||
const unsigned zoom_level
|
||||
) {
|
||||
{
|
||||
BOOST_ASSERT_MSG(zoom_level < 19, "unsupported zoom level");
|
||||
BOOST_ASSERT_MSG(1 < input_geometry.size(), "geometry invalid");
|
||||
std::size_t left_border = 0;
|
||||
std::size_t right_border = 1;
|
||||
//Sweep over array and identify those ranges that need to be checked
|
||||
do {
|
||||
BOOST_ASSERT_MSG(
|
||||
input_geometry[left_border].necessary,
|
||||
"left border must be necessary"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
input_geometry.back().necessary,
|
||||
"right border must be necessary"
|
||||
);
|
||||
|
||||
if(input_geometry[right_border].necessary) {
|
||||
recursion_stack.push(std::make_pair(left_border, right_border));
|
||||
left_border = right_border;
|
||||
}
|
||||
++right_border;
|
||||
} while( right_border < input_geometry.size());
|
||||
}
|
||||
while( !recursion_stack.empty() ) {
|
||||
//pop next element
|
||||
const PairOfPoints pair = recursion_stack.top();
|
||||
recursion_stack.pop();
|
||||
BOOST_ASSERT_MSG(
|
||||
input_geometry[pair.first].necessary,
|
||||
"left border mus be necessary"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
input_geometry[pair.second].necessary,
|
||||
"right border must be necessary"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
pair.second < input_geometry.size(),
|
||||
"right border outside of geometry"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
pair.first < pair.second,
|
||||
"left border on the wrong side"
|
||||
);
|
||||
double max_distance = std::numeric_limits<double>::min();
|
||||
|
||||
std::size_t farthest_element_index = pair.second;
|
||||
//find index idx of element with max_distance
|
||||
for(std::size_t i = pair.first+1; i < pair.second; ++i){
|
||||
const int temp_dist = ComputeDistance(
|
||||
input_geometry[i].location,
|
||||
input_geometry[pair.first].location,
|
||||
input_geometry[pair.second].location
|
||||
);
|
||||
const double distance = std::abs(temp_dist);
|
||||
if(
|
||||
distance > DouglasPeuckerThresholds[zoom_level] &&
|
||||
distance > max_distance
|
||||
) {
|
||||
farthest_element_index = i;
|
||||
max_distance = distance;
|
||||
}
|
||||
}
|
||||
if (max_distance > DouglasPeuckerThresholds[zoom_level]) {
|
||||
// mark idx as necessary
|
||||
input_geometry[farthest_element_index].necessary = true;
|
||||
if (1 < (farthest_element_index - pair.first) ) {
|
||||
recursion_stack.push(
|
||||
std::make_pair(pair.first, farthest_element_index)
|
||||
);
|
||||
}
|
||||
if (1 < (pair.second - farthest_element_index) ) {
|
||||
recursion_stack.push(
|
||||
std::make_pair(farthest_element_index, pair.second)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
72
Algorithms/DouglasPeucker.h
Normal file
72
Algorithms/DouglasPeucker.h
Normal file
@ -0,0 +1,72 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef DOUGLASPEUCKER_H_
|
||||
#define DOUGLASPEUCKER_H_
|
||||
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#include <limits>
|
||||
#include <stack>
|
||||
#include <vector>
|
||||
|
||||
/* This class object computes the bitvector of indicating generalized input
|
||||
* points according to the (Ramer-)Douglas-Peucker algorithm.
|
||||
*
|
||||
* Input is vector of pairs. Each pair consists of the point information and a
|
||||
* bit indicating if the points is present in the generalization.
|
||||
* Note: points may also be pre-selected*/
|
||||
|
||||
struct SegmentInformation;
|
||||
|
||||
class DouglasPeucker {
|
||||
private:
|
||||
typedef std::pair<std::size_t, std::size_t> PairOfPoints;
|
||||
//Stack to simulate the recursion
|
||||
std::stack<PairOfPoints> recursion_stack;
|
||||
|
||||
double ComputeDistance(
|
||||
const FixedPointCoordinate& point,
|
||||
const FixedPointCoordinate& segA,
|
||||
const FixedPointCoordinate& segB
|
||||
) const;
|
||||
public:
|
||||
void Run(
|
||||
std::vector<SegmentInformation> & input_geometry,
|
||||
const unsigned zoom_level
|
||||
);
|
||||
|
||||
};
|
||||
|
||||
#endif /* DOUGLASPEUCKER_H_ */
|
||||
148
Algorithms/IteratorBasedCRC32.h
Normal file
148
Algorithms/IteratorBasedCRC32.h
Normal file
@ -0,0 +1,148 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef ITERATOR_BASED_CRC32_H
|
||||
#define ITERATOR_BASED_CRC32_H
|
||||
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#if defined(__x86_64__)
|
||||
#include <cpuid.h>
|
||||
#else
|
||||
#include <boost/crc.hpp> // for boost::crc_32_type
|
||||
|
||||
inline void __get_cpuid(
|
||||
int param,
|
||||
unsigned *eax,
|
||||
unsigned *ebx,
|
||||
unsigned *ecx,
|
||||
unsigned *edx
|
||||
) { *ecx = 0; }
|
||||
#endif
|
||||
|
||||
template<class ContainerT>
|
||||
class IteratorbasedCRC32 {
|
||||
private:
|
||||
typedef typename ContainerT::iterator IteratorType;
|
||||
unsigned crc;
|
||||
|
||||
bool use_SSE42_CRC_function;
|
||||
|
||||
#if !defined(__x86_64__)
|
||||
boost::crc_optimal<32, 0x1EDC6F41, 0x0, 0x0, true, true> CRC32_processor;
|
||||
#endif
|
||||
unsigned SoftwareBasedCRC32( char *str, unsigned len )
|
||||
{
|
||||
#if !defined(__x86_64__)
|
||||
CRC32_processor.process_bytes( str, len);
|
||||
return CRC32_processor.checksum();
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
// adapted from http://byteworm.com/2010/10/13/crc32/
|
||||
unsigned SSE42BasedCRC32( char *str, unsigned len )
|
||||
{
|
||||
#if defined(__x86_64__)
|
||||
unsigned q = len/sizeof(unsigned);
|
||||
unsigned r = len%sizeof(unsigned);
|
||||
unsigned *p = (unsigned*)str;
|
||||
|
||||
//crc=0;
|
||||
while (q--) {
|
||||
__asm__ __volatile__(
|
||||
".byte 0xf2, 0xf, 0x38, 0xf1, 0xf1;"
|
||||
:"=S"(crc)
|
||||
:"0"(crc), "c"(*p)
|
||||
);
|
||||
++p;
|
||||
}
|
||||
|
||||
str=(char*)p;
|
||||
while (r--) {
|
||||
__asm__ __volatile__(
|
||||
".byte 0xf2, 0xf, 0x38, 0xf1, 0xf1;"
|
||||
:"=S"(crc)
|
||||
:"0"(crc), "c"(*str)
|
||||
);
|
||||
++str;
|
||||
}
|
||||
#endif
|
||||
return crc;
|
||||
}
|
||||
|
||||
inline unsigned cpuid() const
|
||||
{
|
||||
unsigned eax = 0, ebx = 0, ecx = 0, edx = 0;
|
||||
// on X64 this calls hardware cpuid(.) instr. otherwise a dummy impl.
|
||||
__get_cpuid( 1, &eax, &ebx, &ecx, &edx );
|
||||
return ecx;
|
||||
}
|
||||
|
||||
bool DetectNativeCRC32Support()
|
||||
{
|
||||
static const int SSE42_BIT = 0x00100000;
|
||||
const unsigned ecx = cpuid();
|
||||
const bool has_SSE42 = ecx & SSE42_BIT;
|
||||
if (has_SSE42) {
|
||||
SimpleLogger().Write() << "using hardware based CRC32 computation";
|
||||
} else {
|
||||
SimpleLogger().Write() << "using software based CRC32 computation";
|
||||
}
|
||||
return has_SSE42;
|
||||
}
|
||||
|
||||
public:
|
||||
IteratorbasedCRC32() : crc(0)
|
||||
{
|
||||
use_SSE42_CRC_function = DetectNativeCRC32Support();
|
||||
}
|
||||
|
||||
unsigned operator()( IteratorType iter, const IteratorType end )
|
||||
{
|
||||
unsigned crc = 0;
|
||||
while(iter != end) {
|
||||
char * data = reinterpret_cast<char*>(&(*iter) );
|
||||
|
||||
if (use_SSE42_CRC_function)
|
||||
{
|
||||
crc = SSE42BasedCRC32( data, sizeof(typename ContainerT::value_type) );
|
||||
}
|
||||
else
|
||||
{
|
||||
crc = SoftwareBasedCRC32( data, sizeof(typename ContainerT::value_type) );
|
||||
}
|
||||
++iter;
|
||||
}
|
||||
return crc;
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* ITERATOR_BASED_CRC32_H */
|
||||
100
Algorithms/ObjectToBase64.h
Normal file
100
Algorithms/ObjectToBase64.h
Normal file
@ -0,0 +1,100 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef OBJECTTOBASE64_H_
|
||||
#define OBJECTTOBASE64_H_
|
||||
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/archive/iterators/base64_from_binary.hpp>
|
||||
#include <boost/archive/iterators/binary_from_base64.hpp>
|
||||
#include <boost/archive/iterators/transform_width.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
typedef
|
||||
boost::archive::iterators::base64_from_binary<
|
||||
boost::archive::iterators::transform_width<const char *, 6, 8>
|
||||
> base64_t;
|
||||
|
||||
typedef
|
||||
boost::archive::iterators::transform_width<
|
||||
boost::archive::iterators::binary_from_base64<
|
||||
std::string::const_iterator>, 8, 6
|
||||
> binary_t;
|
||||
|
||||
template<class ObjectT>
|
||||
static void EncodeObjectToBase64(const ObjectT & object, std::string& encoded) {
|
||||
const char * char_ptr_to_object = (const char *)&object;
|
||||
std::vector<unsigned char> data(sizeof(object));
|
||||
std::copy(
|
||||
char_ptr_to_object,
|
||||
char_ptr_to_object + sizeof(ObjectT),
|
||||
data.begin()
|
||||
);
|
||||
|
||||
unsigned char number_of_padded_chars = 0; // is in {0,1,2};
|
||||
while(data.size() % 3 != 0) {
|
||||
++number_of_padded_chars;
|
||||
data.push_back(0x00);
|
||||
}
|
||||
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == data.size() % 3,
|
||||
"base64 input data size is not a multiple of 3!"
|
||||
);
|
||||
encoded.resize(sizeof(ObjectT));
|
||||
encoded.assign(
|
||||
base64_t( &data[0] ),
|
||||
base64_t( &data[0] + (data.size() - number_of_padded_chars) )
|
||||
);
|
||||
replaceAll(encoded, "+", "-");
|
||||
replaceAll(encoded, "/", "_");
|
||||
}
|
||||
|
||||
template<class ObjectT>
|
||||
static void DecodeObjectFromBase64(const std::string& input, ObjectT & object) {
|
||||
try {
|
||||
std::string encoded(input);
|
||||
//replace "-" with "+" and "_" with "/"
|
||||
replaceAll(encoded, "-", "+");
|
||||
replaceAll(encoded, "_", "/");
|
||||
|
||||
std::copy (
|
||||
binary_t( encoded.begin() ),
|
||||
binary_t( encoded.begin() + encoded.length() - 1),
|
||||
(char *)&object
|
||||
);
|
||||
|
||||
} catch(...) { }
|
||||
}
|
||||
|
||||
#endif /* OBJECTTOBASE64_H_ */
|
||||
147
Algorithms/PolylineCompressor.cpp
Normal file
147
Algorithms/PolylineCompressor.cpp
Normal file
@ -0,0 +1,147 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "PolylineCompressor.h"
|
||||
|
||||
void PolylineCompressor::encodeVectorSignedNumber(
|
||||
std::vector<int> & numbers,
|
||||
std::string & output
|
||||
) const {
|
||||
for(unsigned i = 0; i < numbers.size(); ++i) {
|
||||
numbers[i] <<= 1;
|
||||
if (numbers[i] < 0) {
|
||||
numbers[i] = ~(numbers[i]);
|
||||
}
|
||||
}
|
||||
for(unsigned i = 0; i < numbers.size(); ++i) {
|
||||
encodeNumber(numbers[i], output);
|
||||
}
|
||||
}
|
||||
|
||||
void PolylineCompressor::encodeNumber(int number_to_encode, std::string & output) const {
|
||||
while (number_to_encode >= 0x20) {
|
||||
int nextValue = (0x20 | (number_to_encode & 0x1f)) + 63;
|
||||
output += static_cast<char>(nextValue);
|
||||
if(92 == nextValue) {
|
||||
output += static_cast<char>(nextValue);
|
||||
}
|
||||
number_to_encode >>= 5;
|
||||
}
|
||||
|
||||
number_to_encode += 63;
|
||||
output += static_cast<char>(number_to_encode);
|
||||
if(92 == number_to_encode) {
|
||||
output += static_cast<char>(number_to_encode);
|
||||
}
|
||||
}
|
||||
|
||||
void PolylineCompressor::printEncodedString(
|
||||
const std::vector<SegmentInformation> & polyline,
|
||||
std::string & output
|
||||
) const {
|
||||
std::vector<int> deltaNumbers;
|
||||
output += "\"";
|
||||
if(!polyline.empty()) {
|
||||
FixedPointCoordinate lastCoordinate = polyline[0].location;
|
||||
deltaNumbers.push_back( lastCoordinate.lat );
|
||||
deltaNumbers.push_back( lastCoordinate.lon );
|
||||
for(unsigned i = 1; i < polyline.size(); ++i) {
|
||||
if(!polyline[i].necessary) {
|
||||
continue;
|
||||
}
|
||||
deltaNumbers.push_back(polyline[i].location.lat - lastCoordinate.lat);
|
||||
deltaNumbers.push_back(polyline[i].location.lon - lastCoordinate.lon);
|
||||
lastCoordinate = polyline[i].location;
|
||||
}
|
||||
encodeVectorSignedNumber(deltaNumbers, output);
|
||||
}
|
||||
output += "\"";
|
||||
|
||||
}
|
||||
|
||||
void PolylineCompressor::printEncodedString(
|
||||
const std::vector<FixedPointCoordinate>& polyline,
|
||||
std::string &output
|
||||
) const {
|
||||
std::vector<int> deltaNumbers(2*polyline.size());
|
||||
output += "\"";
|
||||
if(!polyline.empty()) {
|
||||
deltaNumbers[0] = polyline[0].lat;
|
||||
deltaNumbers[1] = polyline[0].lon;
|
||||
for(unsigned i = 1; i < polyline.size(); ++i) {
|
||||
deltaNumbers[(2*i)] = (polyline[i].lat - polyline[i-1].lat);
|
||||
deltaNumbers[(2*i)+1] = (polyline[i].lon - polyline[i-1].lon);
|
||||
}
|
||||
encodeVectorSignedNumber(deltaNumbers, output);
|
||||
}
|
||||
output += "\"";
|
||||
}
|
||||
|
||||
void PolylineCompressor::printUnencodedString(
|
||||
const std::vector<FixedPointCoordinate> & polyline,
|
||||
std::string & output
|
||||
) const {
|
||||
output += "[";
|
||||
std::string tmp;
|
||||
for(unsigned i = 0; i < polyline.size(); i++) {
|
||||
FixedPointCoordinate::convertInternalLatLonToString(polyline[i].lat, tmp);
|
||||
output += "[";
|
||||
output += tmp;
|
||||
FixedPointCoordinate::convertInternalLatLonToString(polyline[i].lon, tmp);
|
||||
output += ", ";
|
||||
output += tmp;
|
||||
output += "]";
|
||||
if( i < polyline.size()-1 ) {
|
||||
output += ",";
|
||||
}
|
||||
}
|
||||
output += "]";
|
||||
}
|
||||
|
||||
void PolylineCompressor::printUnencodedString(
|
||||
const std::vector<SegmentInformation> & polyline,
|
||||
std::string & output
|
||||
) const {
|
||||
output += "[";
|
||||
std::string tmp;
|
||||
for(unsigned i = 0; i < polyline.size(); i++) {
|
||||
if(!polyline[i].necessary) {
|
||||
continue;
|
||||
}
|
||||
FixedPointCoordinate::convertInternalLatLonToString(polyline[i].location.lat, tmp);
|
||||
output += "[";
|
||||
output += tmp;
|
||||
FixedPointCoordinate::convertInternalLatLonToString(polyline[i].location.lon, tmp);
|
||||
output += ", ";
|
||||
output += tmp;
|
||||
output += "]";
|
||||
if( i < polyline.size()-1 ) {
|
||||
output += ",";
|
||||
}
|
||||
}
|
||||
output += "]";
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,39 +25,45 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef STORAGE_HPP
|
||||
#define STORAGE_HPP
|
||||
#ifndef POLYLINECOMPRESSOR_H_
|
||||
#define POLYLINECOMPRESSOR_H_
|
||||
|
||||
#include "storage/shared_data_index.hpp"
|
||||
#include "storage/shared_datatype.hpp"
|
||||
#include "storage/storage_config.hpp"
|
||||
#include "../DataStructures/SegmentInformation.h"
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <filesystem>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace osrm::storage
|
||||
{
|
||||
class PolylineCompressor {
|
||||
private:
|
||||
void encodeVectorSignedNumber(
|
||||
std::vector<int> & numbers,
|
||||
std::string & output
|
||||
) const;
|
||||
|
||||
void populateLayoutFromFile(const std::filesystem::path &path, storage::BaseDataLayout &layout);
|
||||
void encodeNumber(int number_to_encode, std::string & output) const;
|
||||
|
||||
class Storage
|
||||
{
|
||||
public:
|
||||
Storage(StorageConfig config);
|
||||
public:
|
||||
void printEncodedString(
|
||||
const std::vector<SegmentInformation> & polyline,
|
||||
std::string & output
|
||||
) const;
|
||||
|
||||
int Run(int max_wait, const std::string &name, bool only_metric);
|
||||
void PopulateStaticData(const SharedDataIndex &index);
|
||||
void PopulateUpdatableData(const SharedDataIndex &index);
|
||||
void PopulateLayout(storage::BaseDataLayout &layout,
|
||||
const std::vector<std::pair<bool, std::filesystem::path>> &files);
|
||||
std::string PopulateLayoutWithRTree(storage::BaseDataLayout &layout);
|
||||
std::vector<std::pair<bool, std::filesystem::path>> GetUpdatableFiles();
|
||||
std::vector<std::pair<bool, std::filesystem::path>> GetStaticFiles();
|
||||
void printEncodedString(
|
||||
const std::vector<FixedPointCoordinate>& polyline,
|
||||
std::string &output
|
||||
) const;
|
||||
|
||||
void printUnencodedString(
|
||||
const std::vector<FixedPointCoordinate> & polyline,
|
||||
std::string & output
|
||||
) const;
|
||||
|
||||
void printUnencodedString(
|
||||
const std::vector<SegmentInformation> & polyline,
|
||||
std::string & output
|
||||
) const;
|
||||
|
||||
private:
|
||||
StorageConfig config;
|
||||
};
|
||||
} // namespace osrm::storage
|
||||
|
||||
#endif
|
||||
#endif /* POLYLINECOMPRESSOR_H_ */
|
||||
501
Algorithms/StronglyConnectedComponents.h
Normal file
501
Algorithms/StronglyConnectedComponents.h
Normal file
@ -0,0 +1,501 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef STRONGLYCONNECTEDCOMPONENTS_H_
|
||||
#define STRONGLYCONNECTEDCOMPONENTS_H_
|
||||
|
||||
#include "../DataStructures/DeallocatingVector.h"
|
||||
#include "../DataStructures/DynamicGraph.h"
|
||||
#include "../DataStructures/ImportEdge.h"
|
||||
#include "../DataStructures/QueryNode.h"
|
||||
#include "../DataStructures/Percent.h"
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../DataStructures/TurnInstructions.h"
|
||||
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/integer.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
#include <boost/unordered_set.hpp>
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <gdal.h>
|
||||
#include <ogrsf_frmts.h>
|
||||
#else
|
||||
#include <gdal/gdal.h>
|
||||
#include <gdal/ogrsf_frmts.h>
|
||||
#endif
|
||||
|
||||
#include <stack>
|
||||
#include <vector>
|
||||
|
||||
class TarjanSCC {
|
||||
private:
|
||||
|
||||
struct TarjanNode {
|
||||
TarjanNode() : index(UINT_MAX), lowlink(UINT_MAX), onStack(false) {}
|
||||
unsigned index;
|
||||
unsigned lowlink;
|
||||
bool onStack;
|
||||
};
|
||||
|
||||
struct TarjanEdgeData {
|
||||
int distance;
|
||||
unsigned nameID:31;
|
||||
bool shortcut:1;
|
||||
short type;
|
||||
bool isAccessRestricted:1;
|
||||
bool forward:1;
|
||||
bool backward:1;
|
||||
bool roundabout:1;
|
||||
bool ignoreInGrid:1;
|
||||
bool reversedEdge:1;
|
||||
};
|
||||
|
||||
struct TarjanStackFrame {
|
||||
explicit TarjanStackFrame(
|
||||
NodeID v,
|
||||
NodeID parent
|
||||
) : v(v), parent(parent) { }
|
||||
NodeID v;
|
||||
NodeID parent;
|
||||
};
|
||||
|
||||
typedef DynamicGraph<TarjanEdgeData> TarjanDynamicGraph;
|
||||
typedef TarjanDynamicGraph::InputEdge TarjanEdge;
|
||||
typedef std::pair<NodeID, NodeID> RestrictionSource;
|
||||
typedef std::pair<NodeID, bool> restriction_target;
|
||||
typedef std::vector<restriction_target> EmanatingRestrictionsVector;
|
||||
typedef boost::unordered_map<RestrictionSource, unsigned > RestrictionMap;
|
||||
|
||||
std::vector<NodeInfo> m_coordinate_list;
|
||||
std::vector<EmanatingRestrictionsVector> m_restriction_bucket_list;
|
||||
boost::shared_ptr<TarjanDynamicGraph> m_node_based_graph;
|
||||
boost::unordered_set<NodeID> m_barrier_node_list;
|
||||
boost::unordered_set<NodeID> m_traffic_light_list;
|
||||
unsigned m_restriction_counter;
|
||||
RestrictionMap m_restriction_map;
|
||||
|
||||
struct EdgeBasedNode {
|
||||
bool operator<(const EdgeBasedNode & other) const {
|
||||
return other.id < id;
|
||||
}
|
||||
bool operator==(const EdgeBasedNode & other) const {
|
||||
return id == other.id;
|
||||
}
|
||||
NodeID id;
|
||||
int lat1;
|
||||
int lat2;
|
||||
int lon1;
|
||||
int lon2:31;
|
||||
bool belongsToTinyComponent:1;
|
||||
NodeID nameID;
|
||||
unsigned weight:31;
|
||||
bool ignoreInGrid:1;
|
||||
};
|
||||
|
||||
public:
|
||||
TarjanSCC(
|
||||
int number_of_nodes,
|
||||
std::vector<NodeBasedEdge> & input_edges,
|
||||
std::vector<NodeID> & bn,
|
||||
std::vector<NodeID> & tl,
|
||||
std::vector<TurnRestriction> & irs,
|
||||
std::vector<NodeInfo> & nI
|
||||
) :
|
||||
m_coordinate_list(nI),
|
||||
m_restriction_counter(irs.size())
|
||||
{
|
||||
BOOST_FOREACH(const TurnRestriction & restriction, irs) {
|
||||
std::pair<NodeID, NodeID> restrictionSource = std::make_pair(
|
||||
restriction.fromNode, restriction.viaNode
|
||||
);
|
||||
unsigned index;
|
||||
RestrictionMap::iterator restriction_iterator = m_restriction_map.find(restrictionSource);
|
||||
if(restriction_iterator == m_restriction_map.end()) {
|
||||
index = m_restriction_bucket_list.size();
|
||||
m_restriction_bucket_list.resize(index+1);
|
||||
m_restriction_map[restrictionSource] = index;
|
||||
} else {
|
||||
index = restriction_iterator->second;
|
||||
//Map already contains an is_only_*-restriction
|
||||
if(m_restriction_bucket_list.at(index).begin()->second) {
|
||||
continue;
|
||||
} else if(restriction.flags.isOnly) {
|
||||
//We are going to insert an is_only_*-restriction. There can be only one.
|
||||
m_restriction_bucket_list.at(index).clear();
|
||||
}
|
||||
}
|
||||
|
||||
m_restriction_bucket_list.at(index).push_back(
|
||||
std::make_pair(restriction.toNode, restriction.flags.isOnly)
|
||||
);
|
||||
}
|
||||
|
||||
m_barrier_node_list.insert(bn.begin(), bn.end());
|
||||
m_traffic_light_list.insert(tl.begin(), tl.end());
|
||||
|
||||
DeallocatingVector< TarjanEdge > edge_list;
|
||||
BOOST_FOREACH(const NodeBasedEdge & input_edge, input_edges) {
|
||||
TarjanEdge edge;
|
||||
if(!input_edge.isForward()) {
|
||||
edge.source = input_edge.target();
|
||||
edge.target = input_edge.source();
|
||||
edge.data.backward = input_edge.isForward();
|
||||
edge.data.forward = input_edge.isBackward();
|
||||
} else {
|
||||
edge.source = input_edge.source();
|
||||
edge.target = input_edge.target();
|
||||
edge.data.forward = input_edge.isForward();
|
||||
edge.data.backward = input_edge.isBackward();
|
||||
}
|
||||
if(edge.source == edge.target) {
|
||||
continue;
|
||||
}
|
||||
|
||||
edge.data.distance = (std::max)((int)input_edge.weight(), 1 );
|
||||
BOOST_ASSERT( edge.data.distance > 0 );
|
||||
edge.data.shortcut = false;
|
||||
edge.data.roundabout = input_edge.isRoundabout();
|
||||
edge.data.ignoreInGrid = input_edge.ignoreInGrid();
|
||||
edge.data.nameID = input_edge.name();
|
||||
edge.data.type = input_edge.type();
|
||||
edge.data.isAccessRestricted = input_edge.isAccessRestricted();
|
||||
edge.data.reversedEdge = false;
|
||||
edge_list.push_back( edge );
|
||||
if( edge.data.backward ) {
|
||||
std::swap( edge.source, edge.target );
|
||||
edge.data.forward = input_edge.isBackward();
|
||||
edge.data.backward = input_edge.isForward();
|
||||
edge.data.reversedEdge = true;
|
||||
edge_list.push_back( edge );
|
||||
}
|
||||
}
|
||||
std::vector<NodeBasedEdge>().swap(input_edges);
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == input_edges.size() && 0 == input_edges.capacity(),
|
||||
"input edge vector not properly deallocated"
|
||||
);
|
||||
|
||||
std::sort( edge_list.begin(), edge_list.end() );
|
||||
|
||||
m_node_based_graph = boost::make_shared<TarjanDynamicGraph>(
|
||||
number_of_nodes,
|
||||
edge_list
|
||||
);
|
||||
}
|
||||
|
||||
~TarjanSCC() {
|
||||
m_node_based_graph.reset();
|
||||
}
|
||||
|
||||
void Run() {
|
||||
//remove files from previous run if exist
|
||||
DeleteFileIfExists("component.dbf");
|
||||
DeleteFileIfExists("component.shx");
|
||||
DeleteFileIfExists("component.shp");
|
||||
|
||||
Percent p(m_node_based_graph->GetNumberOfNodes());
|
||||
|
||||
OGRRegisterAll();
|
||||
|
||||
const char *pszDriverName = "ESRI Shapefile";
|
||||
OGRSFDriver * poDriver = OGRSFDriverRegistrar::GetRegistrar()->
|
||||
GetDriverByName( pszDriverName );
|
||||
if( NULL == poDriver ) {
|
||||
throw OSRMException("ESRI Shapefile driver not available");
|
||||
}
|
||||
OGRDataSource * poDS = poDriver->CreateDataSource(
|
||||
"component.shp",
|
||||
NULL
|
||||
);
|
||||
|
||||
if( NULL == poDS ) {
|
||||
throw OSRMException("Creation of output file failed");
|
||||
}
|
||||
|
||||
OGRLayer * poLayer = poDS->CreateLayer(
|
||||
"component",
|
||||
NULL,
|
||||
wkbLineString,
|
||||
NULL
|
||||
);
|
||||
|
||||
if( NULL == poLayer ) {
|
||||
throw OSRMException("Layer creation failed.");
|
||||
}
|
||||
|
||||
//The following is a hack to distinguish between stuff that happens
|
||||
//before the recursive call and stuff that happens after
|
||||
std::stack<std::pair<bool, TarjanStackFrame> > recursion_stack;
|
||||
//true = stuff before, false = stuff after call
|
||||
std::stack<NodeID> tarjan_stack;
|
||||
std::vector<unsigned> components_index(
|
||||
m_node_based_graph->GetNumberOfNodes(),
|
||||
UINT_MAX
|
||||
);
|
||||
std::vector<NodeID> component_size_vector;
|
||||
std::vector<TarjanNode> tarjan_node_list(
|
||||
m_node_based_graph->GetNumberOfNodes()
|
||||
);
|
||||
unsigned component_index = 0, size_of_current_component = 0;
|
||||
int index = 0;
|
||||
for(
|
||||
NodeID node = 0, last_node = m_node_based_graph->GetNumberOfNodes();
|
||||
node < last_node;
|
||||
++node
|
||||
) {
|
||||
if(UINT_MAX == components_index[node]) {
|
||||
recursion_stack.push(
|
||||
std::make_pair(true, TarjanStackFrame(node,node))
|
||||
);
|
||||
}
|
||||
|
||||
while(!recursion_stack.empty()) {
|
||||
bool before_recursion = recursion_stack.top().first;
|
||||
TarjanStackFrame currentFrame = recursion_stack.top().second;
|
||||
NodeID v = currentFrame.v;
|
||||
recursion_stack.pop();
|
||||
|
||||
if(before_recursion) {
|
||||
//Mark frame to handle tail of recursion
|
||||
recursion_stack.push(std::make_pair(false, currentFrame));
|
||||
|
||||
//Mark essential information for SCC
|
||||
tarjan_node_list[v].index = index;
|
||||
tarjan_node_list[v].lowlink = index;
|
||||
tarjan_stack.push(v);
|
||||
tarjan_node_list[v].onStack = true;
|
||||
++index;
|
||||
|
||||
//Traverse outgoing edges
|
||||
for(
|
||||
TarjanDynamicGraph::EdgeIterator e2 = m_node_based_graph->BeginEdges(v);
|
||||
e2 < m_node_based_graph->EndEdges(v);
|
||||
++e2
|
||||
) {
|
||||
const TarjanDynamicGraph::NodeIterator vprime =
|
||||
m_node_based_graph->GetTarget(e2);
|
||||
if(UINT_MAX == tarjan_node_list[vprime].index) {
|
||||
recursion_stack.push(
|
||||
std::make_pair(
|
||||
true,
|
||||
TarjanStackFrame(vprime, v)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
if(
|
||||
tarjan_node_list[vprime].onStack &&
|
||||
tarjan_node_list[vprime].index < tarjan_node_list[v].lowlink
|
||||
) {
|
||||
tarjan_node_list[v].lowlink = tarjan_node_list[vprime].index;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tarjan_node_list[currentFrame.parent].lowlink =
|
||||
std::min(
|
||||
tarjan_node_list[currentFrame.parent].lowlink,
|
||||
tarjan_node_list[v].lowlink
|
||||
);
|
||||
//after recursion, lets do cycle checking
|
||||
//Check if we found a cycle. This is the bottom part of the recursion
|
||||
if(tarjan_node_list[v].lowlink == tarjan_node_list[v].index) {
|
||||
NodeID vprime;
|
||||
do {
|
||||
vprime = tarjan_stack.top(); tarjan_stack.pop();
|
||||
tarjan_node_list[vprime].onStack = false;
|
||||
components_index[vprime] = component_index;
|
||||
++size_of_current_component;
|
||||
} while( v != vprime);
|
||||
|
||||
component_size_vector.push_back(size_of_current_component);
|
||||
|
||||
if(size_of_current_component > 1000) {
|
||||
SimpleLogger().Write() <<
|
||||
"large component [" << component_index << "]=" <<
|
||||
size_of_current_component;
|
||||
}
|
||||
|
||||
++component_index;
|
||||
size_of_current_component = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SimpleLogger().Write() <<
|
||||
"identified: " << component_size_vector.size() <<
|
||||
" many components, marking small components";
|
||||
|
||||
unsigned size_one_counter = 0;
|
||||
for(unsigned i = 0, end = component_size_vector.size(); i < end; ++i){
|
||||
if(1 == component_size_vector[i]) {
|
||||
++size_one_counter;
|
||||
}
|
||||
}
|
||||
|
||||
SimpleLogger().Write() <<
|
||||
"identified " << size_one_counter << " SCCs of size 1";
|
||||
|
||||
uint64_t total_network_distance = 0;
|
||||
p.reinit(m_node_based_graph->GetNumberOfNodes());
|
||||
for(
|
||||
TarjanDynamicGraph::NodeIterator u = 0, last_u_node = m_node_based_graph->GetNumberOfNodes();
|
||||
u < last_u_node;
|
||||
++u
|
||||
) {
|
||||
p.printIncrement();
|
||||
for(
|
||||
TarjanDynamicGraph::EdgeIterator e1 = m_node_based_graph->BeginEdges(u), last_edge = m_node_based_graph->EndEdges(u);
|
||||
e1 < last_edge;
|
||||
++e1
|
||||
) {
|
||||
if(!m_node_based_graph->GetEdgeData(e1).reversedEdge) {
|
||||
continue;
|
||||
}
|
||||
const TarjanDynamicGraph::NodeIterator v = m_node_based_graph->GetTarget(e1);
|
||||
|
||||
total_network_distance += 100*FixedPointCoordinate::ApproximateDistance(
|
||||
m_coordinate_list[u].lat,
|
||||
m_coordinate_list[u].lon,
|
||||
m_coordinate_list[v].lat,
|
||||
m_coordinate_list[v].lon
|
||||
);
|
||||
|
||||
if( SHRT_MAX != m_node_based_graph->GetEdgeData(e1).type ) {
|
||||
BOOST_ASSERT(e1 != UINT_MAX);
|
||||
BOOST_ASSERT(u != UINT_MAX);
|
||||
BOOST_ASSERT(v != UINT_MAX);
|
||||
|
||||
const unsigned size_of_containing_component =
|
||||
std::min(
|
||||
component_size_vector[components_index[u]],
|
||||
component_size_vector[components_index[v]]
|
||||
);
|
||||
|
||||
//edges that end on bollard nodes may actually be in two distinct components
|
||||
if(size_of_containing_component < 10) {
|
||||
OGRLineString lineString;
|
||||
lineString.addPoint(
|
||||
m_coordinate_list[u].lon/COORDINATE_PRECISION,
|
||||
m_coordinate_list[u].lat/COORDINATE_PRECISION
|
||||
);
|
||||
lineString.addPoint(
|
||||
m_coordinate_list[v].lon/COORDINATE_PRECISION,
|
||||
m_coordinate_list[v].lat/COORDINATE_PRECISION
|
||||
);
|
||||
|
||||
OGRFeature * poFeature = OGRFeature::CreateFeature(
|
||||
poLayer->GetLayerDefn()
|
||||
);
|
||||
|
||||
poFeature->SetGeometry( &lineString );
|
||||
if( OGRERR_NONE != poLayer->CreateFeature(poFeature) ) {
|
||||
throw OSRMException(
|
||||
"Failed to create feature in shapefile."
|
||||
);
|
||||
}
|
||||
OGRFeature::DestroyFeature( poFeature );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
OGRDataSource::DestroyDataSource( poDS );
|
||||
std::vector<NodeID>().swap(component_size_vector);
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == component_size_vector.size() &&
|
||||
0 == component_size_vector.capacity(),
|
||||
"component_size_vector not properly deallocated"
|
||||
);
|
||||
|
||||
std::vector<NodeID>().swap(components_index);
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == components_index.size() && 0 == components_index.capacity(),
|
||||
"icomponents_index not properly deallocated"
|
||||
);
|
||||
|
||||
SimpleLogger().Write()
|
||||
<< "total network distance: " <<
|
||||
(uint64_t)total_network_distance/100/1000. <<
|
||||
" km";
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const {
|
||||
std::pair < NodeID, NodeID > restriction_source = std::make_pair(u, v);
|
||||
RestrictionMap::const_iterator restriction_iterator = m_restriction_map.find(restriction_source);
|
||||
if (restriction_iterator != m_restriction_map.end()) {
|
||||
const unsigned index = restriction_iterator->second;
|
||||
BOOST_FOREACH(
|
||||
const RestrictionSource & restriction_target,
|
||||
m_restriction_bucket_list.at(index)
|
||||
) {
|
||||
if(restriction_target.second) {
|
||||
return restriction_target.first;
|
||||
}
|
||||
}
|
||||
}
|
||||
return UINT_MAX;
|
||||
}
|
||||
|
||||
bool CheckIfTurnIsRestricted(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w
|
||||
) const {
|
||||
//only add an edge if turn is not a U-turn except it is the end of dead-end street.
|
||||
std::pair < NodeID, NodeID > restriction_source = std::make_pair(u, v);
|
||||
RestrictionMap::const_iterator restriction_iterator = m_restriction_map.find(restriction_source);
|
||||
if (restriction_iterator != m_restriction_map.end()) {
|
||||
const unsigned index = restriction_iterator->second;
|
||||
BOOST_FOREACH(
|
||||
const restriction_target & restriction_target,
|
||||
m_restriction_bucket_list.at(index)
|
||||
) {
|
||||
if(w == restriction_target.first) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void DeleteFileIfExists(const std::string & file_name) const {
|
||||
if (boost::filesystem::exists(file_name) ) {
|
||||
boost::filesystem::remove(file_name);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* STRONGLYCONNECTEDCOMPONENTS_H_ */
|
||||
1102
CHANGELOG.md
1102
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
910
CMakeLists.txt
910
CMakeLists.txt
@ -1,755 +1,269 @@
|
||||
cmake_minimum_required(VERSION 3.18)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
|
||||
if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR AND NOT MSVC_IDE)
|
||||
message(FATAL_ERROR "In-source builds are not allowed.
|
||||
Please create a directory and run cmake from there, passing the path to this source directory as the last argument.
|
||||
This process created the file `CMakeCache.txt' and the directory `CMakeFiles'. Please delete them.")
|
||||
endif()
|
||||
|
||||
# detect if this is included as subproject and if so expose
|
||||
# some variables to its parent scope
|
||||
get_directory_property(BUILD_AS_SUBPROJECT PARENT_DIRECTORY)
|
||||
if(BUILD_AS_SUBPROJECT)
|
||||
message(STATUS "Building libosrm as subproject.")
|
||||
endif()
|
||||
|
||||
# set OSRM_BUILD_DIR location (might be used in various scripts)
|
||||
if (NOT WIN32 AND NOT DEFINED ENV{OSRM_BUILD_DIR})
|
||||
set(ENV{OSRM_BUILD_DIR} ${CMAKE_CURRENT_BINARY_DIR})
|
||||
endif()
|
||||
|
||||
option(ENABLE_CONAN "Use conan for dependencies" OFF)
|
||||
option(ENABLE_CCACHE "Speed up incremental rebuilds via ccache" ON)
|
||||
option(BUILD_TOOLS "Build OSRM tools" OFF)
|
||||
option(BUILD_PACKAGE "Build OSRM package" OFF)
|
||||
option(BUILD_ROUTED "Build osrm-routed HTTP server" ON)
|
||||
option(ENABLE_ASSERTIONS "Use assertions in release mode" OFF)
|
||||
option(ENABLE_DEBUG_LOGGING "Use debug logging in release mode" OFF)
|
||||
option(ENABLE_COVERAGE "Build with coverage instrumentalisation" OFF)
|
||||
option(ENABLE_SANITIZER "Use memory sanitizer for Debug build" OFF)
|
||||
option(ENABLE_LTO "Use Link Time Optimisation" ON)
|
||||
option(ENABLE_FUZZING "Fuzz testing using LLVM's libFuzzer" OFF)
|
||||
option(ENABLE_NODE_BINDINGS "Build NodeJs bindings" OFF)
|
||||
option(ENABLE_CLANG_TIDY "Enables clang-tidy checks" OFF)
|
||||
|
||||
|
||||
if (ENABLE_CLANG_TIDY)
|
||||
find_program(CLANG_TIDY_COMMAND NAMES clang-tidy)
|
||||
if(NOT CLANG_TIDY_COMMAND)
|
||||
message(FATAL_ERROR "ENABLE_CLANG_TIDY is ON but clang-tidy is not found!")
|
||||
else()
|
||||
message(STATUS "Found clang-tidy at ${CLANG_TIDY_COMMAND}")
|
||||
set(CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND};--warnings-as-errors=*;--header-filter=.*")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
# be compatible with version handling before cmake 3.x
|
||||
if (POLICY CMP0057)
|
||||
cmake_policy(SET CMP0057 NEW)
|
||||
endif()
|
||||
if (POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
if (POLICY CMP0167)
|
||||
cmake_policy(SET CMP0167 NEW)
|
||||
endif()
|
||||
project(OSRM C CXX)
|
||||
|
||||
|
||||
if(ENABLE_LTO AND (CMAKE_BUILD_TYPE MATCHES Release OR CMAKE_BUILD_TYPE MATCHES MinRelSize OR CMAKE_BUILD_TYPE MATCHES RelWithDebInfo))
|
||||
include(CheckIPOSupported)
|
||||
check_ipo_supported(RESULT LTO_SUPPORTED OUTPUT error)
|
||||
if(LTO_SUPPORTED)
|
||||
message(STATUS "IPO / LTO enabled")
|
||||
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
|
||||
else()
|
||||
message(FATAL_ERROR "IPO / LTO not supported: <${error}>")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# add @loader_path/$ORIGIN to rpath to make binaries relocatable
|
||||
if (APPLE)
|
||||
set(CMAKE_BUILD_RPATH "@loader_path")
|
||||
else()
|
||||
set(CMAKE_BUILD_RPATH "\$ORIGIN")
|
||||
# https://stackoverflow.com/questions/6324131/rpath-origin-not-having-desired-effect
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-z,origin")
|
||||
endif()
|
||||
|
||||
include(JSONParser)
|
||||
file(READ "package.json" packagejsonraw)
|
||||
sbeParseJson(packagejson packagejsonraw)
|
||||
|
||||
# This regex is not strict enough, but the correct one is too complicated for cmake matching.
|
||||
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
||||
if (packagejson.version MATCHES "^([0-9]+)\.([0-9]+)\.([0-9]+)([-+][0-9a-zA-Z.-]+)?$")
|
||||
set(OSRM_VERSION_MAJOR ${CMAKE_MATCH_1})
|
||||
set(OSRM_VERSION_MINOR ${CMAKE_MATCH_2})
|
||||
set(OSRM_VERSION_PATCH ${CMAKE_MATCH_3})
|
||||
set(OSRM_VERSION_PRERELEASE_BUILD ${CMAKE_MATCH_4})
|
||||
|
||||
set(OSRM_VERSION packagejson.version)
|
||||
else()
|
||||
message(FATAL_ERROR "Version from package.json cannot be parsed, expected semver compatible label, but found ${packagejson.version}")
|
||||
endif()
|
||||
|
||||
if (MSVC)
|
||||
add_definitions("-DOSRM_PROJECT_DIR=\"${CMAKE_CURRENT_SOURCE_DIR}\"")
|
||||
else()
|
||||
add_definitions(-DOSRM_PROJECT_DIR="${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
endif()
|
||||
|
||||
# these two functions build up custom variables:
|
||||
# DEPENDENCIES_INCLUDE_DIRS and OSRM_DEFINES
|
||||
# These variables we want to pass to
|
||||
# include_directories and add_definitions for both
|
||||
# this build and for sharing externally via pkg-config
|
||||
|
||||
function(add_dependency_includes)
|
||||
if(${ARGC} GREATER 0)
|
||||
list(APPEND DEPENDENCIES_INCLUDE_DIRS "${ARGV}")
|
||||
set(DEPENDENCIES_INCLUDE_DIRS "${DEPENDENCIES_INCLUDE_DIRS}" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction(add_dependency_includes)
|
||||
|
||||
function(add_dependency_defines defines)
|
||||
list(APPEND OSRM_DEFINES "${defines}")
|
||||
set(OSRM_DEFINES "${OSRM_DEFINES}" PARENT_SCOPE)
|
||||
endfunction(add_dependency_defines)
|
||||
|
||||
cmake_minimum_required(VERSION 2.6)
|
||||
project(OSRM)
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(FindPackageHandleStandardArgs)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
|
||||
include(GetGitRevisionDescription)
|
||||
git_describe(GIT_DESCRIPTION)
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
|
||||
|
||||
set(bitness 32)
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
set(bitness 64)
|
||||
message(STATUS "Building on a 64 bit system")
|
||||
else()
|
||||
message(FATAL_ERROR "Building on a 32 bit system is not supported")
|
||||
message(WARNING "Building on a 32 bit system is unsupported")
|
||||
endif()
|
||||
|
||||
include_directories(BEFORE ${CMAKE_CURRENT_BINARY_DIR}/include/)
|
||||
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}/include/)
|
||||
include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/generated/include/)
|
||||
include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/third_party/sol2/include)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/Include/)
|
||||
|
||||
set(BOOST_COMPONENTS date_time iostreams program_options thread unit_test_framework)
|
||||
add_custom_command(OUTPUT ${CMAKE_SOURCE_DIR}/Util/UUID.cpp UUID.cpp.alwaysbuild
|
||||
COMMAND ${CMAKE_COMMAND} -DSOURCE_DIR=${CMAKE_SOURCE_DIR}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/UUID-Config.cmake
|
||||
DEPENDS
|
||||
${CMAKE_SOURCE_DIR}/Util/UUID.cpp.in
|
||||
${CMAKE_SOURCE_DIR}/cmake/UUID-Config.cmake
|
||||
COMMENT "Configuring UUID.cpp"
|
||||
VERBATIM)
|
||||
|
||||
add_custom_target(UUIDConfigure DEPENDS ${CMAKE_SOURCE_DIR}/Util/UUID.cpp)
|
||||
|
||||
set(BOOST_COMPONENTS date_time filesystem iostreams program_options regex system thread)
|
||||
|
||||
configure_file(
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/include/util/version.hpp.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/include/util/version.hpp
|
||||
${CMAKE_SOURCE_DIR}/Util/GitDescription.cpp.in
|
||||
${CMAKE_SOURCE_DIR}/Util/GitDescription.cpp
|
||||
)
|
||||
file(GLOB UtilGlob src/util/*.cpp src/util/*/*.cpp)
|
||||
file(GLOB ExtractorGlob src/extractor/*.cpp src/extractor/*/*.cpp)
|
||||
file(GLOB GuidanceGlob src/guidance/*.cpp src/extractor/intersection/*.cpp)
|
||||
file(GLOB PartitionerGlob src/partitioner/*.cpp)
|
||||
file(GLOB CustomizerGlob src/customize/*.cpp)
|
||||
file(GLOB ContractorGlob src/contractor/*.cpp)
|
||||
file(GLOB UpdaterGlob src/updater/*.cpp)
|
||||
file(GLOB StorageGlob src/storage/*.cpp)
|
||||
file(GLOB ServerGlob src/server/*.cpp src/server/**/*.cpp)
|
||||
file(GLOB EngineGlob src/engine/*.cpp src/engine/**/*.cpp)
|
||||
file(GLOB ErrorcodesGlob src/osrm/errorcodes.cpp)
|
||||
file(GLOB ExtractorGlob Extractor/*.cpp)
|
||||
set(ExtractorSources extractor.cpp ${ExtractorGlob})
|
||||
add_executable(osrm-extract ${ExtractorSources})
|
||||
|
||||
add_library(UTIL OBJECT ${UtilGlob})
|
||||
add_library(EXTRACTOR OBJECT ${ExtractorGlob})
|
||||
add_library(GUIDANCE OBJECT ${GuidanceGlob})
|
||||
add_library(PARTITIONER OBJECT ${PartitionerGlob})
|
||||
add_library(CUSTOMIZER OBJECT ${CustomizerGlob})
|
||||
add_library(CONTRACTOR OBJECT ${ContractorGlob})
|
||||
add_library(UPDATER OBJECT ${UpdaterGlob})
|
||||
add_library(STORAGE OBJECT ${StorageGlob})
|
||||
add_library(ENGINE OBJECT ${EngineGlob})
|
||||
file(GLOB PrepareGlob Contractor/*.cpp DataStructures/HilbertValue.cpp)
|
||||
set(PrepareSources prepare.cpp ${PrepareGlob})
|
||||
add_executable(osrm-prepare ${PrepareSources})
|
||||
|
||||
if (BUILD_ROUTED)
|
||||
add_library(SERVER OBJECT ${ServerGlob})
|
||||
add_executable(osrm-routed src/tools/routed.cpp $<TARGET_OBJECTS:SERVER> $<TARGET_OBJECTS:UTIL>)
|
||||
endif()
|
||||
file(GLOB ServerGlob Server/*.cpp)
|
||||
file(GLOB DescriptorGlob Descriptors/*.cpp)
|
||||
file(GLOB DatastructureGlob DataStructures/SearchEngineData.cpp)
|
||||
file(GLOB CoordinateGlob DataStructures/Coordinate.cpp)
|
||||
file(GLOB AlgorithmGlob Algorithms/*.cpp)
|
||||
file(GLOB HttpGlob Server/Http/*.cpp)
|
||||
file(GLOB LibOSRMGlob Library/*.cpp)
|
||||
|
||||
set_target_properties(UTIL PROPERTIES LINKER_LANGUAGE CXX)
|
||||
set(
|
||||
OSRMSources
|
||||
${LibOSRMGlob}
|
||||
${DescriptorGlob}
|
||||
${DatastructureGlob}
|
||||
${CoordinateGlob}
|
||||
${AlgorithmGlob}
|
||||
${HttpGlob}
|
||||
)
|
||||
add_library(COORDLIB STATIC ${CoordinateGlob})
|
||||
add_library(OSRM ${OSRMSources} Util/GitDescription.cpp Util/UUID.cpp)
|
||||
add_library(UUID STATIC Util/UUID.cpp)
|
||||
add_library(GITDESCRIPTION STATIC Util/GitDescription.cpp)
|
||||
add_dependencies(UUID UUIDConfigure)
|
||||
add_dependencies(GITDESCRIPTION GIT_DESCRIPTION)
|
||||
|
||||
add_executable(osrm-extract src/tools/extract.cpp)
|
||||
add_executable(osrm-partition src/tools/partition.cpp)
|
||||
add_executable(osrm-customize src/tools/customize.cpp)
|
||||
add_executable(osrm-contract src/tools/contract.cpp)
|
||||
add_executable(osrm-datastore src/tools/store.cpp $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm src/osrm/osrm.cpp $<TARGET_OBJECTS:ENGINE> $<TARGET_OBJECTS:STORAGE> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_contract src/osrm/contractor.cpp $<TARGET_OBJECTS:CONTRACTOR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_extract src/osrm/extractor.cpp $<TARGET_OBJECTS:EXTRACTOR> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_guidance $<TARGET_OBJECTS:GUIDANCE> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_partition src/osrm/partitioner.cpp $<TARGET_OBJECTS:PARTITIONER> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_customize src/osrm/customizer.cpp $<TARGET_OBJECTS:CUSTOMIZER> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_update $<TARGET_OBJECTS:UPDATER> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_library(osrm_store $<TARGET_OBJECTS:STORAGE> $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
add_executable(osrm-routed routed.cpp ${ServerGlob})
|
||||
set_target_properties(osrm-routed PROPERTIES COMPILE_FLAGS -DROUTED)
|
||||
add_executable(osrm-datastore datastore.cpp)
|
||||
|
||||
# Explicitly set the build type to Release if no other type is specified
|
||||
# on the command line. Without this, cmake defaults to an unoptimized,
|
||||
# non-debug build, which almost nobody wants.
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
message(STATUS "No build type specified, defaulting to Release")
|
||||
# Check the release mode
|
||||
if(NOT CMAKE_BUILD_TYPE MATCHES Debug)
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE MATCHES Debug)
|
||||
message(STATUS "Configuring OSRM in debug mode")
|
||||
elseif(CMAKE_BUILD_TYPE MATCHES Release)
|
||||
message(STATUS "Configuring OSRM in release mode")
|
||||
elseif(CMAKE_BUILD_TYPE MATCHES RelWithDebInfo)
|
||||
message(STATUS "Configuring OSRM in release mode with debug flags")
|
||||
elseif(CMAKE_BUILD_TYPE MATCHES MinRelSize)
|
||||
message(STATUS "Configuring OSRM in release mode with minimized size")
|
||||
else()
|
||||
message(STATUS "Unrecognized build type - will use cmake defaults")
|
||||
endif()
|
||||
|
||||
# Additional logic for the different build types
|
||||
if(CMAKE_BUILD_TYPE MATCHES Debug OR CMAKE_BUILD_TYPE MATCHES RelWithDebInfo)
|
||||
message(STATUS "Configuring debug mode flags")
|
||||
set(ENABLE_ASSERTIONS ON)
|
||||
set(ENABLE_DEBUG_LOGGING ON)
|
||||
endif()
|
||||
|
||||
if(NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-inline -fno-omit-frame-pointer")
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -fno-inline -fno-omit-frame-pointer")
|
||||
endif()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
set(CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -ggdb")
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -Og -ggdb")
|
||||
endif()
|
||||
|
||||
set(MAYBE_COVERAGE_LIBRARIES "")
|
||||
if (ENABLE_COVERAGE)
|
||||
if (NOT CMAKE_BUILD_TYPE MATCHES "Debug")
|
||||
message(ERROR "ENABLE_COVERAGE=ON only makes sense with a Debug build")
|
||||
if(NOT "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||
message(STATUS "adding profiling flags")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS} -fprofile-arcs -ftest-coverage -fno-inline")
|
||||
set(CMAKE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage -fno-inline")
|
||||
endif()
|
||||
message(STATUS "Enabling coverage")
|
||||
set(MAYBE_COVERAGE_LIBRARIES "-lgcov")
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -ftest-coverage -fprofile-arcs")
|
||||
endif()
|
||||
|
||||
|
||||
if (ENABLE_SANITIZER)
|
||||
set(SANITIZER_FLAGS "-g -fsanitize=address -fsanitize-address-use-after-scope -fsanitize=undefined -fno-omit-frame-pointer")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SANITIZER_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SANITIZER_FLAGS}")
|
||||
set(OSRM_CXXFLAGS "${OSRM_CXXFLAGS} ${SANITIZER_FLAGS}")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${SANITIZER_FLAGS}")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${SANITIZER_FLAGS}")
|
||||
if(CMAKE_BUILD_TYPE MATCHES Release)
|
||||
message(STATUS "Configuring OSRM in release mode")
|
||||
endif()
|
||||
|
||||
# Configuring compilers
|
||||
include(cmake/warnings.cmake)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fPIC -fcolor-diagnostics -ftemplate-depth=1024")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
set(COLOR_FLAG "-fdiagnostics-color=auto")
|
||||
check_cxx_compiler_flag("-fdiagnostics-color=auto" HAS_COLOR_FLAG)
|
||||
if(NOT HAS_COLOR_FLAG)
|
||||
set(COLOR_FLAG "")
|
||||
endif()
|
||||
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
|
||||
# using Clang
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-unknown-pragmas -Wno-unneeded-internal-declaration -pedantic -fPIC")
|
||||
message(STATUS "OpenMP parallelization not available using clang++")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
# using GCC
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 ${COLOR_FLAG} -fPIC -ftemplate-depth=1024")
|
||||
|
||||
if(WIN32) # using mingw
|
||||
add_dependency_defines(-DWIN32)
|
||||
set(OPTIONAL_SOCKET_LIBS ws2_32 wsock32)
|
||||
endif()
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -fopenmp -pedantic -fPIC")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
|
||||
# using Intel C++
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -static-intel -wd10237 -Wall -ipo -fPIC")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -static-intel -wd10237 -Wall -openmp -ipo -fPIC")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||
# using Visual Studio C++
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /bigobj") # avoid compiler error C1128 from scripting_environment_lua.cpp
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /DWIN32_LEAN_AND_MEAN") # avoid compiler error C2011 from dual #include of winsock.h and winsock2.h
|
||||
add_dependency_defines(-DBOOST_LIB_DIAGNOSTIC)
|
||||
add_dependency_defines(-D_CRT_SECURE_NO_WARNINGS)
|
||||
add_dependency_defines(-DNOMINMAX) # avoid min and max macros that can break compilation
|
||||
add_dependency_defines(-D_WIN32_WINNT=0x0501)
|
||||
add_dependency_defines(-DXML_STATIC)
|
||||
find_library(ws2_32_LIBRARY_PATH ws2_32)
|
||||
target_link_libraries(osrm-extract wsock32 ws2_32)
|
||||
endif()
|
||||
|
||||
# Check if LTO is available
|
||||
set(LTO_FLAGS "")
|
||||
CHECK_CXX_COMPILER_FLAG("-flto" HAS_LTO_FLAG)
|
||||
if (HAS_LTO_FLAG)
|
||||
set(LTO_FLAGS "${LTO_FLAGS} -flto")
|
||||
endif (HAS_LTO_FLAG)
|
||||
|
||||
# disable partitioning of LTO process when possible (fixes Debian issues)
|
||||
set(LTO_PARTITION_FLAGS "")
|
||||
CHECK_CXX_COMPILER_FLAG("-flto-partition=none" HAS_LTO_PARTITION_FLAG)
|
||||
if (HAS_LTO_PARTITION_FLAG)
|
||||
set(LTO_PARTITION_FLAGS "${LTO_PARTITION_FLAGS} -flto-partition=none")
|
||||
endif (HAS_LTO_PARTITION_FLAG)
|
||||
|
||||
# Add Link-Time-Optimization flags, if supported (GCC >= 4.5) and enabled
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${LTO_FLAGS}")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${LTO_FLAGS} ${LTO_PARTITION_FLAGS}")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${LTO_FLAGS} ${LTO_PARTITION_FLAGS}")
|
||||
|
||||
# Configuring other platform dependencies
|
||||
if(APPLE)
|
||||
set(CMAKE_OSX_ARCHITECTURES "x86_64")
|
||||
message(STATUS "Set Architecture to x64 on OS X")
|
||||
exec_program(uname ARGS -v OUTPUT_VARIABLE DARWIN_VERSION)
|
||||
string(REGEX MATCH "[0-9]+" DARWIN_VERSION ${DARWIN_VERSION})
|
||||
if(DARWIN_VERSION GREATER 12 AND NOT OSXLIBSTD)
|
||||
message(STATUS "Activating -std=c++11 flag for >= OS X 10.9")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
|
||||
endif()
|
||||
if(OSXLIBSTD)
|
||||
message(STATUS "linking against ${OSXLIBSTD}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=${OSXLIBSTD}")
|
||||
elseif(DARWIN_VERSION GREATER 12)
|
||||
message(STATUS "linking against libc++")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
find_library(RT_LIB rt)
|
||||
if (RT_LIB)
|
||||
set(MAYBE_RT_LIBRARY -lrt)
|
||||
endif()
|
||||
target_link_libraries(osrm-datastore rt)
|
||||
target_link_libraries(OSRM rt)
|
||||
endif()
|
||||
|
||||
#Check Boost
|
||||
set(BOOST_MIN_VERSION "1.46.0")
|
||||
find_package(Boost ${BOOST_MIN_VERSION} COMPONENTS ${BOOST_COMPONENTS} REQUIRED)
|
||||
if(NOT Boost_FOUND)
|
||||
message(FATAL_ERROR "Fatal error: Boost (version >= 1.46.0) required.\n")
|
||||
endif()
|
||||
include_directories(${Boost_INCLUDE_DIRS})
|
||||
|
||||
target_link_libraries(OSRM ${Boost_LIBRARIES} COORDLIB)
|
||||
target_link_libraries(osrm-extract ${Boost_LIBRARIES} UUID GITDESCRIPTION COORDLIB)
|
||||
target_link_libraries(osrm-prepare ${Boost_LIBRARIES} UUID GITDESCRIPTION COORDLIB)
|
||||
target_link_libraries(osrm-routed ${Boost_LIBRARIES} OSRM UUID GITDESCRIPTION)
|
||||
target_link_libraries(osrm-datastore ${Boost_LIBRARIES} UUID GITDESCRIPTION COORDLIB)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
target_link_libraries(osrm-extract ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
# Third-party libraries
|
||||
set(RAPIDJSON_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/rapidjson/include")
|
||||
include_directories(SYSTEM ${RAPIDJSON_INCLUDE_DIR})
|
||||
|
||||
set(MICROTAR_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/microtar/src")
|
||||
include_directories(SYSTEM ${MICROTAR_INCLUDE_DIR})
|
||||
|
||||
add_library(MICROTAR OBJECT "${CMAKE_CURRENT_SOURCE_DIR}/third_party/microtar/src/microtar.c")
|
||||
set_property(TARGET MICROTAR PROPERTY POSITION_INDEPENDENT_CODE ON)
|
||||
|
||||
target_no_warning(MICROTAR unused-variable)
|
||||
target_no_warning(MICROTAR format)
|
||||
|
||||
set(PROTOZERO_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/protozero/include")
|
||||
include_directories(SYSTEM ${PROTOZERO_INCLUDE_DIR})
|
||||
|
||||
set(VTZERO_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/vtzero/include")
|
||||
include_directories(SYSTEM ${VTZERO_INCLUDE_DIR})
|
||||
|
||||
set(FLATBUFFERS_BUILD_TESTS OFF CACHE BOOL "Disable the build of Flatbuffers tests and samples.")
|
||||
set(FLATBUFFERS_SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers")
|
||||
set(FLATBUFFERS_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers/include")
|
||||
include_directories(SYSTEM ${FLATBUFFERS_INCLUDE_DIR})
|
||||
add_subdirectory(${FLATBUFFERS_SRC_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/flatbuffers-build
|
||||
EXCLUDE_FROM_ALL)
|
||||
|
||||
set(FMT_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/fmt/include")
|
||||
add_compile_definitions(FMT_HEADER_ONLY)
|
||||
include_directories(SYSTEM ${FMT_INCLUDE_DIR})
|
||||
|
||||
|
||||
# see https://stackoverflow.com/questions/70898030/boost-link-error-using-conan-find-package
|
||||
if (MSVC)
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
endif()
|
||||
|
||||
if(ENABLE_CONAN)
|
||||
message(STATUS "Installing dependencies via Conan")
|
||||
|
||||
# Conan will generate Find*.cmake files to build directory, so we use them with the highest priority
|
||||
list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_BINARY_DIR})
|
||||
list(INSERT CMAKE_PREFIX_PATH 0 ${CMAKE_BINARY_DIR})
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/cmake/conan.cmake)
|
||||
|
||||
conan_check(REQUIRED)
|
||||
|
||||
set(CONAN_BOOST_VERSION "1.85.0@#14265ec82b25d91305bbb3b30d3357f8")
|
||||
set(CONAN_BZIP2_VERSION "1.0.8@#d1b2d5816f25865acf978501dff1f897")
|
||||
set(CONAN_EXPAT_VERSION "2.6.2@#2d385d0d50eb5561006a7ff9e356656b")
|
||||
set(CONAN_LUA_VERSION "5.4.6@#658d6089093cf01992c2737ab2e96763")
|
||||
set(CONAN_TBB_VERSION "2021.12.0@#e56e5b44be8d690530585dd3634c0106")
|
||||
|
||||
set(CONAN_SYSTEM_INCLUDES ON)
|
||||
|
||||
|
||||
set(CONAN_ARGS
|
||||
REQUIRES
|
||||
"boost/${CONAN_BOOST_VERSION}"
|
||||
"bzip2/${CONAN_BZIP2_VERSION}"
|
||||
"expat/${CONAN_EXPAT_VERSION}"
|
||||
"lua/${CONAN_LUA_VERSION}"
|
||||
"onetbb/${CONAN_TBB_VERSION}"
|
||||
BASIC_SETUP
|
||||
GENERATORS cmake_find_package json # json generator generates a conanbuildinfo.json in the build folder so (non-CMake) projects can easily parse OSRM's dependencies
|
||||
KEEP_RPATHS
|
||||
NO_OUTPUT_DIRS
|
||||
OPTIONS boost:without_stacktrace=True # Apple Silicon cross-compilation fails without it
|
||||
BUILD missing
|
||||
)
|
||||
|
||||
# Enable revisions in the conan config
|
||||
execute_process(COMMAND ${CONAN_CMD} config set general.revisions_enabled=1 RESULT_VARIABLE RET_CODE)
|
||||
if(NOT "${RET_CODE}" STREQUAL "0")
|
||||
message(FATAL_ERROR "Error setting revisions for Conan: '${RET_CODE}'")
|
||||
find_package(Lua52)
|
||||
if(NOT LUA52_FOUND)
|
||||
find_package(Lua51 REQUIRED)
|
||||
if(NOT APPLE)
|
||||
find_package(LuaJIT 5.1)
|
||||
endif()
|
||||
|
||||
# explicitly say Conan to use x86 dependencies if build for x86 platforms (https://github.com/conan-io/cmake-conan/issues/141)
|
||||
if(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
conan_cmake_run("${CONAN_ARGS};ARCH;x86")
|
||||
# cross-compilation for Apple Silicon
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64" AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL "x86_64")
|
||||
conan_cmake_run("${CONAN_ARGS};ARCH;armv8")
|
||||
else()
|
||||
conan_cmake_run("${CONAN_ARGS}")
|
||||
endif()
|
||||
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_BOOST})
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_BZIP2})
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_EXPAT})
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_LUA})
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_TBB})
|
||||
|
||||
set(Boost_USE_STATIC_LIBS ON)
|
||||
find_package(Boost REQUIRED COMPONENTS ${BOOST_COMPONENTS})
|
||||
set(Boost_DATE_TIME_LIBRARY "${Boost_date_time_LIB_TARGETS}")
|
||||
set(Boost_PROGRAM_OPTIONS_LIBRARY "${Boost_program_options_LIB_TARGETS}")
|
||||
set(Boost_IOSTREAMS_LIBRARY "${Boost_iostreams_LIB_TARGETS}")
|
||||
set(Boost_THREAD_LIBRARY "${Boost_thread_LIB_TARGETS}")
|
||||
set(Boost_ZLIB_LIBRARY "${Boost_zlib_LIB_TARGETS}")
|
||||
set(Boost_UNIT_TEST_FRAMEWORK_LIBRARY "${Boost_unit_test_framework_LIB_TARGETS}")
|
||||
|
||||
|
||||
find_package(BZip2 REQUIRED)
|
||||
find_package(EXPAT REQUIRED)
|
||||
find_package(lua REQUIRED)
|
||||
set(LUA_LIBRARIES ${lua_LIBRARIES})
|
||||
|
||||
find_package(TBB REQUIRED)
|
||||
|
||||
|
||||
# note: we avoid calling find_package(Osmium ...) here to ensure that the
|
||||
# expat and bzip2 are used from conan rather than the system
|
||||
include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include)
|
||||
else()
|
||||
find_package(Boost 1.70 REQUIRED COMPONENTS ${BOOST_COMPONENTS})
|
||||
add_dependency_includes(${Boost_INCLUDE_DIRS})
|
||||
|
||||
find_package(TBB REQUIRED)
|
||||
add_dependency_includes(${TBB_INCLUDE_DIR})
|
||||
set(TBB_LIBRARIES TBB::tbb)
|
||||
|
||||
find_package(EXPAT REQUIRED)
|
||||
add_dependency_includes(${EXPAT_INCLUDE_DIRS})
|
||||
|
||||
find_package(BZip2 REQUIRED)
|
||||
add_dependency_includes(${BZIP2_INCLUDE_DIR})
|
||||
|
||||
find_package(Lua 5.2 REQUIRED)
|
||||
if (LUA_FOUND)
|
||||
message(STATUS "Using Lua ${LUA_VERSION_STRING}")
|
||||
endif()
|
||||
|
||||
add_dependency_includes(${LUA_INCLUDE_DIR})
|
||||
|
||||
# add a target to generate API documentation with Doxygen
|
||||
find_package(Doxygen)
|
||||
if(DOXYGEN_FOUND)
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
|
||||
add_custom_target(doc
|
||||
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||
COMMENT "Generating API documentation with Doxygen" VERBATIM
|
||||
)
|
||||
endif()
|
||||
|
||||
# note libosmium depends on expat and bzip2
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/cmake")
|
||||
if(NOT OSMIUM_INCLUDE_DIR)
|
||||
set(OSMIUM_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include")
|
||||
endif()
|
||||
find_package(Osmium REQUIRED COMPONENTS io)
|
||||
include_directories(SYSTEM ${OSMIUM_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
# prefix compilation with ccache by default if available and on clang or gcc
|
||||
if(ENABLE_CCACHE AND (CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
if(CCACHE_FOUND)
|
||||
message(STATUS "Using ccache to speed up incremental builds")
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
|
||||
if(NOT APPLE)
|
||||
find_package(LuaJIT 5.2)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# even with conan builds we want to link to system zlib
|
||||
# to ensure that osrm binaries play well with other binaries like nodejs
|
||||
if( LUAJIT_FOUND )
|
||||
target_link_libraries(osrm-extract ${LUAJIT_LIBRARIES})
|
||||
target_link_libraries(osrm-prepare ${LUAJIT_LIBRARIES})
|
||||
else()
|
||||
target_link_libraries(osrm-extract ${LUA_LIBRARY})
|
||||
target_link_libraries(osrm-prepare ${LUA_LIBRARY})
|
||||
endif()
|
||||
include_directories(${LUA_INCLUDE_DIR})
|
||||
|
||||
find_package(LibXml2 REQUIRED)
|
||||
include_directories(${LIBXML2_INCLUDE_DIR})
|
||||
target_link_libraries(osrm-extract ${LIBXML2_LIBRARIES})
|
||||
|
||||
find_package( Luabind REQUIRED )
|
||||
include_directories(${LUABIND_INCLUDE_DIR})
|
||||
target_link_libraries(osrm-extract ${LUABIND_LIBRARY})
|
||||
target_link_libraries(osrm-prepare ${LUABIND_LIBRARY})
|
||||
|
||||
find_package( STXXL REQUIRED )
|
||||
include_directories(${STXXL_INCLUDE_DIR})
|
||||
target_link_libraries(OSRM ${STXXL_LIBRARY})
|
||||
target_link_libraries(osrm-extract ${STXXL_LIBRARY})
|
||||
target_link_libraries(osrm-prepare ${STXXL_LIBRARY})
|
||||
|
||||
find_package( OSMPBF REQUIRED )
|
||||
include_directories(${OSMPBF_INCLUDE_DIR})
|
||||
target_link_libraries(osrm-extract ${OSMPBF_LIBRARY})
|
||||
target_link_libraries(osrm-prepare ${OSMPBF_LIBRARY})
|
||||
|
||||
find_package(Protobuf REQUIRED)
|
||||
include_directories(${PROTOBUF_INCLUDE_DIRS})
|
||||
target_link_libraries(osrm-extract ${PROTOBUF_LIBRARY})
|
||||
target_link_libraries(osrm-prepare ${PROTOBUF_LIBRARY})
|
||||
|
||||
find_package(BZip2 REQUIRED)
|
||||
include_directories(${BZIP_INCLUDE_DIRS})
|
||||
target_link_libraries(osrm-extract ${BZIP2_LIBRARIES})
|
||||
|
||||
find_package(ZLIB REQUIRED)
|
||||
add_dependency_includes(${ZLIB_INCLUDE_DIRS})
|
||||
include_directories(${ZLIB_INCLUDE_DIRS})
|
||||
target_link_libraries(osrm-extract ${ZLIB_LIBRARY})
|
||||
target_link_libraries(osrm-routed ${ZLIB_LIBRARY})
|
||||
|
||||
add_dependency_defines(-DBOOST_SPIRIT_USE_PHOENIX_V3)
|
||||
add_dependency_defines(-DBOOST_RESULT_OF_USE_DECLTYPE)
|
||||
|
||||
# Workaround for https://github.com/boostorg/phoenix/issues/111
|
||||
add_dependency_defines(-DBOOST_PHOENIX_STL_TUPLE_H_)
|
||||
|
||||
add_definitions(${OSRM_DEFINES})
|
||||
include_directories(SYSTEM ${DEPENDENCIES_INCLUDE_DIRS})
|
||||
|
||||
set(BOOST_BASE_LIBRARIES
|
||||
${Boost_DATE_TIME_LIBRARY}
|
||||
${Boost_IOSTREAMS_LIBRARY}
|
||||
${Boost_THREAD_LIBRARY})
|
||||
|
||||
set(BOOST_ENGINE_LIBRARIES
|
||||
${Boost_ZLIB_LIBRARY}
|
||||
${Boost_REGEX_LIBRARY}
|
||||
${BOOST_BASE_LIBRARIES})
|
||||
|
||||
# Binaries
|
||||
target_link_libraries(osrm-datastore osrm_store ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries(osrm-extract osrm_extract ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries(osrm-partition osrm_partition ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries(osrm-customize osrm_customize ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries(osrm-contract osrm_contract ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
if (BUILD_ROUTED)
|
||||
target_link_libraries(osrm-routed osrm ${Boost_PROGRAM_OPTIONS_LIBRARY} ${OPTIONAL_SOCKET_LIBS} ${ZLIB_LIBRARY})
|
||||
endif()
|
||||
|
||||
set(EXTRACTOR_LIBRARIES
|
||||
${BZIP2_LIBRARIES}
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${EXPAT_LIBRARIES}
|
||||
${LUA_LIBRARIES}
|
||||
${OSMIUM_LIBRARIES}
|
||||
${TBB_LIBRARIES}
|
||||
${ZLIB_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES})
|
||||
set(GUIDANCE_LIBRARIES
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${LUA_LIBRARIES}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_COVERAGE_LIBRARIES})
|
||||
set(PARTITIONER_LIBRARIES
|
||||
${BOOST_ENGINE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES}
|
||||
${ZLIB_LIBRARY})
|
||||
set(CUSTOMIZER_LIBRARIES
|
||||
${BOOST_ENGINE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES})
|
||||
set(UPDATER_LIBRARIES
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES}
|
||||
${ZLIB_LIBRARY})
|
||||
set(CONTRACTOR_LIBRARIES
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${LUA_LIBRARIES}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES})
|
||||
set(ENGINE_LIBRARIES
|
||||
${BOOST_ENGINE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES}
|
||||
${ZLIB_LIBRARY})
|
||||
set(STORAGE_LIBRARIES
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_RT_LIBRARY}
|
||||
${MAYBE_COVERAGE_LIBRARIES})
|
||||
set(UTIL_LIBRARIES
|
||||
${BOOST_BASE_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
${TBB_LIBRARIES}
|
||||
${MAYBE_COVERAGE_LIBRARIES}
|
||||
${ZLIB_LIBRARY})
|
||||
|
||||
# Libraries
|
||||
target_link_libraries(osrm ${ENGINE_LIBRARIES})
|
||||
target_link_libraries(osrm_update ${UPDATER_LIBRARIES})
|
||||
target_link_libraries(osrm_contract ${CONTRACTOR_LIBRARIES} osrm_update osrm_store)
|
||||
target_link_libraries(osrm_extract osrm_guidance ${EXTRACTOR_LIBRARIES})
|
||||
target_link_libraries(osrm_partition ${PARTITIONER_LIBRARIES})
|
||||
target_link_libraries(osrm_customize ${CUSTOMIZER_LIBRARIES} osrm_update osrm_store)
|
||||
target_link_libraries(osrm_store ${STORAGE_LIBRARIES})
|
||||
|
||||
# BUILD_COMPONENTS
|
||||
add_executable(osrm-components src/tools/components.cpp $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
|
||||
target_link_libraries(osrm-components ${TBB_LIBRARIES} ${BOOST_BASE_LIBRARIES} ${UTIL_LIBRARIES})
|
||||
install(TARGETS osrm-components DESTINATION bin)
|
||||
|
||||
if(BUILD_TOOLS)
|
||||
if(WITH_TOOLS)
|
||||
message(STATUS "Activating OSRM internal tools")
|
||||
add_executable(osrm-io-benchmark src/tools/io-benchmark.cpp $<TARGET_OBJECTS:UTIL>)
|
||||
target_link_libraries(osrm-io-benchmark ${BOOST_BASE_LIBRARIES} ${TBB_LIBRARIES})
|
||||
|
||||
install(TARGETS osrm-io-benchmark DESTINATION bin)
|
||||
find_package(GDAL)
|
||||
if(GDAL_FOUND)
|
||||
add_executable(osrm-components Tools/componentAnalysis.cpp)
|
||||
include_directories(${GDAL_INCLUDE_DIR})
|
||||
target_link_libraries(
|
||||
osrm-components
|
||||
${GDAL_LIBRARIES} ${Boost_LIBRARIES} UUID GITDESCRIPTION COORDLIB)
|
||||
endif()
|
||||
add_executable(osrm-cli Tools/simpleclient.cpp)
|
||||
target_link_libraries(osrm-cli ${Boost_LIBRARIES} OSRM UUID GITDESCRIPTION)
|
||||
add_executable(osrm-io-benchmark Tools/io-benchmark.cpp)
|
||||
target_link_libraries(osrm-io-benchmark ${Boost_LIBRARIES} GITDESCRIPTION)
|
||||
add_executable(osrm-unlock-all Tools/unlock_all_mutexes.cpp)
|
||||
target_link_libraries(osrm-unlock-all ${Boost_LIBRARIES} GITDESCRIPTION)
|
||||
if(UNIX AND NOT APPLE)
|
||||
target_link_libraries(osrm-unlock-all rt)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ENABLE_ASSERTIONS)
|
||||
message(STATUS "Enabling assertions")
|
||||
add_definitions(-DBOOST_ENABLE_ASSERT_HANDLER)
|
||||
endif()
|
||||
|
||||
if (ENABLE_DEBUG_LOGGING)
|
||||
message(STATUS "Enabling debug logging")
|
||||
add_definitions(-DENABLE_DEBUG_LOGGING)
|
||||
endif()
|
||||
file(GLOB InstallGlob Include/osrm/*.h Library/OSRM.h)
|
||||
|
||||
# Add RPATH info to executables so that when they are run after being installed
|
||||
# (i.e., from /usr/local/bin/) the linker can find library dependencies. For
|
||||
# more info see http://www.cmake.org/Wiki/CMake_RPATH_handling
|
||||
set_property(TARGET osrm-extract PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
set_property(TARGET osrm-partition PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
set_property(TARGET osrm-contract PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
set_property(TARGET osrm-prepare PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
set_property(TARGET osrm-datastore PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
if (BUILD_ROUTED)
|
||||
set_property(TARGET osrm-routed PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
endif()
|
||||
set_property(TARGET osrm-routed PROPERTY INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
|
||||
file(GLOB FlatbuffersGlob third_party/flatbuffers/include/flatbuffers/*.h)
|
||||
file(GLOB LibraryGlob include/osrm/*.hpp)
|
||||
file(GLOB ParametersGlob include/engine/api/*_parameters.hpp)
|
||||
set(ApiHeader include/engine/api/base_result.hpp)
|
||||
set(EngineHeader include/engine/status.hpp include/engine/engine_config.hpp include/engine/hint.hpp include/engine/bearing.hpp include/engine/approach.hpp include/engine/phantom_node.hpp)
|
||||
set(UtilHeader include/util/coordinate.hpp include/util/json_container.hpp include/util/typedefs.hpp include/util/alias.hpp include/util/exception.hpp include/util/bearing.hpp)
|
||||
set(ExtractorHeader include/extractor/extractor.hpp include/storage/io_config.hpp include/extractor/extractor_config.hpp include/extractor/travel_mode.hpp)
|
||||
set(PartitionerHeader include/partitioner/partitioner.hpp include/partitioner/partitioner_config.hpp)
|
||||
set(ContractorHeader include/contractor/contractor.hpp include/contractor/contractor_config.hpp)
|
||||
set(StorageHeader include/storage/storage.hpp include/storage/io_config.hpp include/storage/storage_config.hpp)
|
||||
install(FILES ${EngineHeader} DESTINATION include/osrm/engine)
|
||||
install(FILES ${UtilHeader} DESTINATION include/osrm/util)
|
||||
install(FILES ${StorageHeader} DESTINATION include/osrm/storage)
|
||||
install(FILES ${ExtractorHeader} DESTINATION include/osrm/extractor)
|
||||
install(FILES ${PartitionerHeader} DESTINATION include/osrm/partitioner)
|
||||
install(FILES ${ContractorHeader} DESTINATION include/osrm/contractor)
|
||||
install(FILES ${LibraryGlob} DESTINATION include/osrm)
|
||||
install(FILES ${ParametersGlob} DESTINATION include/osrm/engine/api)
|
||||
install(FILES ${ApiHeader} DESTINATION include/osrm/engine/api)
|
||||
install(FILES ${FlatbuffersGlob} DESTINATION include/flatbuffers)
|
||||
install(FILES ${InstallGlob} DESTINATION include/osrm)
|
||||
install(TARGETS osrm-extract DESTINATION bin)
|
||||
install(TARGETS osrm-partition DESTINATION bin)
|
||||
install(TARGETS osrm-customize DESTINATION bin)
|
||||
install(TARGETS osrm-contract DESTINATION bin)
|
||||
install(TARGETS osrm-prepare DESTINATION bin)
|
||||
install(TARGETS osrm-datastore DESTINATION bin)
|
||||
if (BUILD_ROUTED)
|
||||
install(TARGETS osrm-routed DESTINATION bin)
|
||||
endif()
|
||||
install(TARGETS osrm DESTINATION lib)
|
||||
install(TARGETS osrm_extract DESTINATION lib)
|
||||
install(TARGETS osrm_partition DESTINATION lib)
|
||||
install(TARGETS osrm_customize DESTINATION lib)
|
||||
install(TARGETS osrm_update DESTINATION lib)
|
||||
install(TARGETS osrm_contract DESTINATION lib)
|
||||
install(TARGETS osrm_store DESTINATION lib)
|
||||
install(TARGETS osrm_guidance DESTINATION lib)
|
||||
|
||||
|
||||
# Install profiles and support library to /usr/local/share/osrm/profiles by default
|
||||
set(DefaultProfilesDir profiles)
|
||||
install(DIRECTORY ${DefaultProfilesDir} DESTINATION share/osrm)
|
||||
|
||||
# Install data geojson files to /usr/local/share/osrm/data by default
|
||||
set(DefaultProfilesDir data)
|
||||
install(DIRECTORY ${DefaultProfilesDir} DESTINATION share/osrm)
|
||||
|
||||
# Setup exporting variables for pkgconfig and subproject
|
||||
#
|
||||
|
||||
if(BUILD_PACKAGE)
|
||||
include(CPackConfig)
|
||||
include(CPack)
|
||||
endif()
|
||||
|
||||
function(JOIN VALUES GLUE OUTPUT)
|
||||
string (REPLACE ";" "${GLUE}" _TMP_STR "${VALUES}")
|
||||
set (${OUTPUT} "${_TMP_STR}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
JOIN("${OSRM_DEFINES}" " " TMP_OSRM_DEFINES)
|
||||
set(LibOSRM_CXXFLAGS "${OSRM_CXXFLAGS} ${TMP_OSRM_DEFINES}")
|
||||
set(LibOSRM_LDFLAGS "${OSRM_LDFLAGS}")
|
||||
|
||||
if(BUILD_AS_SUBPROJECT)
|
||||
set(LibOSRM_CXXFLAGS "${LibOSRM_CXXFLAGS}" PARENT_SCOPE)
|
||||
set(LibOSRM_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/include" PARENT_SCOPE)
|
||||
set(LibOSRM_LIBRARY_DIR "${CMAKE_CURRENT_BINARY_DIR}" PARENT_SCOPE)
|
||||
set(LibOSRM_LIBRARIES "osrm" PARENT_SCOPE)
|
||||
set(LibOSRM_DEPENDENT_LIBRARIES "${ENGINE_LIBRARIES}" PARENT_SCOPE)
|
||||
set(LibOSRM_INCLUDE_DIRS "${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/osrm"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/third_party"
|
||||
"${DEPENDENCIES_INCLUDE_DIRS}" PARENT_SCOPE)
|
||||
set(LibOSRM_LIBRARY_DIRS "${LibOSRM_LIBRARY_DIR}" PARENT_SCOPE)
|
||||
endif()
|
||||
|
||||
# pkgconfig defines
|
||||
set(PKGCONFIG_OSRM_CXXFLAGS "${LibOSRM_CXXFLAGS}")
|
||||
set(PKGCONFIG_OSRM_LDFLAGS "${LibOSRM_LDFLAGS}")
|
||||
set(PKGCONFIG_LIBRARY_DIR "${CMAKE_INSTALL_PREFIX}/lib")
|
||||
set(PKGCONFIG_INCLUDE_DIR "${CMAKE_INSTALL_PREFIX}/include")
|
||||
|
||||
list(APPEND DEPENDENCIES_INCLUDE_DIRS "${PKGCONFIG_INCLUDE_DIR}")
|
||||
list(APPEND DEPENDENCIES_INCLUDE_DIRS "${PKGCONFIG_INCLUDE_DIR}/osrm")
|
||||
JOIN("-I${DEPENDENCIES_INCLUDE_DIRS}" " -I" PKGCONFIG_OSRM_INCLUDE_FLAGS)
|
||||
|
||||
# Boost uses imported targets, we need to use a generator expression to extract
|
||||
# the link libraries to be written to the pkg-config file.
|
||||
# Conan & TBB define dependencies as CMake targets too, that's why we do the same for them.
|
||||
foreach(engine_lib ${ENGINE_LIBRARIES})
|
||||
if("${engine_lib}" MATCHES "^Boost.*" OR "${engine_lib}" MATCHES "^CONAN_LIB.*" OR "${engine_lib}" MATCHES "^TBB.*")
|
||||
list(APPEND PKGCONFIG_DEPENDENT_LIBRARIES "$<TARGET_LINKER_FILE:${engine_lib}>")
|
||||
else()
|
||||
list(APPEND PKGCONFIG_DEPENDENT_LIBRARIES "${engine_lib}")
|
||||
endif()
|
||||
endforeach(engine_lib)
|
||||
JOIN("${PKGCONFIG_DEPENDENT_LIBRARIES}" " " PKGCONFIG_OSRM_DEPENDENT_LIBRARIES)
|
||||
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/pkgconfig.in pkgconfig.configured @ONLY)
|
||||
file(GENERATE
|
||||
OUTPUT
|
||||
${PROJECT_BINARY_DIR}/libosrm.pc
|
||||
INPUT
|
||||
${PROJECT_BINARY_DIR}/pkgconfig.configured)
|
||||
|
||||
install(FILES ${PROJECT_BINARY_DIR}/libosrm.pc DESTINATION ${PKGCONFIG_LIBRARY_DIR}/pkgconfig)
|
||||
|
||||
# uninstall target
|
||||
configure_file(
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake"
|
||||
IMMEDIATE @ONLY)
|
||||
|
||||
add_custom_target(uninstall
|
||||
COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake)
|
||||
|
||||
|
||||
# Modular build system: each directory registered here provides its own CMakeLists.txt
|
||||
add_subdirectory(unit_tests)
|
||||
add_subdirectory(src/benchmarks)
|
||||
|
||||
if (ENABLE_NODE_BINDINGS)
|
||||
add_subdirectory(src/nodejs)
|
||||
endif()
|
||||
|
||||
|
||||
if (ENABLE_FUZZING)
|
||||
# Requires libosrm being built with sanitizers; make configurable and default to ubsan
|
||||
set(FUZZ_SANITIZER "undefined" CACHE STRING "Sanitizer to be used for Fuzz testing")
|
||||
set_property(CACHE FUZZ_SANITIZER PROPERTY STRINGS "undefined" "integer" "address" "memory" "thread" "leak")
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize-coverage=edge,indirect-calls,8bit-counters -fsanitize=address")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
|
||||
set(OSRM_LDFLAGS "${OSRM_LDFLAGS} -fsanitize=address")
|
||||
|
||||
message(STATUS "Using -fsanitize=${FUZZ_SANITIZER} for Fuzz testing")
|
||||
|
||||
add_subdirectory(fuzz)
|
||||
endif ()
|
||||
|
||||
# add headers sanity check target that includes all headers independently
|
||||
set(check_headers_dir "${PROJECT_BINARY_DIR}/check-headers")
|
||||
file(GLOB_RECURSE headers_to_check
|
||||
${PROJECT_BINARY_DIR}/*.hpp
|
||||
${PROJECT_SOURCE_DIR}/include/*.hpp)
|
||||
foreach(header ${headers_to_check})
|
||||
if ("${header}" MATCHES ".*/include/nodejs/.*")
|
||||
# we do not check NodeJS bindings headers
|
||||
continue()
|
||||
endif()
|
||||
get_filename_component(filename ${header} NAME_WE)
|
||||
set(filename "${check_headers_dir}/${filename}.cpp")
|
||||
if (NOT EXISTS ${filename})
|
||||
file(WRITE ${filename} "#include \"${header}\"\n")
|
||||
endif()
|
||||
list(APPEND sources ${filename})
|
||||
endforeach()
|
||||
add_library(check-headers STATIC EXCLUDE_FROM_ALL ${sources})
|
||||
set_target_properties(check-headers PROPERTIES ARCHIVE_OUTPUT_DIRECTORY ${check_headers_dir})
|
||||
install(TARGETS osrm-routed DESTINATION bin)
|
||||
install(TARGETS OSRM DESTINATION lib)
|
||||
list(GET Boost_LIBRARIES 1 BOOST_LIBRARY_FIRST)
|
||||
get_filename_component(BOOST_LIBRARY_LISTING "${BOOST_LIBRARY_FIRST}" PATH)
|
||||
set(BOOST_LIBRARY_LISTING "-L${BOOST_LIBRARY_LISTING}")
|
||||
foreach (lib ${Boost_LIBRARIES})
|
||||
get_filename_component(BOOST_LIBRARY_NAME "${lib}" NAME_WE)
|
||||
string(REPLACE "lib" "" BOOST_LIBRARY_NAME ${BOOST_LIBRARY_NAME})
|
||||
set(BOOST_LIBRARY_LISTING "${BOOST_LIBRARY_LISTING} -l${BOOST_LIBRARY_NAME}")
|
||||
endforeach ()
|
||||
|
||||
configure_file(${CMAKE_SOURCE_DIR}/cmake/pkgconfig.in libosrm.pc @ONLY)
|
||||
install(FILES ${PROJECT_BINARY_DIR}/libosrm.pc DESTINATION lib/pkgconfig)
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
# Code of conduct
|
||||
|
||||
Everyone is invited to participate in Project OSRM’s open source projects and public discussions: we want to create a welcoming and friendly environment. Harassment of participants or other unethical and unprofessional behavior will not be tolerated in our spaces. The [Contributor Covenant](http://contributor-covenant.org) applies to all projects under the Project-OSRM organization and we ask that you please read [the full text](http://contributor-covenant.org/version/1/2/0/).
|
||||
@ -1,81 +0,0 @@
|
||||
# Everyone
|
||||
|
||||
Please take some time to review our [code of conduct](CODE-OF-CONDUCT.md) to help guide your interactions with others on this project.
|
||||
|
||||
# User
|
||||
|
||||
Before you open a new issue, please search for older ones that cover the same issue.
|
||||
In general "me too" comments/issues are frowned upon.
|
||||
You can add a :+1: emoji reaction to the issue if you want to express interest in this.
|
||||
|
||||
# Developer
|
||||
|
||||
We use `clang-format` version `15` to consistently format the code base. There is a helper script under `scripts/format.sh`.
|
||||
The format is automatically checked by the `mason-linux-release` job of a Travis CI build.
|
||||
To save development time a local hook `.git/hooks/pre-push`
|
||||
```
|
||||
#!/bin/sh
|
||||
|
||||
remote="$1"
|
||||
if [ x"$remote" = xorigin ] ; then
|
||||
if [ $(git rev-parse --abbrev-ref HEAD) = master ] ; then
|
||||
echo "Rejected push to $remote/master" ; exit 1
|
||||
fi
|
||||
|
||||
./scripts/format.sh && ./scripts/error_on_dirty.sh
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo "Unstaged format changes" ; exit 1
|
||||
fi
|
||||
fi
|
||||
```
|
||||
could check code format, modify a local repository and reject push due to unstaged formatting changes.
|
||||
Also `pre-push` hook rejects direct pushes to `origin/master`.
|
||||
|
||||
⚠️ `scripts/format.sh` checks all local files that match `*.cpp` or `*.hpp` patterns.
|
||||
|
||||
|
||||
In general changes that affect the API and/or increase the memory consumption need to be discussed first.
|
||||
Often we don't include changes that would increase the memory consumption a lot if they are not generally usable (e.g. elevation data is a good example).
|
||||
|
||||
## Pull Request
|
||||
|
||||
Every pull-request that changes the API needs to update the docs in `docs/http.md` and add an entry to `CHANGELOG.md`.
|
||||
Breaking changes need to have a BREAKING prefix. See the [releasing documentation](docs/releasing.md) on how this affects the version.
|
||||
|
||||
Early feedback is also important.
|
||||
You will see that a lot of the PR have tags like `[not ready]` or `[wip]`.
|
||||
We like to open PRs as soon as we are starting to work on something to make it visible to the rest of the team.
|
||||
If your work is going in entirely the wrong direction, there is a good chance someone will pick up on this before it is too late.
|
||||
Everyone is encouraged to read PRs of other people and give feedback.
|
||||
|
||||
For every significant code change we require a pull request review before it is merged.
|
||||
If your pull request modifies the API this need to be signed of by a team discussion.
|
||||
This means you will need to find another member of the team with commit access and request a review of your pull request.
|
||||
|
||||
Once your pull request is reviewed you can merge it! If you don't have commit access, ping someone that has commit access.
|
||||
If you do have commit access there are in general two accepted styles to merging:
|
||||
|
||||
1. Make sure the branch is up to date with `master`. Run `git rebase master` to find out.
|
||||
2. Once that is ensured you can either:
|
||||
- Click the nice green merge button (for a non-fast-forward merge)
|
||||
- Merge by hand using a fast-forward merge
|
||||
|
||||
Which merge you prefer is up to personal preference. In general it is recommended to use fast-forward merges because it creates a history that is sequential and easier to understand.
|
||||
|
||||
# Maintainer
|
||||
|
||||
## Doing a release
|
||||
|
||||
There is an in-depth guide around how to push out a release once it is ready [here](docs/releasing.md).
|
||||
|
||||
## The API
|
||||
|
||||
Changes to the API need to be discussed and signed off by the team. Breaking changes even more so than additive changes.
|
||||
|
||||
## Milestones
|
||||
|
||||
If a pull request or an issue is applicable for the current or next milestone, depends on the target version number.
|
||||
Since we use semantic versioning we restrict breaking changes to major releases.
|
||||
After a Release Candidate is released we usually don't change the API anymore if it is not critical.
|
||||
Bigger code changes after a RC was released should also be avoided.
|
||||
|
||||
789
Contractor/Contractor.h
Normal file
789
Contractor/Contractor.h
Normal file
@ -0,0 +1,789 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef CONTRACTOR_H_INCLUDED
|
||||
#define CONTRACTOR_H_INCLUDED
|
||||
|
||||
#include "TemporaryStorage.h"
|
||||
#include "../DataStructures/BinaryHeap.h"
|
||||
#include "../DataStructures/DeallocatingVector.h"
|
||||
#include "../DataStructures/DynamicGraph.h"
|
||||
#include "../DataStructures/Percent.h"
|
||||
#include "../DataStructures/XORFastHash.h"
|
||||
#include "../DataStructures/XORFastHashStorage.h"
|
||||
#include "../Util/OpenMPWrapper.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
class Contractor {
|
||||
|
||||
private:
|
||||
struct ContractorEdgeData {
|
||||
ContractorEdgeData() :
|
||||
distance(0), id(0), originalEdges(0), shortcut(0), forward(0), backward(0), originalViaNodeID(false) {}
|
||||
ContractorEdgeData( unsigned _distance, unsigned _originalEdges, unsigned _id, bool _shortcut, bool _forward, bool _backward) :
|
||||
distance(_distance), id(_id), originalEdges(std::min((unsigned)1<<28, _originalEdges) ), shortcut(_shortcut), forward(_forward), backward(_backward), originalViaNodeID(false) {}
|
||||
unsigned distance;
|
||||
unsigned id;
|
||||
unsigned originalEdges:28;
|
||||
bool shortcut:1;
|
||||
bool forward:1;
|
||||
bool backward:1;
|
||||
bool originalViaNodeID:1;
|
||||
} data;
|
||||
|
||||
struct _HeapData {
|
||||
short hop;
|
||||
bool target;
|
||||
_HeapData() : hop(0), target(false) {}
|
||||
_HeapData( short h, bool t ) : hop(h), target(t) {}
|
||||
};
|
||||
|
||||
typedef DynamicGraph< ContractorEdgeData > _DynamicGraph;
|
||||
// typedef BinaryHeap< NodeID, NodeID, int, _HeapData, ArrayStorage<NodeID, NodeID> > _Heap;
|
||||
typedef BinaryHeap< NodeID, NodeID, int, _HeapData, XORFastHashStorage<NodeID, NodeID> > _Heap;
|
||||
typedef _DynamicGraph::InputEdge _ContractorEdge;
|
||||
|
||||
struct _ThreadData {
|
||||
_Heap heap;
|
||||
std::vector< _ContractorEdge > insertedEdges;
|
||||
std::vector< NodeID > neighbours;
|
||||
_ThreadData( NodeID nodes ): heap( nodes ) { }
|
||||
};
|
||||
|
||||
struct _PriorityData {
|
||||
int depth;
|
||||
_PriorityData() : depth(0) { }
|
||||
};
|
||||
|
||||
struct _ContractionInformation {
|
||||
int edgesDeleted;
|
||||
int edgesAdded;
|
||||
int originalEdgesDeleted;
|
||||
int originalEdgesAdded;
|
||||
_ContractionInformation() : edgesDeleted(0), edgesAdded(0), originalEdgesDeleted(0), originalEdgesAdded(0) {}
|
||||
};
|
||||
|
||||
struct _RemainingNodeData {
|
||||
_RemainingNodeData() : id (0), isIndependent(false) {}
|
||||
NodeID id:31;
|
||||
bool isIndependent:1;
|
||||
};
|
||||
|
||||
struct _NodePartitionor {
|
||||
inline bool operator()(_RemainingNodeData & nodeData ) const {
|
||||
return !nodeData.isIndependent;
|
||||
}
|
||||
};
|
||||
|
||||
public:
|
||||
|
||||
template<class ContainerT >
|
||||
Contractor( int nodes, ContainerT& inputEdges) {
|
||||
std::vector< _ContractorEdge > edges;
|
||||
edges.reserve(inputEdges.size()*2);
|
||||
temp_edge_counter = 0;
|
||||
|
||||
typename ContainerT::deallocation_iterator diter = inputEdges.dbegin();
|
||||
typename ContainerT::deallocation_iterator dend = inputEdges.dend();
|
||||
|
||||
_ContractorEdge newEdge;
|
||||
while(diter!=dend) {
|
||||
newEdge.source = diter->source();
|
||||
newEdge.target = diter->target();
|
||||
newEdge.data = ContractorEdgeData( (std::max)((int)diter->weight(), 1 ), 1, diter->id(), false, diter->isForward(), diter->isBackward());
|
||||
|
||||
BOOST_ASSERT_MSG( newEdge.data.distance > 0, "edge distance < 1" );
|
||||
#ifndef NDEBUG
|
||||
if ( newEdge.data.distance > 24 * 60 * 60 * 10 ) {
|
||||
SimpleLogger().Write(logWARNING) <<
|
||||
"Edge weight large -> " << newEdge.data.distance;
|
||||
}
|
||||
#endif
|
||||
edges.push_back( newEdge );
|
||||
std::swap( newEdge.source, newEdge.target );
|
||||
newEdge.data.forward = diter->isBackward();
|
||||
newEdge.data.backward = diter->isForward();
|
||||
edges.push_back( newEdge );
|
||||
++diter;
|
||||
}
|
||||
//clear input vector and trim the current set of edges with the well-known swap trick
|
||||
inputEdges.clear();
|
||||
sort( edges.begin(), edges.end() );
|
||||
NodeID edge = 0;
|
||||
for ( NodeID i = 0; i < edges.size(); ) {
|
||||
const NodeID source = edges[i].source;
|
||||
const NodeID target = edges[i].target;
|
||||
const NodeID id = edges[i].data.id;
|
||||
//remove eigenloops
|
||||
if ( source == target ) {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
_ContractorEdge forwardEdge;
|
||||
_ContractorEdge backwardEdge;
|
||||
forwardEdge.source = backwardEdge.source = source;
|
||||
forwardEdge.target = backwardEdge.target = target;
|
||||
forwardEdge.data.forward = backwardEdge.data.backward = true;
|
||||
forwardEdge.data.backward = backwardEdge.data.forward = false;
|
||||
forwardEdge.data.shortcut = backwardEdge.data.shortcut = false;
|
||||
forwardEdge.data.id = backwardEdge.data.id = id;
|
||||
forwardEdge.data.originalEdges = backwardEdge.data.originalEdges = 1;
|
||||
forwardEdge.data.distance = backwardEdge.data.distance = std::numeric_limits< int >::max();
|
||||
//remove parallel edges
|
||||
while ( i < edges.size() && edges[i].source == source && edges[i].target == target ) {
|
||||
if ( edges[i].data.forward) {
|
||||
forwardEdge.data.distance = std::min( edges[i].data.distance, forwardEdge.data.distance );
|
||||
}
|
||||
if ( edges[i].data.backward) {
|
||||
backwardEdge.data.distance = std::min( edges[i].data.distance, backwardEdge.data.distance );
|
||||
}
|
||||
++i;
|
||||
}
|
||||
//merge edges (s,t) and (t,s) into bidirectional edge
|
||||
if ( forwardEdge.data.distance == backwardEdge.data.distance ) {
|
||||
if ( (int)forwardEdge.data.distance != std::numeric_limits< int >::max() ) {
|
||||
forwardEdge.data.backward = true;
|
||||
edges[edge++] = forwardEdge;
|
||||
}
|
||||
} else { //insert seperate edges
|
||||
if ( ((int)forwardEdge.data.distance) != std::numeric_limits< int >::max() ) {
|
||||
edges[edge++] = forwardEdge;
|
||||
}
|
||||
if ( (int)backwardEdge.data.distance != std::numeric_limits< int >::max() ) {
|
||||
edges[edge++] = backwardEdge;
|
||||
}
|
||||
}
|
||||
}
|
||||
std::cout << "merged " << edges.size() - edge << " edges out of " << edges.size() << std::endl;
|
||||
edges.resize( edge );
|
||||
_graph = boost::make_shared<_DynamicGraph>( nodes, edges );
|
||||
edges.clear();
|
||||
std::vector<_ContractorEdge>().swap(edges);
|
||||
BOOST_ASSERT( 0 == edges.capacity() );
|
||||
// unsigned maxdegree = 0;
|
||||
// NodeID highestNode = 0;
|
||||
//
|
||||
// for(unsigned i = 0; i < _graph->GetNumberOfNodes(); ++i) {
|
||||
// unsigned degree = _graph->EndEdges(i) - _graph->BeginEdges(i);
|
||||
// if(degree > maxdegree) {
|
||||
// maxdegree = degree;
|
||||
// highestNode = i;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// SimpleLogger().Write() << "edges at node with id " << highestNode << " has degree " << maxdegree;
|
||||
// for(unsigned i = _graph->BeginEdges(highestNode); i < _graph->EndEdges(highestNode); ++i) {
|
||||
// SimpleLogger().Write() << " ->(" << highestNode << "," << _graph->GetTarget(i) << "); via: " << _graph->GetEdgeData(i).via;
|
||||
// }
|
||||
|
||||
//Create temporary file
|
||||
|
||||
// GetTemporaryFileName(temporaryEdgeStorageFilename);
|
||||
edge_storage_slot = TemporaryStorage::GetInstance().AllocateSlot();
|
||||
std::cout << "contractor finished initalization" << std::endl;
|
||||
}
|
||||
|
||||
~Contractor() {
|
||||
//Delete temporary file
|
||||
// remove(temporaryEdgeStorageFilename.c_str());
|
||||
TemporaryStorage::GetInstance().DeallocateSlot(edge_storage_slot);
|
||||
}
|
||||
|
||||
void Run() {
|
||||
const NodeID numberOfNodes = _graph->GetNumberOfNodes();
|
||||
Percent p (numberOfNodes);
|
||||
|
||||
const unsigned maxThreads = omp_get_max_threads();
|
||||
std::vector < _ThreadData* > threadData;
|
||||
for ( unsigned threadNum = 0; threadNum < maxThreads; ++threadNum ) {
|
||||
threadData.push_back( new _ThreadData( numberOfNodes ) );
|
||||
}
|
||||
std::cout << "Contractor is using " << maxThreads << " threads" << std::endl;
|
||||
|
||||
NodeID numberOfContractedNodes = 0;
|
||||
std::vector< _RemainingNodeData > remainingNodes( numberOfNodes );
|
||||
std::vector< float > nodePriority( numberOfNodes );
|
||||
std::vector< _PriorityData > nodeData( numberOfNodes );
|
||||
|
||||
//initialize the variables
|
||||
#pragma omp parallel for schedule ( guided )
|
||||
for ( int x = 0; x < ( int ) numberOfNodes; ++x ) {
|
||||
remainingNodes[x].id = x;
|
||||
}
|
||||
|
||||
std::cout << "initializing elimination PQ ..." << std::flush;
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* data = threadData[omp_get_thread_num()];
|
||||
#pragma omp parallel for schedule ( guided )
|
||||
for ( int x = 0; x < ( int ) numberOfNodes; ++x ) {
|
||||
nodePriority[x] = _Evaluate( data, &nodeData[x], x );
|
||||
}
|
||||
}
|
||||
std::cout << "ok" << std::endl << "preprocessing " << numberOfNodes << " nodes ..." << std::flush;
|
||||
|
||||
bool flushedContractor = false;
|
||||
while ( numberOfNodes > 2 && numberOfContractedNodes < numberOfNodes ) {
|
||||
if(!flushedContractor && (numberOfContractedNodes > (numberOfNodes*0.65) ) ){
|
||||
DeallocatingVector<_ContractorEdge> newSetOfEdges; //this one is not explicitely cleared since it goes out of scope anywa
|
||||
std::cout << " [flush " << numberOfContractedNodes << " nodes] " << std::flush;
|
||||
|
||||
//Delete old heap data to free memory that we need for the coming operations
|
||||
BOOST_FOREACH(_ThreadData * data, threadData) {
|
||||
delete data;
|
||||
}
|
||||
threadData.clear();
|
||||
|
||||
//Create new priority array
|
||||
std::vector<float> newNodePriority(remainingNodes.size());
|
||||
//this map gives the old IDs from the new ones, necessary to get a consistent graph at the end of contraction
|
||||
oldNodeIDFromNewNodeIDMap.resize(remainingNodes.size());
|
||||
//this map gives the new IDs from the old ones, necessary to remap targets from the remaining graph
|
||||
std::vector<NodeID> newNodeIDFromOldNodeIDMap(numberOfNodes, UINT_MAX);
|
||||
|
||||
//build forward and backward renumbering map and remap ids in remainingNodes and Priorities.
|
||||
for(unsigned newNodeID = 0; newNodeID < remainingNodes.size(); ++newNodeID) {
|
||||
//create renumbering maps in both directions
|
||||
oldNodeIDFromNewNodeIDMap[newNodeID] = remainingNodes[newNodeID].id;
|
||||
newNodeIDFromOldNodeIDMap[remainingNodes[newNodeID].id] = newNodeID;
|
||||
newNodePriority[newNodeID] = nodePriority[remainingNodes[newNodeID].id];
|
||||
remainingNodes[newNodeID].id = newNodeID;
|
||||
}
|
||||
TemporaryStorage & tempStorage = TemporaryStorage::GetInstance();
|
||||
//walk over all nodes
|
||||
for(unsigned i = 0; i < _graph->GetNumberOfNodes(); ++i) {
|
||||
const NodeID start = i;
|
||||
for(_DynamicGraph::EdgeIterator currentEdge = _graph->BeginEdges(start); currentEdge < _graph->EndEdges(start); ++currentEdge) {
|
||||
_DynamicGraph::EdgeData & data = _graph->GetEdgeData(currentEdge);
|
||||
const NodeID target = _graph->GetTarget(currentEdge);
|
||||
if(UINT_MAX == newNodeIDFromOldNodeIDMap[i] ){
|
||||
//Save edges of this node w/o renumbering.
|
||||
tempStorage.WriteToSlot(edge_storage_slot, (char*)&start, sizeof(NodeID));
|
||||
tempStorage.WriteToSlot(edge_storage_slot, (char*)&target, sizeof(NodeID));
|
||||
tempStorage.WriteToSlot(edge_storage_slot, (char*)&data, sizeof(_DynamicGraph::EdgeData));
|
||||
++temp_edge_counter;
|
||||
} else {
|
||||
//node is not yet contracted.
|
||||
//add (renumbered) outgoing edges to new DynamicGraph.
|
||||
_ContractorEdge newEdge;
|
||||
newEdge.source = newNodeIDFromOldNodeIDMap[start];
|
||||
newEdge.target = newNodeIDFromOldNodeIDMap[target];
|
||||
newEdge.data = data;
|
||||
newEdge.data.originalViaNodeID = true;
|
||||
BOOST_ASSERT_MSG(
|
||||
UINT_MAX != newNodeIDFromOldNodeIDMap[start],
|
||||
"new start id not resolveable"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
UINT_MAX != newNodeIDFromOldNodeIDMap[target],
|
||||
"new target id not resolveable"
|
||||
);
|
||||
newSetOfEdges.push_back(newEdge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Delete map from old NodeIDs to new ones.
|
||||
std::vector<NodeID>().swap(newNodeIDFromOldNodeIDMap);
|
||||
|
||||
//Replace old priorities array by new one
|
||||
nodePriority.swap(newNodePriority);
|
||||
//Delete old nodePriority vector
|
||||
std::vector<float>().swap(newNodePriority);
|
||||
//old Graph is removed
|
||||
_graph.reset();
|
||||
|
||||
//create new graph
|
||||
std::sort(newSetOfEdges.begin(), newSetOfEdges.end());
|
||||
_graph = boost::make_shared<_DynamicGraph>(remainingNodes.size(), newSetOfEdges);
|
||||
|
||||
newSetOfEdges.clear();
|
||||
flushedContractor = true;
|
||||
|
||||
//INFO: MAKE SURE THIS IS THE LAST OPERATION OF THE FLUSH!
|
||||
//reinitialize heaps and ThreadData objects with appropriate size
|
||||
for ( unsigned threadNum = 0; threadNum < maxThreads; ++threadNum ) {
|
||||
threadData.push_back( new _ThreadData( _graph->GetNumberOfNodes() ) );
|
||||
}
|
||||
}
|
||||
|
||||
const int last = ( int ) remainingNodes.size();
|
||||
#pragma omp parallel
|
||||
{
|
||||
//determine independent node set
|
||||
_ThreadData* const data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided )
|
||||
for ( int i = 0; i < last; ++i ) {
|
||||
const NodeID node = remainingNodes[i].id;
|
||||
remainingNodes[i].isIndependent = _IsIndependent( nodePriority/*, nodeData*/, data, node );
|
||||
}
|
||||
}
|
||||
_NodePartitionor functor;
|
||||
const std::vector < _RemainingNodeData >::const_iterator first = stable_partition( remainingNodes.begin(), remainingNodes.end(), functor );
|
||||
const int firstIndependent = first - remainingNodes.begin();
|
||||
//contract independent nodes
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].id;
|
||||
_Contract< false > ( data, x );
|
||||
//nodePriority[x] = -1;
|
||||
}
|
||||
|
||||
std::sort( data->insertedEdges.begin(), data->insertedEdges.end() );
|
||||
}
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].id;
|
||||
_DeleteIncomingEdges( data, x );
|
||||
}
|
||||
}
|
||||
//insert new edges
|
||||
for ( unsigned threadNum = 0; threadNum < maxThreads; ++threadNum ) {
|
||||
_ThreadData& data = *threadData[threadNum];
|
||||
BOOST_FOREACH(const _ContractorEdge& edge, data.insertedEdges) {
|
||||
_DynamicGraph::EdgeIterator currentEdgeID = _graph->FindEdge(edge.source, edge.target);
|
||||
if(currentEdgeID < _graph->EndEdges(edge.source) ) {
|
||||
_DynamicGraph::EdgeData & currentEdgeData = _graph->GetEdgeData(currentEdgeID);
|
||||
if( currentEdgeData.shortcut &&
|
||||
edge.data.forward == currentEdgeData.forward &&
|
||||
edge.data.backward == currentEdgeData.backward &&
|
||||
edge.data.distance < currentEdgeData.distance
|
||||
) {
|
||||
// found a duplicate edge with smaller weight, update it.
|
||||
currentEdgeData = edge.data;
|
||||
// currentEdgeData.distance = std::min(currentEdgeData.distance, edge.data.distance);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
_graph->InsertEdge( edge.source, edge.target, edge.data );
|
||||
}
|
||||
data.insertedEdges.clear();
|
||||
}
|
||||
//update priorities
|
||||
#pragma omp parallel
|
||||
{
|
||||
_ThreadData* data = threadData[omp_get_thread_num()];
|
||||
#pragma omp for schedule ( guided ) nowait
|
||||
for ( int position = firstIndependent ; position < last; ++position ) {
|
||||
NodeID x = remainingNodes[position].id;
|
||||
_UpdateNeighbours( nodePriority, nodeData, data, x );
|
||||
}
|
||||
}
|
||||
//remove contracted nodes from the pool
|
||||
numberOfContractedNodes += last - firstIndependent;
|
||||
remainingNodes.resize( firstIndependent );
|
||||
std::vector< _RemainingNodeData>( remainingNodes ).swap( remainingNodes );
|
||||
// unsigned maxdegree = 0;
|
||||
// unsigned avgdegree = 0;
|
||||
// unsigned mindegree = UINT_MAX;
|
||||
// unsigned quaddegree = 0;
|
||||
//
|
||||
// for(unsigned i = 0; i < remainingNodes.size(); ++i) {
|
||||
// unsigned degree = _graph->EndEdges(remainingNodes[i].first) - _graph->BeginEdges(remainingNodes[i].first);
|
||||
// if(degree > maxdegree)
|
||||
// maxdegree = degree;
|
||||
// if(degree < mindegree)
|
||||
// mindegree = degree;
|
||||
//
|
||||
// avgdegree += degree;
|
||||
// quaddegree += (degree*degree);
|
||||
// }
|
||||
//
|
||||
// avgdegree /= std::max((unsigned)1,(unsigned)remainingNodes.size() );
|
||||
// quaddegree /= std::max((unsigned)1,(unsigned)remainingNodes.size() );
|
||||
//
|
||||
// SimpleLogger().Write() << "rest: " << remainingNodes.size() << ", max: " << maxdegree << ", min: " << mindegree << ", avg: " << avgdegree << ", quad: " << quaddegree;
|
||||
|
||||
p.printStatus(numberOfContractedNodes);
|
||||
}
|
||||
BOOST_FOREACH(_ThreadData * data, threadData) {
|
||||
delete data;
|
||||
}
|
||||
threadData.clear();
|
||||
}
|
||||
|
||||
template< class Edge >
|
||||
inline void GetEdges( DeallocatingVector< Edge >& edges ) {
|
||||
Percent p (_graph->GetNumberOfNodes());
|
||||
SimpleLogger().Write() << "Getting edges of minimized graph";
|
||||
NodeID numberOfNodes = _graph->GetNumberOfNodes();
|
||||
if(_graph->GetNumberOfNodes()) {
|
||||
Edge newEdge;
|
||||
for ( NodeID node = 0; node < numberOfNodes; ++node ) {
|
||||
p.printStatus(node);
|
||||
for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge < endEdges; ++edge ) {
|
||||
const NodeID target = _graph->GetTarget( edge );
|
||||
const _DynamicGraph::EdgeData& data = _graph->GetEdgeData( edge );
|
||||
if( !oldNodeIDFromNewNodeIDMap.empty() ) {
|
||||
newEdge.source = oldNodeIDFromNewNodeIDMap[node];
|
||||
newEdge.target = oldNodeIDFromNewNodeIDMap[target];
|
||||
} else {
|
||||
newEdge.source = node;
|
||||
newEdge.target = target;
|
||||
}
|
||||
BOOST_ASSERT_MSG(
|
||||
UINT_MAX != newEdge.source,
|
||||
"Source id invalid"
|
||||
);
|
||||
BOOST_ASSERT_MSG(
|
||||
UINT_MAX != newEdge.target,
|
||||
"Target id invalid"
|
||||
);
|
||||
newEdge.data.distance = data.distance;
|
||||
newEdge.data.shortcut = data.shortcut;
|
||||
if(
|
||||
!data.originalViaNodeID &&
|
||||
!oldNodeIDFromNewNodeIDMap.empty()
|
||||
) {
|
||||
newEdge.data.id = oldNodeIDFromNewNodeIDMap[data.id];
|
||||
} else {
|
||||
newEdge.data.id = data.id;
|
||||
}
|
||||
BOOST_ASSERT_MSG(
|
||||
newEdge.data.id != INT_MAX, //2^31
|
||||
"edge id invalid"
|
||||
);
|
||||
newEdge.data.forward = data.forward;
|
||||
newEdge.data.backward = data.backward;
|
||||
edges.push_back( newEdge );
|
||||
}
|
||||
}
|
||||
}
|
||||
_graph.reset();
|
||||
std::vector<NodeID>().swap(oldNodeIDFromNewNodeIDMap);
|
||||
BOOST_ASSERT( 0 == oldNodeIDFromNewNodeIDMap.capacity() );
|
||||
|
||||
TemporaryStorage & tempStorage = TemporaryStorage::GetInstance();
|
||||
//loads edges of graph before renumbering, no need for further numbering action.
|
||||
NodeID start;
|
||||
NodeID target;
|
||||
_DynamicGraph::EdgeData data;
|
||||
|
||||
Edge restored_edge;
|
||||
for(unsigned i = 0; i < temp_edge_counter; ++i) {
|
||||
tempStorage.ReadFromSlot(edge_storage_slot, (char*)&start, sizeof(NodeID));
|
||||
tempStorage.ReadFromSlot(edge_storage_slot, (char*)&target, sizeof(NodeID));
|
||||
tempStorage.ReadFromSlot(edge_storage_slot, (char*)&data, sizeof(_DynamicGraph::EdgeData));
|
||||
restored_edge.source = start;
|
||||
restored_edge.target = target;
|
||||
restored_edge.data.distance = data.distance;
|
||||
restored_edge.data.shortcut = data.shortcut;
|
||||
restored_edge.data.id = data.id;
|
||||
restored_edge.data.forward = data.forward;
|
||||
restored_edge.data.backward = data.backward;
|
||||
edges.push_back( restored_edge );
|
||||
}
|
||||
tempStorage.DeallocateSlot(edge_storage_slot);
|
||||
}
|
||||
|
||||
private:
|
||||
inline void _Dijkstra( const int maxDistance, const unsigned numTargets, const int maxNodes, _ThreadData* const data, const NodeID middleNode ){
|
||||
|
||||
_Heap& heap = data->heap;
|
||||
|
||||
int nodes = 0;
|
||||
unsigned targetsFound = 0;
|
||||
while ( heap.Size() > 0 ) {
|
||||
const NodeID node = heap.DeleteMin();
|
||||
const int distance = heap.GetKey( node );
|
||||
const short currentHop = heap.GetData( node ).hop+1;
|
||||
|
||||
if ( ++nodes > maxNodes )
|
||||
return;
|
||||
//Destination settled?
|
||||
if ( distance > maxDistance )
|
||||
return;
|
||||
|
||||
if ( heap.GetData( node ).target ) {
|
||||
++targetsFound;
|
||||
if ( targetsFound >= numTargets ) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//iterate over all edges of node
|
||||
for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge != endEdges; ++edge ) {
|
||||
const ContractorEdgeData& data = _graph->GetEdgeData( edge );
|
||||
if ( !data.forward ){
|
||||
continue;
|
||||
}
|
||||
const NodeID to = _graph->GetTarget( edge );
|
||||
if(middleNode == to) {
|
||||
continue;
|
||||
}
|
||||
const int toDistance = distance + data.distance;
|
||||
|
||||
//New Node discovered -> Add to Heap + Node Info Storage
|
||||
if ( !heap.WasInserted( to ) ) {
|
||||
heap.Insert( to, toDistance, _HeapData(currentHop, false) );
|
||||
}
|
||||
//Found a shorter Path -> Update distance
|
||||
else if ( toDistance < heap.GetKey( to ) ) {
|
||||
heap.DecreaseKey( to, toDistance );
|
||||
heap.GetData( to ).hop = currentHop;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline float _Evaluate( _ThreadData* const data, _PriorityData* const nodeData, const NodeID node){
|
||||
_ContractionInformation stats;
|
||||
|
||||
//perform simulated contraction
|
||||
_Contract< true> ( data, node, &stats );
|
||||
|
||||
// Result will contain the priority
|
||||
float result;
|
||||
if ( 0 == (stats.edgesDeleted*stats.originalEdgesDeleted) )
|
||||
result = 1 * nodeData->depth;
|
||||
else
|
||||
result = 2 * ((( float ) stats.edgesAdded ) / stats.edgesDeleted ) + 4 * ((( float ) stats.originalEdgesAdded ) / stats.originalEdgesDeleted ) + 1 * nodeData->depth;
|
||||
assert( result >= 0 );
|
||||
return result;
|
||||
}
|
||||
|
||||
template< bool Simulate >
|
||||
inline bool _Contract( _ThreadData* data, NodeID node, _ContractionInformation* stats = NULL ) {
|
||||
_Heap& heap = data->heap;
|
||||
int insertedEdgesSize = data->insertedEdges.size();
|
||||
std::vector< _ContractorEdge >& insertedEdges = data->insertedEdges;
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator inEdge = _graph->BeginEdges( node ), endInEdges = _graph->EndEdges( node ); inEdge != endInEdges; ++inEdge ) {
|
||||
const ContractorEdgeData& inData = _graph->GetEdgeData( inEdge );
|
||||
const NodeID source = _graph->GetTarget( inEdge );
|
||||
if ( Simulate ) {
|
||||
assert( stats != NULL );
|
||||
++stats->edgesDeleted;
|
||||
stats->originalEdgesDeleted += inData.originalEdges;
|
||||
}
|
||||
if ( !inData.backward )
|
||||
continue;
|
||||
|
||||
heap.Clear();
|
||||
heap.Insert( source, 0, _HeapData() );
|
||||
int maxDistance = 0;
|
||||
unsigned numTargets = 0;
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) {
|
||||
const ContractorEdgeData& outData = _graph->GetEdgeData( outEdge );
|
||||
if ( !outData.forward ) {
|
||||
continue;
|
||||
}
|
||||
const NodeID target = _graph->GetTarget( outEdge );
|
||||
const int pathDistance = inData.distance + outData.distance;
|
||||
maxDistance = std::max( maxDistance, pathDistance );
|
||||
if ( !heap.WasInserted( target ) ) {
|
||||
heap.Insert( target, INT_MAX, _HeapData( 0, true ) );
|
||||
++numTargets;
|
||||
}
|
||||
}
|
||||
|
||||
if( Simulate ) {
|
||||
_Dijkstra( maxDistance, numTargets, 1000, data, node );
|
||||
} else {
|
||||
_Dijkstra( maxDistance, numTargets, 2000, data, node );
|
||||
}
|
||||
for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) {
|
||||
const ContractorEdgeData& outData = _graph->GetEdgeData( outEdge );
|
||||
if ( !outData.forward ) {
|
||||
continue;
|
||||
}
|
||||
const NodeID target = _graph->GetTarget( outEdge );
|
||||
const int pathDistance = inData.distance + outData.distance;
|
||||
const int distance = heap.GetKey( target );
|
||||
if ( pathDistance < distance ) {
|
||||
if ( Simulate ) {
|
||||
assert( stats != NULL );
|
||||
stats->edgesAdded+=2;
|
||||
stats->originalEdgesAdded += 2* ( outData.originalEdges + inData.originalEdges );
|
||||
} else {
|
||||
_ContractorEdge newEdge;
|
||||
newEdge.source = source;
|
||||
newEdge.target = target;
|
||||
newEdge.data = ContractorEdgeData( pathDistance, outData.originalEdges + inData.originalEdges, node/*, 0, inData.turnInstruction*/, true, true, false);;
|
||||
insertedEdges.push_back( newEdge );
|
||||
std::swap( newEdge.source, newEdge.target );
|
||||
newEdge.data.forward = false;
|
||||
newEdge.data.backward = true;
|
||||
insertedEdges.push_back( newEdge );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( !Simulate ) {
|
||||
for ( int i = insertedEdgesSize, iend = insertedEdges.size(); i < iend; ++i ) {
|
||||
bool found = false;
|
||||
for ( int other = i + 1 ; other < iend ; ++other ) {
|
||||
if ( insertedEdges[other].source != insertedEdges[i].source )
|
||||
continue;
|
||||
if ( insertedEdges[other].target != insertedEdges[i].target )
|
||||
continue;
|
||||
if ( insertedEdges[other].data.distance != insertedEdges[i].data.distance )
|
||||
continue;
|
||||
if ( insertedEdges[other].data.shortcut != insertedEdges[i].data.shortcut )
|
||||
continue;
|
||||
insertedEdges[other].data.forward |= insertedEdges[i].data.forward;
|
||||
insertedEdges[other].data.backward |= insertedEdges[i].data.backward;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
if ( !found ) {
|
||||
insertedEdges[insertedEdgesSize++] = insertedEdges[i];
|
||||
}
|
||||
}
|
||||
insertedEdges.resize( insertedEdgesSize );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
inline void _DeleteIncomingEdges( _ThreadData* data, const NodeID node ) {
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
//find all neighbours
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ) ; e < _graph->EndEdges( node ) ; ++e ) {
|
||||
const NodeID u = _graph->GetTarget( e );
|
||||
if ( u != node )
|
||||
neighbours.push_back( u );
|
||||
}
|
||||
//eliminate duplicate entries ( forward + backward edges )
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
for ( int i = 0, e = ( int ) neighbours.size(); i < e; ++i ) {
|
||||
_graph->DeleteEdgesTo( neighbours[i], node );
|
||||
}
|
||||
}
|
||||
|
||||
inline bool _UpdateNeighbours( std::vector< float > & priorities, std::vector< _PriorityData > & nodeData, _ThreadData* const data, const NodeID node) {
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
//find all neighbours
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ) ; e < endEdges ; ++e ) {
|
||||
const NodeID u = _graph->GetTarget( e );
|
||||
if ( u == node )
|
||||
continue;
|
||||
neighbours.push_back( u );
|
||||
nodeData[u].depth = (std::max)(nodeData[node].depth + 1, nodeData[u].depth );
|
||||
}
|
||||
//eliminate duplicate entries ( forward + backward edges )
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
BOOST_FOREACH(const NodeID u, neighbours) {
|
||||
priorities[u] = _Evaluate( data, &( nodeData )[u], u );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
inline bool _IsIndependent( const std::vector< float >& priorities/*, const std::vector< _PriorityData >& nodeData*/, _ThreadData* const data, NodeID node ) const {
|
||||
const double priority = priorities[node];
|
||||
|
||||
std::vector< NodeID >& neighbours = data->neighbours;
|
||||
neighbours.clear();
|
||||
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( node ) ; e < _graph->EndEdges( node ) ; ++e ) {
|
||||
const NodeID target = _graph->GetTarget( e );
|
||||
if(node==target)
|
||||
continue;
|
||||
const double targetPriority = priorities[target];
|
||||
assert( targetPriority >= 0 );
|
||||
//found a neighbour with lower priority?
|
||||
if ( priority > targetPriority )
|
||||
return false;
|
||||
//tie breaking
|
||||
if ( std::abs(priority - targetPriority) < std::numeric_limits<double>::epsilon() && bias(node, target) ) {
|
||||
return false;
|
||||
}
|
||||
neighbours.push_back( target );
|
||||
}
|
||||
|
||||
std::sort( neighbours.begin(), neighbours.end() );
|
||||
neighbours.resize( std::unique( neighbours.begin(), neighbours.end() ) - neighbours.begin() );
|
||||
|
||||
//examine all neighbours that are at most 2 hops away
|
||||
BOOST_FOREACH(const NodeID u, neighbours) {
|
||||
for ( _DynamicGraph::EdgeIterator e = _graph->BeginEdges( u ) ; e < _graph->EndEdges( u ) ; ++e ) {
|
||||
const NodeID target = _graph->GetTarget( e );
|
||||
if(node==target)
|
||||
continue;
|
||||
|
||||
const double targetPriority = priorities[target];
|
||||
assert( targetPriority >= 0 );
|
||||
//found a neighbour with lower priority?
|
||||
if ( priority > targetPriority)
|
||||
return false;
|
||||
//tie breaking
|
||||
if ( std::abs(priority - targetPriority) < std::numeric_limits<double>::epsilon() && bias(node, target) ) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This bias function takes up 22 assembly instructions in total on X86
|
||||
*/
|
||||
inline bool bias(const NodeID a, const NodeID b) const {
|
||||
unsigned short hasha = fastHash(a);
|
||||
unsigned short hashb = fastHash(b);
|
||||
|
||||
//The compiler optimizes that to conditional register flags but without branching statements!
|
||||
if(hasha != hashb)
|
||||
return hasha < hashb;
|
||||
return a < b;
|
||||
}
|
||||
|
||||
boost::shared_ptr<_DynamicGraph> _graph;
|
||||
std::vector<_DynamicGraph::InputEdge> contractedEdges;
|
||||
unsigned edge_storage_slot;
|
||||
uint64_t temp_edge_counter;
|
||||
std::vector<NodeID> oldNodeIDFromNewNodeIDMap;
|
||||
XORFastHash fastHash;
|
||||
};
|
||||
|
||||
#endif // CONTRACTOR_H_INCLUDED
|
||||
628
Contractor/EdgeBasedGraphFactory.cpp
Normal file
628
Contractor/EdgeBasedGraphFactory.cpp
Normal file
@ -0,0 +1,628 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
|
||||
#include "EdgeBasedGraphFactory.h"
|
||||
#include "../Util/ComputeAngle.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
//TODO: CompressionWorker
|
||||
//TODO: EdgeBasedEdgeGenerator
|
||||
|
||||
// template<class Work>
|
||||
// inline static void TraverseGraph(NodeBasedDynamicGraph & graph, Work & work) {
|
||||
|
||||
// }
|
||||
|
||||
EdgeBasedGraphFactory::EdgeBasedGraphFactory(
|
||||
int number_of_nodes,
|
||||
std::vector<ImportEdge> & input_edge_list,
|
||||
std::vector<NodeID> & barrier_node_list,
|
||||
std::vector<NodeID> & traffic_light_node_list,
|
||||
std::vector<TurnRestriction> & input_restrictions_list,
|
||||
std::vector<NodeInfo> & m_node_info_list,
|
||||
SpeedProfileProperties speed_profile
|
||||
) : speed_profile(speed_profile),
|
||||
m_turn_restrictions_count(0),
|
||||
m_node_info_list(m_node_info_list)
|
||||
{
|
||||
BOOST_FOREACH(const TurnRestriction & restriction, input_restrictions_list) {
|
||||
std::pair<NodeID, NodeID> restriction_source =
|
||||
std::make_pair(restriction.fromNode, restriction.viaNode);
|
||||
unsigned index;
|
||||
RestrictionMap::iterator restriction_iter;
|
||||
restriction_iter = m_restriction_map.find(restriction_source);
|
||||
if(restriction_iter == m_restriction_map.end()) {
|
||||
index = m_restriction_bucket_list.size();
|
||||
m_restriction_bucket_list.resize(index+1);
|
||||
m_restriction_map.emplace(restriction_source, index);
|
||||
} else {
|
||||
index = restriction_iter->second;
|
||||
//Map already contains an is_only_*-restriction
|
||||
if(m_restriction_bucket_list.at(index).begin()->second) {
|
||||
continue;
|
||||
} else if(restriction.flags.isOnly) {
|
||||
//We are going to insert an is_only_*-restriction. There can be only one.
|
||||
m_turn_restrictions_count -= m_restriction_bucket_list.at(index).size();
|
||||
m_restriction_bucket_list.at(index).clear();
|
||||
}
|
||||
}
|
||||
++m_turn_restrictions_count;
|
||||
m_restriction_bucket_list.at(index).push_back(
|
||||
std::make_pair( restriction.toNode, restriction.flags.isOnly)
|
||||
);
|
||||
}
|
||||
|
||||
m_barrier_nodes.insert(
|
||||
barrier_node_list.begin(),
|
||||
barrier_node_list.end()
|
||||
);
|
||||
|
||||
m_traffic_lights.insert(
|
||||
traffic_light_node_list.begin(),
|
||||
traffic_light_node_list.end()
|
||||
);
|
||||
|
||||
DeallocatingVector< NodeBasedEdge > edges_list;
|
||||
NodeBasedEdge edge;
|
||||
BOOST_FOREACH(const ImportEdge & import_edge, input_edge_list) {
|
||||
if(!import_edge.isForward()) {
|
||||
edge.source = import_edge.target();
|
||||
edge.target = import_edge.source();
|
||||
edge.data.backward = import_edge.isForward();
|
||||
edge.data.forward = import_edge.isBackward();
|
||||
} else {
|
||||
edge.source = import_edge.source();
|
||||
edge.target = import_edge.target();
|
||||
edge.data.forward = import_edge.isForward();
|
||||
edge.data.backward = import_edge.isBackward();
|
||||
}
|
||||
if(edge.source == edge.target) {
|
||||
continue;
|
||||
}
|
||||
edge.data.distance = (std::max)((int)import_edge.weight(), 1 );
|
||||
BOOST_ASSERT( edge.data.distance > 0 );
|
||||
edge.data.shortcut = false;
|
||||
edge.data.roundabout = import_edge.isRoundabout();
|
||||
edge.data.ignoreInGrid = import_edge.ignoreInGrid();
|
||||
edge.data.nameID = import_edge.name();
|
||||
edge.data.type = import_edge.type();
|
||||
edge.data.isAccessRestricted = import_edge.isAccessRestricted();
|
||||
edge.data.edgeBasedNodeID = edges_list.size();
|
||||
edge.data.contraFlow = import_edge.isContraFlow();
|
||||
edges_list.push_back( edge );
|
||||
if( edge.data.backward ) {
|
||||
std::swap( edge.source, edge.target );
|
||||
edge.data.forward = import_edge.isBackward();
|
||||
edge.data.backward = import_edge.isForward();
|
||||
edge.data.edgeBasedNodeID = edges_list.size();
|
||||
edges_list.push_back( edge );
|
||||
}
|
||||
}
|
||||
std::vector<ImportEdge>().swap(input_edge_list);
|
||||
std::sort( edges_list.begin(), edges_list.end() );
|
||||
m_node_based_graph = boost::make_shared<NodeBasedDynamicGraph>(
|
||||
number_of_nodes, edges_list
|
||||
);
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetEdgeBasedEdges(
|
||||
DeallocatingVector< EdgeBasedEdge >& output_edge_list
|
||||
) {
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == output_edge_list.size(),
|
||||
"Vector is not empty"
|
||||
);
|
||||
m_edge_based_edge_list.swap(output_edge_list);
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetEdgeBasedNodes( std::vector<EdgeBasedNode> & nodes) {
|
||||
#ifndef NDEBUG
|
||||
BOOST_FOREACH(const EdgeBasedNode & node, m_edge_based_node_list){
|
||||
BOOST_ASSERT(node.lat1 != INT_MAX); BOOST_ASSERT(node.lon1 != INT_MAX);
|
||||
BOOST_ASSERT(node.lat2 != INT_MAX); BOOST_ASSERT(node.lon2 != INT_MAX);
|
||||
}
|
||||
#endif
|
||||
nodes.swap(m_edge_based_node_list);
|
||||
}
|
||||
|
||||
NodeID EdgeBasedGraphFactory::CheckForEmanatingIsOnlyTurn(
|
||||
const NodeID u,
|
||||
const NodeID v
|
||||
) const {
|
||||
const std::pair < NodeID, NodeID > restriction_source = std::make_pair(u, v);
|
||||
RestrictionMap::const_iterator restriction_iter;
|
||||
restriction_iter = m_restriction_map.find(restriction_source);
|
||||
if (restriction_iter != m_restriction_map.end()) {
|
||||
const unsigned index = restriction_iter->second;
|
||||
BOOST_FOREACH(
|
||||
const RestrictionSource & restriction_target,
|
||||
m_restriction_bucket_list.at(index)
|
||||
) {
|
||||
if(restriction_target.second) {
|
||||
return restriction_target.first;
|
||||
}
|
||||
}
|
||||
}
|
||||
return UINT_MAX;
|
||||
}
|
||||
|
||||
bool EdgeBasedGraphFactory::CheckIfTurnIsRestricted(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w
|
||||
) const {
|
||||
//only add an edge if turn is not a U-turn except it is the end of dead-end street.
|
||||
const std::pair < NodeID, NodeID > restriction_source = std::make_pair(u, v);
|
||||
RestrictionMap::const_iterator restriction_iter;
|
||||
restriction_iter = m_restriction_map.find(restriction_source);
|
||||
if (restriction_iter != m_restriction_map.end()) {
|
||||
const unsigned index = restriction_iter->second;
|
||||
BOOST_FOREACH(
|
||||
const RestrictionTarget & restriction_target,
|
||||
m_restriction_bucket_list.at(index)
|
||||
) {
|
||||
if(w == restriction_target.first) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::InsertEdgeBasedNode(
|
||||
EdgeIterator e1,
|
||||
NodeIterator u,
|
||||
NodeIterator v,
|
||||
bool belongsToTinyComponent) {
|
||||
EdgeData & data = m_node_based_graph->GetEdgeData(e1);
|
||||
EdgeBasedNode currentNode;
|
||||
currentNode.nameID = data.nameID;
|
||||
currentNode.lat1 = m_node_info_list[u].lat;
|
||||
currentNode.lon1 = m_node_info_list[u].lon;
|
||||
currentNode.lat2 = m_node_info_list[v].lat;
|
||||
currentNode.lon2 = m_node_info_list[v].lon;
|
||||
currentNode.belongsToTinyComponent = belongsToTinyComponent;
|
||||
currentNode.id = data.edgeBasedNodeID;
|
||||
currentNode.ignoreInGrid = data.ignoreInGrid;
|
||||
currentNode.weight = data.distance;
|
||||
m_edge_based_node_list.push_back(currentNode);
|
||||
}
|
||||
|
||||
|
||||
void EdgeBasedGraphFactory::FlushVectorToStream(
|
||||
std::ofstream & edge_data_file,
|
||||
std::vector<OriginalEdgeData> & original_edge_data_vector
|
||||
) const {
|
||||
edge_data_file.write(
|
||||
(char*)&(original_edge_data_vector[0]),
|
||||
original_edge_data_vector.size()*sizeof(OriginalEdgeData)
|
||||
);
|
||||
original_edge_data_vector.clear();
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::Run(
|
||||
const char * original_edge_data_filename,
|
||||
lua_State *lua_state
|
||||
) {
|
||||
SimpleLogger().Write() << "Compressing geometry of input graph";
|
||||
//TODO: iterate over all turns
|
||||
|
||||
//TODO: compress geometries
|
||||
|
||||
//TODO: update turn restrictions if concerned by compression
|
||||
|
||||
//TODO: do some compression statistics
|
||||
|
||||
|
||||
SimpleLogger().Write() << "Identifying components of the road network";
|
||||
|
||||
unsigned skipped_turns_counter = 0;
|
||||
unsigned node_based_edge_counter = 0;
|
||||
unsigned original_edges_counter = 0;
|
||||
|
||||
std::ofstream edge_data_file(
|
||||
original_edge_data_filename,
|
||||
std::ios::binary
|
||||
);
|
||||
|
||||
//writes a dummy value that is updated later
|
||||
edge_data_file.write(
|
||||
(char*)&original_edges_counter,
|
||||
sizeof(unsigned)
|
||||
);
|
||||
|
||||
//Run a BFS on the undirected graph and identify small components
|
||||
std::vector<unsigned> component_index_list;
|
||||
std::vector<NodeID> component_index_size;
|
||||
BFSCompentExplorer( component_index_list, component_index_size);
|
||||
|
||||
SimpleLogger().Write() <<
|
||||
"identified: " << component_index_size.size() << " many components";
|
||||
SimpleLogger().Write() <<
|
||||
"generating edge-expanded nodes";
|
||||
|
||||
Percent p(m_node_based_graph->GetNumberOfNodes());
|
||||
//loop over all edges and generate new set of nodes.
|
||||
for(
|
||||
NodeIterator u = 0, end = m_node_based_graph->GetNumberOfNodes();
|
||||
u < end;
|
||||
++u
|
||||
) {
|
||||
p.printIncrement();
|
||||
for(
|
||||
EdgeIterator e1 = m_node_based_graph->BeginEdges(u),
|
||||
last_edge = m_node_based_graph->EndEdges(u);
|
||||
e1 < last_edge;
|
||||
++e1
|
||||
) {
|
||||
NodeIterator v = m_node_based_graph->GetTarget(e1);
|
||||
|
||||
if(m_node_based_graph->GetEdgeData(e1).type != SHRT_MAX) {
|
||||
BOOST_ASSERT_MSG(e1 != UINT_MAX, "edge id invalid");
|
||||
BOOST_ASSERT_MSG(u != UINT_MAX, "souce node invalid");
|
||||
BOOST_ASSERT_MSG(v != UINT_MAX, "target node invalid");
|
||||
//Note: edges that end on barrier nodes or on a turn restriction
|
||||
//may actually be in two distinct components. We choose the smallest
|
||||
const unsigned size_of_component = std::min(
|
||||
component_index_size[component_index_list[u]],
|
||||
component_index_size[component_index_list[v]]
|
||||
);
|
||||
|
||||
InsertEdgeBasedNode( e1, u, v, size_of_component < 1000 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SimpleLogger().Write()
|
||||
<< "Generated " << m_edge_based_node_list.size() << " nodes in " <<
|
||||
"edge-expanded graph";
|
||||
SimpleLogger().Write() << "generating edge-expanded edges";
|
||||
|
||||
std::vector<NodeID>().swap(component_index_size);
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == component_index_size.capacity(),
|
||||
"component size vector not deallocated"
|
||||
);
|
||||
std::vector<NodeID>().swap(component_index_list);
|
||||
BOOST_ASSERT_MSG(
|
||||
0 == component_index_list.capacity(),
|
||||
"component index vector not deallocated"
|
||||
);
|
||||
std::vector<OriginalEdgeData> original_edge_data_vector;
|
||||
original_edge_data_vector.reserve(10000);
|
||||
|
||||
//Loop over all turns and generate new set of edges.
|
||||
//Three nested loop look super-linear, but we are dealing with a (kind of)
|
||||
//linear number of turns only.
|
||||
p.reinit(m_node_based_graph->GetNumberOfNodes());
|
||||
for(
|
||||
NodeIterator u = 0, end = m_node_based_graph->GetNumberOfNodes();
|
||||
u < end;
|
||||
++u
|
||||
) {
|
||||
for(
|
||||
EdgeIterator e1 = m_node_based_graph->BeginEdges(u),
|
||||
last_edge_u = m_node_based_graph->EndEdges(u);
|
||||
e1 < last_edge_u;
|
||||
++e1
|
||||
) {
|
||||
++node_based_edge_counter;
|
||||
const NodeIterator v = m_node_based_graph->GetTarget(e1);
|
||||
const NodeID to_node_of_only_restriction = CheckForEmanatingIsOnlyTurn(u, v);
|
||||
const bool is_barrier_node = ( m_barrier_nodes.find(v) != m_barrier_nodes.end() );
|
||||
|
||||
for(
|
||||
EdgeIterator e2 = m_node_based_graph->BeginEdges(v),
|
||||
last_edge_v = m_node_based_graph->EndEdges(v);
|
||||
e2 < last_edge_v;
|
||||
++e2
|
||||
) {
|
||||
const NodeIterator w = m_node_based_graph->GetTarget(e2);
|
||||
if(
|
||||
to_node_of_only_restriction != UINT_MAX &&
|
||||
w != to_node_of_only_restriction
|
||||
) {
|
||||
//We are at an only_-restriction but not at the right turn.
|
||||
++skipped_turns_counter;
|
||||
continue;
|
||||
}
|
||||
|
||||
if( is_barrier_node) {
|
||||
if(u != w) {
|
||||
++skipped_turns_counter;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
if ( (u == w) && (m_node_based_graph->GetOutDegree(v) > 1) ) {
|
||||
++skipped_turns_counter;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
//only add an edge if turn is not a U-turn except when it is
|
||||
//at the end of a dead-end street
|
||||
if (
|
||||
CheckIfTurnIsRestricted(u, v, w) &&
|
||||
(to_node_of_only_restriction == UINT_MAX) &&
|
||||
(w != to_node_of_only_restriction)
|
||||
) {
|
||||
++skipped_turns_counter;
|
||||
continue;
|
||||
}
|
||||
|
||||
//only add an edge if turn is not prohibited
|
||||
const EdgeData edge_data1 = m_node_based_graph->GetEdgeData(e1);
|
||||
const EdgeData edge_data2 = m_node_based_graph->GetEdgeData(e2);
|
||||
|
||||
BOOST_ASSERT(
|
||||
edge_data1.edgeBasedNodeID < m_node_based_graph->GetNumberOfEdges()
|
||||
);
|
||||
BOOST_ASSERT(
|
||||
edge_data2.edgeBasedNodeID < m_node_based_graph->GetNumberOfEdges()
|
||||
);
|
||||
BOOST_ASSERT(
|
||||
edge_data1.edgeBasedNodeID != edge_data2.edgeBasedNodeID
|
||||
);
|
||||
BOOST_ASSERT( edge_data1.forward );
|
||||
BOOST_ASSERT( edge_data2.forward );
|
||||
|
||||
// the following is the core of the loop.
|
||||
unsigned distance = edge_data1.distance;
|
||||
if( m_traffic_lights.find(v) != m_traffic_lights.end() ) {
|
||||
distance += speed_profile.trafficSignalPenalty;
|
||||
}
|
||||
const int turn_penalty = GetTurnPenalty(u, v, w, lua_state);
|
||||
TurnInstruction turnInstruction = AnalyzeTurn(u, v, w);
|
||||
if(turnInstruction == TurnInstructions.UTurn){
|
||||
distance += speed_profile.uTurnPenalty;
|
||||
}
|
||||
distance += turn_penalty;
|
||||
|
||||
original_edge_data_vector.push_back(
|
||||
OriginalEdgeData(
|
||||
v,
|
||||
edge_data2.nameID,
|
||||
turnInstruction
|
||||
)
|
||||
);
|
||||
++original_edges_counter;
|
||||
|
||||
if(original_edge_data_vector.size() > 100000) {
|
||||
FlushVectorToStream(
|
||||
edge_data_file,
|
||||
original_edge_data_vector
|
||||
);
|
||||
}
|
||||
|
||||
m_edge_based_edge_list.push_back(
|
||||
EdgeBasedEdge(
|
||||
edge_data1.edgeBasedNodeID,
|
||||
edge_data2.edgeBasedNodeID,
|
||||
m_edge_based_edge_list.size(),
|
||||
distance,
|
||||
true,
|
||||
false
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
p.printIncrement();
|
||||
}
|
||||
FlushVectorToStream( edge_data_file, original_edge_data_vector );
|
||||
|
||||
edge_data_file.seekp( std::ios::beg );
|
||||
edge_data_file.write( (char*)&original_edges_counter, sizeof(unsigned) );
|
||||
edge_data_file.close();
|
||||
|
||||
SimpleLogger().Write() <<
|
||||
"Generated " << m_edge_based_node_list.size() << " edge based nodes";
|
||||
SimpleLogger().Write() <<
|
||||
"Node-based graph contains " << node_based_edge_counter << " edges";
|
||||
SimpleLogger().Write() <<
|
||||
"Edge-expanded graph ...";
|
||||
SimpleLogger().Write() <<
|
||||
" contains " << m_edge_based_edge_list.size() << " edges";
|
||||
SimpleLogger().Write() <<
|
||||
" skips " << skipped_turns_counter << " turns, "
|
||||
"defined by " << m_turn_restrictions_count << " restrictions";
|
||||
}
|
||||
|
||||
int EdgeBasedGraphFactory::GetTurnPenalty(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w,
|
||||
lua_State *lua_state
|
||||
) const {
|
||||
const double angle = GetAngleBetweenThreeFixedPointCoordinates (
|
||||
m_node_info_list[u],
|
||||
m_node_info_list[v],
|
||||
m_node_info_list[w]
|
||||
);
|
||||
|
||||
if( speed_profile.has_turn_penalty_function ) {
|
||||
try {
|
||||
//call lua profile to compute turn penalty
|
||||
return luabind::call_function<int>(
|
||||
lua_state,
|
||||
"turn_function",
|
||||
180.-angle
|
||||
);
|
||||
} catch (const luabind::error &er) {
|
||||
SimpleLogger().Write(logWARNING) << er.what();
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w
|
||||
) const {
|
||||
if(u == w) {
|
||||
return TurnInstructions.UTurn;
|
||||
}
|
||||
|
||||
const EdgeIterator edge1 = m_node_based_graph->FindEdge(u, v);
|
||||
const EdgeIterator edge2 = m_node_based_graph->FindEdge(v, w);
|
||||
|
||||
const EdgeData & data1 = m_node_based_graph->GetEdgeData(edge1);
|
||||
const EdgeData & data2 = m_node_based_graph->GetEdgeData(edge2);
|
||||
|
||||
if(!data1.contraFlow && data2.contraFlow) {
|
||||
return TurnInstructions.EnterAgainstAllowedDirection;
|
||||
}
|
||||
if(data1.contraFlow && !data2.contraFlow) {
|
||||
return TurnInstructions.LeaveAgainstAllowedDirection;
|
||||
}
|
||||
|
||||
//roundabouts need to be handled explicitely
|
||||
if(data1.roundabout && data2.roundabout) {
|
||||
//Is a turn possible? If yes, we stay on the roundabout!
|
||||
if( 1 == m_node_based_graph->GetOutDegree(v) ) {
|
||||
//No turn possible.
|
||||
return TurnInstructions.NoTurn;
|
||||
}
|
||||
return TurnInstructions.StayOnRoundAbout;
|
||||
}
|
||||
//Does turn start or end on roundabout?
|
||||
if(data1.roundabout || data2.roundabout) {
|
||||
//We are entering the roundabout
|
||||
if( (!data1.roundabout) && data2.roundabout) {
|
||||
return TurnInstructions.EnterRoundAbout;
|
||||
}
|
||||
//We are leaving the roundabout
|
||||
if(data1.roundabout && (!data2.roundabout) ) {
|
||||
return TurnInstructions.LeaveRoundAbout;
|
||||
}
|
||||
}
|
||||
|
||||
//If street names stay the same and if we are certain that it is not a
|
||||
//a segment of a roundabout, we skip it.
|
||||
if( data1.nameID == data2.nameID ) {
|
||||
//TODO: Here we should also do a small graph exploration to check for
|
||||
// more complex situations
|
||||
if( 0 != data1.nameID ) {
|
||||
return TurnInstructions.NoTurn;
|
||||
} else if (m_node_based_graph->GetOutDegree(v) <= 2) {
|
||||
return TurnInstructions.NoTurn;
|
||||
}
|
||||
}
|
||||
|
||||
const double angle = GetAngleBetweenThreeFixedPointCoordinates (
|
||||
m_node_info_list[u],
|
||||
m_node_info_list[v],
|
||||
m_node_info_list[w]
|
||||
);
|
||||
return TurnInstructions.GetTurnDirectionOfInstruction(angle);
|
||||
}
|
||||
|
||||
unsigned EdgeBasedGraphFactory::GetNumberOfNodes() const {
|
||||
return m_node_based_graph->GetNumberOfEdges();
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::BFSCompentExplorer(
|
||||
std::vector<unsigned> & component_index_list,
|
||||
std::vector<unsigned> & component_index_size
|
||||
) const {
|
||||
std::queue<std::pair<NodeID, NodeID> > bfs_queue;
|
||||
Percent p( m_node_based_graph->GetNumberOfNodes() );
|
||||
unsigned current_component, current_component_size;
|
||||
current_component = current_component_size = 0;
|
||||
|
||||
BOOST_ASSERT( component_index_list.empty() );
|
||||
BOOST_ASSERT( component_index_size.empty() );
|
||||
|
||||
component_index_list.resize(
|
||||
m_node_based_graph->GetNumberOfNodes(),
|
||||
UINT_MAX
|
||||
);
|
||||
|
||||
//put unexplorered node with parent pointer into queue
|
||||
for( NodeID node = 0, end = m_node_based_graph->GetNumberOfNodes(); node < end; ++node) {
|
||||
if(UINT_MAX == component_index_list[node]) {
|
||||
bfs_queue.push(std::make_pair(node, node));
|
||||
//mark node as read
|
||||
component_index_list[node] = current_component;
|
||||
p.printIncrement();
|
||||
while(!bfs_queue.empty()) {
|
||||
//fetch element from BFS queue
|
||||
std::pair<NodeID, NodeID> current_queue_item = bfs_queue.front();
|
||||
bfs_queue.pop();
|
||||
|
||||
const NodeID v = current_queue_item.first; //current node
|
||||
const NodeID u = current_queue_item.second; //parent
|
||||
//increment size counter of current component
|
||||
++current_component_size;
|
||||
const bool is_barrier_node = (m_barrier_nodes.find(v) != m_barrier_nodes.end());
|
||||
if(!is_barrier_node) {
|
||||
const NodeID to_node_of_only_restriction = CheckForEmanatingIsOnlyTurn(u, v);
|
||||
|
||||
for(
|
||||
EdgeIterator e2 = m_node_based_graph->BeginEdges(v);
|
||||
e2 < m_node_based_graph->EndEdges(v);
|
||||
++e2
|
||||
) {
|
||||
NodeIterator w = m_node_based_graph->GetTarget(e2);
|
||||
|
||||
if(
|
||||
to_node_of_only_restriction != UINT_MAX &&
|
||||
w != to_node_of_only_restriction
|
||||
) {
|
||||
// At an only_-restriction but not at the right turn
|
||||
continue;
|
||||
}
|
||||
if( u != w ) {
|
||||
//only add an edge if turn is not a U-turn except
|
||||
//when it is at the end of a dead-end street.
|
||||
if (!CheckIfTurnIsRestricted(u, v, w) ) {
|
||||
//only add an edge if turn is not prohibited
|
||||
if(UINT_MAX == component_index_list[w]) {
|
||||
//insert next (node, parent) only if w has
|
||||
//not yet been explored
|
||||
//mark node as read
|
||||
component_index_list[w] = current_component;
|
||||
bfs_queue.push(std::make_pair(w,v));
|
||||
p.printIncrement();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//push size into vector
|
||||
component_index_size.push_back(current_component_size);
|
||||
//reset counters;
|
||||
current_component_size = 0;
|
||||
++current_component;
|
||||
}
|
||||
}
|
||||
}
|
||||
172
Contractor/EdgeBasedGraphFactory.h
Normal file
172
Contractor/EdgeBasedGraphFactory.h
Normal file
@ -0,0 +1,172 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
// This class constructs the edge-expanded routing graph
|
||||
|
||||
#ifndef EDGEBASEDGRAPHFACTORY_H_
|
||||
#define EDGEBASEDGRAPHFACTORY_H_
|
||||
|
||||
#include "../typedefs.h"
|
||||
#include "../DataStructures/DeallocatingVector.h"
|
||||
#include "../DataStructures/DynamicGraph.h"
|
||||
#include "../DataStructures/EdgeBasedNode.h"
|
||||
#include "../DataStructures/HashTable.h"
|
||||
#include "../DataStructures/ImportEdge.h"
|
||||
#include "../DataStructures/OriginalEdgeData.h"
|
||||
#include "../DataStructures/Percent.h"
|
||||
#include "../DataStructures/QueryEdge.h"
|
||||
#include "../DataStructures/QueryNode.h"
|
||||
#include "../DataStructures/TurnInstructions.h"
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../Util/LuaUtil.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include "GeometryCompressor.h"
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
#include <boost/unordered_set.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <fstream>
|
||||
#include <queue>
|
||||
#include <vector>
|
||||
|
||||
class EdgeBasedGraphFactory : boost::noncopyable {
|
||||
public:
|
||||
struct SpeedProfileProperties{
|
||||
SpeedProfileProperties() :
|
||||
trafficSignalPenalty(0),
|
||||
uTurnPenalty(0),
|
||||
has_turn_penalty_function(false)
|
||||
{ }
|
||||
|
||||
int trafficSignalPenalty;
|
||||
int uTurnPenalty;
|
||||
bool has_turn_penalty_function;
|
||||
} speed_profile;
|
||||
|
||||
explicit EdgeBasedGraphFactory(
|
||||
int number_of_nodes,
|
||||
std::vector<ImportEdge> & input_edge_list,
|
||||
std::vector<NodeID> & barrier_node_list,
|
||||
std::vector<NodeID> & traffic_light_node_list,
|
||||
std::vector<TurnRestriction> & input_restrictions_list,
|
||||
std::vector<NodeInfo> & m_node_info_list,
|
||||
SpeedProfileProperties speed_profile
|
||||
);
|
||||
|
||||
void Run(const char * originalEdgeDataFilename, lua_State *myLuaState);
|
||||
|
||||
void GetEdgeBasedEdges( DeallocatingVector< EdgeBasedEdge >& edges );
|
||||
|
||||
void GetEdgeBasedNodes( std::vector< EdgeBasedNode> & nodes);
|
||||
|
||||
TurnInstruction AnalyzeTurn(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w
|
||||
) const;
|
||||
|
||||
int GetTurnPenalty(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w,
|
||||
lua_State *myLuaState
|
||||
) const;
|
||||
|
||||
unsigned GetNumberOfNodes() const;
|
||||
|
||||
private:
|
||||
struct NodeBasedEdgeData {
|
||||
int distance;
|
||||
unsigned edgeBasedNodeID;
|
||||
unsigned nameID;
|
||||
short type;
|
||||
bool isAccessRestricted:1;
|
||||
bool shortcut:1;
|
||||
bool forward:1;
|
||||
bool backward:1;
|
||||
bool roundabout:1;
|
||||
bool ignoreInGrid:1;
|
||||
bool contraFlow:1;
|
||||
};
|
||||
|
||||
unsigned m_turn_restrictions_count;
|
||||
|
||||
typedef DynamicGraph<NodeBasedEdgeData> NodeBasedDynamicGraph;
|
||||
typedef NodeBasedDynamicGraph::InputEdge NodeBasedEdge;
|
||||
typedef NodeBasedDynamicGraph::NodeIterator NodeIterator;
|
||||
typedef NodeBasedDynamicGraph::EdgeIterator EdgeIterator;
|
||||
typedef NodeBasedDynamicGraph::EdgeData EdgeData;
|
||||
typedef std::pair<NodeID, NodeID> RestrictionSource;
|
||||
typedef std::pair<NodeID, bool> RestrictionTarget;
|
||||
typedef std::vector<RestrictionTarget> EmanatingRestrictionsVector;
|
||||
typedef boost::unordered_map<RestrictionSource, unsigned > RestrictionMap;
|
||||
|
||||
std::vector<NodeInfo> m_node_info_list;
|
||||
std::vector<EmanatingRestrictionsVector> m_restriction_bucket_list;
|
||||
std::vector<EdgeBasedNode> m_edge_based_node_list;
|
||||
DeallocatingVector<EdgeBasedEdge> m_edge_based_edge_list;
|
||||
|
||||
boost::shared_ptr<NodeBasedDynamicGraph> m_node_based_graph;
|
||||
boost::unordered_set<NodeID> m_barrier_nodes;
|
||||
boost::unordered_set<NodeID> m_traffic_lights;
|
||||
|
||||
RestrictionMap m_restriction_map;
|
||||
|
||||
NodeID CheckForEmanatingIsOnlyTurn(
|
||||
const NodeID u,
|
||||
const NodeID v
|
||||
) const;
|
||||
|
||||
bool CheckIfTurnIsRestricted(
|
||||
const NodeID u,
|
||||
const NodeID v,
|
||||
const NodeID w
|
||||
) const;
|
||||
|
||||
void InsertEdgeBasedNode(
|
||||
NodeBasedDynamicGraph::EdgeIterator e1,
|
||||
NodeBasedDynamicGraph::NodeIterator u,
|
||||
NodeBasedDynamicGraph::NodeIterator v,
|
||||
bool belongsToTinyComponent
|
||||
);
|
||||
|
||||
void BFSCompentExplorer(
|
||||
std::vector<unsigned> & component_index_list,
|
||||
std::vector<unsigned> & component_index_size
|
||||
) const;
|
||||
|
||||
void FlushVectorToStream(
|
||||
std::ofstream & edge_data_file,
|
||||
std::vector<OriginalEdgeData> & original_edge_data_vector
|
||||
) const;
|
||||
};
|
||||
|
||||
#endif /* EDGEBASEDGRAPHFACTORY_H_ */
|
||||
94
Contractor/GeometryCompressor.cpp
Normal file
94
Contractor/GeometryCompressor.cpp
Normal file
@ -0,0 +1,94 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "GeometryCompressor.h"
|
||||
|
||||
int current_free_list_maximum = 0;
|
||||
int UniqueNumber () { return ++current_free_list_maximum; }
|
||||
|
||||
GeometryCompressor::GeometryCompressor() {
|
||||
m_free_list.resize(100);
|
||||
IncreaseFreeList();
|
||||
}
|
||||
|
||||
void GeometryCompressor::IncreaseFreeList() {
|
||||
m_compressed_geometries.resize(m_compressed_geometries.size() + 100);
|
||||
std::generate_n (m_free_list.rend(), 100, UniqueNumber);
|
||||
}
|
||||
|
||||
void GeometryCompressor::AppendNodeIDsToGeomtry( NodeID node_id, NodeID contracted_node_id ) {
|
||||
//check if node_id already has a list
|
||||
boost::unordered_map<unsigned, unsigned>::const_iterator map_iterator;
|
||||
map_iterator = m_node_id_to_index_map.find( node_id );
|
||||
|
||||
unsigned geometry_bucket_index = std::numeric_limits<unsigned>::max();
|
||||
if( m_node_id_to_index_map.end() == map_iterator ) {
|
||||
//if not, create one
|
||||
if( m_free_list.empty() ) {
|
||||
IncreaseFreeList();
|
||||
}
|
||||
geometry_bucket_index = m_free_list.back();
|
||||
m_free_list.pop_back();
|
||||
} else {
|
||||
geometry_bucket_index = map_iterator->second;
|
||||
}
|
||||
|
||||
BOOST_ASSERT( std::numeric_limits<unsigned>::max() != geometry_bucket_index );
|
||||
BOOST_ASSERT( geometry_bucket_index < m_compressed_geometries.size() );
|
||||
|
||||
//append contracted_node_id to m_compressed_geometries[node_id]
|
||||
m_compressed_geometries[geometry_bucket_index].push_back(contracted_node_id);
|
||||
|
||||
//append m_compressed_geometries[contracted_node_id] to m_compressed_geometries[node_id]
|
||||
map_iterator = m_node_id_to_index_map.find(contracted_node_id);
|
||||
if ( m_node_id_to_index_map.end() != map_iterator) {
|
||||
const unsigned bucket_index_to_remove = map_iterator->second;
|
||||
BOOST_ASSERT( bucket_index_to_remove < m_compressed_geometries.size() );
|
||||
|
||||
m_compressed_geometries[geometry_bucket_index].insert(
|
||||
m_compressed_geometries[geometry_bucket_index].end(),
|
||||
m_compressed_geometries[bucket_index_to_remove].begin(),
|
||||
m_compressed_geometries[bucket_index_to_remove].end()
|
||||
);
|
||||
//remove m_compressed_geometries[contracted_node_id], add to free list
|
||||
m_compressed_geometries[bucket_index_to_remove].clear();
|
||||
m_free_list.push_back(bucket_index_to_remove);
|
||||
}
|
||||
}
|
||||
|
||||
void GeometryCompressor::PrintStatistics() const {
|
||||
unsigned compressed_node_count = 0;
|
||||
const unsigned surviving_node_count = m_compressed_geometries.size();
|
||||
|
||||
BOOST_FOREACH(const std::vector<unsigned> & current_vector, m_compressed_geometries) {
|
||||
compressed_node_count += current_vector.size();
|
||||
}
|
||||
SimpleLogger().Write() <<
|
||||
"surv: " << surviving_node_count <<
|
||||
", comp: " << compressed_node_count <<
|
||||
", comp ratio: " << ((float)surviving_node_count/std::max(compressed_node_count, 1u) );
|
||||
}
|
||||
58
Contractor/GeometryCompressor.h
Normal file
58
Contractor/GeometryCompressor.h
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
#ifndef GEOMETRY_COMPRESSOR_H
|
||||
#define GEOMETRY_COMPRESSOR_H
|
||||
|
||||
class GeometryCompressor {
|
||||
public:
|
||||
GeometryCompressor();
|
||||
void AppendNodeIDsToGeomtry( NodeID node_id, NodeID contracted_node_id );
|
||||
void PrintStatistics() const;
|
||||
|
||||
private:
|
||||
|
||||
void IncreaseFreeList();
|
||||
|
||||
std::vector<std::vector<unsigned> > m_compressed_geometries;
|
||||
std::vector<unsigned> m_free_list;
|
||||
boost::unordered_map<unsigned, unsigned> m_node_id_to_index_map;
|
||||
};
|
||||
|
||||
|
||||
#endif //GEOMETRY_COMPRESSOR_H
|
||||
162
Contractor/TemporaryStorage.cpp
Normal file
162
Contractor/TemporaryStorage.cpp
Normal file
@ -0,0 +1,162 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "TemporaryStorage.h"
|
||||
|
||||
TemporaryStorage::TemporaryStorage() {
|
||||
temp_directory = boost::filesystem::temp_directory_path();
|
||||
}
|
||||
|
||||
TemporaryStorage & TemporaryStorage::GetInstance(){
|
||||
static TemporaryStorage static_instance;
|
||||
return static_instance;
|
||||
}
|
||||
|
||||
TemporaryStorage::~TemporaryStorage() {
|
||||
RemoveAll();
|
||||
}
|
||||
|
||||
void TemporaryStorage::RemoveAll() {
|
||||
boost::mutex::scoped_lock lock(mutex);
|
||||
for(unsigned slot_id = 0; slot_id < stream_data_list.size(); ++slot_id) {
|
||||
DeallocateSlot(slot_id);
|
||||
}
|
||||
stream_data_list.clear();
|
||||
}
|
||||
|
||||
int TemporaryStorage::AllocateSlot() {
|
||||
boost::mutex::scoped_lock lock(mutex);
|
||||
try {
|
||||
stream_data_list.push_back(StreamData());
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
CheckIfTemporaryDeviceFull();
|
||||
return stream_data_list.size() - 1;
|
||||
}
|
||||
|
||||
void TemporaryStorage::DeallocateSlot(const int slot_id) {
|
||||
try {
|
||||
StreamData & data = stream_data_list[slot_id];
|
||||
boost::mutex::scoped_lock lock(*data.readWriteMutex);
|
||||
if(!boost::filesystem::exists(data.temp_path)) {
|
||||
return;
|
||||
}
|
||||
if(data.temp_file->is_open()) {
|
||||
data.temp_file->close();
|
||||
}
|
||||
|
||||
boost::filesystem::remove(data.temp_path);
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
}
|
||||
|
||||
void TemporaryStorage::WriteToSlot(
|
||||
const int slot_id,
|
||||
char * pointer,
|
||||
const std::size_t size
|
||||
) {
|
||||
try {
|
||||
StreamData & data = stream_data_list[slot_id];
|
||||
BOOST_ASSERT(data.write_mode);
|
||||
|
||||
boost::mutex::scoped_lock lock(*data.readWriteMutex);
|
||||
BOOST_ASSERT_MSG(
|
||||
data.write_mode,
|
||||
"Writing after first read is not allowed"
|
||||
);
|
||||
if( 1073741824 < data.buffer.size() ) {
|
||||
data.temp_file->write(&data.buffer[0], data.buffer.size());
|
||||
// data.temp_file->write(pointer, size);
|
||||
data.buffer.clear();
|
||||
CheckIfTemporaryDeviceFull();
|
||||
}
|
||||
data.buffer.insert(data.buffer.end(), pointer, pointer+size);
|
||||
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
}
|
||||
void TemporaryStorage::ReadFromSlot(
|
||||
const int slot_id,
|
||||
char * pointer,
|
||||
const std::size_t size
|
||||
) {
|
||||
try {
|
||||
StreamData & data = stream_data_list[slot_id];
|
||||
boost::mutex::scoped_lock lock(*data.readWriteMutex);
|
||||
if( data.write_mode ) {
|
||||
data.write_mode = false;
|
||||
data.temp_file->write(&data.buffer[0], data.buffer.size());
|
||||
data.buffer.clear();
|
||||
data.temp_file->seekg( data.temp_file->beg );
|
||||
BOOST_ASSERT( data.temp_file->beg == data.temp_file->tellg() );
|
||||
}
|
||||
BOOST_ASSERT( !data.write_mode );
|
||||
data.temp_file->read(pointer, size);
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t TemporaryStorage::GetFreeBytesOnTemporaryDevice() {
|
||||
uint64_t value = -1;
|
||||
try {
|
||||
boost::filesystem::path p = boost::filesystem::temp_directory_path();
|
||||
boost::filesystem::space_info s = boost::filesystem::space( p );
|
||||
value = s.free;
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
void TemporaryStorage::CheckIfTemporaryDeviceFull() {
|
||||
boost::filesystem::path p = boost::filesystem::temp_directory_path();
|
||||
boost::filesystem::space_info s = boost::filesystem::space( p );
|
||||
if( (1024*1024) > s.free ) {
|
||||
throw OSRMException("temporary device is full");
|
||||
}
|
||||
}
|
||||
|
||||
boost::filesystem::fstream::pos_type TemporaryStorage::Tell(const int slot_id) {
|
||||
boost::filesystem::fstream::pos_type position;
|
||||
try {
|
||||
StreamData & data = stream_data_list[slot_id];
|
||||
boost::mutex::scoped_lock lock(*data.readWriteMutex);
|
||||
position = data.temp_file->tellp();
|
||||
} catch(boost::filesystem::filesystem_error & e) {
|
||||
Abort(e);
|
||||
}
|
||||
return position;
|
||||
}
|
||||
|
||||
void TemporaryStorage::Abort(const boost::filesystem::filesystem_error& e) {
|
||||
RemoveAll();
|
||||
throw OSRMException(e.what());
|
||||
}
|
||||
118
Contractor/TemporaryStorage.h
Normal file
118
Contractor/TemporaryStorage.h
Normal file
@ -0,0 +1,118 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef TEMPORARYSTORAGE_H_
|
||||
#define TEMPORARYSTORAGE_H_
|
||||
|
||||
#include "../Util/BoostFileSystemFix.h"
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/integer.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/filesystem/fstream.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/thread/mutex.hpp>
|
||||
|
||||
#include <vector>
|
||||
#include <fstream>
|
||||
|
||||
/**
|
||||
* This class implements a singleton file storage for temporary data.
|
||||
* temporary slots can be accessed by other objects through an int
|
||||
* On deallocation every slot gets deallocated
|
||||
*
|
||||
* Access is sequential, which means, that there is no random access
|
||||
* -> Data is written in first phase and reread in second.
|
||||
*/
|
||||
|
||||
static boost::filesystem::path temp_directory;
|
||||
static std::string TemporaryFilePattern("OSRM-%%%%-%%%%-%%%%");
|
||||
class TemporaryStorage {
|
||||
public:
|
||||
static TemporaryStorage & GetInstance();
|
||||
virtual ~TemporaryStorage();
|
||||
|
||||
int AllocateSlot();
|
||||
void DeallocateSlot(const int slot_id);
|
||||
void WriteToSlot(const int slot_id, char * pointer, const std::size_t size);
|
||||
void ReadFromSlot(const int slot_id, char * pointer, const std::size_t size);
|
||||
//returns the number of free bytes
|
||||
uint64_t GetFreeBytesOnTemporaryDevice();
|
||||
boost::filesystem::fstream::pos_type Tell(const int slot_id);
|
||||
void RemoveAll();
|
||||
private:
|
||||
TemporaryStorage();
|
||||
TemporaryStorage(TemporaryStorage const &){};
|
||||
|
||||
TemporaryStorage & operator=(TemporaryStorage const &) {
|
||||
return *this;
|
||||
}
|
||||
|
||||
void Abort(const boost::filesystem::filesystem_error& e);
|
||||
void CheckIfTemporaryDeviceFull();
|
||||
|
||||
struct StreamData {
|
||||
bool write_mode;
|
||||
boost::filesystem::path temp_path;
|
||||
boost::shared_ptr<boost::filesystem::fstream> temp_file;
|
||||
boost::shared_ptr<boost::mutex> readWriteMutex;
|
||||
std::vector<char> buffer;
|
||||
|
||||
StreamData() :
|
||||
write_mode(true),
|
||||
temp_path(
|
||||
boost::filesystem::unique_path(
|
||||
temp_directory.append(
|
||||
TemporaryFilePattern.begin(),
|
||||
TemporaryFilePattern.end()
|
||||
)
|
||||
)
|
||||
),
|
||||
temp_file(
|
||||
new boost::filesystem::fstream(
|
||||
temp_path,
|
||||
std::ios::in|std::ios::out|std::ios::trunc|std::ios::binary
|
||||
)
|
||||
),
|
||||
readWriteMutex(boost::make_shared<boost::mutex>())
|
||||
{
|
||||
if( temp_file->fail() ) {
|
||||
throw OSRMException("temporary file could not be created");
|
||||
}
|
||||
}
|
||||
};
|
||||
//vector of file streams that is used to store temporary data
|
||||
boost::mutex mutex;
|
||||
std::vector<StreamData> stream_data_list;
|
||||
};
|
||||
|
||||
#endif /* TEMPORARYSTORAGE_H_ */
|
||||
290
DataStructures/BinaryHeap.h
Normal file
290
DataStructures/BinaryHeap.h
Normal file
@ -0,0 +1,290 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef BINARY_HEAP_H
|
||||
#define BINARY_HEAP_H
|
||||
|
||||
//Not compatible with non contiguous node ids
|
||||
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class ArrayStorage {
|
||||
public:
|
||||
|
||||
ArrayStorage( size_t size ) : positions( new Key[size] ) {
|
||||
memset(positions, 0, size*sizeof(Key));
|
||||
}
|
||||
|
||||
~ArrayStorage() {
|
||||
delete[] positions;
|
||||
}
|
||||
|
||||
Key &operator[]( NodeID node ) {
|
||||
return positions[node];
|
||||
}
|
||||
|
||||
void Clear() {}
|
||||
|
||||
private:
|
||||
Key* positions;
|
||||
};
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class MapStorage {
|
||||
public:
|
||||
|
||||
MapStorage( size_t ) {}
|
||||
|
||||
Key &operator[]( NodeID node ) {
|
||||
return nodes[node];
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
nodes.clear();
|
||||
}
|
||||
|
||||
private:
|
||||
std::map< NodeID, Key > nodes;
|
||||
|
||||
};
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class UnorderedMapStorage {
|
||||
typedef boost::unordered_map<NodeID, Key> UnorderedMapType;
|
||||
typedef typename UnorderedMapType::iterator UnorderedMapIterator;
|
||||
typedef typename UnorderedMapType::const_iterator UnorderedMapConstIterator;
|
||||
public:
|
||||
|
||||
UnorderedMapStorage( size_t ) {
|
||||
//hash table gets 1000 Buckets
|
||||
nodes.rehash(1000);
|
||||
}
|
||||
|
||||
Key & operator[]( const NodeID node ) {
|
||||
return nodes[node];
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
nodes.clear();
|
||||
}
|
||||
|
||||
private:
|
||||
boost::unordered_map< NodeID, Key > nodes;
|
||||
};
|
||||
|
||||
template<
|
||||
typename NodeID,
|
||||
typename Key,
|
||||
typename Weight,
|
||||
typename Data,
|
||||
typename IndexStorage = ArrayStorage<NodeID, NodeID>
|
||||
>
|
||||
class BinaryHeap {
|
||||
private:
|
||||
BinaryHeap( const BinaryHeap& right );
|
||||
void operator=( const BinaryHeap& right );
|
||||
public:
|
||||
typedef Weight WeightType;
|
||||
typedef Data DataType;
|
||||
|
||||
BinaryHeap( size_t maxID )
|
||||
:
|
||||
nodeIndex( maxID )
|
||||
{
|
||||
Clear();
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
heap.resize( 1 );
|
||||
insertedNodes.clear();
|
||||
heap[0].weight = std::numeric_limits< Weight >::min();
|
||||
nodeIndex.Clear();
|
||||
}
|
||||
|
||||
Key Size() const {
|
||||
return static_cast<Key>( heap.size() - 1 );
|
||||
}
|
||||
|
||||
bool Empty() const {
|
||||
return 0 == Size();
|
||||
}
|
||||
|
||||
void Insert( NodeID node, Weight weight, const Data &data ) {
|
||||
HeapElement element;
|
||||
element.index = static_cast<NodeID>(insertedNodes.size());
|
||||
element.weight = weight;
|
||||
const Key key = static_cast<Key>(heap.size());
|
||||
heap.push_back( element );
|
||||
insertedNodes.push_back( HeapNode( node, key, weight, data ) );
|
||||
nodeIndex[node] = element.index;
|
||||
Upheap( key );
|
||||
CheckHeap();
|
||||
}
|
||||
|
||||
Data& GetData( NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].data;
|
||||
}
|
||||
|
||||
Weight& GetKey( NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].weight;
|
||||
}
|
||||
|
||||
bool WasRemoved( const NodeID node ) {
|
||||
BOOST_ASSERT( WasInserted( node ) );
|
||||
const Key index = nodeIndex[node];
|
||||
return insertedNodes[index].key == 0;
|
||||
}
|
||||
|
||||
bool WasInserted( const NodeID node ) {
|
||||
const Key index = nodeIndex[node];
|
||||
if ( index >= static_cast<Key> (insertedNodes.size()) )
|
||||
return false;
|
||||
return insertedNodes[index].node == node;
|
||||
}
|
||||
|
||||
NodeID Min() const {
|
||||
BOOST_ASSERT( heap.size() > 1 );
|
||||
return insertedNodes[heap[1].index].node;
|
||||
}
|
||||
|
||||
NodeID DeleteMin() {
|
||||
BOOST_ASSERT( heap.size() > 1 );
|
||||
const Key removedIndex = heap[1].index;
|
||||
heap[1] = heap[heap.size()-1];
|
||||
heap.pop_back();
|
||||
if ( heap.size() > 1 )
|
||||
Downheap( 1 );
|
||||
insertedNodes[removedIndex].key = 0;
|
||||
CheckHeap();
|
||||
return insertedNodes[removedIndex].node;
|
||||
}
|
||||
|
||||
void DeleteAll() {
|
||||
for ( typename std::vector< HeapElement >::iterator i = heap.begin() + 1, iend = heap.end(); i != iend; ++i )
|
||||
insertedNodes[i->index].key = 0;
|
||||
heap.resize( 1 );
|
||||
heap[0].weight = (std::numeric_limits< Weight >::min)();
|
||||
}
|
||||
|
||||
void DecreaseKey( NodeID node, Weight weight ) {
|
||||
BOOST_ASSERT( UINT_MAX != node );
|
||||
const Key & index = nodeIndex[node];
|
||||
Key & key = insertedNodes[index].key;
|
||||
BOOST_ASSERT ( key >= 0 );
|
||||
|
||||
insertedNodes[index].weight = weight;
|
||||
heap[key].weight = weight;
|
||||
Upheap( key );
|
||||
CheckHeap();
|
||||
}
|
||||
|
||||
private:
|
||||
class HeapNode {
|
||||
public:
|
||||
HeapNode( NodeID n, Key k, Weight w, Data d )
|
||||
:
|
||||
node(n),
|
||||
key(k),
|
||||
weight(w),
|
||||
data(d)
|
||||
{ }
|
||||
|
||||
NodeID node;
|
||||
Key key;
|
||||
Weight weight;
|
||||
Data data;
|
||||
};
|
||||
struct HeapElement {
|
||||
Key index;
|
||||
Weight weight;
|
||||
};
|
||||
|
||||
std::vector< HeapNode > insertedNodes;
|
||||
std::vector< HeapElement > heap;
|
||||
IndexStorage nodeIndex;
|
||||
|
||||
void Downheap( Key key ) {
|
||||
const Key droppingIndex = heap[key].index;
|
||||
const Weight weight = heap[key].weight;
|
||||
Key nextKey = key << 1;
|
||||
while( nextKey < static_cast<Key>( heap.size() ) ){
|
||||
const Key nextKeyOther = nextKey + 1;
|
||||
if (
|
||||
( nextKeyOther < static_cast<Key>( heap.size() ) ) &&
|
||||
( heap[nextKey].weight > heap[nextKeyOther].weight )
|
||||
) {
|
||||
nextKey = nextKeyOther;
|
||||
}
|
||||
if ( weight <= heap[nextKey].weight ){
|
||||
break;
|
||||
}
|
||||
heap[key] = heap[nextKey];
|
||||
insertedNodes[heap[key].index].key = key;
|
||||
key = nextKey;
|
||||
nextKey <<= 1;
|
||||
}
|
||||
heap[key].index = droppingIndex;
|
||||
heap[key].weight = weight;
|
||||
insertedNodes[droppingIndex].key = key;
|
||||
}
|
||||
|
||||
void Upheap( Key key ) {
|
||||
const Key risingIndex = heap[key].index;
|
||||
const Weight weight = heap[key].weight;
|
||||
Key nextKey = key >> 1;
|
||||
while ( heap[nextKey].weight > weight ) {
|
||||
BOOST_ASSERT( nextKey != 0 );
|
||||
heap[key] = heap[nextKey];
|
||||
insertedNodes[heap[key].index].key = key;
|
||||
key = nextKey;
|
||||
nextKey >>= 1;
|
||||
}
|
||||
heap[key].index = risingIndex;
|
||||
heap[key].weight = weight;
|
||||
insertedNodes[risingIndex].key = key;
|
||||
}
|
||||
|
||||
void CheckHeap() {
|
||||
#ifndef NDEBUG
|
||||
for ( Key i = 2; i < (Key) heap.size(); ++i ) {
|
||||
BOOST_ASSERT( heap[i].weight >= heap[i >> 1].weight );
|
||||
}
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
#endif //BINARY_HEAP_H
|
||||
99
DataStructures/ConcurrentQueue.h
Normal file
99
DataStructures/ConcurrentQueue.h
Normal file
@ -0,0 +1,99 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef CONCURRENTQUEUE_H_
|
||||
#define CONCURRENTQUEUE_H_
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/bind.hpp>
|
||||
#include <boost/circular_buffer.hpp>
|
||||
#include <boost/thread/condition.hpp>
|
||||
#include <boost/thread/mutex.hpp>
|
||||
#include <boost/thread/thread.hpp>
|
||||
|
||||
template<typename Data>
|
||||
class ConcurrentQueue {
|
||||
|
||||
public:
|
||||
ConcurrentQueue(const size_t max_size) : m_internal_queue(max_size) { }
|
||||
|
||||
inline void push(const Data & data) {
|
||||
boost::mutex::scoped_lock lock(m_mutex);
|
||||
m_not_full.wait(
|
||||
lock,
|
||||
boost::bind(&ConcurrentQueue<Data>::is_not_full, this)
|
||||
);
|
||||
m_internal_queue.push_back(data);
|
||||
lock.unlock();
|
||||
m_not_empty.notify_one();
|
||||
}
|
||||
|
||||
inline bool empty() const {
|
||||
return m_internal_queue.empty();
|
||||
}
|
||||
|
||||
inline void wait_and_pop(Data & popped_value) {
|
||||
boost::mutex::scoped_lock lock(m_mutex);
|
||||
m_not_empty.wait(
|
||||
lock,
|
||||
boost::bind(&ConcurrentQueue<Data>::is_not_empty, this)
|
||||
);
|
||||
popped_value = m_internal_queue.front();
|
||||
m_internal_queue.pop_front();
|
||||
lock.unlock();
|
||||
m_not_full.notify_one();
|
||||
}
|
||||
|
||||
inline bool try_pop(Data& popped_value) {
|
||||
boost::mutex::scoped_lock lock(m_mutex);
|
||||
if(m_internal_queue.empty()) {
|
||||
return false;
|
||||
}
|
||||
popped_value=m_internal_queue.front();
|
||||
m_internal_queue.pop_front();
|
||||
lock.unlock();
|
||||
m_not_full.notify_one();
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
inline bool is_not_empty() const {
|
||||
return !m_internal_queue.empty();
|
||||
}
|
||||
|
||||
inline bool is_not_full() const {
|
||||
return m_internal_queue.size() < m_internal_queue.capacity();
|
||||
}
|
||||
|
||||
boost::circular_buffer<Data> m_internal_queue;
|
||||
boost::mutex m_mutex;
|
||||
boost::condition m_not_empty;
|
||||
boost::condition m_not_full;
|
||||
};
|
||||
|
||||
#endif /* CONCURRENTQUEUE_H_ */
|
||||
162
DataStructures/Coordinate.cpp
Normal file
162
DataStructures/Coordinate.cpp
Normal file
@ -0,0 +1,162 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <cmath>
|
||||
#include <climits>
|
||||
|
||||
FixedPointCoordinate::FixedPointCoordinate()
|
||||
: lat(INT_MIN),
|
||||
lon(INT_MIN)
|
||||
{ }
|
||||
|
||||
FixedPointCoordinate::FixedPointCoordinate(int lat, int lon)
|
||||
: lat(lat),
|
||||
lon(lon)
|
||||
{ }
|
||||
|
||||
void FixedPointCoordinate::Reset() {
|
||||
lat = INT_MIN;
|
||||
lon = INT_MIN;
|
||||
}
|
||||
bool FixedPointCoordinate::isSet() const {
|
||||
return (INT_MIN != lat) && (INT_MIN != lon);
|
||||
}
|
||||
bool FixedPointCoordinate::isValid() const {
|
||||
if(
|
||||
lat > 90*COORDINATE_PRECISION ||
|
||||
lat < -90*COORDINATE_PRECISION ||
|
||||
lon > 180*COORDINATE_PRECISION ||
|
||||
lon < -180*COORDINATE_PRECISION
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool FixedPointCoordinate::operator==(const FixedPointCoordinate & other) const {
|
||||
return lat == other.lat && lon == other.lon;
|
||||
}
|
||||
|
||||
double FixedPointCoordinate::ApproximateDistance(
|
||||
const int lat1,
|
||||
const int lon1,
|
||||
const int lat2,
|
||||
const int lon2
|
||||
) {
|
||||
BOOST_ASSERT(lat1 != INT_MIN);
|
||||
BOOST_ASSERT(lon1 != INT_MIN);
|
||||
BOOST_ASSERT(lat2 != INT_MIN);
|
||||
BOOST_ASSERT(lon2 != INT_MIN);
|
||||
double RAD = 0.017453292519943295769236907684886;
|
||||
double lt1 = lat1/COORDINATE_PRECISION;
|
||||
double ln1 = lon1/COORDINATE_PRECISION;
|
||||
double lt2 = lat2/COORDINATE_PRECISION;
|
||||
double ln2 = lon2/COORDINATE_PRECISION;
|
||||
double dlat1=lt1*(RAD);
|
||||
|
||||
double dlong1=ln1*(RAD);
|
||||
double dlat2=lt2*(RAD);
|
||||
double dlong2=ln2*(RAD);
|
||||
|
||||
double dLong=dlong1-dlong2;
|
||||
double dLat=dlat1-dlat2;
|
||||
|
||||
double aHarv= pow(sin(dLat/2.0),2.0)+cos(dlat1)*cos(dlat2)*pow(sin(dLong/2.),2);
|
||||
double cHarv=2.*atan2(sqrt(aHarv),sqrt(1.0-aHarv));
|
||||
//earth radius varies between 6,356.750-6,378.135 km (3,949.901-3,963.189mi)
|
||||
//The IUGG value for the equatorial radius is 6378.137 km (3963.19 miles)
|
||||
const double earth=6372797.560856;
|
||||
double distance=earth*cHarv;
|
||||
return distance;
|
||||
}
|
||||
|
||||
double FixedPointCoordinate::ApproximateDistance(
|
||||
const FixedPointCoordinate &c1,
|
||||
const FixedPointCoordinate &c2
|
||||
) {
|
||||
return ApproximateDistance( c1.lat, c1.lon, c2.lat, c2.lon );
|
||||
}
|
||||
|
||||
double FixedPointCoordinate::ApproximateEuclideanDistance(
|
||||
const FixedPointCoordinate &c1,
|
||||
const FixedPointCoordinate &c2
|
||||
) {
|
||||
BOOST_ASSERT(c1.lat != INT_MIN);
|
||||
BOOST_ASSERT(c1.lon != INT_MIN);
|
||||
BOOST_ASSERT(c2.lat != INT_MIN);
|
||||
BOOST_ASSERT(c2.lon != INT_MIN);
|
||||
const double RAD = 0.017453292519943295769236907684886;
|
||||
const double lat1 = (c1.lat/COORDINATE_PRECISION)*RAD;
|
||||
const double lon1 = (c1.lon/COORDINATE_PRECISION)*RAD;
|
||||
const double lat2 = (c2.lat/COORDINATE_PRECISION)*RAD;
|
||||
const double lon2 = (c2.lon/COORDINATE_PRECISION)*RAD;
|
||||
|
||||
const double x = (lon2-lon1) * cos((lat1+lat2)/2.);
|
||||
const double y = (lat2-lat1);
|
||||
const double earthRadius = 6372797.560856;
|
||||
const double d = sqrt(x*x + y*y) * earthRadius;
|
||||
return d;
|
||||
}
|
||||
|
||||
void FixedPointCoordinate::convertInternalLatLonToString(
|
||||
const int value,
|
||||
std::string & output
|
||||
) {
|
||||
char buffer[100];
|
||||
buffer[11] = 0; // zero termination
|
||||
char* string = printInt< 11, 6 >( buffer, value );
|
||||
output = string;
|
||||
}
|
||||
|
||||
void FixedPointCoordinate::convertInternalCoordinateToString(
|
||||
const FixedPointCoordinate & coord,
|
||||
std::string & output
|
||||
) {
|
||||
std::string tmp;
|
||||
tmp.reserve(23);
|
||||
convertInternalLatLonToString(coord.lon, tmp);
|
||||
output = tmp;
|
||||
output += ",";
|
||||
convertInternalLatLonToString(coord.lat, tmp);
|
||||
output += tmp;
|
||||
}
|
||||
|
||||
void FixedPointCoordinate::convertInternalReversedCoordinateToString(
|
||||
const FixedPointCoordinate & coord,
|
||||
std::string & output
|
||||
) {
|
||||
std::string tmp;
|
||||
tmp.reserve(23);
|
||||
convertInternalLatLonToString(coord.lat, tmp);
|
||||
output = tmp;
|
||||
output += ",";
|
||||
convertInternalLatLonToString(coord.lon, tmp);
|
||||
output += tmp;
|
||||
}
|
||||
324
DataStructures/DeallocatingVector.h
Normal file
324
DataStructures/DeallocatingVector.h
Normal file
@ -0,0 +1,324 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef DEALLOCATINGVECTOR_H_
|
||||
#define DEALLOCATINGVECTOR_H_
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <cstring>
|
||||
#include <vector>
|
||||
|
||||
#if __cplusplus > 199711L
|
||||
#define DEALLOCATION_VECTOR_NULL_PTR nullptr
|
||||
#else
|
||||
#define DEALLOCATION_VECTOR_NULL_PTR NULL
|
||||
#endif
|
||||
|
||||
|
||||
template<typename ElementT, std::size_t bucketSizeC = 8388608/sizeof(ElementT), bool DeallocateC = false>
|
||||
class DeallocatingVectorIterator : public std::iterator<std::random_access_iterator_tag, ElementT> {
|
||||
protected:
|
||||
|
||||
class DeallocatingVectorIteratorState {
|
||||
private:
|
||||
//make constructors explicit, so we do not mix random access and deallocation iterators.
|
||||
DeallocatingVectorIteratorState();
|
||||
public:
|
||||
explicit DeallocatingVectorIteratorState(const DeallocatingVectorIteratorState &r) : /*mData(r.mData),*/ mIndex(r.mIndex), mBucketList(r.mBucketList) {}
|
||||
explicit DeallocatingVectorIteratorState(const std::size_t idx, std::vector<ElementT *> & input_list) : /*mData(DEALLOCATION_VECTOR_NULL_PTR),*/ mIndex(idx), mBucketList(input_list) {
|
||||
}
|
||||
std::size_t mIndex;
|
||||
std::vector<ElementT *> & mBucketList;
|
||||
|
||||
inline bool operator!=(const DeallocatingVectorIteratorState &other) {
|
||||
return mIndex != other.mIndex;
|
||||
}
|
||||
|
||||
inline bool operator==(const DeallocatingVectorIteratorState &other) {
|
||||
return mIndex == other.mIndex;
|
||||
}
|
||||
|
||||
bool operator<(const DeallocatingVectorIteratorState &other) const {
|
||||
return mIndex < other.mIndex;
|
||||
}
|
||||
|
||||
bool operator>(const DeallocatingVectorIteratorState &other) const {
|
||||
return mIndex > other.mIndex;
|
||||
}
|
||||
|
||||
bool operator>=(const DeallocatingVectorIteratorState &other) const {
|
||||
return mIndex >= other.mIndex;
|
||||
}
|
||||
|
||||
//This is a hack to make assignment operator possible with reference member
|
||||
inline DeallocatingVectorIteratorState& operator= (const DeallocatingVectorIteratorState &a) {
|
||||
if (this != &a) {
|
||||
this->DeallocatingVectorIteratorState::~DeallocatingVectorIteratorState(); // explicit non-virtual destructor
|
||||
new (this) DeallocatingVectorIteratorState(a); // placement new
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
};
|
||||
|
||||
DeallocatingVectorIteratorState mState;
|
||||
|
||||
public:
|
||||
typedef std::random_access_iterator_tag iterator_category;
|
||||
typedef typename std::iterator<std::random_access_iterator_tag, ElementT>::value_type value_type;
|
||||
typedef typename std::iterator<std::random_access_iterator_tag, ElementT>::difference_type difference_type;
|
||||
typedef typename std::iterator<std::random_access_iterator_tag, ElementT>::reference reference;
|
||||
typedef typename std::iterator<std::random_access_iterator_tag, ElementT>::pointer pointer;
|
||||
|
||||
DeallocatingVectorIterator() {}
|
||||
|
||||
template<typename T2>
|
||||
DeallocatingVectorIterator(const DeallocatingVectorIterator<T2> & r) : mState(r.mState) {}
|
||||
|
||||
DeallocatingVectorIterator(std::size_t idx, std::vector<ElementT *> & input_list) : mState(idx, input_list) {}
|
||||
DeallocatingVectorIterator(const DeallocatingVectorIteratorState & r) : mState(r) {}
|
||||
|
||||
template<typename T2>
|
||||
DeallocatingVectorIterator& operator=(const DeallocatingVectorIterator<T2> &r) {
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
mState = r.mState; return *this;
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator& operator++() { //prefix
|
||||
++mState.mIndex;
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator& operator--() { //prefix
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
--mState.mIndex;
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator operator++(int) { //postfix
|
||||
DeallocatingVectorIteratorState _myState(mState);
|
||||
mState.mIndex++;
|
||||
return DeallocatingVectorIterator(_myState);
|
||||
}
|
||||
inline DeallocatingVectorIterator operator--(int) { //postfix
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
DeallocatingVectorIteratorState _myState(mState);
|
||||
mState.mIndex--;
|
||||
return DeallocatingVectorIterator(_myState);
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator operator+(const difference_type& n) const {
|
||||
DeallocatingVectorIteratorState _myState(mState);
|
||||
_myState.mIndex+=n;
|
||||
return DeallocatingVectorIterator(_myState);
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator& operator+=(const difference_type& n) {
|
||||
mState.mIndex+=n; return *this;
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator operator-(const difference_type& n) const {
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
DeallocatingVectorIteratorState _myState(mState);
|
||||
_myState.mIndex-=n;
|
||||
return DeallocatingVectorIterator(_myState);
|
||||
}
|
||||
|
||||
inline DeallocatingVectorIterator& operator-=(const difference_type &n) const {
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
mState.mIndex-=n; return *this;
|
||||
}
|
||||
|
||||
inline reference operator*() const {
|
||||
std::size_t _bucket = mState.mIndex/bucketSizeC;
|
||||
std::size_t _index = mState.mIndex%bucketSizeC;
|
||||
return (mState.mBucketList[_bucket][_index]);
|
||||
}
|
||||
|
||||
inline pointer operator->() const {
|
||||
std::size_t _bucket = mState.mIndex/bucketSizeC;
|
||||
std::size_t _index = mState.mIndex%bucketSizeC;
|
||||
return &(mState.mBucketList[_bucket][_index]);
|
||||
}
|
||||
|
||||
inline bool operator!=(const DeallocatingVectorIterator & other) {
|
||||
return mState != other.mState;
|
||||
}
|
||||
|
||||
inline bool operator==(const DeallocatingVectorIterator & other) {
|
||||
return mState == other.mState;
|
||||
}
|
||||
|
||||
inline bool operator<(const DeallocatingVectorIterator & other) const {
|
||||
return mState < other.mState;
|
||||
}
|
||||
|
||||
inline bool operator>(const DeallocatingVectorIterator & other) const {
|
||||
return mState > other.mState;
|
||||
}
|
||||
|
||||
inline bool operator>=(const DeallocatingVectorIterator & other) const {
|
||||
return mState >= other.mState;
|
||||
}
|
||||
|
||||
difference_type operator-(const DeallocatingVectorIterator & other) {
|
||||
if(DeallocateC) BOOST_ASSERT(false);
|
||||
return mState.mIndex-other.mState.mIndex;
|
||||
}
|
||||
};
|
||||
|
||||
template<typename ElementT, std::size_t bucketSizeC = 8388608/sizeof(ElementT) >
|
||||
class DeallocatingVector {
|
||||
private:
|
||||
std::size_t mCurrentSize;
|
||||
std::vector<ElementT *> mBucketList;
|
||||
|
||||
public:
|
||||
typedef ElementT value_type;
|
||||
typedef DeallocatingVectorIterator<ElementT, bucketSizeC, false> iterator;
|
||||
typedef DeallocatingVectorIterator<ElementT, bucketSizeC, false> const_iterator;
|
||||
|
||||
//this iterator deallocates all buckets that have been visited. Iterators to visited objects become invalid.
|
||||
typedef DeallocatingVectorIterator<ElementT, bucketSizeC, true> deallocation_iterator;
|
||||
|
||||
DeallocatingVector() : mCurrentSize(0) {
|
||||
//initial bucket
|
||||
mBucketList.push_back(new ElementT[bucketSizeC]);
|
||||
}
|
||||
|
||||
~DeallocatingVector() {
|
||||
clear();
|
||||
}
|
||||
|
||||
inline void swap(DeallocatingVector<ElementT, bucketSizeC> & other) {
|
||||
std::swap(mCurrentSize, other.mCurrentSize);
|
||||
mBucketList.swap(other.mBucketList);
|
||||
}
|
||||
|
||||
inline void clear() {
|
||||
//Delete[]'ing ptr's to all Buckets
|
||||
for(unsigned i = 0; i < mBucketList.size(); ++i) {
|
||||
if(DEALLOCATION_VECTOR_NULL_PTR != mBucketList[i]) {
|
||||
delete[] mBucketList[i];
|
||||
mBucketList[i] = DEALLOCATION_VECTOR_NULL_PTR;
|
||||
}
|
||||
}
|
||||
//Removing all ptrs from vector
|
||||
std::vector<ElementT *>().swap(mBucketList);
|
||||
mCurrentSize = 0;
|
||||
}
|
||||
|
||||
inline void push_back(const ElementT & element) {
|
||||
std::size_t _capacity = capacity();
|
||||
if(mCurrentSize == _capacity) {
|
||||
mBucketList.push_back(new ElementT[bucketSizeC]);
|
||||
}
|
||||
|
||||
std::size_t _index = size()%bucketSizeC;
|
||||
mBucketList.back()[_index] = element;
|
||||
++mCurrentSize;
|
||||
}
|
||||
|
||||
inline void reserve(const std::size_t) const {
|
||||
//don't do anything
|
||||
}
|
||||
|
||||
inline void resize(const std::size_t new_size) {
|
||||
if(new_size > mCurrentSize) {
|
||||
while(capacity() < new_size) {
|
||||
mBucketList.push_back(new ElementT[bucketSizeC]);
|
||||
}
|
||||
mCurrentSize = new_size;
|
||||
}
|
||||
if(new_size < mCurrentSize) {
|
||||
std::size_t number_of_necessary_buckets = 1+(new_size / bucketSizeC);
|
||||
|
||||
for(unsigned i = number_of_necessary_buckets; i < mBucketList.size(); ++i) {
|
||||
delete[] mBucketList[i];
|
||||
}
|
||||
mBucketList.resize(number_of_necessary_buckets);
|
||||
mCurrentSize = new_size;
|
||||
}
|
||||
}
|
||||
|
||||
inline std::size_t size() const {
|
||||
return mCurrentSize;
|
||||
}
|
||||
|
||||
inline std::size_t capacity() const {
|
||||
return mBucketList.size() * bucketSizeC;
|
||||
}
|
||||
|
||||
inline iterator begin() {
|
||||
return iterator(static_cast<std::size_t>(0), mBucketList);
|
||||
}
|
||||
|
||||
inline iterator end() {
|
||||
return iterator(size(), mBucketList);
|
||||
}
|
||||
|
||||
inline deallocation_iterator dbegin() {
|
||||
return deallocation_iterator(static_cast<std::size_t>(0), mBucketList);
|
||||
}
|
||||
|
||||
inline deallocation_iterator dend() {
|
||||
return deallocation_iterator(size(), mBucketList);
|
||||
}
|
||||
|
||||
inline const_iterator begin() const {
|
||||
return const_iterator(static_cast<std::size_t>(0), mBucketList);
|
||||
}
|
||||
|
||||
inline const_iterator end() const {
|
||||
return const_iterator(size(), mBucketList);
|
||||
}
|
||||
|
||||
inline ElementT & operator[](const std::size_t index) {
|
||||
std::size_t _bucket = index / bucketSizeC;
|
||||
std::size_t _index = index % bucketSizeC;
|
||||
return (mBucketList[_bucket][_index]);
|
||||
}
|
||||
|
||||
const inline ElementT & operator[](const std::size_t index) const {
|
||||
std::size_t _bucket = index / bucketSizeC;
|
||||
std::size_t _index = index % bucketSizeC;
|
||||
return (mBucketList[_bucket][_index]);
|
||||
}
|
||||
|
||||
inline ElementT & back() {
|
||||
std::size_t _bucket = mCurrentSize / bucketSizeC;
|
||||
std::size_t _index = mCurrentSize % bucketSizeC;
|
||||
return (mBucketList[_bucket][_index]);
|
||||
}
|
||||
|
||||
const inline ElementT & back() const {
|
||||
std::size_t _bucket = mCurrentSize / bucketSizeC;
|
||||
std::size_t _index = mCurrentSize % bucketSizeC;
|
||||
return (mBucketList[_bucket][_index]);
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* DEALLOCATINGVECTOR_H_ */
|
||||
247
DataStructures/DynamicGraph.h
Normal file
247
DataStructures/DynamicGraph.h
Normal file
@ -0,0 +1,247 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef DYNAMICGRAPH_H_INCLUDED
|
||||
#define DYNAMICGRAPH_H_INCLUDED
|
||||
|
||||
#include "../DataStructures/DeallocatingVector.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/integer.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
template< typename EdgeDataT>
|
||||
class DynamicGraph {
|
||||
public:
|
||||
typedef EdgeDataT EdgeData;
|
||||
typedef unsigned NodeIterator;
|
||||
typedef unsigned EdgeIterator;
|
||||
|
||||
class InputEdge {
|
||||
public:
|
||||
NodeIterator source;
|
||||
NodeIterator target;
|
||||
EdgeDataT data;
|
||||
bool operator<( const InputEdge& right ) const {
|
||||
if ( source != right.source )
|
||||
return source < right.source;
|
||||
return target < right.target;
|
||||
}
|
||||
};
|
||||
|
||||
//Constructs an empty graph with a given number of nodes.
|
||||
DynamicGraph( int32_t nodes ) : m_numNodes(nodes), m_numEdges(0) {
|
||||
m_nodes.reserve( m_numNodes );
|
||||
m_nodes.resize( m_numNodes );
|
||||
|
||||
m_edges.reserve( m_numNodes * 1.1 );
|
||||
m_edges.resize( m_numNodes );
|
||||
}
|
||||
|
||||
template<class ContainerT>
|
||||
DynamicGraph( const int32_t nodes, const ContainerT &graph ) {
|
||||
m_numNodes = nodes;
|
||||
m_numEdges = ( EdgeIterator ) graph.size();
|
||||
m_nodes.reserve( m_numNodes +1);
|
||||
m_nodes.resize( m_numNodes +1);
|
||||
EdgeIterator edge = 0;
|
||||
EdgeIterator position = 0;
|
||||
for ( NodeIterator node = 0; node < m_numNodes; ++node ) {
|
||||
EdgeIterator lastEdge = edge;
|
||||
while ( edge < m_numEdges && graph[edge].source == node ) {
|
||||
++edge;
|
||||
}
|
||||
m_nodes[node].firstEdge = position;
|
||||
m_nodes[node].edges = edge - lastEdge;
|
||||
position += m_nodes[node].edges;
|
||||
}
|
||||
m_nodes.back().firstEdge = position;
|
||||
m_edges.reserve( position * 1.1 );
|
||||
m_edges.resize( position );
|
||||
edge = 0;
|
||||
for ( NodeIterator node = 0; node < m_numNodes; ++node ) {
|
||||
for ( EdgeIterator i = m_nodes[node].firstEdge, e = m_nodes[node].firstEdge + m_nodes[node].edges; i != e; ++i ) {
|
||||
m_edges[i].target = graph[edge].target;
|
||||
m_edges[i].data = graph[edge].data;
|
||||
BOOST_ASSERT_MSG(
|
||||
graph[edge].data.distance > 0,
|
||||
"edge distance invalid"
|
||||
);
|
||||
++edge;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
~DynamicGraph(){ }
|
||||
|
||||
unsigned GetNumberOfNodes() const {
|
||||
return m_numNodes;
|
||||
}
|
||||
|
||||
unsigned GetNumberOfEdges() const {
|
||||
return m_numEdges;
|
||||
}
|
||||
|
||||
unsigned GetOutDegree( const NodeIterator n ) const {
|
||||
return m_nodes[n].edges;
|
||||
}
|
||||
|
||||
NodeIterator GetTarget( const EdgeIterator e ) const {
|
||||
return NodeIterator( m_edges[e].target );
|
||||
}
|
||||
|
||||
void SetTarget( const EdgeIterator e, const NodeIterator n ) {
|
||||
m_edges[e].target = n;
|
||||
}
|
||||
|
||||
EdgeDataT &GetEdgeData( const EdgeIterator e ) {
|
||||
return m_edges[e].data;
|
||||
}
|
||||
|
||||
const EdgeDataT &GetEdgeData( const EdgeIterator e ) const {
|
||||
return m_edges[e].data;
|
||||
}
|
||||
|
||||
EdgeIterator BeginEdges( const NodeIterator n ) const {
|
||||
return EdgeIterator( m_nodes[n].firstEdge );
|
||||
}
|
||||
|
||||
EdgeIterator EndEdges( const NodeIterator n ) const {
|
||||
return EdgeIterator( m_nodes[n].firstEdge + m_nodes[n].edges );
|
||||
}
|
||||
|
||||
//adds an edge. Invalidates edge iterators for the source node
|
||||
EdgeIterator InsertEdge( const NodeIterator from, const NodeIterator to, const EdgeDataT &data ) {
|
||||
Node &node = m_nodes[from];
|
||||
EdgeIterator newFirstEdge = node.edges + node.firstEdge;
|
||||
if ( newFirstEdge >= m_edges.size() || !isDummy( newFirstEdge ) ) {
|
||||
if ( node.firstEdge != 0 && isDummy( node.firstEdge - 1 ) ) {
|
||||
node.firstEdge--;
|
||||
m_edges[node.firstEdge] = m_edges[node.firstEdge + node.edges];
|
||||
} else {
|
||||
EdgeIterator newFirstEdge = ( EdgeIterator ) m_edges.size();
|
||||
unsigned newSize = node.edges * 1.1 + 2;
|
||||
EdgeIterator requiredCapacity = newSize + m_edges.size();
|
||||
EdgeIterator oldCapacity = m_edges.capacity();
|
||||
if ( requiredCapacity >= oldCapacity ) {
|
||||
m_edges.reserve( requiredCapacity * 1.1 );
|
||||
}
|
||||
m_edges.resize( m_edges.size() + newSize );
|
||||
for ( EdgeIterator i = 0; i < node.edges; ++i ) {
|
||||
m_edges[newFirstEdge + i ] = m_edges[node.firstEdge + i];
|
||||
makeDummy( node.firstEdge + i );
|
||||
}
|
||||
for ( EdgeIterator i = node.edges + 1; i < newSize; ++i )
|
||||
makeDummy( newFirstEdge + i );
|
||||
node.firstEdge = newFirstEdge;
|
||||
}
|
||||
}
|
||||
Edge &edge = m_edges[node.firstEdge + node.edges];
|
||||
edge.target = to;
|
||||
edge.data = data;
|
||||
++m_numEdges;
|
||||
++node.edges;
|
||||
return EdgeIterator( node.firstEdge + node.edges );
|
||||
}
|
||||
|
||||
//removes an edge. Invalidates edge iterators for the source node
|
||||
void DeleteEdge( const NodeIterator source, const EdgeIterator e ) {
|
||||
Node &node = m_nodes[source];
|
||||
#pragma omp atomic
|
||||
--m_numEdges;
|
||||
--node.edges;
|
||||
BOOST_ASSERT(UINT_MAX != node.edges);
|
||||
const unsigned last = node.firstEdge + node.edges;
|
||||
BOOST_ASSERT( UINT_MAX != last);
|
||||
//swap with last edge
|
||||
m_edges[e] = m_edges[last];
|
||||
makeDummy( last );
|
||||
}
|
||||
|
||||
//removes all edges (source,target)
|
||||
int32_t DeleteEdgesTo( const NodeIterator source, const NodeIterator target ) {
|
||||
int32_t deleted = 0;
|
||||
for ( EdgeIterator i = BeginEdges( source ), iend = EndEdges( source ); i < iend - deleted; ++i ) {
|
||||
if ( m_edges[i].target == target ) {
|
||||
do {
|
||||
deleted++;
|
||||
m_edges[i] = m_edges[iend - deleted];
|
||||
makeDummy( iend - deleted );
|
||||
} while ( i < iend - deleted && m_edges[i].target == target );
|
||||
}
|
||||
}
|
||||
|
||||
#pragma omp atomic
|
||||
m_numEdges -= deleted;
|
||||
m_nodes[source].edges -= deleted;
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
//searches for a specific edge
|
||||
EdgeIterator FindEdge( const NodeIterator from, const NodeIterator to ) const {
|
||||
for ( EdgeIterator i = BeginEdges( from ), iend = EndEdges( from ); i != iend; ++i ) {
|
||||
if ( to == m_edges[i].target ) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return EndEdges( from );
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
bool isDummy( const EdgeIterator edge ) const {
|
||||
return m_edges[edge].target == (std::numeric_limits< NodeIterator >::max)();
|
||||
}
|
||||
|
||||
void makeDummy( const EdgeIterator edge ) {
|
||||
m_edges[edge].target = (std::numeric_limits< NodeIterator >::max)();
|
||||
}
|
||||
|
||||
struct Node {
|
||||
//index of the first edge
|
||||
EdgeIterator firstEdge;
|
||||
//amount of edges
|
||||
unsigned edges;
|
||||
};
|
||||
|
||||
struct Edge {
|
||||
NodeIterator target;
|
||||
EdgeDataT data;
|
||||
};
|
||||
|
||||
NodeIterator m_numNodes;
|
||||
EdgeIterator m_numEdges;
|
||||
|
||||
std::vector< Node > m_nodes;
|
||||
DeallocatingVector< Edge > m_edges;
|
||||
};
|
||||
|
||||
#endif // DYNAMICGRAPH_H_INCLUDED
|
||||
176
DataStructures/EdgeBasedNode.h
Normal file
176
DataStructures/EdgeBasedNode.h
Normal file
@ -0,0 +1,176 @@
|
||||
#ifndef EDGE_BASED_NODE_H
|
||||
#define EDGE_BASED_NODE_H
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#include "../Util/MercatorUtil.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
// An EdgeBasedNode represents a node in the edge-expanded graph.
|
||||
struct EdgeBasedNode {
|
||||
|
||||
EdgeBasedNode() :
|
||||
id(INT_MAX),
|
||||
lat1(INT_MAX),
|
||||
lat2(INT_MAX),
|
||||
lon1(INT_MAX),
|
||||
lon2(INT_MAX >> 1),
|
||||
belongsToTinyComponent(false),
|
||||
nameID(UINT_MAX),
|
||||
weight(UINT_MAX >> 1),
|
||||
ignoreInGrid(false)
|
||||
{ }
|
||||
|
||||
// Computes:
|
||||
// - the distance from the given query location to nearest point on this edge (and returns it)
|
||||
// - the location on this edge which is nearest to the query location
|
||||
// - the ratio ps:pq, where p and q are the end points of this edge, and s is the perpendicular foot of
|
||||
// the query location on the line defined by p and q.
|
||||
double ComputePerpendicularDistance(
|
||||
const FixedPointCoordinate& query_location,
|
||||
FixedPointCoordinate & nearest_location,
|
||||
double & ratio,
|
||||
double precision = COORDINATE_PRECISION
|
||||
) const {
|
||||
BOOST_ASSERT( query_location.isValid() );
|
||||
|
||||
const double epsilon = 1.0/precision;
|
||||
|
||||
if( ignoreInGrid ) {
|
||||
return std::numeric_limits<double>::max();
|
||||
}
|
||||
|
||||
// p, q : the end points of the underlying edge
|
||||
const Point p(lat2y(lat1/COORDINATE_PRECISION), lon1/COORDINATE_PRECISION);
|
||||
const Point q(lat2y(lat2/COORDINATE_PRECISION), lon2/COORDINATE_PRECISION);
|
||||
|
||||
// r : query location
|
||||
const Point r(lat2y(query_location.lat/COORDINATE_PRECISION),
|
||||
query_location.lon/COORDINATE_PRECISION);
|
||||
|
||||
const Point foot = ComputePerpendicularFoot(p, q, r, epsilon);
|
||||
ratio = ComputeRatio(p, q, foot, epsilon);
|
||||
|
||||
BOOST_ASSERT( !std::isnan(ratio) );
|
||||
|
||||
nearest_location = ComputeNearestPointOnSegment(foot, ratio);
|
||||
|
||||
BOOST_ASSERT( nearest_location.isValid() );
|
||||
|
||||
// TODO: Replace with euclidean approximation when k-NN search is done
|
||||
// const double approximated_distance = FixedPointCoordinate::ApproximateEuclideanDistance(
|
||||
const double approximated_distance = FixedPointCoordinate::ApproximateDistance(query_location, nearest_location);
|
||||
|
||||
BOOST_ASSERT( 0.0 <= approximated_distance );
|
||||
return approximated_distance;
|
||||
}
|
||||
|
||||
bool operator<(const EdgeBasedNode & other) const {
|
||||
return other.id < id;
|
||||
}
|
||||
|
||||
bool operator==(const EdgeBasedNode & other) const {
|
||||
return id == other.id;
|
||||
}
|
||||
|
||||
// Returns the midpoint of the underlying edge.
|
||||
inline FixedPointCoordinate Centroid() const {
|
||||
return FixedPointCoordinate((lat1+lat2)/2, (lon1+lon2)/2);
|
||||
}
|
||||
|
||||
NodeID id;
|
||||
|
||||
// The coordinates of the end-points of the underlying edge.
|
||||
int lat1;
|
||||
int lat2;
|
||||
int lon1;
|
||||
int lon2:31;
|
||||
|
||||
bool belongsToTinyComponent:1;
|
||||
NodeID nameID;
|
||||
|
||||
// The weight of the underlying edge.
|
||||
unsigned weight:31;
|
||||
|
||||
bool ignoreInGrid:1;
|
||||
|
||||
private:
|
||||
|
||||
typedef std::pair<double,double> Point;
|
||||
|
||||
// Compute the perpendicular foot of point r on the line defined by p and q.
|
||||
Point ComputePerpendicularFoot(const Point &p, const Point &q, const Point &r, double epsilon) const {
|
||||
|
||||
// the projection of r onto the line pq
|
||||
double foot_x, foot_y;
|
||||
|
||||
const bool is_parallel_to_y_axis = std::abs(q.first - p.first) < epsilon;
|
||||
|
||||
if( is_parallel_to_y_axis ) {
|
||||
foot_x = q.first;
|
||||
foot_y = r.second;
|
||||
} else {
|
||||
// the slope of the line through (a|b) and (c|d)
|
||||
const double m = (q.second - p.second) / (q.first - p.first);
|
||||
|
||||
// Projection of (x|y) onto the line joining (a|b) and (c|d).
|
||||
foot_x = ((r.first + (m*r.second)) + (m*m*p.first - m*p.second))/(1.0 + m*m);
|
||||
foot_y = p.second + m*(foot_x - p.first);
|
||||
}
|
||||
|
||||
return Point(foot_x, foot_y);
|
||||
}
|
||||
|
||||
// Compute the ratio of the line segment pr to line segment pq.
|
||||
double ComputeRatio(const Point & p, const Point & q, const Point & r, double epsilon) const {
|
||||
|
||||
const bool is_parallel_to_x_axis = std::abs(q.second-p.second) < epsilon;
|
||||
const bool is_parallel_to_y_axis = std::abs(q.first -p.first ) < epsilon;
|
||||
|
||||
double ratio;
|
||||
|
||||
if( !is_parallel_to_y_axis ) {
|
||||
ratio = (r.first - p.first)/(q.first - p.first);
|
||||
} else if( !is_parallel_to_x_axis ) {
|
||||
ratio = (r.second - p.second)/(q.second - p.second);
|
||||
} else {
|
||||
// (a|b) and (c|d) are essentially the same point
|
||||
// by convention, we set the ratio to 0 in this case
|
||||
//ratio = ((lat2 == query_location.lat) && (lon2 == query_location.lon)) ? 1. : 0.;
|
||||
ratio = 0.0;
|
||||
}
|
||||
|
||||
// Round to integer if the ratio is close to 0 or 1.
|
||||
if( std::abs(ratio) <= epsilon ) {
|
||||
ratio = 0.0;
|
||||
} else if( std::abs(ratio-1.0) <= epsilon ) {
|
||||
ratio = 1.0;
|
||||
}
|
||||
|
||||
return ratio;
|
||||
}
|
||||
|
||||
// Computes the point on the segment pq which is nearest to a point r = p + lambda * (q-p).
|
||||
// p and q are the end points of the underlying edge.
|
||||
FixedPointCoordinate ComputeNearestPointOnSegment(const Point & r, double lambda) const {
|
||||
|
||||
if( lambda <= 0.0 ) {
|
||||
return FixedPointCoordinate(lat1, lon1);
|
||||
} else if( lambda >= 1.0 ) {
|
||||
return FixedPointCoordinate(lat2, lon2);
|
||||
}
|
||||
|
||||
// r lies between p and q
|
||||
return FixedPointCoordinate(
|
||||
y2lat(r.first)*COORDINATE_PRECISION,
|
||||
r.second*COORDINATE_PRECISION
|
||||
);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
#endif //EDGE_BASED_NODE_H
|
||||
68
DataStructures/HashTable.h
Normal file
68
DataStructures/HashTable.h
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef HASH_TABLE_H
|
||||
#define HASH_TABLE_H
|
||||
|
||||
#include <boost/ref.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
template<typename KeyT, typename ValueT>
|
||||
class HashTable : public boost::unordered_map<KeyT, ValueT> {
|
||||
private:
|
||||
typedef boost::unordered_map<KeyT, ValueT> super;
|
||||
public:
|
||||
static ValueT default_value;
|
||||
|
||||
HashTable() : super() { }
|
||||
|
||||
HashTable(const unsigned size) : super(size) { }
|
||||
|
||||
inline void Add( KeyT const & key, ValueT const & value) {
|
||||
super::emplace(std::make_pair(key, value));
|
||||
}
|
||||
|
||||
inline const ValueT Find(KeyT const & key) const {
|
||||
typename super::const_iterator iter = super::find(key);
|
||||
if( iter == super::end() ) {
|
||||
return boost::cref(default_value);
|
||||
}
|
||||
return boost::cref(iter->second);
|
||||
}
|
||||
|
||||
inline bool Holds( KeyT const & key) const {
|
||||
if( super::find(key) == super::end() ) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
template<typename KeyT, typename ValueT>
|
||||
ValueT HashTable<KeyT, ValueT>::default_value;
|
||||
|
||||
#endif /* HASH_TABLE_H */
|
||||
88
DataStructures/HilbertValue.cpp
Normal file
88
DataStructures/HilbertValue.cpp
Normal file
@ -0,0 +1,88 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "HilbertValue.h"
|
||||
|
||||
uint64_t HilbertCode::operator() (
|
||||
const FixedPointCoordinate & current_coordinate
|
||||
) const {
|
||||
unsigned location[2];
|
||||
location[0] = current_coordinate.lat+( 90*COORDINATE_PRECISION);
|
||||
location[1] = current_coordinate.lon+(180*COORDINATE_PRECISION);
|
||||
|
||||
TransposeCoordinate(location);
|
||||
return BitInterleaving(location[0], location[1]);
|
||||
}
|
||||
|
||||
uint64_t HilbertCode::BitInterleaving(const uint32_t latitude, const uint32_t longitude) const
|
||||
{
|
||||
uint64_t result = 0;
|
||||
for(int8_t index = 31; index >= 0; --index){
|
||||
result |= (latitude >> index) & 1;
|
||||
result <<= 1;
|
||||
result |= (longitude >> index) & 1;
|
||||
if(0 != index){
|
||||
result <<= 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void HilbertCode::TransposeCoordinate( uint32_t * X) const
|
||||
{
|
||||
uint32_t M = 1 << (32-1), P, Q, t;
|
||||
int i;
|
||||
// Inverse undo
|
||||
for( Q = M; Q > 1; Q >>= 1 ) {
|
||||
P=Q-1;
|
||||
for( i = 0; i < 2; ++i ) {
|
||||
|
||||
const bool condition = (X[i] & Q);
|
||||
if( condition ) {
|
||||
X[0] ^= P; // invert
|
||||
} else {
|
||||
t = (X[0]^X[i]) & P;
|
||||
X[0] ^= t;
|
||||
X[i] ^= t;
|
||||
}
|
||||
} // exchange
|
||||
}
|
||||
// Gray encode
|
||||
for( i = 1; i < 2; ++i ) {
|
||||
X[i] ^= X[i-1];
|
||||
}
|
||||
t=0;
|
||||
for( Q = M; Q > 1; Q >>= 1 ) {
|
||||
const bool condition = (X[2-1] & Q);
|
||||
if( condition ) {
|
||||
t ^= Q-1;
|
||||
}
|
||||
} //check if this for loop is wrong
|
||||
for( i = 0; i < 2; ++i ) {
|
||||
X[i] ^= t;
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,25 +25,26 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef OSRM_ENGINE_BEARING_HPP
|
||||
#define OSRM_ENGINE_BEARING_HPP
|
||||
#ifndef HILBERTVALUE_H_
|
||||
#define HILBERTVALUE_H_
|
||||
|
||||
namespace osrm::engine
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/integer.hpp>
|
||||
#include <boost/noncopyable.hpp>
|
||||
|
||||
// computes a 64 bit value that corresponds to the hilbert space filling curve
|
||||
|
||||
class HilbertCode : boost::noncopyable
|
||||
{
|
||||
|
||||
struct Bearing
|
||||
{
|
||||
short bearing;
|
||||
short range;
|
||||
|
||||
bool IsValid() const { return bearing >= 0 && bearing <= 360 && range >= 0 && range <= 180; }
|
||||
public:
|
||||
uint64_t operator()
|
||||
(
|
||||
const FixedPointCoordinate & current_coordinate
|
||||
) const;
|
||||
private:
|
||||
inline uint64_t BitInterleaving( const uint32_t a, const uint32_t b) const;
|
||||
inline void TransposeCoordinate( uint32_t * X) const;
|
||||
};
|
||||
|
||||
inline bool operator==(const Bearing lhs, const Bearing rhs)
|
||||
{
|
||||
return lhs.bearing == rhs.bearing && lhs.range == rhs.range;
|
||||
}
|
||||
inline bool operator!=(const Bearing lhs, const Bearing rhs) { return !(lhs == rhs); }
|
||||
} // namespace osrm::engine
|
||||
|
||||
#endif
|
||||
#endif /* HILBERTVALUE_H_ */
|
||||
185
DataStructures/ImportEdge.h
Normal file
185
DataStructures/ImportEdge.h
Normal file
@ -0,0 +1,185 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef IMPORT_EDGE_H
|
||||
#define IMPORT_EDGE_H
|
||||
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
class NodeBasedEdge {
|
||||
|
||||
public:
|
||||
bool operator< (const NodeBasedEdge& e) const {
|
||||
if (source() == e.source()) {
|
||||
if (target() == e.target()) {
|
||||
if (weight() == e.weight()) {
|
||||
return (isForward() && isBackward() &&
|
||||
((! e.isForward()) || (! e.isBackward())));
|
||||
}
|
||||
return (weight() < e.weight());
|
||||
}
|
||||
return (target() < e.target());
|
||||
}
|
||||
return (source() < e.source());
|
||||
}
|
||||
|
||||
explicit NodeBasedEdge(
|
||||
NodeID s,
|
||||
NodeID t,
|
||||
NodeID n,
|
||||
EdgeWeight w,
|
||||
bool f,
|
||||
bool b,
|
||||
short ty,
|
||||
bool ra,
|
||||
bool ig,
|
||||
bool ar,
|
||||
bool cf
|
||||
) : _source(s),
|
||||
_target(t),
|
||||
_name(n),
|
||||
_weight(w),
|
||||
_type(ty),
|
||||
forward(f),
|
||||
backward(b),
|
||||
_roundabout(ra),
|
||||
_ignoreInGrid(ig),
|
||||
_accessRestricted(ar),
|
||||
_contraFlow(cf)
|
||||
{
|
||||
if(ty < 0) {
|
||||
throw OSRMException("negative edge type");
|
||||
}
|
||||
}
|
||||
|
||||
NodeID target() const {return _target; }
|
||||
NodeID source() const {return _source; }
|
||||
NodeID name() const { return _name; }
|
||||
EdgeWeight weight() const {return _weight; }
|
||||
short type() const {
|
||||
BOOST_ASSERT_MSG(_type >= 0, "type of ImportEdge invalid");
|
||||
return _type; }
|
||||
bool isBackward() const { return backward; }
|
||||
bool isForward() const { return forward; }
|
||||
bool isLocatable() const { return _type != 14; }
|
||||
bool isRoundabout() const { return _roundabout; }
|
||||
bool ignoreInGrid() const { return _ignoreInGrid; }
|
||||
bool isAccessRestricted() const { return _accessRestricted; }
|
||||
bool isContraFlow() const { return _contraFlow; }
|
||||
|
||||
//TODO: names need to be fixed.
|
||||
NodeID _source;
|
||||
NodeID _target;
|
||||
NodeID _name;
|
||||
EdgeWeight _weight;
|
||||
short _type;
|
||||
bool forward:1;
|
||||
bool backward:1;
|
||||
bool _roundabout:1;
|
||||
bool _ignoreInGrid:1;
|
||||
bool _accessRestricted:1;
|
||||
bool _contraFlow:1;
|
||||
|
||||
private:
|
||||
NodeBasedEdge() { }
|
||||
};
|
||||
|
||||
class EdgeBasedEdge {
|
||||
|
||||
public:
|
||||
bool operator< (const EdgeBasedEdge& e) const {
|
||||
if (source() == e.source()) {
|
||||
if (target() == e.target()) {
|
||||
if (weight() == e.weight()) {
|
||||
return (isForward() && isBackward() &&
|
||||
((! e.isForward()) || (! e.isBackward())));
|
||||
}
|
||||
return (weight() < e.weight());
|
||||
}
|
||||
return (target() < e.target());
|
||||
}
|
||||
return (source() < e.source());
|
||||
}
|
||||
|
||||
template<class EdgeT>
|
||||
EdgeBasedEdge(const EdgeT & myEdge ) :
|
||||
m_source(myEdge.source),
|
||||
m_target(myEdge.target),
|
||||
m_edgeID(myEdge.data.via),
|
||||
m_weight(myEdge.data.distance),
|
||||
m_forward(myEdge.data.forward),
|
||||
m_backward(myEdge.data.backward)
|
||||
{ }
|
||||
|
||||
/** Default constructor. target and weight are set to 0.*/
|
||||
EdgeBasedEdge() :
|
||||
m_source(0),
|
||||
m_target(0),
|
||||
m_edgeID(0),
|
||||
m_weight(0),
|
||||
m_forward(false),
|
||||
m_backward(false)
|
||||
{ }
|
||||
|
||||
explicit EdgeBasedEdge(
|
||||
const NodeID s,
|
||||
const NodeID t,
|
||||
const NodeID v,
|
||||
const EdgeWeight w,
|
||||
const bool f,
|
||||
const bool b
|
||||
) :
|
||||
m_source(s),
|
||||
m_target(t),
|
||||
m_edgeID(v),
|
||||
m_weight(w),
|
||||
m_forward(f),
|
||||
m_backward(b)
|
||||
{}
|
||||
|
||||
NodeID target() const { return m_target; }
|
||||
NodeID source() const { return m_source; }
|
||||
EdgeWeight weight() const { return m_weight; }
|
||||
NodeID id() const { return m_edgeID; }
|
||||
bool isBackward() const { return m_backward; }
|
||||
bool isForward() const { return m_forward; }
|
||||
|
||||
private:
|
||||
NodeID m_source;
|
||||
NodeID m_target;
|
||||
NodeID m_edgeID;
|
||||
EdgeWeight m_weight:30;
|
||||
bool m_forward:1;
|
||||
bool m_backward:1;
|
||||
};
|
||||
|
||||
typedef NodeBasedEdge ImportEdge;
|
||||
|
||||
#endif /* IMPORT_EDGE_H */
|
||||
77
DataStructures/ImportNode.h
Normal file
77
DataStructures/ImportNode.h
Normal file
@ -0,0 +1,77 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef IMPORTNODE_H_
|
||||
#define IMPORTNODE_H_
|
||||
|
||||
#include "QueryNode.h"
|
||||
#include "../DataStructures/HashTable.h"
|
||||
|
||||
|
||||
struct ExternalMemoryNode : NodeInfo {
|
||||
ExternalMemoryNode(
|
||||
int lat,
|
||||
int lon,
|
||||
unsigned int id,
|
||||
bool bollard,
|
||||
bool traffic_light
|
||||
) :
|
||||
NodeInfo(lat, lon, id),
|
||||
bollard(bollard),
|
||||
trafficLight(traffic_light)
|
||||
{ }
|
||||
ExternalMemoryNode() : bollard(false), trafficLight(false) {}
|
||||
|
||||
static ExternalMemoryNode min_value() {
|
||||
return ExternalMemoryNode(0,0,0, false, false);
|
||||
}
|
||||
static ExternalMemoryNode max_value() {
|
||||
return ExternalMemoryNode(
|
||||
std::numeric_limits<int>::max(),
|
||||
std::numeric_limits<int>::max(),
|
||||
std::numeric_limits<unsigned>::max(),
|
||||
false,
|
||||
false
|
||||
);
|
||||
}
|
||||
NodeID key() const {
|
||||
return id;
|
||||
}
|
||||
bool bollard;
|
||||
bool trafficLight;
|
||||
};
|
||||
|
||||
struct ImportNode : public ExternalMemoryNode {
|
||||
HashTable<std::string, std::string> keyVals;
|
||||
|
||||
inline void Clear() {
|
||||
keyVals.clear();
|
||||
lat = 0; lon = 0; id = 0; bollard = false; trafficLight = false;
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* IMPORTNODE_H_ */
|
||||
102
DataStructures/InputReaderFactory.h
Normal file
102
DataStructures/InputReaderFactory.h
Normal file
@ -0,0 +1,102 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
|
||||
#ifndef INPUTREADERFACTORY_H
|
||||
#define INPUTREADERFACTORY_H
|
||||
|
||||
#include <bzlib.h>
|
||||
#include <libxml/xmlreader.h>
|
||||
|
||||
struct BZ2Context {
|
||||
FILE* file;
|
||||
BZFILE* bz2;
|
||||
int error;
|
||||
int nUnused;
|
||||
char unused[BZ_MAX_UNUSED];
|
||||
};
|
||||
|
||||
int readFromBz2Stream( void* pointer, char* buffer, int len ) {
|
||||
void *unusedTmpVoid=NULL;
|
||||
char *unusedTmp=NULL;
|
||||
BZ2Context* context = (BZ2Context*) pointer;
|
||||
int read = 0;
|
||||
while(0 == read && !(BZ_STREAM_END == context->error && 0 == context->nUnused && feof(context->file))) {
|
||||
read = BZ2_bzRead(&context->error, context->bz2, buffer, len);
|
||||
if(BZ_OK == context->error) {
|
||||
return read;
|
||||
} else if(BZ_STREAM_END == context->error) {
|
||||
BZ2_bzReadGetUnused(&context->error, context->bz2, &unusedTmpVoid, &context->nUnused);
|
||||
if(BZ_OK != context->error) {std::cerr << "Could not BZ2_bzReadGetUnused" <<std::endl; exit(-1);};
|
||||
unusedTmp = (char*)unusedTmpVoid;
|
||||
for(int i=0;i<context->nUnused;i++) {
|
||||
context->unused[i] = unusedTmp[i];
|
||||
}
|
||||
BZ2_bzReadClose(&context->error, context->bz2);
|
||||
if(BZ_OK != context->error) {std::cerr << "Could not BZ2_bzReadClose" <<std::endl; exit(-1);};
|
||||
context->error = BZ_STREAM_END; // set to the stream end for next call to this function
|
||||
if(0 == context->nUnused && feof(context->file)) {
|
||||
return read;
|
||||
} else {
|
||||
context->bz2 = BZ2_bzReadOpen(&context->error, context->file, 0, 0, context->unused, context->nUnused);
|
||||
if(NULL == context->bz2){std::cerr << "Could not open file" <<std::endl; exit(-1);};
|
||||
}
|
||||
} else { std::cerr << "Could not read bz2 file" << std::endl; exit(-1); }
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
int closeBz2Stream( void *pointer )
|
||||
{
|
||||
BZ2Context* context = (BZ2Context*) pointer;
|
||||
fclose( context->file );
|
||||
delete context;
|
||||
return 0;
|
||||
}
|
||||
|
||||
xmlTextReaderPtr inputReaderFactory( const char* name )
|
||||
{
|
||||
std::string inputName(name);
|
||||
|
||||
if(inputName.find(".osm.bz2")!=std::string::npos)
|
||||
{
|
||||
BZ2Context* context = new BZ2Context();
|
||||
context->error = false;
|
||||
context->file = fopen( name, "r" );
|
||||
int error;
|
||||
context->bz2 = BZ2_bzReadOpen( &error, context->file, 0, 0, context->unused, context->nUnused );
|
||||
if ( context->bz2 == NULL || context->file == NULL ) {
|
||||
delete context;
|
||||
return NULL;
|
||||
}
|
||||
return xmlReaderForIO( readFromBz2Stream, closeBz2Stream, (void*) context, NULL, NULL, 0 );
|
||||
} else {
|
||||
return xmlNewTextReaderFilename(name);
|
||||
}
|
||||
}
|
||||
|
||||
#endif // INPUTREADERFACTORY_H
|
||||
89
DataStructures/LRUCache.h
Normal file
89
DataStructures/LRUCache.h
Normal file
@ -0,0 +1,89 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef LRUCACHE_H
|
||||
#define LRUCACHE_H
|
||||
|
||||
#include <list>
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
template<typename KeyT, typename ValueT>
|
||||
class LRUCache {
|
||||
private:
|
||||
struct CacheEntry {
|
||||
CacheEntry(KeyT k, ValueT v) : key(k), value(v) {}
|
||||
KeyT key;
|
||||
ValueT value;
|
||||
};
|
||||
unsigned capacity;
|
||||
std::list<CacheEntry> itemsInCache;
|
||||
boost::unordered_map<KeyT, typename std::list<CacheEntry>::iterator > positionMap;
|
||||
public:
|
||||
LRUCache(unsigned c) : capacity(c) {}
|
||||
|
||||
bool Holds(KeyT key) {
|
||||
if(positionMap.find(key) != positionMap.end()) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void Insert(const KeyT key, ValueT &value) {
|
||||
itemsInCache.push_front(CacheEntry(key, value));
|
||||
positionMap.insert(std::make_pair(key, itemsInCache.begin()));
|
||||
if(itemsInCache.size() > capacity) {
|
||||
positionMap.erase(itemsInCache.back().key);
|
||||
itemsInCache.pop_back();
|
||||
}
|
||||
}
|
||||
|
||||
void Insert(const KeyT key, ValueT value) {
|
||||
itemsInCache.push_front(CacheEntry(key, value));
|
||||
positionMap.insert(std::make_pair(key, itemsInCache.begin()));
|
||||
if(itemsInCache.size() > capacity) {
|
||||
positionMap.erase(itemsInCache.back().key);
|
||||
itemsInCache.pop_back();
|
||||
}
|
||||
}
|
||||
|
||||
bool Fetch(const KeyT key, ValueT& result) {
|
||||
if(Holds(key)) {
|
||||
CacheEntry e = *(positionMap.find(key)->second);
|
||||
result = e.value;
|
||||
|
||||
//move to front
|
||||
itemsInCache.splice(positionMap.find(key)->second, itemsInCache, itemsInCache.begin());
|
||||
positionMap.find(key)->second = itemsInCache.begin();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
unsigned Size() const {
|
||||
return itemsInCache.size();
|
||||
}
|
||||
};
|
||||
#endif //LRUCACHE_H
|
||||
58
DataStructures/OriginalEdgeData.h
Normal file
58
DataStructures/OriginalEdgeData.h
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef ORIGINAL_EDGE_DATA_H
|
||||
#define ORIGINAL_EDGE_DATA_H
|
||||
|
||||
#include "TurnInstructions.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <climits>
|
||||
|
||||
struct OriginalEdgeData{
|
||||
explicit OriginalEdgeData(
|
||||
NodeID via_node,
|
||||
unsigned name_id,
|
||||
TurnInstruction turn_instruction
|
||||
) :
|
||||
via_node(via_node),
|
||||
name_id(name_id),
|
||||
turn_instruction(turn_instruction)
|
||||
{ }
|
||||
|
||||
OriginalEdgeData() :
|
||||
via_node(UINT_MAX),
|
||||
name_id(UINT_MAX),
|
||||
turn_instruction(UCHAR_MAX)
|
||||
{ }
|
||||
|
||||
NodeID via_node;
|
||||
unsigned name_id;
|
||||
TurnInstruction turn_instruction;
|
||||
};
|
||||
|
||||
#endif //ORIGINAL_EDGE_DATA_H
|
||||
99
DataStructures/Percent.h
Normal file
99
DataStructures/Percent.h
Normal file
@ -0,0 +1,99 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef PERCENT_H
|
||||
#define PERCENT_H
|
||||
|
||||
#include "../Util/OpenMPWrapper.h"
|
||||
#include <iostream>
|
||||
|
||||
class Percent {
|
||||
public:
|
||||
/**
|
||||
* Constructor.
|
||||
* @param maxValue the value that corresponds to 100%
|
||||
* @param step the progress is shown in steps of 'step' percent
|
||||
*/
|
||||
Percent(unsigned maxValue, unsigned step = 5) {
|
||||
reinit(maxValue, step);
|
||||
}
|
||||
|
||||
/** Reinitializes this object. */
|
||||
void reinit(unsigned maxValue, unsigned step = 5) {
|
||||
_maxValue = maxValue;
|
||||
_current_value = 0;
|
||||
_intervalPercent = _maxValue / 100;
|
||||
_nextThreshold = _intervalPercent;
|
||||
_lastPercent = 0;
|
||||
_step = step;
|
||||
}
|
||||
|
||||
/** If there has been significant progress, display it. */
|
||||
void printStatus(unsigned currentValue) {
|
||||
if (currentValue >= _nextThreshold) {
|
||||
_nextThreshold += _intervalPercent;
|
||||
printPercent( currentValue / (double)_maxValue * 100 );
|
||||
}
|
||||
if (currentValue + 1 == _maxValue)
|
||||
std::cout << " 100%" << std::endl;
|
||||
}
|
||||
|
||||
void printIncrement() {
|
||||
#pragma omp atomic
|
||||
++_current_value;
|
||||
printStatus(_current_value);
|
||||
}
|
||||
|
||||
void printAddition(const unsigned addition) {
|
||||
#pragma omp atomic
|
||||
_current_value += addition;
|
||||
printStatus(_current_value);
|
||||
}
|
||||
private:
|
||||
unsigned _current_value;
|
||||
unsigned _maxValue;
|
||||
unsigned _intervalPercent;
|
||||
unsigned _nextThreshold;
|
||||
unsigned _lastPercent;
|
||||
unsigned _step;
|
||||
|
||||
/** Displays the new progress. */
|
||||
void printPercent(double percent) {
|
||||
while (percent >= _lastPercent+_step) {
|
||||
_lastPercent+=_step;
|
||||
if (_lastPercent % 10 == 0) {
|
||||
std::cout << " " << _lastPercent << "% ";
|
||||
}
|
||||
else {
|
||||
std::cout << ".";
|
||||
}
|
||||
std::cout.flush();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif // PERCENT_H
|
||||
104
DataStructures/PhantomNodes.h
Normal file
104
DataStructures/PhantomNodes.h
Normal file
@ -0,0 +1,104 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef PHANTOMNODES_H_
|
||||
#define PHANTOMNODES_H_
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
struct PhantomNode {
|
||||
PhantomNode() :
|
||||
edgeBasedNode(UINT_MAX),
|
||||
nodeBasedEdgeNameID(UINT_MAX),
|
||||
weight1(INT_MAX),
|
||||
weight2(INT_MAX),
|
||||
ratio(0.)
|
||||
{ }
|
||||
|
||||
NodeID edgeBasedNode;
|
||||
unsigned nodeBasedEdgeNameID;
|
||||
int weight1;
|
||||
int weight2;
|
||||
double ratio;
|
||||
FixedPointCoordinate location;
|
||||
void Reset() {
|
||||
edgeBasedNode = UINT_MAX;
|
||||
nodeBasedEdgeNameID = UINT_MAX;
|
||||
weight1 = INT_MAX;
|
||||
weight2 = INT_MAX;
|
||||
ratio = 0.;
|
||||
location.Reset();
|
||||
}
|
||||
bool isBidirected() const {
|
||||
return weight2 != INT_MAX;
|
||||
}
|
||||
bool isValid(const unsigned numberOfNodes) const {
|
||||
return location.isValid() && (edgeBasedNode < numberOfNodes) && (weight1 != INT_MAX) && (ratio >= 0.) && (ratio <= 1.) && (nodeBasedEdgeNameID != UINT_MAX);
|
||||
}
|
||||
|
||||
bool operator==(const PhantomNode & other) const {
|
||||
return location == other.location;
|
||||
}
|
||||
};
|
||||
|
||||
struct PhantomNodes {
|
||||
PhantomNode startPhantom;
|
||||
PhantomNode targetPhantom;
|
||||
void Reset() {
|
||||
startPhantom.Reset();
|
||||
targetPhantom.Reset();
|
||||
}
|
||||
|
||||
bool PhantomsAreOnSameNodeBasedEdge() const {
|
||||
return (startPhantom.edgeBasedNode == targetPhantom.edgeBasedNode);
|
||||
}
|
||||
|
||||
bool AtLeastOnePhantomNodeIsUINTMAX() const {
|
||||
return !(startPhantom.edgeBasedNode == UINT_MAX || targetPhantom.edgeBasedNode == UINT_MAX);
|
||||
}
|
||||
|
||||
bool PhantomNodesHaveEqualLocation() const {
|
||||
return startPhantom == targetPhantom;
|
||||
}
|
||||
};
|
||||
|
||||
inline std::ostream& operator<<(std::ostream &out, const PhantomNodes & pn){
|
||||
out << "Node1: " << pn.startPhantom.edgeBasedNode << std::endl;
|
||||
out << "Node2: " << pn.targetPhantom.edgeBasedNode << std::endl;
|
||||
out << "startCoord: " << pn.startPhantom.location << std::endl;
|
||||
out << "targetCoord: " << pn.targetPhantom.location << std::endl;
|
||||
return out;
|
||||
}
|
||||
|
||||
inline std::ostream& operator<<(std::ostream &out, const PhantomNode & pn){
|
||||
out << "node: " << pn.edgeBasedNode << ", name: " << pn.nodeBasedEdgeNameID << ", w1: " << pn.weight1 << ", w2: " << pn.weight2 << ", ratio: " << pn.ratio << ", loc: " << pn.location;
|
||||
return out;
|
||||
}
|
||||
|
||||
#endif /* PHANTOMNODES_H_ */
|
||||
66
DataStructures/QueryEdge.h
Normal file
66
DataStructures/QueryEdge.h
Normal file
@ -0,0 +1,66 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef QUERYEDGE_H_
|
||||
#define QUERYEDGE_H_
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <climits>
|
||||
|
||||
struct QueryEdge {
|
||||
NodeID source;
|
||||
NodeID target;
|
||||
struct EdgeData {
|
||||
NodeID id:31;
|
||||
bool shortcut:1;
|
||||
int distance:30;
|
||||
bool forward:1;
|
||||
bool backward:1;
|
||||
} data;
|
||||
|
||||
bool operator<( const QueryEdge& right ) const {
|
||||
if ( source != right.source ) {
|
||||
return source < right.source;
|
||||
}
|
||||
return target < right.target;
|
||||
}
|
||||
|
||||
bool operator== ( const QueryEdge& right ) const {
|
||||
return (
|
||||
source == right.source &&
|
||||
target == right.target &&
|
||||
data.distance == right.data.distance &&
|
||||
data.shortcut == right.data.shortcut &&
|
||||
data.forward == right.data.forward &&
|
||||
data.backward == right.data.backward &&
|
||||
data.id == right.data.id
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* QUERYEDGE_H_ */
|
||||
86
DataStructures/QueryNode.h
Normal file
86
DataStructures/QueryNode.h
Normal file
@ -0,0 +1,86 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef _NODE_COORDS_H
|
||||
#define _NODE_COORDS_H
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#include <cstddef>
|
||||
#include <climits>
|
||||
|
||||
#include <limits>
|
||||
|
||||
struct NodeInfo {
|
||||
typedef NodeID key_type; //type of NodeID
|
||||
typedef int value_type; //type of lat,lons
|
||||
|
||||
NodeInfo(int _lat, int _lon, NodeID _id) : lat(_lat), lon(_lon), id(_id) {}
|
||||
NodeInfo() : lat(INT_MAX), lon(INT_MAX), id(UINT_MAX) {}
|
||||
int lat;
|
||||
int lon;
|
||||
NodeID id;
|
||||
|
||||
static NodeInfo min_value() {
|
||||
return NodeInfo(
|
||||
-90*COORDINATE_PRECISION,
|
||||
-180*COORDINATE_PRECISION,
|
||||
std::numeric_limits<NodeID>::min()
|
||||
);
|
||||
}
|
||||
|
||||
static NodeInfo max_value() {
|
||||
return NodeInfo(
|
||||
90*COORDINATE_PRECISION,
|
||||
180*COORDINATE_PRECISION,
|
||||
std::numeric_limits<NodeID>::max()
|
||||
);
|
||||
}
|
||||
|
||||
value_type operator[](const std::size_t n) const {
|
||||
switch(n) {
|
||||
case 1:
|
||||
return lat;
|
||||
break;
|
||||
case 0:
|
||||
return lon;
|
||||
break;
|
||||
default:
|
||||
BOOST_ASSERT_MSG(false, "should not happen");
|
||||
return UINT_MAX;
|
||||
break;
|
||||
}
|
||||
BOOST_ASSERT_MSG(false, "should not happen");
|
||||
return UINT_MAX;
|
||||
}
|
||||
};
|
||||
|
||||
#endif //_NODE_COORDS_H
|
||||
80
DataStructures/RawRouteData.h
Normal file
80
DataStructures/RawRouteData.h
Normal file
@ -0,0 +1,80 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef RAWROUTEDATA_H_
|
||||
#define RAWROUTEDATA_H_
|
||||
|
||||
#include "../DataStructures/PhantomNodes.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <limits>
|
||||
|
||||
#include <vector>
|
||||
|
||||
struct PathData {
|
||||
PathData() :
|
||||
node(UINT_MAX),
|
||||
name_id(UINT_MAX),
|
||||
durationOfSegment(UINT_MAX),
|
||||
turnInstruction(UCHAR_MAX)
|
||||
{ }
|
||||
|
||||
PathData(
|
||||
NodeID no,
|
||||
unsigned na,
|
||||
unsigned tu,
|
||||
unsigned dur
|
||||
) :
|
||||
node(no),
|
||||
name_id(na),
|
||||
durationOfSegment(dur),
|
||||
turnInstruction(tu)
|
||||
{ }
|
||||
NodeID node;
|
||||
unsigned name_id;
|
||||
unsigned durationOfSegment;
|
||||
short turnInstruction;
|
||||
};
|
||||
|
||||
struct RawRouteData {
|
||||
std::vector< std::vector<PathData> > unpacked_path_segments;
|
||||
std::vector< PathData > unpacked_alternative;
|
||||
std::vector< PhantomNodes > segmentEndCoordinates;
|
||||
std::vector< FixedPointCoordinate > rawViaNodeCoordinates;
|
||||
unsigned checkSum;
|
||||
int lengthOfShortestPath;
|
||||
int lengthOfAlternativePath;
|
||||
RawRouteData() :
|
||||
checkSum(UINT_MAX),
|
||||
lengthOfShortestPath(INT_MAX),
|
||||
lengthOfAlternativePath(INT_MAX)
|
||||
{ }
|
||||
};
|
||||
|
||||
#endif /* RAWROUTEDATA_H_ */
|
||||
149
DataStructures/Restriction.h
Normal file
149
DataStructures/Restriction.h
Normal file
@ -0,0 +1,149 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef RESTRICTION_H_
|
||||
#define RESTRICTION_H_
|
||||
|
||||
#include "../typedefs.h"
|
||||
#include <climits>
|
||||
|
||||
struct TurnRestriction {
|
||||
NodeID viaNode;
|
||||
NodeID fromNode;
|
||||
NodeID toNode;
|
||||
struct Bits { //mostly unused
|
||||
Bits()
|
||||
:
|
||||
isOnly(false),
|
||||
unused1(false),
|
||||
unused2(false),
|
||||
unused3(false),
|
||||
unused4(false),
|
||||
unused5(false),
|
||||
unused6(false),
|
||||
unused7(false)
|
||||
{ }
|
||||
|
||||
bool isOnly:1;
|
||||
bool unused1:1;
|
||||
bool unused2:1;
|
||||
bool unused3:1;
|
||||
bool unused4:1;
|
||||
bool unused5:1;
|
||||
bool unused6:1;
|
||||
bool unused7:1;
|
||||
} flags;
|
||||
|
||||
TurnRestriction(NodeID viaNode) :
|
||||
viaNode(viaNode),
|
||||
fromNode(UINT_MAX),
|
||||
toNode(UINT_MAX) {
|
||||
|
||||
}
|
||||
|
||||
TurnRestriction(const bool isOnly = false) :
|
||||
viaNode(UINT_MAX),
|
||||
fromNode(UINT_MAX),
|
||||
toNode(UINT_MAX) {
|
||||
flags.isOnly = isOnly;
|
||||
}
|
||||
};
|
||||
|
||||
struct InputRestrictionContainer {
|
||||
EdgeID fromWay;
|
||||
EdgeID toWay;
|
||||
unsigned viaNode;
|
||||
TurnRestriction restriction;
|
||||
|
||||
InputRestrictionContainer(
|
||||
EdgeID fromWay,
|
||||
EdgeID toWay,
|
||||
NodeID vn,
|
||||
unsigned vw
|
||||
) :
|
||||
fromWay(fromWay),
|
||||
toWay(toWay),
|
||||
viaNode(vw)
|
||||
{
|
||||
restriction.viaNode = vn;
|
||||
}
|
||||
InputRestrictionContainer(
|
||||
bool isOnly = false
|
||||
) :
|
||||
fromWay(UINT_MAX),
|
||||
toWay(UINT_MAX),
|
||||
viaNode(UINT_MAX)
|
||||
{
|
||||
restriction.flags.isOnly = isOnly;
|
||||
}
|
||||
|
||||
static InputRestrictionContainer min_value() {
|
||||
return InputRestrictionContainer(0, 0, 0, 0);
|
||||
}
|
||||
static InputRestrictionContainer max_value() {
|
||||
return InputRestrictionContainer(
|
||||
UINT_MAX,
|
||||
UINT_MAX,
|
||||
UINT_MAX,
|
||||
UINT_MAX
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
struct CmpRestrictionContainerByFrom {
|
||||
typedef InputRestrictionContainer value_type;
|
||||
inline bool operator()(
|
||||
const InputRestrictionContainer & a,
|
||||
const InputRestrictionContainer & b
|
||||
) const {
|
||||
return a.fromWay < b.fromWay;
|
||||
}
|
||||
inline value_type max_value() const {
|
||||
return InputRestrictionContainer::max_value();
|
||||
}
|
||||
inline value_type min_value() const {
|
||||
return InputRestrictionContainer::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
struct CmpRestrictionContainerByTo {
|
||||
typedef InputRestrictionContainer value_type;
|
||||
inline bool operator()(
|
||||
const InputRestrictionContainer & a,
|
||||
const InputRestrictionContainer & b
|
||||
) const {
|
||||
return a.toWay < b.toWay;
|
||||
}
|
||||
value_type max_value() const {
|
||||
return InputRestrictionContainer::max_value();
|
||||
}
|
||||
value_type min_value() const {
|
||||
return InputRestrictionContainer::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* RESTRICTION_H_ */
|
||||
68
DataStructures/SearchEngine.h
Normal file
68
DataStructures/SearchEngine.h
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SEARCHENGINE_H
|
||||
#define SEARCHENGINE_H
|
||||
|
||||
#include "SearchEngineData.h"
|
||||
#include "PhantomNodes.h"
|
||||
#include "QueryEdge.h"
|
||||
#include "../RoutingAlgorithms/AlternativePathRouting.h"
|
||||
#include "../RoutingAlgorithms/ShortestPathRouting.h"
|
||||
|
||||
#include "../Util/StringUtil.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#include <climits>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
template<class DataFacadeT>
|
||||
class SearchEngine {
|
||||
private:
|
||||
DataFacadeT * facade;
|
||||
SearchEngineData engine_working_data;
|
||||
public:
|
||||
ShortestPathRouting<DataFacadeT> shortest_path;
|
||||
AlternativeRouting <DataFacadeT> alternative_path;
|
||||
|
||||
SearchEngine( DataFacadeT * facade )
|
||||
:
|
||||
facade (facade),
|
||||
shortest_path (facade, engine_working_data),
|
||||
alternative_path (facade, engine_working_data)
|
||||
{}
|
||||
|
||||
~SearchEngine() {}
|
||||
|
||||
};
|
||||
|
||||
#endif // SEARCHENGINE_H
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,51 +25,67 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef OSRM_UPDATER_UPDATER_CONFIG_HPP
|
||||
#define OSRM_UPDATER_UPDATER_CONFIG_HPP
|
||||
#include "SearchEngineData.h"
|
||||
|
||||
#include "storage/io_config.hpp"
|
||||
#include "storage/storage_config.hpp"
|
||||
|
||||
#include <chrono>
|
||||
#include <filesystem>
|
||||
#include <string>
|
||||
|
||||
namespace osrm::updater
|
||||
void SearchEngineData::InitializeOrClearFirstThreadLocalStorage(const unsigned number_of_nodes)
|
||||
{
|
||||
|
||||
struct UpdaterConfig final : storage::IOConfig
|
||||
{
|
||||
UpdaterConfig()
|
||||
: IOConfig({".osrm.ebg",
|
||||
".osrm.turn_weight_penalties",
|
||||
".osrm.turn_duration_penalties",
|
||||
".osrm.turn_penalties_index",
|
||||
".osrm.nbg_nodes",
|
||||
".osrm.ebg_nodes",
|
||||
".osrm.geometry",
|
||||
".osrm.fileIndex",
|
||||
".osrm.properties",
|
||||
".osrm.restrictions",
|
||||
".osrm.enw"},
|
||||
{},
|
||||
{".osrm.datasource_names"}),
|
||||
valid_now(0)
|
||||
if (forwardHeap.get())
|
||||
{
|
||||
forwardHeap->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
forwardHeap.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
|
||||
void UseDefaultOutputNames(const std::filesystem::path &base)
|
||||
if (backwardHeap.get())
|
||||
{
|
||||
IOConfig::UseDefaultOutputNames(base);
|
||||
backwardHeap->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
backwardHeap.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
}
|
||||
|
||||
void SearchEngineData::InitializeOrClearSecondThreadLocalStorage(const unsigned number_of_nodes)
|
||||
{
|
||||
if (forwardHeap2.get())
|
||||
{
|
||||
forwardHeap2->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
forwardHeap2.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
|
||||
double log_edge_updates_factor = 0.0;
|
||||
std::time_t valid_now;
|
||||
if (backwardHeap2.get())
|
||||
{
|
||||
backwardHeap2->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
backwardHeap2.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::string> segment_speed_lookup_paths;
|
||||
std::vector<std::string> turn_penalty_lookup_paths;
|
||||
std::string tz_file_path;
|
||||
};
|
||||
} // namespace osrm::updater
|
||||
void SearchEngineData::InitializeOrClearThirdThreadLocalStorage(const unsigned number_of_nodes)
|
||||
{
|
||||
if (forwardHeap3.get())
|
||||
{
|
||||
forwardHeap3->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
forwardHeap3.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
|
||||
#endif // EXTRACTOR_OPTIONS_HPP
|
||||
if (backwardHeap3.get())
|
||||
{
|
||||
backwardHeap3->Clear();
|
||||
}
|
||||
else
|
||||
{
|
||||
backwardHeap3.reset(new QueryHeap(number_of_nodes));
|
||||
}
|
||||
}
|
||||
67
DataStructures/SearchEngineData.h
Normal file
67
DataStructures/SearchEngineData.h
Normal file
@ -0,0 +1,67 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SEARCH_ENGINE_DATA_H
|
||||
#define SEARCH_ENGINE_DATA_H
|
||||
|
||||
#include "BinaryHeap.h"
|
||||
#include "QueryEdge.h"
|
||||
#include "StaticGraph.h"
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/thread.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
struct _HeapData {
|
||||
NodeID parent;
|
||||
_HeapData( NodeID p ) : parent(p) { }
|
||||
};
|
||||
|
||||
// typedef StaticGraph<QueryEdge::EdgeData> QueryGraph;
|
||||
|
||||
struct SearchEngineData {
|
||||
typedef BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> > QueryHeap;
|
||||
typedef boost::thread_specific_ptr<QueryHeap> SearchEngineHeapPtr;
|
||||
|
||||
static SearchEngineHeapPtr forwardHeap;
|
||||
static SearchEngineHeapPtr backwardHeap;
|
||||
static SearchEngineHeapPtr forwardHeap2;
|
||||
static SearchEngineHeapPtr backwardHeap2;
|
||||
static SearchEngineHeapPtr forwardHeap3;
|
||||
static SearchEngineHeapPtr backwardHeap3;
|
||||
|
||||
void InitializeOrClearFirstThreadLocalStorage(const unsigned number_of_nodes);
|
||||
|
||||
void InitializeOrClearSecondThreadLocalStorage(const unsigned number_of_nodes);
|
||||
|
||||
void InitializeOrClearThirdThreadLocalStorage(const unsigned number_of_nodes);
|
||||
};
|
||||
|
||||
#endif // SEARCH_ENGINE_DATA_H
|
||||
81
DataStructures/SegmentInformation.h
Normal file
81
DataStructures/SegmentInformation.h
Normal file
@ -0,0 +1,81 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SEGMENTINFORMATION_H_
|
||||
#define SEGMENTINFORMATION_H_
|
||||
|
||||
#include "TurnInstructions.h"
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
// Struct fits everything in one cache line
|
||||
struct SegmentInformation {
|
||||
FixedPointCoordinate location;
|
||||
NodeID name_id;
|
||||
unsigned duration;
|
||||
double length;
|
||||
short bearing; //more than enough [0..3600] fits into 12 bits
|
||||
TurnInstruction turn_instruction;
|
||||
bool necessary;
|
||||
|
||||
explicit SegmentInformation(
|
||||
const FixedPointCoordinate & location,
|
||||
const NodeID name_id,
|
||||
const unsigned duration,
|
||||
const double length,
|
||||
const TurnInstruction turn_instruction,
|
||||
const bool necessary
|
||||
) :
|
||||
location(location),
|
||||
name_id(name_id),
|
||||
duration(duration),
|
||||
length(length),
|
||||
bearing(0),
|
||||
turn_instruction(turn_instruction),
|
||||
necessary(necessary)
|
||||
{ }
|
||||
|
||||
explicit SegmentInformation(
|
||||
const FixedPointCoordinate & location,
|
||||
const NodeID name_id,
|
||||
const unsigned duration,
|
||||
const double length,
|
||||
const TurnInstruction turn_instruction
|
||||
) :
|
||||
location(location),
|
||||
name_id(name_id),
|
||||
duration(duration),
|
||||
length(length),
|
||||
bearing(0),
|
||||
turn_instruction(turn_instruction),
|
||||
necessary(turn_instruction != 0)
|
||||
{ }
|
||||
};
|
||||
|
||||
#endif /* SEGMENTINFORMATION_H_ */
|
||||
250
DataStructures/SharedMemoryFactory.h
Normal file
250
DataStructures/SharedMemoryFactory.h
Normal file
@ -0,0 +1,250 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SHARED_MEMORY_FACTORY_H
|
||||
#define SHARED_MEMORY_FACTORY_H
|
||||
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/filesystem/fstream.hpp>
|
||||
#include <boost/integer.hpp>
|
||||
#include <boost/interprocess/mapped_region.hpp>
|
||||
#include <boost/interprocess/xsi_shared_memory.hpp>
|
||||
|
||||
#ifdef __linux__
|
||||
#include <sys/ipc.h>
|
||||
#include <sys/shm.h>
|
||||
#endif
|
||||
|
||||
#include <cstring>
|
||||
|
||||
#include <algorithm>
|
||||
#include <exception>
|
||||
|
||||
struct OSRMLockFile {
|
||||
boost::filesystem::path operator()() {
|
||||
boost::filesystem::path temp_dir =
|
||||
boost::filesystem::temp_directory_path();
|
||||
boost::filesystem::path lock_file = temp_dir / "osrm.lock";
|
||||
return lock_file;
|
||||
}
|
||||
};
|
||||
|
||||
class SharedMemory : boost::noncopyable {
|
||||
|
||||
//Remove shared memory on destruction
|
||||
class shm_remove : boost::noncopyable {
|
||||
private:
|
||||
int m_shmid;
|
||||
bool m_initialized;
|
||||
public:
|
||||
void SetID(int shmid) {
|
||||
m_shmid = shmid;
|
||||
m_initialized = true;
|
||||
}
|
||||
|
||||
shm_remove() : m_shmid(INT_MIN), m_initialized(false) {}
|
||||
|
||||
~shm_remove(){
|
||||
if(m_initialized) {
|
||||
SimpleLogger().Write(logDEBUG) <<
|
||||
"automatic memory deallocation";
|
||||
if(!boost::interprocess::xsi_shared_memory::remove(m_shmid)) {
|
||||
SimpleLogger().Write(logDEBUG) << "could not deallocate id " << m_shmid;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public:
|
||||
void * Ptr() const {
|
||||
return region.get_address();
|
||||
}
|
||||
|
||||
template<typename IdentifierT >
|
||||
SharedMemory(
|
||||
const boost::filesystem::path & lock_file,
|
||||
const IdentifierT id,
|
||||
const uint64_t size = 0,
|
||||
bool read_write = false,
|
||||
bool remove_prev = true
|
||||
) : key(
|
||||
lock_file.string().c_str(),
|
||||
id
|
||||
) {
|
||||
if( 0 == size ){ //read_only
|
||||
shm = boost::interprocess::xsi_shared_memory (
|
||||
boost::interprocess::open_only,
|
||||
key
|
||||
);
|
||||
|
||||
region = boost::interprocess::mapped_region (
|
||||
shm,
|
||||
(
|
||||
read_write ?
|
||||
boost::interprocess::read_write :
|
||||
boost::interprocess::read_only
|
||||
)
|
||||
);
|
||||
} else { //writeable pointer
|
||||
//remove previously allocated mem
|
||||
if( remove_prev ) {
|
||||
Remove(key);
|
||||
}
|
||||
shm = boost::interprocess::xsi_shared_memory (
|
||||
boost::interprocess::open_or_create,
|
||||
key,
|
||||
size
|
||||
);
|
||||
#ifdef __linux__
|
||||
if( -1 == shmctl(shm.get_shmid(), SHM_LOCK, 0) ) {
|
||||
if( ENOMEM == errno ) {
|
||||
SimpleLogger().Write(logWARNING) <<
|
||||
"could not lock shared memory to RAM";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
region = boost::interprocess::mapped_region (
|
||||
shm,
|
||||
boost::interprocess::read_write
|
||||
);
|
||||
|
||||
remover.SetID( shm.get_shmid() );
|
||||
SimpleLogger().Write(logDEBUG) <<
|
||||
"writeable memory allocated " << size << " bytes";
|
||||
}
|
||||
}
|
||||
|
||||
template<typename IdentifierT >
|
||||
static bool RegionExists(
|
||||
const IdentifierT id
|
||||
) {
|
||||
bool result = true;
|
||||
try {
|
||||
OSRMLockFile lock_file;
|
||||
boost::interprocess::xsi_key key( lock_file().string().c_str(), id );
|
||||
result = RegionExists(key);
|
||||
} catch(...) {
|
||||
result = false;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
template<typename IdentifierT >
|
||||
static bool Remove(
|
||||
const IdentifierT id
|
||||
) {
|
||||
OSRMLockFile lock_file;
|
||||
boost::interprocess::xsi_key key( lock_file().string().c_str(), id );
|
||||
return Remove(key);
|
||||
}
|
||||
|
||||
private:
|
||||
static bool RegionExists( const boost::interprocess::xsi_key &key ) {
|
||||
bool result = true;
|
||||
try {
|
||||
boost::interprocess::xsi_shared_memory shm(
|
||||
boost::interprocess::open_only,
|
||||
key
|
||||
);
|
||||
} catch(...) {
|
||||
result = false;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static bool Remove(
|
||||
const boost::interprocess::xsi_key &key
|
||||
) {
|
||||
bool ret = false;
|
||||
try{
|
||||
SimpleLogger().Write(logDEBUG) << "deallocating prev memory";
|
||||
boost::interprocess::xsi_shared_memory xsi(
|
||||
boost::interprocess::open_only,
|
||||
key
|
||||
);
|
||||
ret = boost::interprocess::xsi_shared_memory::remove(xsi.get_shmid());
|
||||
} catch(const boost::interprocess::interprocess_exception &e){
|
||||
if(e.get_error_code() != boost::interprocess::not_found_error) {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
boost::interprocess::xsi_key key;
|
||||
boost::interprocess::xsi_shared_memory shm;
|
||||
boost::interprocess::mapped_region region;
|
||||
shm_remove remover;
|
||||
};
|
||||
|
||||
template<class LockFileT = OSRMLockFile>
|
||||
class SharedMemoryFactory_tmpl : boost::noncopyable {
|
||||
public:
|
||||
|
||||
template<typename IdentifierT >
|
||||
static SharedMemory * Get(
|
||||
const IdentifierT & id,
|
||||
const uint64_t size = 0,
|
||||
bool read_write = false,
|
||||
bool remove_prev = true
|
||||
) {
|
||||
try {
|
||||
LockFileT lock_file;
|
||||
if(!boost::filesystem::exists(lock_file()) ) {
|
||||
if( 0 == size ) {
|
||||
throw OSRMException("lock file does not exist, exiting");
|
||||
} else {
|
||||
boost::filesystem::ofstream ofs(lock_file());
|
||||
ofs.close();
|
||||
}
|
||||
}
|
||||
return new SharedMemory(
|
||||
lock_file(),
|
||||
id,
|
||||
size,
|
||||
read_write,
|
||||
remove_prev
|
||||
);
|
||||
} catch(const boost::interprocess::interprocess_exception &e){
|
||||
SimpleLogger().Write(logWARNING) <<
|
||||
"caught exception: " << e.what() <<
|
||||
", code " << e.get_error_code();
|
||||
throw OSRMException(e.what());
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
SharedMemoryFactory_tmpl() {}
|
||||
};
|
||||
|
||||
typedef SharedMemoryFactory_tmpl<> SharedMemoryFactory;
|
||||
|
||||
#endif /* SHARED_MEMORY_POINTER_FACTORY_H */
|
||||
139
DataStructures/SharedMemoryVectorWrapper.h
Normal file
139
DataStructures/SharedMemoryVectorWrapper.h
Normal file
@ -0,0 +1,139 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SHARED_MEMORY_VECTOR_WRAPPER_H
|
||||
#define SHARED_MEMORY_VECTOR_WRAPPER_H
|
||||
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/type_traits.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <vector>
|
||||
|
||||
template<typename DataT>
|
||||
class ShMemIterator : public std::iterator<std::input_iterator_tag, DataT> {
|
||||
DataT * p;
|
||||
public:
|
||||
ShMemIterator(DataT * x) : p(x) {}
|
||||
ShMemIterator(const ShMemIterator & mit) : p(mit.p) {}
|
||||
ShMemIterator& operator++() {
|
||||
++p;
|
||||
return *this;
|
||||
}
|
||||
ShMemIterator operator++(int) {
|
||||
ShMemIterator tmp(*this);
|
||||
operator++();
|
||||
return tmp;
|
||||
}
|
||||
ShMemIterator operator+(std::ptrdiff_t diff) {
|
||||
ShMemIterator tmp(p+diff);
|
||||
return tmp;
|
||||
}
|
||||
bool operator==(const ShMemIterator& rhs) {
|
||||
return p==rhs.p;
|
||||
}
|
||||
bool operator!=(const ShMemIterator& rhs) {
|
||||
return p!=rhs.p;
|
||||
}
|
||||
DataT& operator*() {
|
||||
return *p;
|
||||
}
|
||||
};
|
||||
|
||||
template<typename DataT>
|
||||
class SharedMemoryWrapper {
|
||||
private:
|
||||
DataT * m_ptr;
|
||||
std::size_t m_size;
|
||||
|
||||
public:
|
||||
SharedMemoryWrapper() :
|
||||
m_ptr(NULL),
|
||||
m_size(0)
|
||||
{ }
|
||||
|
||||
SharedMemoryWrapper(DataT * ptr, std::size_t size) :
|
||||
m_ptr(ptr),
|
||||
m_size(size)
|
||||
{ }
|
||||
|
||||
void swap( SharedMemoryWrapper<DataT> & other ) {
|
||||
BOOST_ASSERT_MSG(m_size != 0 || other.size() != 0, "size invalid");
|
||||
std::swap( m_size, other.m_size);
|
||||
std::swap( m_ptr , other.m_ptr );
|
||||
}
|
||||
|
||||
// void SetData(const DataT * ptr, const std::size_t size) {
|
||||
// BOOST_ASSERT_MSG( 0 == m_size, "vector not empty");
|
||||
// BOOST_ASSERT_MSG( 0 < size , "new vector empty");
|
||||
// m_ptr.reset(ptr);
|
||||
// m_size = size;
|
||||
// }
|
||||
|
||||
DataT & at(const std::size_t index) {
|
||||
return m_ptr[index];
|
||||
}
|
||||
|
||||
const DataT & at(const std::size_t index) const {
|
||||
return m_ptr[index];
|
||||
}
|
||||
|
||||
ShMemIterator<DataT> begin() const {
|
||||
return ShMemIterator<DataT>(m_ptr);
|
||||
}
|
||||
|
||||
ShMemIterator<DataT> end() const {
|
||||
return ShMemIterator<DataT>(m_ptr+m_size);
|
||||
}
|
||||
|
||||
std::size_t size() const { return m_size; }
|
||||
|
||||
DataT & operator[](const unsigned index) {
|
||||
BOOST_ASSERT_MSG(index < m_size, "invalid size");
|
||||
return m_ptr[index];
|
||||
}
|
||||
|
||||
const DataT & operator[](const unsigned index) const {
|
||||
BOOST_ASSERT_MSG(index < m_size, "invalid size");
|
||||
return m_ptr[index];
|
||||
}
|
||||
};
|
||||
|
||||
template<typename DataT, bool UseSharedMemory>
|
||||
struct ShM {
|
||||
typedef typename boost::conditional<
|
||||
UseSharedMemory,
|
||||
SharedMemoryWrapper<DataT>,
|
||||
std::vector<DataT>
|
||||
>::type vector;
|
||||
};
|
||||
|
||||
|
||||
#endif //SHARED_MEMORY_VECTOR_WRAPPER_H
|
||||
204
DataStructures/StaticGraph.h
Normal file
204
DataStructures/StaticGraph.h
Normal file
@ -0,0 +1,204 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef STATICGRAPH_H_INCLUDED
|
||||
#define STATICGRAPH_H_INCLUDED
|
||||
|
||||
#include "../DataStructures/Percent.h"
|
||||
#include "../DataStructures/SharedMemoryVectorWrapper.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
template< typename EdgeDataT, bool UseSharedMemory = false>
|
||||
class StaticGraph {
|
||||
public:
|
||||
typedef NodeID NodeIterator;
|
||||
typedef NodeID EdgeIterator;
|
||||
typedef EdgeDataT EdgeData;
|
||||
class InputEdge {
|
||||
public:
|
||||
EdgeDataT data;
|
||||
NodeIterator source;
|
||||
NodeIterator target;
|
||||
bool operator<( const InputEdge& right ) const {
|
||||
if ( source != right.source ) {
|
||||
return source < right.source;
|
||||
}
|
||||
return target < right.target;
|
||||
}
|
||||
};
|
||||
|
||||
struct _StrNode {
|
||||
//index of the first edge
|
||||
EdgeIterator firstEdge;
|
||||
};
|
||||
|
||||
struct _StrEdge {
|
||||
NodeID target;
|
||||
EdgeDataT data;
|
||||
};
|
||||
|
||||
StaticGraph( const int nodes, std::vector< InputEdge > &graph ) {
|
||||
std::sort( graph.begin(), graph.end() );
|
||||
_numNodes = nodes;
|
||||
_numEdges = ( EdgeIterator ) graph.size();
|
||||
_nodes.resize( _numNodes + 1);
|
||||
EdgeIterator edge = 0;
|
||||
EdgeIterator position = 0;
|
||||
for ( NodeIterator node = 0; node <= _numNodes; ++node ) {
|
||||
EdgeIterator lastEdge = edge;
|
||||
while ( edge < _numEdges && graph[edge].source == node )
|
||||
++edge;
|
||||
_nodes[node].firstEdge = position; //=edge
|
||||
position += edge - lastEdge; //remove
|
||||
}
|
||||
_edges.resize( position ); //(edge)
|
||||
edge = 0;
|
||||
for ( NodeIterator node = 0; node < _numNodes; ++node ) {
|
||||
for ( EdgeIterator i = _nodes[node].firstEdge, e = _nodes[node+1].firstEdge; i != e; ++i ) {
|
||||
_edges[i].target = graph[edge].target;
|
||||
_edges[i].data = graph[edge].data;
|
||||
assert(_edges[i].data.distance > 0);
|
||||
edge++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StaticGraph(
|
||||
typename ShM<_StrNode, UseSharedMemory>::vector & nodes,
|
||||
typename ShM<_StrEdge, UseSharedMemory>::vector & edges
|
||||
) {
|
||||
_numNodes = nodes.size()-1;
|
||||
_numEdges = edges.size();
|
||||
|
||||
_nodes.swap(nodes);
|
||||
_edges.swap(edges);
|
||||
|
||||
#ifndef NDEBUG
|
||||
Percent p(GetNumberOfNodes());
|
||||
for(unsigned u = 0; u < GetNumberOfNodes(); ++u) {
|
||||
for(unsigned eid = BeginEdges(u); eid < EndEdges(u); ++eid) {
|
||||
unsigned v = GetTarget(eid);
|
||||
EdgeData & data = GetEdgeData(eid);
|
||||
if(data.shortcut) {
|
||||
unsigned eid2 = FindEdgeInEitherDirection(u, data.id);
|
||||
if(eid2 == UINT_MAX) {
|
||||
SimpleLogger().Write(logWARNING) <<
|
||||
"cannot find first segment of edge (" <<
|
||||
u << "," << data.id << "," << v << ")";
|
||||
|
||||
data.shortcut = false;
|
||||
}
|
||||
eid2 = FindEdgeInEitherDirection(data.id, v);
|
||||
if(eid2 == UINT_MAX) {
|
||||
SimpleLogger().Write(logWARNING) <<
|
||||
"cannot find second segment of edge (" <<
|
||||
u << "," << data.id << "," << v << ")";
|
||||
data.shortcut = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
p.printIncrement();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
unsigned GetNumberOfNodes() const {
|
||||
return _numNodes;
|
||||
}
|
||||
|
||||
unsigned GetNumberOfEdges() const {
|
||||
return _numEdges;
|
||||
}
|
||||
|
||||
unsigned GetOutDegree( const NodeIterator n ) const {
|
||||
return BeginEdges(n)-EndEdges(n) - 1;
|
||||
}
|
||||
|
||||
inline NodeIterator GetTarget( const EdgeIterator e ) const {
|
||||
return NodeIterator( _edges[e].target );
|
||||
}
|
||||
|
||||
inline EdgeDataT &GetEdgeData( const EdgeIterator e ) {
|
||||
return _edges[e].data;
|
||||
}
|
||||
|
||||
const EdgeDataT &GetEdgeData( const EdgeIterator e ) const {
|
||||
return _edges[e].data;
|
||||
}
|
||||
|
||||
EdgeIterator BeginEdges( const NodeIterator n ) const {
|
||||
return EdgeIterator( _nodes[n].firstEdge );
|
||||
}
|
||||
|
||||
EdgeIterator EndEdges( const NodeIterator n ) const {
|
||||
return EdgeIterator( _nodes[n+1].firstEdge );
|
||||
}
|
||||
|
||||
//searches for a specific edge
|
||||
EdgeIterator FindEdge( const NodeIterator from, const NodeIterator to ) const {
|
||||
EdgeIterator smallestEdge = SPECIAL_EDGEID;
|
||||
EdgeWeight smallestWeight = UINT_MAX;
|
||||
for ( EdgeIterator edge = BeginEdges( from ); edge < EndEdges(from); edge++ ) {
|
||||
const NodeID target = GetTarget(edge);
|
||||
const EdgeWeight weight = GetEdgeData(edge).distance;
|
||||
if(target == to && weight < smallestWeight) {
|
||||
smallestEdge = edge; smallestWeight = weight;
|
||||
}
|
||||
}
|
||||
return smallestEdge;
|
||||
}
|
||||
|
||||
EdgeIterator FindEdgeInEitherDirection( const NodeIterator from, const NodeIterator to ) const {
|
||||
EdgeIterator tmp = FindEdge( from, to );
|
||||
return (UINT_MAX != tmp ? tmp : FindEdge( to, from ));
|
||||
}
|
||||
|
||||
EdgeIterator FindEdgeIndicateIfReverse( const NodeIterator from, const NodeIterator to, bool & result ) const {
|
||||
EdgeIterator tmp = FindEdge( from, to );
|
||||
if(UINT_MAX == tmp) {
|
||||
tmp = FindEdge( to, from );
|
||||
if(UINT_MAX != tmp) {
|
||||
result = true;
|
||||
}
|
||||
}
|
||||
return tmp;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
NodeIterator _numNodes;
|
||||
EdgeIterator _numEdges;
|
||||
|
||||
typename ShM< _StrNode, UseSharedMemory >::vector _nodes;
|
||||
typename ShM< _StrEdge, UseSharedMemory >::vector _edges;
|
||||
};
|
||||
|
||||
#endif // STATICGRAPH_H_INCLUDED
|
||||
230
DataStructures/StaticKDTree.h
Normal file
230
DataStructures/StaticKDTree.h
Normal file
@ -0,0 +1,230 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
// KD Tree coded by Christian Vetter, Monav Project
|
||||
|
||||
#ifndef STATICKDTREE_H_INCLUDED
|
||||
#define STATICKDTREE_H_INCLUDED
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include <stack>
|
||||
#include <limits>
|
||||
|
||||
namespace KDTree {
|
||||
|
||||
#define KDTREE_BASESIZE (8)
|
||||
|
||||
template< unsigned k, typename T >
|
||||
class BoundingBox {
|
||||
public:
|
||||
BoundingBox() {
|
||||
for ( unsigned dim = 0; dim < k; ++dim ) {
|
||||
min[dim] = std::numeric_limits< T >::min();
|
||||
max[dim] = std::numeric_limits< T >::max();
|
||||
}
|
||||
}
|
||||
|
||||
T min[k];
|
||||
T max[k];
|
||||
};
|
||||
|
||||
struct NoData {};
|
||||
|
||||
template< unsigned k, typename T >
|
||||
class EuclidianMetric {
|
||||
public:
|
||||
double operator() ( const T left[k], const T right[k] ) {
|
||||
double result = 0;
|
||||
for ( unsigned i = 0; i < k; ++i ) {
|
||||
double temp = (double)left[i] - (double)right[i];
|
||||
result += temp * temp;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
double operator() ( const BoundingBox< k, T > &box, const T point[k] ) {
|
||||
T nearest[k];
|
||||
for ( unsigned dim = 0; dim < k; ++dim ) {
|
||||
if ( point[dim] < box.min[dim] )
|
||||
nearest[dim] = box.min[dim];
|
||||
else if ( point[dim] > box.max[dim] )
|
||||
nearest[dim] = box.max[dim];
|
||||
else
|
||||
nearest[dim] = point[dim];
|
||||
}
|
||||
return operator() ( point, nearest );
|
||||
}
|
||||
};
|
||||
|
||||
template < unsigned k, typename T, typename Data = NoData, typename Metric = EuclidianMetric< k, T > >
|
||||
class StaticKDTree {
|
||||
public:
|
||||
|
||||
struct InputPoint {
|
||||
T coordinates[k];
|
||||
Data data;
|
||||
bool operator==( const InputPoint& right )
|
||||
{
|
||||
for ( int i = 0; i < k; i++ ) {
|
||||
if ( coordinates[i] != right.coordinates[i] )
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
StaticKDTree( std::vector< InputPoint > * points ){
|
||||
BOOST_ASSERT( k > 0 );
|
||||
BOOST_ASSERT ( points->size() > 0 );
|
||||
size = points->size();
|
||||
kdtree = new InputPoint[size];
|
||||
for ( Iterator i = 0; i != size; ++i ) {
|
||||
kdtree[i] = points->at(i);
|
||||
for ( unsigned dim = 0; dim < k; ++dim ) {
|
||||
if ( kdtree[i].coordinates[dim] < boundingBox.min[dim] )
|
||||
boundingBox.min[dim] = kdtree[i].coordinates[dim];
|
||||
if ( kdtree[i].coordinates[dim] > boundingBox.max[dim] )
|
||||
boundingBox.max[dim] = kdtree[i].coordinates[dim];
|
||||
}
|
||||
}
|
||||
std::stack< Tree > s;
|
||||
s.push ( Tree ( 0, size, 0 ) );
|
||||
while ( !s.empty() ) {
|
||||
Tree tree = s.top();
|
||||
s.pop();
|
||||
|
||||
if ( tree.right - tree.left < KDTREE_BASESIZE )
|
||||
continue;
|
||||
|
||||
Iterator middle = tree.left + ( tree.right - tree.left ) / 2;
|
||||
std::nth_element( kdtree + tree.left, kdtree + middle, kdtree + tree.right, Less( tree.dimension ) );
|
||||
s.push( Tree( tree.left, middle, ( tree.dimension + 1 ) % k ) );
|
||||
s.push( Tree( middle + 1, tree.right, ( tree.dimension + 1 ) % k ) );
|
||||
}
|
||||
}
|
||||
|
||||
~StaticKDTree(){
|
||||
delete[] kdtree;
|
||||
}
|
||||
|
||||
bool NearestNeighbor( InputPoint* result, const InputPoint& point ) {
|
||||
Metric distance;
|
||||
bool found = false;
|
||||
double nearestDistance = std::numeric_limits< T >::max();
|
||||
std::stack< NNTree > s;
|
||||
s.push ( NNTree ( 0, size, 0, boundingBox ) );
|
||||
while ( !s.empty() ) {
|
||||
NNTree tree = s.top();
|
||||
s.pop();
|
||||
|
||||
if ( distance( tree.box, point.coordinates ) >= nearestDistance )
|
||||
continue;
|
||||
|
||||
if ( tree.right - tree.left < KDTREE_BASESIZE ) {
|
||||
for ( unsigned i = tree.left; i < tree.right; i++ ) {
|
||||
double newDistance = distance( kdtree[i].coordinates, point.coordinates );
|
||||
if ( newDistance < nearestDistance ) {
|
||||
nearestDistance = newDistance;
|
||||
*result = kdtree[i];
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
Iterator middle = tree.left + ( tree.right - tree.left ) / 2;
|
||||
|
||||
double newDistance = distance( kdtree[middle].coordinates, point.coordinates );
|
||||
if ( newDistance < nearestDistance ) {
|
||||
nearestDistance = newDistance;
|
||||
*result = kdtree[middle];
|
||||
found = true;
|
||||
}
|
||||
|
||||
Less comperator( tree.dimension );
|
||||
if ( !comperator( point, kdtree[middle] ) ) {
|
||||
NNTree first( middle + 1, tree.right, ( tree.dimension + 1 ) % k, tree.box );
|
||||
NNTree second( tree.left, middle, ( tree.dimension + 1 ) % k, tree.box );
|
||||
first.box.min[tree.dimension] = kdtree[middle].coordinates[tree.dimension];
|
||||
second.box.max[tree.dimension] = kdtree[middle].coordinates[tree.dimension];
|
||||
s.push( second );
|
||||
s.push( first );
|
||||
}
|
||||
else {
|
||||
NNTree first( middle + 1, tree.right, ( tree.dimension + 1 ) % k, tree.box );
|
||||
NNTree second( tree.left, middle, ( tree.dimension + 1 ) % k, tree.box );
|
||||
first.box.min[tree.dimension] = kdtree[middle].coordinates[tree.dimension];
|
||||
second.box.max[tree.dimension] = kdtree[middle].coordinates[tree.dimension];
|
||||
s.push( first );
|
||||
s.push( second );
|
||||
}
|
||||
}
|
||||
return found;
|
||||
}
|
||||
|
||||
private:
|
||||
typedef unsigned Iterator;
|
||||
struct Tree {
|
||||
Iterator left;
|
||||
Iterator right;
|
||||
unsigned dimension;
|
||||
Tree() {}
|
||||
Tree( Iterator l, Iterator r, unsigned d ): left( l ), right( r ), dimension( d ) {}
|
||||
};
|
||||
struct NNTree {
|
||||
Iterator left;
|
||||
Iterator right;
|
||||
unsigned dimension;
|
||||
BoundingBox< k, T > box;
|
||||
NNTree() {}
|
||||
NNTree( Iterator l, Iterator r, unsigned d, const BoundingBox< k, T >& b ): left( l ), right( r ), dimension( d ), box ( b ) {}
|
||||
};
|
||||
class Less {
|
||||
public:
|
||||
Less( unsigned d ) {
|
||||
dimension = d;
|
||||
BOOST_ASSERT( dimension < k );
|
||||
}
|
||||
|
||||
bool operator() ( const InputPoint& left, const InputPoint& right ) {
|
||||
BOOST_ASSERT( dimension < k );
|
||||
return left.coordinates[dimension] < right.coordinates[dimension];
|
||||
}
|
||||
private:
|
||||
unsigned dimension;
|
||||
};
|
||||
|
||||
BoundingBox< k, T > boundingBox;
|
||||
InputPoint* kdtree;
|
||||
Iterator size;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // STATICKDTREE_H_INCLUDED
|
||||
937
DataStructures/StaticRTree.h
Normal file
937
DataStructures/StaticRTree.h
Normal file
@ -0,0 +1,937 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef STATICRTREE_H_
|
||||
#define STATICRTREE_H_
|
||||
|
||||
#include "DeallocatingVector.h"
|
||||
#include "HilbertValue.h"
|
||||
#include "PhantomNodes.h"
|
||||
#include "SharedMemoryFactory.h"
|
||||
#include "SharedMemoryVectorWrapper.h"
|
||||
|
||||
#include "../Util/MercatorUtil.h"
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../Util/TimingUtil.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/bind.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/filesystem/fstream.hpp>
|
||||
#include <boost/algorithm/minmax.hpp>
|
||||
#include <boost/algorithm/minmax_element.hpp>
|
||||
#include <boost/range/algorithm_ext/erase.hpp>
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <boost/thread.hpp>
|
||||
#include <boost/type_traits.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
//tuning parameters
|
||||
const static uint32_t RTREE_BRANCHING_FACTOR = 50;
|
||||
const static uint32_t RTREE_LEAF_NODE_SIZE = 1170;
|
||||
|
||||
// Implements a static, i.e. packed, R-tree
|
||||
|
||||
static boost::thread_specific_ptr<boost::filesystem::ifstream> thread_local_rtree_stream;
|
||||
|
||||
template<class DataT, bool UseSharedMemory = false>
|
||||
class StaticRTree : boost::noncopyable {
|
||||
public:
|
||||
struct RectangleInt2D {
|
||||
RectangleInt2D() :
|
||||
min_lon(INT_MAX),
|
||||
max_lon(INT_MIN),
|
||||
min_lat(INT_MAX),
|
||||
max_lat(INT_MIN) {}
|
||||
|
||||
int32_t min_lon, max_lon;
|
||||
int32_t min_lat, max_lat;
|
||||
|
||||
inline void InitializeMBRectangle(
|
||||
const DataT * objects,
|
||||
const uint32_t element_count
|
||||
) {
|
||||
for(uint32_t i = 0; i < element_count; ++i) {
|
||||
min_lon = std::min(
|
||||
min_lon, std::min(objects[i].lon1, objects[i].lon2)
|
||||
);
|
||||
max_lon = std::max(
|
||||
max_lon, std::max(objects[i].lon1, objects[i].lon2)
|
||||
);
|
||||
|
||||
min_lat = std::min(
|
||||
min_lat, std::min(objects[i].lat1, objects[i].lat2)
|
||||
);
|
||||
max_lat = std::max(
|
||||
max_lat, std::max(objects[i].lat1, objects[i].lat2)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
inline void AugmentMBRectangle(const RectangleInt2D & other) {
|
||||
min_lon = std::min(min_lon, other.min_lon);
|
||||
max_lon = std::max(max_lon, other.max_lon);
|
||||
min_lat = std::min(min_lat, other.min_lat);
|
||||
max_lat = std::max(max_lat, other.max_lat);
|
||||
}
|
||||
|
||||
inline FixedPointCoordinate Centroid() const {
|
||||
FixedPointCoordinate centroid;
|
||||
//The coordinates of the midpoints are given by:
|
||||
//x = (x1 + x2) /2 and y = (y1 + y2) /2.
|
||||
centroid.lon = (min_lon + max_lon)/2;
|
||||
centroid.lat = (min_lat + max_lat)/2;
|
||||
return centroid;
|
||||
}
|
||||
|
||||
inline bool Intersects(const RectangleInt2D & other) const {
|
||||
FixedPointCoordinate upper_left (other.max_lat, other.min_lon);
|
||||
FixedPointCoordinate upper_right(other.max_lat, other.max_lon);
|
||||
FixedPointCoordinate lower_right(other.min_lat, other.max_lon);
|
||||
FixedPointCoordinate lower_left (other.min_lat, other.min_lon);
|
||||
|
||||
return (
|
||||
Contains(upper_left ) ||
|
||||
Contains(upper_right) ||
|
||||
Contains(lower_right) ||
|
||||
Contains(lower_left )
|
||||
);
|
||||
}
|
||||
|
||||
inline double GetMinDist(const FixedPointCoordinate & location) const {
|
||||
bool is_contained = Contains(location);
|
||||
if (is_contained) {
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
double min_dist = std::numeric_limits<double>::max();
|
||||
min_dist = std::min(
|
||||
min_dist,
|
||||
FixedPointCoordinate::ApproximateDistance(
|
||||
location.lat,
|
||||
location.lon,
|
||||
max_lat,
|
||||
min_lon
|
||||
)
|
||||
);
|
||||
min_dist = std::min(
|
||||
min_dist,
|
||||
FixedPointCoordinate::ApproximateDistance(
|
||||
location.lat,
|
||||
location.lon,
|
||||
max_lat,
|
||||
max_lon
|
||||
)
|
||||
);
|
||||
min_dist = std::min(
|
||||
min_dist,
|
||||
FixedPointCoordinate::ApproximateDistance(
|
||||
location.lat,
|
||||
location.lon,
|
||||
min_lat,
|
||||
max_lon
|
||||
)
|
||||
);
|
||||
min_dist = std::min(
|
||||
min_dist,
|
||||
FixedPointCoordinate::ApproximateDistance(
|
||||
location.lat,
|
||||
location.lon,
|
||||
min_lat,
|
||||
min_lon
|
||||
)
|
||||
);
|
||||
return min_dist;
|
||||
}
|
||||
|
||||
inline double GetMinMaxDist(const FixedPointCoordinate & location) const {
|
||||
double min_max_dist = std::numeric_limits<double>::max();
|
||||
//Get minmax distance to each of the four sides
|
||||
FixedPointCoordinate upper_left (max_lat, min_lon);
|
||||
FixedPointCoordinate upper_right(max_lat, max_lon);
|
||||
FixedPointCoordinate lower_right(min_lat, max_lon);
|
||||
FixedPointCoordinate lower_left (min_lat, min_lon);
|
||||
|
||||
min_max_dist = std::min(
|
||||
min_max_dist,
|
||||
std::max(
|
||||
FixedPointCoordinate::ApproximateDistance(location, upper_left ),
|
||||
FixedPointCoordinate::ApproximateDistance(location, upper_right)
|
||||
)
|
||||
);
|
||||
|
||||
min_max_dist = std::min(
|
||||
min_max_dist,
|
||||
std::max(
|
||||
FixedPointCoordinate::ApproximateDistance(location, upper_right),
|
||||
FixedPointCoordinate::ApproximateDistance(location, lower_right)
|
||||
)
|
||||
);
|
||||
|
||||
min_max_dist = std::min(
|
||||
min_max_dist,
|
||||
std::max(
|
||||
FixedPointCoordinate::ApproximateDistance(location, lower_right),
|
||||
FixedPointCoordinate::ApproximateDistance(location, lower_left )
|
||||
)
|
||||
);
|
||||
|
||||
min_max_dist = std::min(
|
||||
min_max_dist,
|
||||
std::max(
|
||||
FixedPointCoordinate::ApproximateDistance(location, lower_left ),
|
||||
FixedPointCoordinate::ApproximateDistance(location, upper_left )
|
||||
)
|
||||
);
|
||||
return min_max_dist;
|
||||
}
|
||||
|
||||
inline bool Contains(const FixedPointCoordinate & location) const {
|
||||
bool lats_contained =
|
||||
(location.lat > min_lat) && (location.lat < max_lat);
|
||||
bool lons_contained =
|
||||
(location.lon > min_lon) && (location.lon < max_lon);
|
||||
return lats_contained && lons_contained;
|
||||
}
|
||||
|
||||
inline friend std::ostream & operator<< (
|
||||
std::ostream & out,
|
||||
const RectangleInt2D & rect
|
||||
) {
|
||||
out << rect.min_lat/COORDINATE_PRECISION << ","
|
||||
<< rect.min_lon/COORDINATE_PRECISION << " "
|
||||
<< rect.max_lat/COORDINATE_PRECISION << ","
|
||||
<< rect.max_lon/COORDINATE_PRECISION;
|
||||
return out;
|
||||
}
|
||||
};
|
||||
|
||||
typedef RectangleInt2D RectangleT;
|
||||
|
||||
struct TreeNode {
|
||||
TreeNode() : child_count(0), child_is_on_disk(false) {}
|
||||
RectangleT minimum_bounding_rectangle;
|
||||
uint32_t child_count:31;
|
||||
bool child_is_on_disk:1;
|
||||
uint32_t children[RTREE_BRANCHING_FACTOR];
|
||||
};
|
||||
|
||||
private:
|
||||
|
||||
struct WrappedInputElement {
|
||||
explicit WrappedInputElement(
|
||||
const uint32_t _array_index,
|
||||
const uint64_t _hilbert_value
|
||||
) : m_array_index(_array_index), m_hilbert_value(_hilbert_value) {}
|
||||
|
||||
WrappedInputElement() : m_array_index(UINT_MAX), m_hilbert_value(0) {}
|
||||
|
||||
uint32_t m_array_index;
|
||||
uint64_t m_hilbert_value;
|
||||
|
||||
inline bool operator<(const WrappedInputElement & other) const {
|
||||
return m_hilbert_value < other.m_hilbert_value;
|
||||
}
|
||||
};
|
||||
|
||||
struct LeafNode {
|
||||
LeafNode() : object_count(0) {}
|
||||
uint32_t object_count;
|
||||
DataT objects[RTREE_LEAF_NODE_SIZE];
|
||||
};
|
||||
|
||||
struct QueryCandidate {
|
||||
explicit QueryCandidate(
|
||||
const uint32_t n_id,
|
||||
const double dist
|
||||
) : node_id(n_id), min_dist(dist) {}
|
||||
QueryCandidate() : node_id(UINT_MAX), min_dist(std::numeric_limits<double>::max()) {}
|
||||
uint32_t node_id;
|
||||
double min_dist;
|
||||
inline bool operator<(const QueryCandidate & other) const {
|
||||
return min_dist < other.min_dist;
|
||||
}
|
||||
};
|
||||
|
||||
typename ShM<TreeNode, UseSharedMemory>::vector m_search_tree;
|
||||
uint64_t m_element_count;
|
||||
|
||||
const std::string m_leaf_node_filename;
|
||||
public:
|
||||
//Construct a packed Hilbert-R-Tree with Kamel-Faloutsos algorithm [1]
|
||||
explicit StaticRTree(
|
||||
std::vector<DataT> & input_data_vector,
|
||||
const std::string tree_node_filename,
|
||||
const std::string leaf_node_filename
|
||||
)
|
||||
: m_element_count(input_data_vector.size()),
|
||||
m_leaf_node_filename(leaf_node_filename)
|
||||
{
|
||||
SimpleLogger().Write() <<
|
||||
"constructing r-tree of " << m_element_count <<
|
||||
" elements";
|
||||
|
||||
double time1 = get_timestamp();
|
||||
std::vector<WrappedInputElement> input_wrapper_vector(m_element_count);
|
||||
|
||||
HilbertCode get_hilbert_number;
|
||||
|
||||
//generate auxiliary vector of hilbert-values
|
||||
#pragma omp parallel for schedule(guided)
|
||||
for(uint64_t element_counter = 0; element_counter < m_element_count; ++element_counter) {
|
||||
input_wrapper_vector[element_counter].m_array_index = element_counter;
|
||||
//Get Hilbert-Value for centroid in mercartor projection
|
||||
DataT & current_element = input_data_vector[element_counter];
|
||||
FixedPointCoordinate current_centroid = current_element.Centroid();
|
||||
current_centroid.lat = COORDINATE_PRECISION*lat2y(current_centroid.lat/COORDINATE_PRECISION);
|
||||
|
||||
uint64_t current_hilbert_value = get_hilbert_number(current_centroid);
|
||||
input_wrapper_vector[element_counter].m_hilbert_value = current_hilbert_value;
|
||||
}
|
||||
|
||||
//open leaf file
|
||||
boost::filesystem::ofstream leaf_node_file(leaf_node_filename, std::ios::binary);
|
||||
leaf_node_file.write((char*) &m_element_count, sizeof(uint64_t));
|
||||
|
||||
//sort the hilbert-value representatives
|
||||
std::sort(input_wrapper_vector.begin(), input_wrapper_vector.end());
|
||||
std::vector<TreeNode> tree_nodes_in_level;
|
||||
|
||||
//pack M elements into leaf node and write to leaf file
|
||||
uint64_t processed_objects_count = 0;
|
||||
while(processed_objects_count < m_element_count) {
|
||||
|
||||
LeafNode current_leaf;
|
||||
TreeNode current_node;
|
||||
//SimpleLogger().Write() << "reading " << tree_size << " tree nodes in " << (sizeof(TreeNode)*tree_size) << " bytes";
|
||||
for(uint32_t current_element_index = 0; RTREE_LEAF_NODE_SIZE > current_element_index; ++current_element_index) {
|
||||
if(m_element_count > (processed_objects_count + current_element_index)) {
|
||||
uint32_t index_of_next_object = input_wrapper_vector[processed_objects_count + current_element_index].m_array_index;
|
||||
current_leaf.objects[current_element_index] = input_data_vector[index_of_next_object];
|
||||
++current_leaf.object_count;
|
||||
}
|
||||
}
|
||||
|
||||
//generate tree node that resemble the objects in leaf and store it for next level
|
||||
current_node.minimum_bounding_rectangle.InitializeMBRectangle(current_leaf.objects, current_leaf.object_count);
|
||||
current_node.child_is_on_disk = true;
|
||||
current_node.children[0] = tree_nodes_in_level.size();
|
||||
tree_nodes_in_level.push_back(current_node);
|
||||
|
||||
//write leaf_node to leaf node file
|
||||
leaf_node_file.write((char*)¤t_leaf, sizeof(current_leaf));
|
||||
processed_objects_count += current_leaf.object_count;
|
||||
}
|
||||
|
||||
//close leaf file
|
||||
leaf_node_file.close();
|
||||
|
||||
uint32_t processing_level = 0;
|
||||
while(1 < tree_nodes_in_level.size()) {
|
||||
std::vector<TreeNode> tree_nodes_in_next_level;
|
||||
uint32_t processed_tree_nodes_in_level = 0;
|
||||
while(processed_tree_nodes_in_level < tree_nodes_in_level.size()) {
|
||||
TreeNode parent_node;
|
||||
//pack RTREE_BRANCHING_FACTOR elements into tree_nodes each
|
||||
for(
|
||||
uint32_t current_child_node_index = 0;
|
||||
RTREE_BRANCHING_FACTOR > current_child_node_index;
|
||||
++current_child_node_index
|
||||
) {
|
||||
if(processed_tree_nodes_in_level < tree_nodes_in_level.size()) {
|
||||
TreeNode & current_child_node = tree_nodes_in_level[processed_tree_nodes_in_level];
|
||||
//add tree node to parent entry
|
||||
parent_node.children[current_child_node_index] = m_search_tree.size();
|
||||
m_search_tree.push_back(current_child_node);
|
||||
//augment MBR of parent
|
||||
parent_node.minimum_bounding_rectangle.AugmentMBRectangle(current_child_node.minimum_bounding_rectangle);
|
||||
//increase counters
|
||||
++parent_node.child_count;
|
||||
++processed_tree_nodes_in_level;
|
||||
}
|
||||
}
|
||||
tree_nodes_in_next_level.push_back(parent_node);
|
||||
}
|
||||
tree_nodes_in_level.swap(tree_nodes_in_next_level);
|
||||
++processing_level;
|
||||
}
|
||||
BOOST_ASSERT_MSG(1 == tree_nodes_in_level.size(), "tree broken, more than one root node");
|
||||
//last remaining entry is the root node, store it
|
||||
m_search_tree.push_back(tree_nodes_in_level[0]);
|
||||
|
||||
//reverse and renumber tree to have root at index 0
|
||||
std::reverse(m_search_tree.begin(), m_search_tree.end());
|
||||
|
||||
#pragma omp parallel for schedule(guided)
|
||||
for(uint32_t i = 0; i < m_search_tree.size(); ++i) {
|
||||
TreeNode & current_tree_node = m_search_tree[i];
|
||||
for(uint32_t j = 0; j < current_tree_node.child_count; ++j) {
|
||||
const uint32_t old_id = current_tree_node.children[j];
|
||||
const uint32_t new_id = m_search_tree.size() - old_id - 1;
|
||||
current_tree_node.children[j] = new_id;
|
||||
}
|
||||
}
|
||||
|
||||
//open tree file
|
||||
boost::filesystem::ofstream tree_node_file(
|
||||
tree_node_filename,
|
||||
std::ios::binary
|
||||
);
|
||||
|
||||
uint32_t size_of_tree = m_search_tree.size();
|
||||
BOOST_ASSERT_MSG(0 < size_of_tree, "tree empty");
|
||||
tree_node_file.write((char *)&size_of_tree, sizeof(uint32_t));
|
||||
tree_node_file.write((char *)&m_search_tree[0], sizeof(TreeNode)*size_of_tree);
|
||||
//close tree node file.
|
||||
tree_node_file.close();
|
||||
double time2 = get_timestamp();
|
||||
SimpleLogger().Write() <<
|
||||
"finished r-tree construction in " << (time2-time1) << " seconds";
|
||||
}
|
||||
|
||||
//Read-only operation for queries
|
||||
explicit StaticRTree(
|
||||
const boost::filesystem::path & node_file,
|
||||
const boost::filesystem::path & leaf_file
|
||||
) : m_leaf_node_filename(leaf_file.string()) {
|
||||
//open tree node file and load into RAM.
|
||||
|
||||
if ( !boost::filesystem::exists( node_file ) ) {
|
||||
throw OSRMException("ram index file does not exist");
|
||||
}
|
||||
if ( 0 == boost::filesystem::file_size( node_file ) ) {
|
||||
throw OSRMException("ram index file is empty");
|
||||
}
|
||||
boost::filesystem::ifstream tree_node_file( node_file, std::ios::binary );
|
||||
|
||||
uint32_t tree_size = 0;
|
||||
tree_node_file.read((char*)&tree_size, sizeof(uint32_t));
|
||||
|
||||
m_search_tree.resize(tree_size);
|
||||
tree_node_file.read((char*)&m_search_tree[0], sizeof(TreeNode)*tree_size);
|
||||
tree_node_file.close();
|
||||
//open leaf node file and store thread specific pointer
|
||||
if ( !boost::filesystem::exists( leaf_file ) ) {
|
||||
throw OSRMException("mem index file does not exist");
|
||||
}
|
||||
if ( 0 == boost::filesystem::file_size( leaf_file ) ) {
|
||||
throw OSRMException("mem index file is empty");
|
||||
}
|
||||
|
||||
boost::filesystem::ifstream leaf_node_file( leaf_file, std::ios::binary );
|
||||
leaf_node_file.read((char*)&m_element_count, sizeof(uint64_t));
|
||||
leaf_node_file.close();
|
||||
|
||||
//SimpleLogger().Write() << tree_size << " nodes in search tree";
|
||||
//SimpleLogger().Write() << m_element_count << " elements in leafs";
|
||||
}
|
||||
|
||||
explicit StaticRTree(
|
||||
TreeNode * tree_node_ptr,
|
||||
const uint32_t number_of_nodes,
|
||||
const boost::filesystem::path & leaf_file
|
||||
) : m_search_tree(tree_node_ptr, number_of_nodes),
|
||||
m_leaf_node_filename(leaf_file.string())
|
||||
{
|
||||
//open leaf node file and store thread specific pointer
|
||||
if ( !boost::filesystem::exists( leaf_file ) ) {
|
||||
throw OSRMException("mem index file does not exist");
|
||||
}
|
||||
if ( 0 == boost::filesystem::file_size( leaf_file ) ) {
|
||||
throw OSRMException("mem index file is empty");
|
||||
}
|
||||
|
||||
boost::filesystem::ifstream leaf_node_file( leaf_file, std::ios::binary );
|
||||
leaf_node_file.read((char*)&m_element_count, sizeof(uint64_t));
|
||||
leaf_node_file.close();
|
||||
|
||||
if( thread_local_rtree_stream.get() ) {
|
||||
thread_local_rtree_stream->close();
|
||||
}
|
||||
|
||||
//SimpleLogger().Write() << tree_size << " nodes in search tree";
|
||||
//SimpleLogger().Write() << m_element_count << " elements in leafs";
|
||||
}
|
||||
//Read-only operation for queries
|
||||
/*
|
||||
inline void FindKNearestPhantomNodesForCoordinate(
|
||||
const FixedPointCoordinate & location,
|
||||
const unsigned zoom_level,
|
||||
const unsigned candidate_count,
|
||||
std::vector<std::pair<PhantomNode, double> > & result_vector
|
||||
) const {
|
||||
|
||||
bool ignore_tiny_components = (zoom_level <= 14);
|
||||
DataT nearest_edge;
|
||||
|
||||
uint32_t io_count = 0;
|
||||
uint32_t explored_tree_nodes_count = 0;
|
||||
SimpleLogger().Write() << "searching for coordinate " << input_coordinate;
|
||||
double min_dist = std::numeric_limits<double>::max();
|
||||
double min_max_dist = std::numeric_limits<double>::max();
|
||||
bool found_a_nearest_edge = false;
|
||||
|
||||
FixedPointCoordinate nearest, current_start_coordinate, current_end_coordinate;
|
||||
|
||||
//initialize queue with root element
|
||||
std::priority_queue<QueryCandidate> traversal_queue;
|
||||
traversal_queue.push(QueryCandidate(0, m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate)));
|
||||
BOOST_ASSERT_MSG(std::numberic_limits<double>::epsilon() > (0. - traversal_queue.top().min_dist), "Root element in NN Search has min dist != 0.");
|
||||
|
||||
while(!traversal_queue.empty()) {
|
||||
const QueryCandidate current_query_node = traversal_queue.top(); traversal_queue.pop();
|
||||
|
||||
++explored_tree_nodes_count;
|
||||
bool prune_downward = (current_query_node.min_dist >= min_max_dist);
|
||||
bool prune_upward = (current_query_node.min_dist >= min_dist);
|
||||
if( !prune_downward && !prune_upward ) { //downward pruning
|
||||
TreeNode & current_tree_node = m_search_tree[current_query_node.node_id];
|
||||
if (current_tree_node.child_is_on_disk) {
|
||||
LeafNode current_leaf_node;
|
||||
LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node);
|
||||
++io_count;
|
||||
for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) {
|
||||
DataT & current_edge = current_leaf_node.objects[i];
|
||||
if(ignore_tiny_components && current_edge.belongsToTinyComponent) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double current_ratio = 0.;
|
||||
double current_perpendicular_distance = current_edge.ComputePerpendicularDistance(
|
||||
input_coordinate,
|
||||
nearest,
|
||||
current_ratio
|
||||
);
|
||||
|
||||
if(
|
||||
current_perpendicular_distance < min_dist
|
||||
&& !DoubleEpsilonCompare(
|
||||
current_perpendicular_distance,
|
||||
min_dist
|
||||
)
|
||||
) { //found a new minimum
|
||||
min_dist = current_perpendicular_distance;
|
||||
result_phantom_node.edgeBasedNode = current_edge.id;
|
||||
result_phantom_node.nodeBasedEdgeNameID = current_edge.nameID;
|
||||
result_phantom_node.weight1 = current_edge.weight;
|
||||
result_phantom_node.weight2 = INT_MAX;
|
||||
result_phantom_node.location = nearest;
|
||||
current_start_coordinate.lat = current_edge.lat1;
|
||||
current_start_coordinate.lon = current_edge.lon1;
|
||||
current_end_coordinate.lat = current_edge.lat2;
|
||||
current_end_coordinate.lon = current_edge.lon2;
|
||||
nearest_edge = current_edge;
|
||||
found_a_nearest_edge = true;
|
||||
} else if(
|
||||
DoubleEpsilonCompare(current_perpendicular_distance, min_dist) &&
|
||||
1 == abs(current_edge.id - result_phantom_node.edgeBasedNode )
|
||||
&& EdgesAreEquivalent(
|
||||
current_start_coordinate,
|
||||
FixedPointCoordinate(
|
||||
current_edge.lat1,
|
||||
current_edge.lon1
|
||||
),
|
||||
FixedPointCoordinate(
|
||||
current_edge.lat2,
|
||||
current_edge.lon2
|
||||
),
|
||||
current_end_coordinate
|
||||
)
|
||||
) {
|
||||
result_phantom_node.edgeBasedNode = std::min(current_edge.id, result_phantom_node.edgeBasedNode);
|
||||
result_phantom_node.weight2 = current_edge.weight;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//traverse children, prune if global mindist is smaller than local one
|
||||
for (uint32_t i = 0; i < current_tree_node.child_count; ++i) {
|
||||
const int32_t child_id = current_tree_node.children[i];
|
||||
TreeNode & child_tree_node = m_search_tree[child_id];
|
||||
RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle;
|
||||
const double current_min_dist = child_rectangle.GetMinDist(input_coordinate);
|
||||
const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate);
|
||||
if( current_min_max_dist < min_max_dist ) {
|
||||
min_max_dist = current_min_max_dist;
|
||||
}
|
||||
if (current_min_dist > min_max_dist) {
|
||||
continue;
|
||||
}
|
||||
if (current_min_dist > min_dist) { //upward pruning
|
||||
continue;
|
||||
}
|
||||
traversal_queue.push(QueryCandidate(child_id, current_min_dist));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const double distance_to_edge =
|
||||
ApproximateDistance (
|
||||
FixedPointCoordinate(nearest_edge.lat1, nearest_edge.lon1),
|
||||
result_phantom_node.location
|
||||
);
|
||||
|
||||
const double length_of_edge =
|
||||
ApproximateDistance(
|
||||
FixedPointCoordinate(nearest_edge.lat1, nearest_edge.lon1),
|
||||
FixedPointCoordinate(nearest_edge.lat2, nearest_edge.lon2)
|
||||
);
|
||||
|
||||
const double ratio = (found_a_nearest_edge ?
|
||||
std::min(1., distance_to_edge/ length_of_edge ) : 0 );
|
||||
result_phantom_node.weight1 *= ratio;
|
||||
if(INT_MAX != result_phantom_node.weight2) {
|
||||
result_phantom_node.weight2 *= (1.-ratio);
|
||||
}
|
||||
result_phantom_node.ratio = ratio;
|
||||
|
||||
//Hack to fix rounding errors and wandering via nodes.
|
||||
if(std::abs(input_coordinate.lon - result_phantom_node.location.lon) == 1) {
|
||||
result_phantom_node.location.lon = input_coordinate.lon;
|
||||
}
|
||||
if(std::abs(input_coordinate.lat - result_phantom_node.location.lat) == 1) {
|
||||
result_phantom_node.location.lat = input_coordinate.lat;
|
||||
}
|
||||
|
||||
SimpleLogger().Write() << "mindist: " << min_distphantom_node.isBidirected() ? "yes" : "no");
|
||||
return found_a_nearest_edge;
|
||||
|
||||
}
|
||||
|
||||
*/
|
||||
bool LocateClosestEndPointForCoordinate(
|
||||
const FixedPointCoordinate & input_coordinate,
|
||||
FixedPointCoordinate & result_coordinate,
|
||||
const unsigned zoom_level
|
||||
) {
|
||||
bool ignore_tiny_components = (zoom_level <= 14);
|
||||
DataT nearest_edge;
|
||||
double min_dist = std::numeric_limits<double>::max();
|
||||
double min_max_dist = std::numeric_limits<double>::max();
|
||||
bool found_a_nearest_edge = false;
|
||||
|
||||
//initialize queue with root element
|
||||
std::priority_queue<QueryCandidate> traversal_queue;
|
||||
double current_min_dist = m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate);
|
||||
traversal_queue.push(
|
||||
QueryCandidate(0, current_min_dist)
|
||||
);
|
||||
|
||||
BOOST_ASSERT_MSG(
|
||||
std::numeric_limits<double>::epsilon() > (0. - traversal_queue.top().min_dist),
|
||||
"Root element in NN Search has min dist != 0."
|
||||
);
|
||||
|
||||
while(!traversal_queue.empty()) {
|
||||
const QueryCandidate current_query_node = traversal_queue.top();
|
||||
traversal_queue.pop();
|
||||
|
||||
const bool prune_downward = (current_query_node.min_dist >= min_max_dist);
|
||||
const bool prune_upward = (current_query_node.min_dist >= min_dist);
|
||||
if( !prune_downward && !prune_upward ) { //downward pruning
|
||||
TreeNode & current_tree_node = m_search_tree[current_query_node.node_id];
|
||||
if (current_tree_node.child_is_on_disk) {
|
||||
LeafNode current_leaf_node;
|
||||
LoadLeafFromDisk(
|
||||
current_tree_node.children[0],
|
||||
current_leaf_node
|
||||
);
|
||||
for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) {
|
||||
const DataT & current_edge = current_leaf_node.objects[i];
|
||||
if(
|
||||
ignore_tiny_components &&
|
||||
current_edge.belongsToTinyComponent
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double current_minimum_distance = FixedPointCoordinate::ApproximateDistance(
|
||||
input_coordinate.lat,
|
||||
input_coordinate.lon,
|
||||
current_edge.lat1,
|
||||
current_edge.lon1
|
||||
);
|
||||
if( current_minimum_distance < min_dist ) {
|
||||
//found a new minimum
|
||||
min_dist = current_minimum_distance;
|
||||
result_coordinate.lat = current_edge.lat1;
|
||||
result_coordinate.lon = current_edge.lon1;
|
||||
found_a_nearest_edge = true;
|
||||
}
|
||||
|
||||
current_minimum_distance = FixedPointCoordinate::ApproximateDistance(
|
||||
input_coordinate.lat,
|
||||
input_coordinate.lon,
|
||||
current_edge.lat2,
|
||||
current_edge.lon2
|
||||
);
|
||||
|
||||
if( current_minimum_distance < min_dist ) {
|
||||
//found a new minimum
|
||||
min_dist = current_minimum_distance;
|
||||
result_coordinate.lat = current_edge.lat2;
|
||||
result_coordinate.lon = current_edge.lon2;
|
||||
found_a_nearest_edge = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//traverse children, prune if global mindist is smaller than local one
|
||||
for (uint32_t i = 0; i < current_tree_node.child_count; ++i) {
|
||||
const int32_t child_id = current_tree_node.children[i];
|
||||
const TreeNode & child_tree_node = m_search_tree[child_id];
|
||||
const RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle;
|
||||
const double current_min_dist = child_rectangle.GetMinDist(input_coordinate);
|
||||
const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate);
|
||||
if( current_min_max_dist < min_max_dist ) {
|
||||
min_max_dist = current_min_max_dist;
|
||||
}
|
||||
if (current_min_dist > min_max_dist) {
|
||||
continue;
|
||||
}
|
||||
if (current_min_dist > min_dist) { //upward pruning
|
||||
continue;
|
||||
}
|
||||
traversal_queue.push(
|
||||
QueryCandidate(child_id, current_min_dist)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return found_a_nearest_edge;
|
||||
}
|
||||
|
||||
bool FindPhantomNodeForCoordinate(
|
||||
const FixedPointCoordinate & input_coordinate,
|
||||
PhantomNode & result_phantom_node,
|
||||
const unsigned zoom_level
|
||||
) {
|
||||
|
||||
bool ignore_tiny_components = (zoom_level <= 14);
|
||||
DataT nearest_edge;
|
||||
|
||||
// uint32_t io_count = 0;
|
||||
uint32_t explored_tree_nodes_count = 0;
|
||||
//SimpleLogger().Write() << "searching for coordinate " << input_coordinate;
|
||||
double min_dist = std::numeric_limits<double>::max();
|
||||
double min_max_dist = std::numeric_limits<double>::max();
|
||||
bool found_a_nearest_edge = false;
|
||||
|
||||
FixedPointCoordinate nearest, current_start_coordinate, current_end_coordinate;
|
||||
|
||||
//initialize queue with root element
|
||||
std::priority_queue<QueryCandidate> traversal_queue;
|
||||
double current_min_dist = m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate);
|
||||
traversal_queue.push( QueryCandidate(0, current_min_dist) );
|
||||
|
||||
BOOST_ASSERT_MSG(
|
||||
std::numeric_limits<double>::epsilon() > (0. - traversal_queue.top().min_dist),
|
||||
"Root element in NN Search has min dist != 0."
|
||||
);
|
||||
|
||||
LeafNode current_leaf_node;
|
||||
while(!traversal_queue.empty()) {
|
||||
const QueryCandidate current_query_node = traversal_queue.top(); traversal_queue.pop();
|
||||
|
||||
++explored_tree_nodes_count;
|
||||
bool prune_downward = (current_query_node.min_dist >= min_max_dist);
|
||||
bool prune_upward = (current_query_node.min_dist >= min_dist);
|
||||
if( !prune_downward && !prune_upward ) { //downward pruning
|
||||
TreeNode & current_tree_node = m_search_tree[current_query_node.node_id];
|
||||
if (current_tree_node.child_is_on_disk) {
|
||||
LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node);
|
||||
// ++io_count;
|
||||
for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) {
|
||||
DataT & current_edge = current_leaf_node.objects[i];
|
||||
if(ignore_tiny_components && current_edge.belongsToTinyComponent) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double current_ratio = 0.;
|
||||
double current_perpendicular_distance = current_edge.ComputePerpendicularDistance(
|
||||
input_coordinate,
|
||||
nearest,
|
||||
current_ratio
|
||||
);
|
||||
|
||||
BOOST_ASSERT( 0. <= current_perpendicular_distance );
|
||||
|
||||
if(
|
||||
( current_perpendicular_distance < min_dist ) &&
|
||||
!DoubleEpsilonCompare(
|
||||
current_perpendicular_distance,
|
||||
min_dist
|
||||
)
|
||||
) { //found a new minimum
|
||||
min_dist = current_perpendicular_distance;
|
||||
result_phantom_node.edgeBasedNode = current_edge.id;
|
||||
result_phantom_node.nodeBasedEdgeNameID = current_edge.nameID;
|
||||
result_phantom_node.weight1 = current_edge.weight;
|
||||
result_phantom_node.weight2 = INT_MAX;
|
||||
result_phantom_node.location = nearest;
|
||||
current_start_coordinate.lat = current_edge.lat1;
|
||||
current_start_coordinate.lon = current_edge.lon1;
|
||||
current_end_coordinate.lat = current_edge.lat2;
|
||||
current_end_coordinate.lon = current_edge.lon2;
|
||||
nearest_edge = current_edge;
|
||||
found_a_nearest_edge = true;
|
||||
} else
|
||||
if( DoubleEpsilonCompare(current_perpendicular_distance, min_dist) &&
|
||||
( 1 == abs(current_edge.id - result_phantom_node.edgeBasedNode ) ) &&
|
||||
EdgesAreEquivalent(
|
||||
current_start_coordinate,
|
||||
FixedPointCoordinate(
|
||||
current_edge.lat1,
|
||||
current_edge.lon1
|
||||
),
|
||||
FixedPointCoordinate(
|
||||
current_edge.lat2,
|
||||
current_edge.lon2
|
||||
),
|
||||
current_end_coordinate
|
||||
)
|
||||
) {
|
||||
|
||||
BOOST_ASSERT_MSG(current_edge.id != result_phantom_node.edgeBasedNode, "IDs not different");
|
||||
result_phantom_node.weight2 = current_edge.weight;
|
||||
if(current_edge.id < result_phantom_node.edgeBasedNode) {
|
||||
result_phantom_node.edgeBasedNode = current_edge.id;
|
||||
std::swap(result_phantom_node.weight1, result_phantom_node.weight2);
|
||||
std::swap(current_end_coordinate, current_start_coordinate);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//traverse children, prune if global mindist is smaller than local one
|
||||
for (uint32_t i = 0; i < current_tree_node.child_count; ++i) {
|
||||
const int32_t child_id = current_tree_node.children[i];
|
||||
TreeNode & child_tree_node = m_search_tree[child_id];
|
||||
RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle;
|
||||
const double current_min_dist = child_rectangle.GetMinDist(input_coordinate);
|
||||
const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate);
|
||||
if( current_min_max_dist < min_max_dist ) {
|
||||
min_max_dist = current_min_max_dist;
|
||||
}
|
||||
if( current_min_dist > min_max_dist ) {
|
||||
continue;
|
||||
}
|
||||
if( current_min_dist > min_dist ) { //upward pruning
|
||||
continue;
|
||||
}
|
||||
traversal_queue.push(QueryCandidate(child_id, current_min_dist));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Hack to fix rounding errors and wandering via nodes.
|
||||
if(std::abs(input_coordinate.lon - result_phantom_node.location.lon) == 1) {
|
||||
result_phantom_node.location.lon = input_coordinate.lon;
|
||||
}
|
||||
if(std::abs(input_coordinate.lat - result_phantom_node.location.lat) == 1) {
|
||||
result_phantom_node.location.lat = input_coordinate.lat;
|
||||
}
|
||||
|
||||
double ratio = 0.;
|
||||
|
||||
if( found_a_nearest_edge) {
|
||||
const double distance_1 = FixedPointCoordinate::ApproximateDistance(
|
||||
current_start_coordinate,
|
||||
result_phantom_node.location
|
||||
);
|
||||
|
||||
const double distance_2 = FixedPointCoordinate::ApproximateDistance(
|
||||
current_start_coordinate,
|
||||
current_end_coordinate
|
||||
);
|
||||
|
||||
ratio = distance_1/distance_2;
|
||||
ratio = std::min(1., ratio);
|
||||
}
|
||||
|
||||
result_phantom_node.weight1 *= ratio;
|
||||
if(INT_MAX != result_phantom_node.weight2) {
|
||||
result_phantom_node.weight2 *= (1.-ratio);
|
||||
}
|
||||
result_phantom_node.ratio = ratio;
|
||||
|
||||
return found_a_nearest_edge;
|
||||
}
|
||||
|
||||
private:
|
||||
inline void LoadLeafFromDisk(const uint32_t leaf_id, LeafNode& result_node) {
|
||||
if(
|
||||
!thread_local_rtree_stream.get() ||
|
||||
!thread_local_rtree_stream->is_open()
|
||||
) {
|
||||
thread_local_rtree_stream.reset(
|
||||
new boost::filesystem::ifstream(
|
||||
m_leaf_node_filename,
|
||||
std::ios::in | std::ios::binary
|
||||
)
|
||||
);
|
||||
}
|
||||
if(!thread_local_rtree_stream->good()) {
|
||||
thread_local_rtree_stream->clear(std::ios::goodbit);
|
||||
SimpleLogger().Write(logDEBUG) << "Resetting stale filestream";
|
||||
}
|
||||
uint64_t seek_pos = sizeof(uint64_t) + leaf_id*sizeof(LeafNode);
|
||||
thread_local_rtree_stream->seekg(seek_pos);
|
||||
thread_local_rtree_stream->read((char *)&result_node, sizeof(LeafNode));
|
||||
}
|
||||
|
||||
inline bool EdgesAreEquivalent(
|
||||
const FixedPointCoordinate & a,
|
||||
const FixedPointCoordinate & b,
|
||||
const FixedPointCoordinate & c,
|
||||
const FixedPointCoordinate & d
|
||||
) const {
|
||||
return (a == b && c == d) || (a == c && b == d) || (a == d && b == c);
|
||||
}
|
||||
|
||||
inline bool DoubleEpsilonCompare(const double d1, const double d2) const {
|
||||
return (std::abs(d1 - d2) < std::numeric_limits<double>::epsilon() );
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
//[1] "On Packing R-Trees"; I. Kamel, C. Faloutsos; 1993; DOI: 10.1145/170088.170403
|
||||
//[2] "Nearest Neighbor Queries", N. Roussopulos et al; 1995; DOI: 10.1145/223784.223794
|
||||
|
||||
|
||||
#endif /* STATICRTREE_H_ */
|
||||
97
DataStructures/TurnInstructions.h
Normal file
97
DataStructures/TurnInstructions.h
Normal file
@ -0,0 +1,97 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef TURNINSTRUCTIONS_H_
|
||||
#define TURNINSTRUCTIONS_H_
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
|
||||
typedef unsigned char TurnInstruction;
|
||||
|
||||
//This is a hack until c++0x is available enough to use scoped enums
|
||||
struct TurnInstructionsClass : boost::noncopyable {
|
||||
|
||||
const static TurnInstruction NoTurn = 0; //Give no instruction at all
|
||||
const static TurnInstruction GoStraight = 1; //Tell user to go straight!
|
||||
const static TurnInstruction TurnSlightRight = 2;
|
||||
const static TurnInstruction TurnRight = 3;
|
||||
const static TurnInstruction TurnSharpRight = 4;
|
||||
const static TurnInstruction UTurn = 5;
|
||||
const static TurnInstruction TurnSharpLeft = 6;
|
||||
const static TurnInstruction TurnLeft = 7;
|
||||
const static TurnInstruction TurnSlightLeft = 8;
|
||||
const static TurnInstruction ReachViaPoint = 9;
|
||||
const static TurnInstruction HeadOn = 10;
|
||||
const static TurnInstruction EnterRoundAbout = 11;
|
||||
const static TurnInstruction LeaveRoundAbout = 12;
|
||||
const static TurnInstruction StayOnRoundAbout = 13;
|
||||
const static TurnInstruction StartAtEndOfStreet = 14;
|
||||
const static TurnInstruction ReachedYourDestination = 15;
|
||||
const static TurnInstruction EnterAgainstAllowedDirection = 16;
|
||||
const static TurnInstruction LeaveAgainstAllowedDirection = 17;
|
||||
|
||||
const static TurnInstruction AccessRestrictionFlag = 128;
|
||||
const static TurnInstruction InverseAccessRestrictionFlag = 0x7f; // ~128 does not work without a warning.
|
||||
|
||||
const static int AccessRestrictionPenalty = 1 << 15; //unrelated to the bit set in the restriction flag
|
||||
|
||||
static inline TurnInstruction GetTurnDirectionOfInstruction( const double angle ) {
|
||||
if(angle >= 23 && angle < 67) {
|
||||
return TurnSharpRight;
|
||||
}
|
||||
if (angle >= 67 && angle < 113) {
|
||||
return TurnRight;
|
||||
}
|
||||
if (angle >= 113 && angle < 158) {
|
||||
return TurnSlightRight;
|
||||
}
|
||||
if (angle >= 158 && angle < 202) {
|
||||
return GoStraight;
|
||||
}
|
||||
if (angle >= 202 && angle < 248) {
|
||||
return TurnSlightLeft;
|
||||
}
|
||||
if (angle >= 248 && angle < 292) {
|
||||
return TurnLeft;
|
||||
}
|
||||
if (angle >= 292 && angle < 336) {
|
||||
return TurnSharpLeft;
|
||||
}
|
||||
return UTurn;
|
||||
}
|
||||
|
||||
static inline bool TurnIsNecessary ( const short turnInstruction ) {
|
||||
if(NoTurn == turnInstruction || StayOnRoundAbout == turnInstruction)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
static TurnInstructionsClass TurnInstructions;
|
||||
|
||||
#endif /* TURNINSTRUCTIONS_H_ */
|
||||
102
DataStructures/XORFastHash.h
Normal file
102
DataStructures/XORFastHash.h
Normal file
@ -0,0 +1,102 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef FASTXORHASH_H_
|
||||
#define FASTXORHASH_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
/*
|
||||
This is an implementation of Tabulation hashing, which has suprising properties like universality.
|
||||
The space requirement is 2*2^16 = 256 kb of memory, which fits into L2 cache.
|
||||
Evaluation boils down to 10 or less assembly instruction on any recent X86 CPU:
|
||||
|
||||
1: movq table2(%rip), %rdx
|
||||
2: movl %edi, %eax
|
||||
3: movzwl %di, %edi
|
||||
4: shrl $16, %eax
|
||||
5: movzwl %ax, %eax
|
||||
6: movzbl (%rdx,%rax), %eax
|
||||
7: movq table1(%rip), %rdx
|
||||
8: xorb (%rdx,%rdi), %al
|
||||
9: movzbl %al, %eax
|
||||
10: ret
|
||||
|
||||
*/
|
||||
class XORFastHash { //65k entries
|
||||
std::vector<unsigned short> table1;
|
||||
std::vector<unsigned short> table2;
|
||||
public:
|
||||
XORFastHash() {
|
||||
table1.resize(2 << 16);
|
||||
table2.resize(2 << 16);
|
||||
for(unsigned i = 0; i < (2 << 16); ++i) {
|
||||
table1[i] = i; table2[i] = i;
|
||||
}
|
||||
std::random_shuffle(table1.begin(), table1.end());
|
||||
std::random_shuffle(table2.begin(), table2.end());
|
||||
}
|
||||
|
||||
inline unsigned short operator()(const unsigned originalValue) const {
|
||||
unsigned short lsb = ((originalValue) & 0xffff);
|
||||
unsigned short msb = (((originalValue) >> 16) & 0xffff);
|
||||
return table1[lsb] ^ table2[msb];
|
||||
}
|
||||
};
|
||||
|
||||
class XORMiniHash { //256 entries
|
||||
std::vector<unsigned char> table1;
|
||||
std::vector<unsigned char> table2;
|
||||
std::vector<unsigned char> table3;
|
||||
std::vector<unsigned char> table4;
|
||||
|
||||
public:
|
||||
XORMiniHash() {
|
||||
table1.resize(1 << 8);
|
||||
table2.resize(1 << 8);
|
||||
table3.resize(1 << 8);
|
||||
table4.resize(1 << 8);
|
||||
for(unsigned i = 0; i < (1 << 8); ++i) {
|
||||
table1[i] = i; table2[i] = i;
|
||||
table3[i] = i; table4[i] = i;
|
||||
}
|
||||
std::random_shuffle(table1.begin(), table1.end());
|
||||
std::random_shuffle(table2.begin(), table2.end());
|
||||
std::random_shuffle(table3.begin(), table3.end());
|
||||
std::random_shuffle(table4.begin(), table4.end());
|
||||
}
|
||||
unsigned char operator()(const unsigned originalValue) const {
|
||||
unsigned char byte1 = ((originalValue) & 0xff);
|
||||
unsigned char byte2 = ((originalValue >> 8) & 0xff);
|
||||
unsigned char byte3 = ((originalValue >> 16) & 0xff);
|
||||
unsigned char byte4 = ((originalValue >> 24) & 0xff);
|
||||
return table1[byte1] ^ table2[byte2] ^ table3[byte3] ^ table4[byte4];
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* FASTXORHASH_H_ */
|
||||
87
DataStructures/XORFastHashStorage.h
Normal file
87
DataStructures/XORFastHashStorage.h
Normal file
@ -0,0 +1,87 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef XORFASTHASHSTORAGE_H_
|
||||
#define XORFASTHASHSTORAGE_H_
|
||||
|
||||
#include "XORFastHash.h"
|
||||
|
||||
#include <climits>
|
||||
#include <vector>
|
||||
#include <bitset>
|
||||
|
||||
template< typename NodeID, typename Key >
|
||||
class XORFastHashStorage {
|
||||
public:
|
||||
struct HashCell{
|
||||
Key key;
|
||||
NodeID id;
|
||||
unsigned time;
|
||||
HashCell() : key(UINT_MAX), id(UINT_MAX), time(UINT_MAX) {}
|
||||
|
||||
HashCell(const HashCell & other) : key(other.key), id(other.id), time(other.time) { }
|
||||
|
||||
inline operator Key() const {
|
||||
return key;
|
||||
}
|
||||
|
||||
inline void operator=(const Key & keyToInsert) {
|
||||
key = keyToInsert;
|
||||
}
|
||||
};
|
||||
|
||||
XORFastHashStorage( size_t ) : positions(2<<16), currentTimestamp(0) { }
|
||||
|
||||
inline HashCell& operator[]( const NodeID node ) {
|
||||
unsigned short position = fastHash(node);
|
||||
while((positions[position].time == currentTimestamp) && (positions[position].id != node)){
|
||||
++position %= (2<<16);
|
||||
}
|
||||
|
||||
positions[position].id = node;
|
||||
positions[position].time = currentTimestamp;
|
||||
return positions[position];
|
||||
}
|
||||
|
||||
inline void Clear() {
|
||||
++currentTimestamp;
|
||||
if(UINT_MAX == currentTimestamp) {
|
||||
positions.clear();
|
||||
positions.resize((2<<16));
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
XORFastHashStorage() : positions(2<<16), currentTimestamp(0) {}
|
||||
std::vector<HashCell> positions;
|
||||
XORFastHash fastHash;
|
||||
unsigned currentTimestamp;
|
||||
};
|
||||
|
||||
|
||||
#endif /* XORFASTHASHSTORAGE_H_ */
|
||||
|
||||
68
Descriptors/BaseDescriptor.h
Normal file
68
Descriptors/BaseDescriptor.h
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef BASE_DESCRIPTOR_H_
|
||||
#define BASE_DESCRIPTOR_H_
|
||||
|
||||
#include "../DataStructures/PhantomNodes.h"
|
||||
#include "../DataStructures/RawRouteData.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Reply.h>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
struct DescriptorConfig {
|
||||
DescriptorConfig() :
|
||||
instructions(true),
|
||||
geometry(true),
|
||||
encode_geometry(true),
|
||||
zoom_level(18)
|
||||
{ }
|
||||
bool instructions;
|
||||
bool geometry;
|
||||
bool encode_geometry;
|
||||
unsigned short zoom_level;
|
||||
};
|
||||
|
||||
template<class DataFacadeT>
|
||||
class BaseDescriptor {
|
||||
public:
|
||||
BaseDescriptor() { }
|
||||
//Maybe someone can explain the pure virtual destructor thing to me (dennis)
|
||||
virtual ~BaseDescriptor() { }
|
||||
virtual void Run(
|
||||
const RawRouteData & rawRoute,
|
||||
const PhantomNodes & phantomNodes,
|
||||
DataFacadeT * facade,
|
||||
http::Reply & reply
|
||||
) = 0;
|
||||
virtual void SetConfig(const DescriptorConfig & config) = 0;
|
||||
};
|
||||
|
||||
#endif /* BASE_DESCRIPTOR_H_ */
|
||||
143
Descriptors/DescriptionFactory.cpp
Normal file
143
Descriptors/DescriptionFactory.cpp
Normal file
@ -0,0 +1,143 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "DescriptionFactory.h"
|
||||
|
||||
DescriptionFactory::DescriptionFactory() : entireLength(0) { }
|
||||
|
||||
DescriptionFactory::~DescriptionFactory() { }
|
||||
|
||||
inline double DescriptionFactory::DegreeToRadian(const double degree) const {
|
||||
return degree * (M_PI/180);
|
||||
}
|
||||
|
||||
inline double DescriptionFactory::RadianToDegree(const double radian) const {
|
||||
return radian * (180/M_PI);
|
||||
}
|
||||
|
||||
double DescriptionFactory::GetBearing(
|
||||
const FixedPointCoordinate & A,
|
||||
const FixedPointCoordinate & B
|
||||
) const {
|
||||
double delta_long = DegreeToRadian(B.lon/COORDINATE_PRECISION - A.lon/COORDINATE_PRECISION);
|
||||
|
||||
const double lat1 = DegreeToRadian(A.lat/COORDINATE_PRECISION);
|
||||
const double lat2 = DegreeToRadian(B.lat/COORDINATE_PRECISION);
|
||||
|
||||
const double y = sin(delta_long) * cos(lat2);
|
||||
const double x = cos(lat1) * sin(lat2) - sin(lat1) * cos(lat2) * cos(delta_long);
|
||||
double result = RadianToDegree(atan2(y, x));
|
||||
while(result < 0.) {
|
||||
result += 360.;
|
||||
}
|
||||
while(result >= 360.) {
|
||||
result -= 360.;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void DescriptionFactory::SetStartSegment(const PhantomNode & start) {
|
||||
start_phantom = start;
|
||||
AppendSegment(
|
||||
start.location,
|
||||
PathData(0, start.nodeBasedEdgeNameID, 10, start.weight1)
|
||||
);
|
||||
}
|
||||
|
||||
void DescriptionFactory::SetEndSegment(const PhantomNode & target) {
|
||||
target_phantom = target;
|
||||
pathDescription.push_back(
|
||||
SegmentInformation(
|
||||
target.location,
|
||||
target.nodeBasedEdgeNameID,
|
||||
0,
|
||||
target.weight1,
|
||||
0,
|
||||
true
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
void DescriptionFactory::AppendSegment(
|
||||
const FixedPointCoordinate & coordinate,
|
||||
const PathData & data
|
||||
) {
|
||||
if(
|
||||
( 1 == pathDescription.size()) &&
|
||||
( pathDescription.back().location == coordinate)
|
||||
) {
|
||||
pathDescription.back().name_id = data.name_id;
|
||||
} else {
|
||||
pathDescription.push_back(
|
||||
SegmentInformation(
|
||||
coordinate,
|
||||
data.name_id,
|
||||
data.durationOfSegment,
|
||||
0,
|
||||
data.turnInstruction
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
void DescriptionFactory::AppendEncodedPolylineString(
|
||||
const bool return_encoded,
|
||||
std::vector<std::string> & output
|
||||
) {
|
||||
std::string temp;
|
||||
if(return_encoded) {
|
||||
polyline_compressor.printEncodedString(pathDescription, temp);
|
||||
} else {
|
||||
polyline_compressor.printUnencodedString(pathDescription, temp);
|
||||
}
|
||||
output.push_back(temp);
|
||||
}
|
||||
|
||||
void DescriptionFactory::AppendEncodedPolylineString(
|
||||
std::vector<std::string> &output
|
||||
) const {
|
||||
std::string temp;
|
||||
polyline_compressor.printEncodedString(pathDescription, temp);
|
||||
output.push_back(temp);
|
||||
}
|
||||
|
||||
void DescriptionFactory::AppendUnencodedPolylineString(
|
||||
std::vector<std::string>& output
|
||||
) const {
|
||||
std::string temp;
|
||||
polyline_compressor.printUnencodedString(pathDescription, temp);
|
||||
output.push_back(temp);
|
||||
}
|
||||
|
||||
void DescriptionFactory::BuildRouteSummary(
|
||||
const double distance,
|
||||
const unsigned time
|
||||
) {
|
||||
summary.startName = start_phantom.nodeBasedEdgeNameID;
|
||||
summary.destName = target_phantom.nodeBasedEdgeNameID;
|
||||
summary.BuildDurationAndLengthStrings(distance, time);
|
||||
}
|
||||
213
Descriptors/DescriptionFactory.h
Normal file
213
Descriptors/DescriptionFactory.h
Normal file
@ -0,0 +1,213 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef DESCRIPTIONFACTORY_H_
|
||||
#define DESCRIPTIONFACTORY_H_
|
||||
|
||||
#include "../Algorithms/DouglasPeucker.h"
|
||||
#include "../Algorithms/PolylineCompressor.h"
|
||||
#include "../DataStructures/PhantomNodes.h"
|
||||
#include "../DataStructures/RawRouteData.h"
|
||||
#include "../DataStructures/SegmentInformation.h"
|
||||
#include "../DataStructures/TurnInstructions.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
/* This class is fed with all way segments in consecutive order
|
||||
* and produces the description plus the encoded polyline */
|
||||
|
||||
class DescriptionFactory {
|
||||
DouglasPeucker polyline_generalizer;
|
||||
PolylineCompressor polyline_compressor;
|
||||
PhantomNode start_phantom, target_phantom;
|
||||
|
||||
double DegreeToRadian(const double degree) const;
|
||||
double RadianToDegree(const double degree) const;
|
||||
public:
|
||||
struct RouteSummary {
|
||||
std::string lengthString;
|
||||
std::string durationString;
|
||||
unsigned startName;
|
||||
unsigned destName;
|
||||
RouteSummary() :
|
||||
lengthString("0"),
|
||||
durationString("0"),
|
||||
startName(0),
|
||||
destName(0)
|
||||
{}
|
||||
|
||||
void BuildDurationAndLengthStrings(
|
||||
const double distance,
|
||||
const unsigned time
|
||||
) {
|
||||
//compute distance/duration for route summary
|
||||
intToString(round(distance), lengthString);
|
||||
int travel_time = time/10;
|
||||
intToString(std::max(travel_time, 1), durationString);
|
||||
}
|
||||
} summary;
|
||||
|
||||
double entireLength;
|
||||
|
||||
//I know, declaring this public is considered bad. I'm lazy
|
||||
std::vector <SegmentInformation> pathDescription;
|
||||
DescriptionFactory();
|
||||
virtual ~DescriptionFactory();
|
||||
double GetBearing(const FixedPointCoordinate& C, const FixedPointCoordinate& B) const;
|
||||
void AppendEncodedPolylineString(std::vector<std::string> &output) const;
|
||||
void AppendUnencodedPolylineString(std::vector<std::string> &output) const;
|
||||
void AppendSegment(const FixedPointCoordinate & coordinate, const PathData & data);
|
||||
void BuildRouteSummary(const double distance, const unsigned time);
|
||||
void SetStartSegment(const PhantomNode & start_phantom);
|
||||
void SetEndSegment(const PhantomNode & start_phantom);
|
||||
void AppendEncodedPolylineString(
|
||||
const bool return_encoded,
|
||||
std::vector<std::string> & output
|
||||
);
|
||||
|
||||
template<class DataFacadeT>
|
||||
void Run(
|
||||
const DataFacadeT * facade,
|
||||
const unsigned zoomLevel
|
||||
) {
|
||||
if( pathDescription.empty() ) {
|
||||
return;
|
||||
}
|
||||
|
||||
/** starts at index 1 */
|
||||
pathDescription[0].length = 0;
|
||||
for(unsigned i = 1; i < pathDescription.size(); ++i) {
|
||||
pathDescription[i].length = FixedPointCoordinate::ApproximateEuclideanDistance(pathDescription[i-1].location, pathDescription[i].location);
|
||||
}
|
||||
|
||||
// std::string string0 = facade->GetEscapedNameForNameID(pathDescription[0].name_id);
|
||||
// std::string string1;
|
||||
|
||||
|
||||
/*Simplify turn instructions
|
||||
Input :
|
||||
10. Turn left on B 36 for 20 km
|
||||
11. Continue on B 35; B 36 for 2 km
|
||||
12. Continue on B 36 for 13 km
|
||||
|
||||
becomes:
|
||||
10. Turn left on B 36 for 35 km
|
||||
*/
|
||||
//TODO: rework to check only end and start of string.
|
||||
// stl string is way to expensive
|
||||
|
||||
// unsigned lastTurn = 0;
|
||||
// for(unsigned i = 1; i < pathDescription.size(); ++i) {
|
||||
// string1 = sEngine.GetEscapedNameForNameID(pathDescription[i].name_id);
|
||||
// if(TurnInstructionsClass::GoStraight == pathDescription[i].turn_instruction) {
|
||||
// if(std::string::npos != string0.find(string1+";")
|
||||
// || std::string::npos != string0.find(";"+string1)
|
||||
// || std::string::npos != string0.find(string1+" ;")
|
||||
// || std::string::npos != string0.find("; "+string1)
|
||||
// ){
|
||||
// SimpleLogger().Write() << "->next correct: " << string0 << " contains " << string1;
|
||||
// for(; lastTurn != i; ++lastTurn)
|
||||
// pathDescription[lastTurn].name_id = pathDescription[i].name_id;
|
||||
// pathDescription[i].turn_instruction = TurnInstructionsClass::NoTurn;
|
||||
// } else if(std::string::npos != string1.find(string0+";")
|
||||
// || std::string::npos != string1.find(";"+string0)
|
||||
// || std::string::npos != string1.find(string0+" ;")
|
||||
// || std::string::npos != string1.find("; "+string0)
|
||||
// ){
|
||||
// SimpleLogger().Write() << "->prev correct: " << string1 << " contains " << string0;
|
||||
// pathDescription[i].name_id = pathDescription[i-1].name_id;
|
||||
// pathDescription[i].turn_instruction = TurnInstructionsClass::NoTurn;
|
||||
// }
|
||||
// }
|
||||
// if (TurnInstructionsClass::NoTurn != pathDescription[i].turn_instruction) {
|
||||
// lastTurn = i;
|
||||
// }
|
||||
// string0 = string1;
|
||||
// }
|
||||
|
||||
double lengthOfSegment = 0;
|
||||
unsigned durationOfSegment = 0;
|
||||
unsigned indexOfSegmentBegin = 0;
|
||||
|
||||
for(unsigned i = 1; i < pathDescription.size(); ++i) {
|
||||
entireLength += pathDescription[i].length;
|
||||
lengthOfSegment += pathDescription[i].length;
|
||||
durationOfSegment += pathDescription[i].duration;
|
||||
pathDescription[indexOfSegmentBegin].length = lengthOfSegment;
|
||||
pathDescription[indexOfSegmentBegin].duration = durationOfSegment;
|
||||
|
||||
|
||||
if(TurnInstructionsClass::NoTurn != pathDescription[i].turn_instruction) {
|
||||
BOOST_ASSERT(pathDescription[i].necessary);
|
||||
lengthOfSegment = 0;
|
||||
durationOfSegment = 0;
|
||||
indexOfSegmentBegin = i;
|
||||
}
|
||||
}
|
||||
|
||||
//Post-processing to remove empty or nearly empty path segments
|
||||
if(std::numeric_limits<double>::epsilon() > pathDescription.back().length) {
|
||||
if(pathDescription.size() > 2){
|
||||
pathDescription.pop_back();
|
||||
pathDescription.back().necessary = true;
|
||||
pathDescription.back().turn_instruction = TurnInstructions.NoTurn;
|
||||
target_phantom.nodeBasedEdgeNameID = (pathDescription.end()-2)->name_id;
|
||||
}
|
||||
} else {
|
||||
pathDescription[indexOfSegmentBegin].duration *= (1.-target_phantom.ratio);
|
||||
}
|
||||
if(std::numeric_limits<double>::epsilon() > pathDescription[0].length) {
|
||||
if(pathDescription.size() > 2) {
|
||||
pathDescription.erase(pathDescription.begin());
|
||||
pathDescription[0].turn_instruction = TurnInstructions.HeadOn;
|
||||
pathDescription[0].necessary = true;
|
||||
start_phantom.nodeBasedEdgeNameID = pathDescription[0].name_id;
|
||||
}
|
||||
} else {
|
||||
pathDescription[0].duration *= start_phantom.ratio;
|
||||
}
|
||||
|
||||
//Generalize poly line
|
||||
polyline_generalizer.Run(pathDescription, zoomLevel);
|
||||
|
||||
//fix what needs to be fixed else
|
||||
for(unsigned i = 0; i < pathDescription.size()-1 && pathDescription.size() >= 2; ++i){
|
||||
if(pathDescription[i].necessary) {
|
||||
double angle = GetBearing(pathDescription[i].location, pathDescription[i+1].location);
|
||||
pathDescription[i].bearing = angle*10;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* DESCRIPTIONFACTORY_H_ */
|
||||
106
Descriptors/GPXDescriptor.h
Normal file
106
Descriptors/GPXDescriptor.h
Normal file
@ -0,0 +1,106 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef GPX_DESCRIPTOR_H_
|
||||
#define GPX_DESCRIPTOR_H_
|
||||
|
||||
#include "BaseDescriptor.h"
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
template<class DataFacadeT>
|
||||
class GPXDescriptor : public BaseDescriptor<DataFacadeT> {
|
||||
private:
|
||||
DescriptorConfig config;
|
||||
FixedPointCoordinate current;
|
||||
|
||||
std::string tmp;
|
||||
public:
|
||||
void SetConfig(const DescriptorConfig & c) { config = c; }
|
||||
|
||||
//TODO: reorder parameters
|
||||
void Run(
|
||||
const RawRouteData &raw_route,
|
||||
const PhantomNodes &phantom_node_list,
|
||||
DataFacadeT * facade,
|
||||
http::Reply & reply
|
||||
) {
|
||||
reply.content.push_back("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
reply.content.push_back(
|
||||
"<gpx creator=\"OSRM Routing Engine\" version=\"1.1\" "
|
||||
"xmlns=\"http://www.topografix.com/GPX/1/1\" "
|
||||
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" "
|
||||
"xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 gpx.xsd"
|
||||
"\">");
|
||||
reply.content.push_back(
|
||||
"<metadata><copyright author=\"Project OSRM\"><license>Data (c)"
|
||||
" OpenStreetMap contributors (ODbL)</license></copyright>"
|
||||
"</metadata>");
|
||||
reply.content.push_back("<rte>");
|
||||
bool found_route = (raw_route.lengthOfShortestPath != INT_MAX) &&
|
||||
(raw_route.unpacked_path_segments[0].size());
|
||||
if( found_route ) {
|
||||
FixedPointCoordinate::convertInternalLatLonToString(
|
||||
phantom_node_list.startPhantom.location.lat,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back("<rtept lat=\"" + tmp + "\" ");
|
||||
FixedPointCoordinate::convertInternalLatLonToString(
|
||||
phantom_node_list.startPhantom.location.lon,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back("lon=\"" + tmp + "\"></rtept>");
|
||||
|
||||
for(unsigned i=0; i < raw_route.unpacked_path_segments.size(); ++i){
|
||||
BOOST_FOREACH(
|
||||
const PathData & pathData,
|
||||
raw_route.unpacked_path_segments[i]
|
||||
) {
|
||||
current = facade->GetCoordinateOfNode(pathData.node);
|
||||
|
||||
FixedPointCoordinate::convertInternalLatLonToString(current.lat, tmp);
|
||||
reply.content.push_back("<rtept lat=\"" + tmp + "\" ");
|
||||
FixedPointCoordinate::convertInternalLatLonToString(current.lon, tmp);
|
||||
reply.content.push_back("lon=\"" + tmp + "\"></rtept>");
|
||||
}
|
||||
}
|
||||
// Add the via point or the end coordinate
|
||||
FixedPointCoordinate::convertInternalLatLonToString(
|
||||
phantom_node_list.targetPhantom.location.lat,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back("<rtept lat=\"" + tmp + "\" ");
|
||||
FixedPointCoordinate::convertInternalLatLonToString(
|
||||
phantom_node_list.targetPhantom.location.lon,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back("lon=\"" + tmp + "\"></rtept>");
|
||||
}
|
||||
reply.content.push_back("</rte></gpx>");
|
||||
}
|
||||
};
|
||||
#endif // GPX_DESCRIPTOR_H_
|
||||
510
Descriptors/JSONDescriptor.h
Normal file
510
Descriptors/JSONDescriptor.h
Normal file
@ -0,0 +1,510 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef JSON_DESCRIPTOR_H_
|
||||
#define JSON_DESCRIPTOR_H_
|
||||
|
||||
#include "BaseDescriptor.h"
|
||||
#include "DescriptionFactory.h"
|
||||
#include "../Algorithms/ObjectToBase64.h"
|
||||
#include "../DataStructures/SegmentInformation.h"
|
||||
#include "../DataStructures/TurnInstructions.h"
|
||||
#include "../Util/Azimuth.h"
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <boost/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
template<class DataFacadeT>
|
||||
class JSONDescriptor : public BaseDescriptor<DataFacadeT> {
|
||||
private:
|
||||
DataFacadeT * facade;
|
||||
DescriptorConfig config;
|
||||
DescriptionFactory description_factory;
|
||||
DescriptionFactory alternate_descriptionFactory;
|
||||
FixedPointCoordinate current;
|
||||
unsigned entered_restricted_area_count;
|
||||
struct RoundAbout{
|
||||
RoundAbout() :
|
||||
start_index(INT_MAX),
|
||||
name_id(INT_MAX),
|
||||
leave_at_exit(INT_MAX)
|
||||
{}
|
||||
int start_index;
|
||||
int name_id;
|
||||
int leave_at_exit;
|
||||
} roundAbout;
|
||||
|
||||
struct Segment {
|
||||
Segment() : name_id(-1), length(-1), position(-1) {}
|
||||
Segment(int n, int l, int p) : name_id(n), length(l), position(p) {}
|
||||
int name_id;
|
||||
int length;
|
||||
int position;
|
||||
};
|
||||
std::vector<Segment> shortest_path_segments, alternative_path_segments;
|
||||
std::vector<unsigned> shortest_leg_end_indices, alternative_leg_end_indices;
|
||||
|
||||
struct RouteNames {
|
||||
std::string shortestPathName1;
|
||||
std::string shortestPathName2;
|
||||
std::string alternativePathName1;
|
||||
std::string alternativePathName2;
|
||||
};
|
||||
|
||||
public:
|
||||
JSONDescriptor() :
|
||||
facade(NULL),
|
||||
entered_restricted_area_count(0)
|
||||
{
|
||||
shortest_leg_end_indices.push_back(0);
|
||||
alternative_leg_end_indices.push_back(0);
|
||||
}
|
||||
|
||||
void SetConfig(const DescriptorConfig & c) { config = c; }
|
||||
|
||||
int DescribeLeg(
|
||||
const std::vector<PathData> & route_leg,
|
||||
const PhantomNodes & leg_phantoms
|
||||
) {
|
||||
int added_element_count = 0;
|
||||
//Get all the coordinates for the computed route
|
||||
FixedPointCoordinate current_coordinate;
|
||||
BOOST_FOREACH(const PathData & path_data, route_leg) {
|
||||
current_coordinate = facade->GetCoordinateOfNode(path_data.node);
|
||||
description_factory.AppendSegment(current_coordinate, path_data );
|
||||
++added_element_count;
|
||||
}
|
||||
// description_factory.SetEndSegment( leg_phantoms.targetPhantom );
|
||||
++added_element_count;
|
||||
BOOST_ASSERT( (int)(route_leg.size() + 1) == added_element_count );
|
||||
return added_element_count;
|
||||
}
|
||||
|
||||
void Run(
|
||||
const RawRouteData & raw_route,
|
||||
const PhantomNodes & phantom_nodes,
|
||||
// TODO: move facade initalization to c'tor
|
||||
DataFacadeT * f,
|
||||
http::Reply & reply
|
||||
) {
|
||||
facade = f;
|
||||
reply.content.push_back(
|
||||
"{\"status\":"
|
||||
);
|
||||
|
||||
if(INT_MAX == raw_route.lengthOfShortestPath) {
|
||||
//We do not need to do much, if there is no route ;-)
|
||||
reply.content.push_back(
|
||||
"207,\"status_message\": \"Cannot find route between points\"}"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
description_factory.SetStartSegment(phantom_nodes.startPhantom);
|
||||
reply.content.push_back("0,"
|
||||
"\"status_message\": \"Found route between points\",");
|
||||
|
||||
BOOST_ASSERT( raw_route.unpacked_path_segments.size() == raw_route.segmentEndCoordinates.size() );
|
||||
for( unsigned i = 0; i < raw_route.unpacked_path_segments.size(); ++i ) {
|
||||
const int added_segments = DescribeLeg(
|
||||
raw_route.unpacked_path_segments[i],
|
||||
raw_route.segmentEndCoordinates[i]
|
||||
);
|
||||
BOOST_ASSERT( 0 < added_segments );
|
||||
shortest_leg_end_indices.push_back(
|
||||
added_segments + shortest_leg_end_indices.back()
|
||||
);
|
||||
}
|
||||
description_factory.SetEndSegment(phantom_nodes.targetPhantom);
|
||||
description_factory.Run(facade, config.zoom_level);
|
||||
|
||||
reply.content.push_back("\"route_geometry\": ");
|
||||
if(config.geometry) {
|
||||
description_factory.AppendEncodedPolylineString(
|
||||
config.encode_geometry,
|
||||
reply.content
|
||||
);
|
||||
} else {
|
||||
reply.content.push_back("[]");
|
||||
}
|
||||
|
||||
reply.content.push_back(",\"route_instructions\": [");
|
||||
if(config.instructions) {
|
||||
BuildTextualDescription(
|
||||
description_factory,
|
||||
reply,
|
||||
raw_route.lengthOfShortestPath,
|
||||
facade,
|
||||
shortest_path_segments
|
||||
);
|
||||
}
|
||||
reply.content.push_back("],");
|
||||
description_factory.BuildRouteSummary(
|
||||
description_factory.entireLength,
|
||||
raw_route.lengthOfShortestPath
|
||||
);
|
||||
|
||||
reply.content.push_back("\"route_summary\":");
|
||||
reply.content.push_back("{");
|
||||
reply.content.push_back("\"total_distance\":");
|
||||
reply.content.push_back(description_factory.summary.lengthString);
|
||||
reply.content.push_back(","
|
||||
"\"total_time\":");
|
||||
reply.content.push_back(description_factory.summary.durationString);
|
||||
reply.content.push_back(","
|
||||
"\"start_point\":\"");
|
||||
reply.content.push_back(
|
||||
facade->GetEscapedNameForNameID(description_factory.summary.startName)
|
||||
);
|
||||
reply.content.push_back("\","
|
||||
"\"end_point\":\"");
|
||||
reply.content.push_back(
|
||||
facade->GetEscapedNameForNameID(description_factory.summary.destName)
|
||||
);
|
||||
reply.content.push_back("\"");
|
||||
reply.content.push_back("}");
|
||||
reply.content.push_back(",");
|
||||
|
||||
//only one alternative route is computed at this time, so this is hardcoded
|
||||
|
||||
if(raw_route.lengthOfAlternativePath != INT_MAX) {
|
||||
alternate_descriptionFactory.SetStartSegment(phantom_nodes.startPhantom);
|
||||
//Get all the coordinates for the computed route
|
||||
BOOST_FOREACH(const PathData & path_data, raw_route.unpacked_alternative) {
|
||||
current = facade->GetCoordinateOfNode(path_data.node);
|
||||
alternate_descriptionFactory.AppendSegment(current, path_data );
|
||||
}
|
||||
alternate_descriptionFactory.SetEndSegment(phantom_nodes.targetPhantom);
|
||||
}
|
||||
alternate_descriptionFactory.Run(facade, config.zoom_level);
|
||||
|
||||
// //give an array of alternative routes
|
||||
reply.content.push_back("\"alternative_geometries\": [");
|
||||
if(config.geometry && INT_MAX != raw_route.lengthOfAlternativePath) {
|
||||
//Generate the linestrings for each alternative
|
||||
alternate_descriptionFactory.AppendEncodedPolylineString(
|
||||
config.encode_geometry,
|
||||
reply.content
|
||||
);
|
||||
}
|
||||
reply.content.push_back("],");
|
||||
reply.content.push_back("\"alternative_instructions\":[");
|
||||
if(INT_MAX != raw_route.lengthOfAlternativePath) {
|
||||
reply.content.push_back("[");
|
||||
//Generate instructions for each alternative
|
||||
if(config.instructions) {
|
||||
BuildTextualDescription(
|
||||
alternate_descriptionFactory,
|
||||
reply,
|
||||
raw_route.lengthOfAlternativePath,
|
||||
facade,
|
||||
alternative_path_segments
|
||||
);
|
||||
}
|
||||
reply.content.push_back("]");
|
||||
}
|
||||
reply.content.push_back("],");
|
||||
reply.content.push_back("\"alternative_summaries\":[");
|
||||
if(INT_MAX != raw_route.lengthOfAlternativePath) {
|
||||
//Generate route summary (length, duration) for each alternative
|
||||
alternate_descriptionFactory.BuildRouteSummary(
|
||||
alternate_descriptionFactory.entireLength,
|
||||
raw_route.lengthOfAlternativePath
|
||||
);
|
||||
reply.content.push_back("{");
|
||||
reply.content.push_back("\"total_distance\":");
|
||||
reply.content.push_back(
|
||||
alternate_descriptionFactory.summary.lengthString
|
||||
);
|
||||
reply.content.push_back(","
|
||||
"\"total_time\":");
|
||||
reply.content.push_back(
|
||||
alternate_descriptionFactory.summary.durationString
|
||||
);
|
||||
reply.content.push_back(","
|
||||
"\"start_point\":\"");
|
||||
reply.content.push_back(
|
||||
facade->GetEscapedNameForNameID(
|
||||
description_factory.summary.startName
|
||||
)
|
||||
);
|
||||
reply.content.push_back("\","
|
||||
"\"end_point\":\"");
|
||||
reply.content.push_back(facade->GetEscapedNameForNameID(description_factory.summary.destName));
|
||||
reply.content.push_back("\"");
|
||||
reply.content.push_back("}");
|
||||
}
|
||||
reply.content.push_back("],");
|
||||
|
||||
// //Get Names for both routes
|
||||
RouteNames routeNames;
|
||||
GetRouteNames(shortest_path_segments, alternative_path_segments, facade, routeNames);
|
||||
|
||||
reply.content.push_back("\"route_name\":[\"");
|
||||
reply.content.push_back(routeNames.shortestPathName1);
|
||||
reply.content.push_back("\",\"");
|
||||
reply.content.push_back(routeNames.shortestPathName2);
|
||||
reply.content.push_back("\"],"
|
||||
"\"alternative_names\":[");
|
||||
reply.content.push_back("[\"");
|
||||
reply.content.push_back(routeNames.alternativePathName1);
|
||||
reply.content.push_back("\",\"");
|
||||
reply.content.push_back(routeNames.alternativePathName2);
|
||||
reply.content.push_back("\"]");
|
||||
reply.content.push_back("],");
|
||||
//list all viapoints so that the client may display it
|
||||
reply.content.push_back("\"via_points\":[");
|
||||
|
||||
BOOST_ASSERT( !raw_route.segmentEndCoordinates.empty() );
|
||||
|
||||
std::string tmp;
|
||||
FixedPointCoordinate::convertInternalReversedCoordinateToString(
|
||||
raw_route.segmentEndCoordinates.front().startPhantom.location,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back("[");
|
||||
reply.content.push_back(tmp);
|
||||
reply.content.push_back("]");
|
||||
|
||||
BOOST_FOREACH(const PhantomNodes & nodes, raw_route.segmentEndCoordinates) {
|
||||
tmp.clear();
|
||||
FixedPointCoordinate::convertInternalReversedCoordinateToString(
|
||||
nodes.targetPhantom.location,
|
||||
tmp
|
||||
);
|
||||
reply.content.push_back(",[");
|
||||
reply.content.push_back(tmp);
|
||||
reply.content.push_back("]");
|
||||
}
|
||||
|
||||
reply.content.push_back("],");
|
||||
reply.content.push_back("\"via_indices\":[");
|
||||
BOOST_FOREACH(const unsigned index, shortest_leg_end_indices) {
|
||||
tmp.clear();
|
||||
intToString(index, tmp);
|
||||
reply.content.push_back(tmp);
|
||||
if( index != shortest_leg_end_indices.back() ) {
|
||||
reply.content.push_back(",");
|
||||
}
|
||||
}
|
||||
reply.content.push_back("],\"alternative_indices\":[");
|
||||
if(INT_MAX != raw_route.lengthOfAlternativePath) {
|
||||
reply.content.push_back("0,");
|
||||
tmp.clear();
|
||||
intToString(alternate_descriptionFactory.pathDescription.size(), tmp);
|
||||
reply.content.push_back(tmp);
|
||||
}
|
||||
|
||||
reply.content.push_back("],");
|
||||
reply.content.push_back("\"hint_data\": {");
|
||||
reply.content.push_back("\"checksum\":");
|
||||
intToString(raw_route.checkSum, tmp);
|
||||
reply.content.push_back(tmp);
|
||||
reply.content.push_back(", \"locations\": [");
|
||||
|
||||
std::string hint;
|
||||
for(unsigned i = 0; i < raw_route.segmentEndCoordinates.size(); ++i) {
|
||||
reply.content.push_back("\"");
|
||||
EncodeObjectToBase64(raw_route.segmentEndCoordinates[i].startPhantom, hint);
|
||||
reply.content.push_back(hint);
|
||||
reply.content.push_back("\", ");
|
||||
}
|
||||
EncodeObjectToBase64(raw_route.segmentEndCoordinates.back().targetPhantom, hint);
|
||||
reply.content.push_back("\"");
|
||||
reply.content.push_back(hint);
|
||||
reply.content.push_back("\"]");
|
||||
reply.content.push_back("}}");
|
||||
}
|
||||
|
||||
// construct routes names
|
||||
void GetRouteNames(
|
||||
std::vector<Segment> & shortest_path_segments,
|
||||
std::vector<Segment> & alternative_path_segments,
|
||||
const DataFacadeT * facade,
|
||||
RouteNames & routeNames
|
||||
) {
|
||||
|
||||
Segment shortestSegment1, shortestSegment2;
|
||||
Segment alternativeSegment1, alternativeSegment2;
|
||||
|
||||
if(0 < shortest_path_segments.size()) {
|
||||
sort(shortest_path_segments.begin(), shortest_path_segments.end(), boost::bind(&Segment::length, _1) > boost::bind(&Segment::length, _2) );
|
||||
shortestSegment1 = shortest_path_segments[0];
|
||||
if(0 < alternative_path_segments.size()) {
|
||||
sort(alternative_path_segments.begin(), alternative_path_segments.end(), boost::bind(&Segment::length, _1) > boost::bind(&Segment::length, _2) );
|
||||
alternativeSegment1 = alternative_path_segments[0];
|
||||
}
|
||||
std::vector<Segment> shortestDifference(shortest_path_segments.size());
|
||||
std::vector<Segment> alternativeDifference(alternative_path_segments.size());
|
||||
std::set_difference(shortest_path_segments.begin(), shortest_path_segments.end(), alternative_path_segments.begin(), alternative_path_segments.end(), shortestDifference.begin(), boost::bind(&Segment::name_id, _1) < boost::bind(&Segment::name_id, _2) );
|
||||
int size_of_difference = shortestDifference.size();
|
||||
if(0 < size_of_difference ) {
|
||||
int i = 0;
|
||||
while( i < size_of_difference && shortestDifference[i].name_id == shortest_path_segments[0].name_id) {
|
||||
++i;
|
||||
}
|
||||
if(i < size_of_difference ) {
|
||||
shortestSegment2 = shortestDifference[i];
|
||||
}
|
||||
}
|
||||
|
||||
std::set_difference(alternative_path_segments.begin(), alternative_path_segments.end(), shortest_path_segments.begin(), shortest_path_segments.end(), alternativeDifference.begin(), boost::bind(&Segment::name_id, _1) < boost::bind(&Segment::name_id, _2) );
|
||||
size_of_difference = alternativeDifference.size();
|
||||
if(0 < size_of_difference ) {
|
||||
int i = 0;
|
||||
while( i < size_of_difference && alternativeDifference[i].name_id == alternative_path_segments[0].name_id) {
|
||||
++i;
|
||||
}
|
||||
if(i < size_of_difference ) {
|
||||
alternativeSegment2 = alternativeDifference[i];
|
||||
}
|
||||
}
|
||||
if(shortestSegment1.position > shortestSegment2.position)
|
||||
std::swap(shortestSegment1, shortestSegment2);
|
||||
|
||||
if(alternativeSegment1.position > alternativeSegment2.position)
|
||||
std::swap(alternativeSegment1, alternativeSegment2);
|
||||
|
||||
routeNames.shortestPathName1 = facade->GetEscapedNameForNameID(
|
||||
shortestSegment1.name_id
|
||||
);
|
||||
routeNames.shortestPathName2 = facade->GetEscapedNameForNameID(
|
||||
shortestSegment2.name_id
|
||||
);
|
||||
|
||||
routeNames.alternativePathName1 = facade->GetEscapedNameForNameID(
|
||||
alternativeSegment1.name_id
|
||||
);
|
||||
routeNames.alternativePathName2 = facade->GetEscapedNameForNameID(
|
||||
alternativeSegment2.name_id
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: reorder parameters
|
||||
inline void BuildTextualDescription(
|
||||
DescriptionFactory & description_factory,
|
||||
http::Reply & reply,
|
||||
const int route_length,
|
||||
const DataFacadeT * facade,
|
||||
std::vector<Segment> & route_segments_list
|
||||
) {
|
||||
//Segment information has following format:
|
||||
//["instruction","streetname",length,position,time,"length","earth_direction",azimuth]
|
||||
//Example: ["Turn left","High Street",200,4,10,"200m","NE",22.5]
|
||||
//See also: http://developers.cloudmade.com/wiki/navengine/JSON_format
|
||||
unsigned prefixSumOfNecessarySegments = 0;
|
||||
roundAbout.leave_at_exit = 0;
|
||||
roundAbout.name_id = 0;
|
||||
std::string tmpDist, tmpLength, tmpDuration, tmpBearing, tmpInstruction;
|
||||
//Fetch data from Factory and generate a string from it.
|
||||
BOOST_FOREACH(const SegmentInformation & segment, description_factory.pathDescription) {
|
||||
TurnInstruction current_instruction = segment.turn_instruction & TurnInstructions.InverseAccessRestrictionFlag;
|
||||
entered_restricted_area_count += (current_instruction != segment.turn_instruction);
|
||||
if(TurnInstructions.TurnIsNecessary( current_instruction) ) {
|
||||
if(TurnInstructions.EnterRoundAbout == current_instruction) {
|
||||
roundAbout.name_id = segment.name_id;
|
||||
roundAbout.start_index = prefixSumOfNecessarySegments;
|
||||
} else {
|
||||
if(0 != prefixSumOfNecessarySegments){
|
||||
reply.content.push_back(",");
|
||||
}
|
||||
reply.content.push_back("[\"");
|
||||
if(TurnInstructions.LeaveRoundAbout == current_instruction) {
|
||||
intToString(TurnInstructions.EnterRoundAbout, tmpInstruction);
|
||||
reply.content.push_back(tmpInstruction);
|
||||
reply.content.push_back("-");
|
||||
intToString(roundAbout.leave_at_exit+1, tmpInstruction);
|
||||
reply.content.push_back(tmpInstruction);
|
||||
roundAbout.leave_at_exit = 0;
|
||||
} else {
|
||||
intToString(current_instruction, tmpInstruction);
|
||||
reply.content.push_back(tmpInstruction);
|
||||
}
|
||||
|
||||
reply.content.push_back("\",\"");
|
||||
reply.content.push_back(facade->GetEscapedNameForNameID(segment.name_id));
|
||||
reply.content.push_back("\",");
|
||||
intToString(segment.length, tmpDist);
|
||||
reply.content.push_back(tmpDist);
|
||||
reply.content.push_back(",");
|
||||
intToString(prefixSumOfNecessarySegments, tmpLength);
|
||||
reply.content.push_back(tmpLength);
|
||||
reply.content.push_back(",");
|
||||
intToString(segment.duration/10, tmpDuration);
|
||||
reply.content.push_back(tmpDuration);
|
||||
reply.content.push_back(",\"");
|
||||
intToString(segment.length, tmpLength);
|
||||
reply.content.push_back(tmpLength);
|
||||
reply.content.push_back("m\",\"");
|
||||
double bearing_value = round(segment.bearing/10.);
|
||||
reply.content.push_back(Azimuth::Get(bearing_value));
|
||||
reply.content.push_back("\",");
|
||||
intToString(bearing_value, tmpBearing);
|
||||
reply.content.push_back(tmpBearing);
|
||||
reply.content.push_back("]");
|
||||
|
||||
route_segments_list.push_back(
|
||||
Segment(
|
||||
segment.name_id,
|
||||
segment.length,
|
||||
route_segments_list.size()
|
||||
)
|
||||
);
|
||||
}
|
||||
} else if(TurnInstructions.StayOnRoundAbout == current_instruction) {
|
||||
++roundAbout.leave_at_exit;
|
||||
}
|
||||
if(segment.necessary)
|
||||
++prefixSumOfNecessarySegments;
|
||||
}
|
||||
if(INT_MAX != route_length) {
|
||||
reply.content.push_back(",[\"");
|
||||
intToString(TurnInstructions.ReachedYourDestination, tmpInstruction);
|
||||
reply.content.push_back(tmpInstruction);
|
||||
reply.content.push_back("\",\"");
|
||||
reply.content.push_back("\",");
|
||||
reply.content.push_back("0");
|
||||
reply.content.push_back(",");
|
||||
intToString(prefixSumOfNecessarySegments-1, tmpLength);
|
||||
reply.content.push_back(tmpLength);
|
||||
reply.content.push_back(",");
|
||||
reply.content.push_back("0");
|
||||
reply.content.push_back(",\"");
|
||||
reply.content.push_back("\",\"");
|
||||
reply.content.push_back(Azimuth::Get(0.0));
|
||||
reply.content.push_back("\",");
|
||||
reply.content.push_back("0.0");
|
||||
reply.content.push_back("]");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* JSON_DESCRIPTOR_H_ */
|
||||
3
Docs/webclient.txt
Normal file
3
Docs/webclient.txt
Normal file
@ -0,0 +1,3 @@
|
||||
The javascript based web client is a seperate project available at
|
||||
|
||||
https://github.com/DennisSchiefer/Project-OSRM-Web
|
||||
44
Doxyfile.in
44
Doxyfile.in
@ -1,44 +0,0 @@
|
||||
PROJECT_NAME = "Project OSRM"
|
||||
PROJECT_BRIEF = "Open Source Routing Machine"
|
||||
BUILTIN_STL_SUPPORT = YES
|
||||
|
||||
EXTRACT_ALL = YES
|
||||
EXTRACT_PRIVATE = YES
|
||||
EXTRACT_PACKAGE = YES
|
||||
EXTRACT_STATIC = YES
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
EXTRACT_ANON_NSPACES = YES
|
||||
|
||||
QUIET = YES
|
||||
|
||||
INPUT = @CMAKE_CURRENT_SOURCE_DIR@
|
||||
USE_MDFILE_AS_MAINPAGE = @CMAKE_CURRENT_SOURCE_DIR@/README.md
|
||||
FILE_PATTERNS = *.h *.hpp *.c *.cc *.cpp *.md
|
||||
RECURSIVE = YES
|
||||
|
||||
EXCLUDE = @CMAKE_CURRENT_SOURCE_DIR@/third_party \
|
||||
@CMAKE_CURRENT_SOURCE_DIR@/build \
|
||||
@CMAKE_CURRENT_SOURCE_DIR@/node_modules \
|
||||
@CMAKE_CURRENT_SOURCE_DIR@/unit_tests \
|
||||
@CMAKE_CURRENT_SOURCE_DIR@/benchmarks \
|
||||
@CMAKE_CURRENT_SOURCE_DIR@/features
|
||||
|
||||
SOURCE_BROWSER = YES
|
||||
|
||||
CLANG_ASSISTED_PARSING = NO
|
||||
|
||||
HTML_COLORSTYLE_HUE = 217
|
||||
HTML_COLORSTYLE_SAT = 71
|
||||
HTML_COLORSTYLE_GAMMA = 50
|
||||
|
||||
GENERATE_TREEVIEW = YES
|
||||
|
||||
HAVE_DOT = @DOXYGEN_DOT_FOUND@
|
||||
CALL_GRAPH = YES
|
||||
CALLER_GRAPH = YES
|
||||
|
||||
DOT_IMAGE_FORMAT = svg
|
||||
INTERACTIVE_SVG = YES
|
||||
DOT_GRAPH_MAX_NODES = 500
|
||||
DOT_TRANSPARENT = YES
|
||||
DOT_MULTI_TARGETS = YES
|
||||
139
Extractor/BaseParser.cpp
Normal file
139
Extractor/BaseParser.cpp
Normal file
@ -0,0 +1,139 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "BaseParser.h"
|
||||
#include "ExtractionWay.h"
|
||||
#include "ScriptingEnvironment.h"
|
||||
|
||||
#include "../DataStructures/ImportNode.h"
|
||||
#include "../Util/LuaUtil.h"
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/algorithm/string/regex.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
|
||||
BaseParser::BaseParser(
|
||||
ExtractorCallbacks * extractor_callbacks,
|
||||
ScriptingEnvironment & scripting_environment
|
||||
) : extractor_callbacks(extractor_callbacks),
|
||||
lua_state(scripting_environment.getLuaStateForThreadID(0)),
|
||||
scripting_environment(scripting_environment),
|
||||
use_turn_restrictions(true)
|
||||
{
|
||||
ReadUseRestrictionsSetting();
|
||||
ReadRestrictionExceptions();
|
||||
}
|
||||
|
||||
void BaseParser::ReadUseRestrictionsSetting() {
|
||||
if( 0 != luaL_dostring( lua_state, "return use_turn_restrictions\n") ) {
|
||||
throw OSRMException("ERROR occured in scripting block");
|
||||
}
|
||||
if( lua_isboolean( lua_state, -1) ) {
|
||||
use_turn_restrictions = lua_toboolean(lua_state, -1);
|
||||
}
|
||||
if( use_turn_restrictions ) {
|
||||
SimpleLogger().Write() << "Using turn restrictions";
|
||||
} else {
|
||||
SimpleLogger().Write() << "Ignoring turn restrictions";
|
||||
}
|
||||
}
|
||||
|
||||
void BaseParser::ReadRestrictionExceptions() {
|
||||
if(lua_function_exists(lua_state, "get_exceptions" )) {
|
||||
//get list of turn restriction exceptions
|
||||
luabind::call_function<void>(
|
||||
lua_state,
|
||||
"get_exceptions",
|
||||
boost::ref(restriction_exceptions)
|
||||
);
|
||||
const unsigned exception_count = restriction_exceptions.size();
|
||||
SimpleLogger().Write() <<
|
||||
"Found " << exception_count << " exceptions to turn restrictions:";
|
||||
BOOST_FOREACH(const std::string & str, restriction_exceptions) {
|
||||
SimpleLogger().Write() << " " << str;
|
||||
}
|
||||
} else {
|
||||
SimpleLogger().Write() << "Found no exceptions to turn restrictions";
|
||||
}
|
||||
}
|
||||
|
||||
void BaseParser::report_errors(lua_State *L, const int status) const {
|
||||
if( 0!=status ) {
|
||||
std::cerr << "-- " << lua_tostring(L, -1) << std::endl;
|
||||
lua_pop(L, 1); // remove error message
|
||||
}
|
||||
}
|
||||
|
||||
void BaseParser::ParseNodeInLua(ImportNode& n, lua_State* local_lua_state) {
|
||||
luabind::call_function<void>(
|
||||
local_lua_state,
|
||||
"node_function",
|
||||
boost::ref(n)
|
||||
);
|
||||
}
|
||||
|
||||
void BaseParser::ParseWayInLua(ExtractionWay& w, lua_State* local_lua_state) {
|
||||
luabind::call_function<void>(
|
||||
local_lua_state,
|
||||
"way_function",
|
||||
boost::ref(w)
|
||||
);
|
||||
}
|
||||
|
||||
bool BaseParser::ShouldIgnoreRestriction(
|
||||
const std::string & except_tag_string
|
||||
) const {
|
||||
//should this restriction be ignored? yes if there's an overlap between:
|
||||
// a) the list of modes in the except tag of the restriction
|
||||
// (except_tag_string), eg: except=bus;bicycle
|
||||
// b) the lua profile defines a hierachy of modes,
|
||||
// eg: [access, vehicle, bicycle]
|
||||
|
||||
if( except_tag_string.empty() ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//Be warned, this is quadratic work here, but we assume that
|
||||
//only a few exceptions are actually defined.
|
||||
std::vector<std::string> exceptions;
|
||||
boost::algorithm::split_regex(exceptions, except_tag_string, boost::regex("[;][ ]*"));
|
||||
BOOST_FOREACH(std::string& current_string, exceptions) {
|
||||
std::vector<std::string>::const_iterator string_iterator;
|
||||
string_iterator = std::find(
|
||||
restriction_exceptions.begin(),
|
||||
restriction_exceptions.end(),
|
||||
current_string
|
||||
);
|
||||
if( restriction_exceptions.end() != string_iterator ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
74
Extractor/BaseParser.h
Normal file
74
Extractor/BaseParser.h
Normal file
@ -0,0 +1,74 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef BASEPARSER_H_
|
||||
#define BASEPARSER_H_
|
||||
|
||||
extern "C" {
|
||||
#include <lua.h>
|
||||
#include <lauxlib.h>
|
||||
#include <lualib.h>
|
||||
}
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
class ExtractorCallbacks;
|
||||
class ScriptingEnvironment;
|
||||
struct ExtractionWay;
|
||||
struct ImportNode;
|
||||
|
||||
class BaseParser : boost::noncopyable {
|
||||
public:
|
||||
BaseParser(
|
||||
ExtractorCallbacks * extractor_callbacks,
|
||||
ScriptingEnvironment & scripting_environment
|
||||
);
|
||||
virtual ~BaseParser() {}
|
||||
virtual bool ReadHeader() = 0;
|
||||
virtual bool Parse() = 0;
|
||||
|
||||
virtual void ParseNodeInLua(ImportNode & n, lua_State* thread_lua_state);
|
||||
virtual void ParseWayInLua(ExtractionWay & n, lua_State* thread_lua_state);
|
||||
virtual void report_errors(lua_State * lua_state, const int status) const;
|
||||
|
||||
protected:
|
||||
virtual void ReadUseRestrictionsSetting();
|
||||
virtual void ReadRestrictionExceptions();
|
||||
virtual bool ShouldIgnoreRestriction(
|
||||
const std::string & except_tag_string
|
||||
) const;
|
||||
|
||||
ExtractorCallbacks * extractor_callbacks;
|
||||
lua_State * lua_state;
|
||||
ScriptingEnvironment & scripting_environment;
|
||||
std::vector<std::string> restriction_exceptions;
|
||||
bool use_turn_restrictions;
|
||||
};
|
||||
|
||||
#endif /* BASEPARSER_H_ */
|
||||
460
Extractor/ExtractionContainers.cpp
Normal file
460
Extractor/ExtractionContainers.cpp
Normal file
@ -0,0 +1,460 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "ExtractionContainers.h"
|
||||
#include "ExtractionWay.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../Util/TimingUtil.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/filesystem/fstream.hpp>
|
||||
|
||||
#include <stxxl/sort>
|
||||
|
||||
ExtractionContainers::ExtractionContainers() {
|
||||
//Check if stxxl can be instantiated
|
||||
stxxl::vector<unsigned> dummy_vector;
|
||||
name_list.push_back("");
|
||||
}
|
||||
|
||||
ExtractionContainers::~ExtractionContainers() {
|
||||
used_node_id_list.clear();
|
||||
all_nodes_list.clear();
|
||||
all_edges_list.clear();
|
||||
name_list.clear();
|
||||
restrictions_list.clear();
|
||||
way_start_end_id_list.clear();
|
||||
}
|
||||
|
||||
void ExtractionContainers::PrepareData(
|
||||
const std::string & output_file_name,
|
||||
const std::string & restrictions_file_name
|
||||
) {
|
||||
try {
|
||||
unsigned number_of_used_nodes = 0;
|
||||
unsigned number_of_used_edges = 0;
|
||||
double time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Sorting used nodes ... " << std::flush;
|
||||
stxxl::sort(
|
||||
used_node_id_list.begin(),
|
||||
used_node_id_list.end(),
|
||||
Cmp(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
|
||||
time = get_timestamp();
|
||||
std::cout << "[extractor] Erasing duplicate nodes ... " << std::flush;
|
||||
stxxl::vector<NodeID>::iterator NewEnd = std::unique ( used_node_id_list.begin(),used_node_id_list.end() ) ;
|
||||
used_node_id_list.resize ( NewEnd - used_node_id_list.begin() );
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Sorting all nodes ... " << std::flush;
|
||||
stxxl::sort(
|
||||
all_nodes_list.begin(),
|
||||
all_nodes_list.end(),
|
||||
CmpNodeByID(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Sorting used ways ... " << std::flush;
|
||||
stxxl::sort(
|
||||
way_start_end_id_list.begin(),
|
||||
way_start_end_id_list.end(),
|
||||
CmpWayByID(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
|
||||
std::cout << "[extractor] Sorting restrctns. by from... " << std::flush;
|
||||
stxxl::sort(
|
||||
restrictions_list.begin(),
|
||||
restrictions_list.end(),
|
||||
CmpRestrictionContainerByFrom(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
|
||||
std::cout << "[extractor] Fixing restriction starts ... " << std::flush;
|
||||
STXXLRestrictionsVector::iterator restrictions_iterator = restrictions_list.begin();
|
||||
STXXLWayIDStartEndVector::iterator way_start_and_end_iterator = way_start_end_id_list.begin();
|
||||
|
||||
while(
|
||||
way_start_and_end_iterator != way_start_end_id_list.end() &&
|
||||
restrictions_iterator != restrictions_list.end()
|
||||
) {
|
||||
|
||||
if(way_start_and_end_iterator->wayID < restrictions_iterator->fromWay){
|
||||
++way_start_and_end_iterator;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(way_start_and_end_iterator->wayID > restrictions_iterator->fromWay) {
|
||||
++restrictions_iterator;
|
||||
continue;
|
||||
}
|
||||
|
||||
BOOST_ASSERT(way_start_and_end_iterator->wayID == restrictions_iterator->fromWay);
|
||||
NodeID via_node_id = restrictions_iterator->restriction.viaNode;
|
||||
|
||||
if(way_start_and_end_iterator->firstStart == via_node_id) {
|
||||
restrictions_iterator->restriction.fromNode = way_start_and_end_iterator->firstTarget;
|
||||
} else if(way_start_and_end_iterator->firstTarget == via_node_id) {
|
||||
restrictions_iterator->restriction.fromNode = way_start_and_end_iterator->firstStart;
|
||||
} else if(way_start_and_end_iterator->lastStart == via_node_id) {
|
||||
restrictions_iterator->restriction.fromNode = way_start_and_end_iterator->lastTarget;
|
||||
} else if(way_start_and_end_iterator->lastTarget == via_node_id) {
|
||||
restrictions_iterator->restriction.fromNode = way_start_and_end_iterator->lastStart;
|
||||
}
|
||||
++restrictions_iterator;
|
||||
}
|
||||
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Sorting restrctns. by to ... " << std::flush;
|
||||
stxxl::sort(
|
||||
restrictions_list.begin(),
|
||||
restrictions_list.end(),
|
||||
CmpRestrictionContainerByTo(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
|
||||
time = get_timestamp();
|
||||
unsigned usableRestrictionsCounter(0);
|
||||
std::cout << "[extractor] Fixing restriction ends ... " << std::flush;
|
||||
restrictions_iterator = restrictions_list.begin();
|
||||
way_start_and_end_iterator = way_start_end_id_list.begin();
|
||||
while(
|
||||
way_start_and_end_iterator != way_start_end_id_list.end() &&
|
||||
restrictions_iterator != restrictions_list.end()
|
||||
) {
|
||||
if(way_start_and_end_iterator->wayID < restrictions_iterator->toWay){
|
||||
++way_start_and_end_iterator;
|
||||
continue;
|
||||
}
|
||||
if(way_start_and_end_iterator->wayID > restrictions_iterator->toWay) {
|
||||
++restrictions_iterator;
|
||||
continue;
|
||||
}
|
||||
NodeID via_node_id = restrictions_iterator->restriction.viaNode;
|
||||
if(way_start_and_end_iterator->lastStart == via_node_id) {
|
||||
restrictions_iterator->restriction.toNode = way_start_and_end_iterator->lastTarget;
|
||||
} else if(way_start_and_end_iterator->lastTarget == via_node_id) {
|
||||
restrictions_iterator->restriction.toNode = way_start_and_end_iterator->lastStart;
|
||||
} else if(way_start_and_end_iterator->firstStart == via_node_id) {
|
||||
restrictions_iterator->restriction.toNode = way_start_and_end_iterator->firstTarget;
|
||||
} else if(way_start_and_end_iterator->firstTarget == via_node_id) {
|
||||
restrictions_iterator->restriction.toNode = way_start_and_end_iterator->firstStart;
|
||||
}
|
||||
|
||||
if(
|
||||
UINT_MAX != restrictions_iterator->restriction.fromNode &&
|
||||
UINT_MAX != restrictions_iterator->restriction.toNode
|
||||
) {
|
||||
++usableRestrictionsCounter;
|
||||
}
|
||||
++restrictions_iterator;
|
||||
}
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
SimpleLogger().Write() << "usable restrictions: " << usableRestrictionsCounter;
|
||||
//serialize restrictions
|
||||
std::ofstream restrictions_out_stream;
|
||||
restrictions_out_stream.open(restrictions_file_name.c_str(), std::ios::binary);
|
||||
restrictions_out_stream.write((char*)&uuid, sizeof(UUID));
|
||||
restrictions_out_stream.write(
|
||||
(char*)&usableRestrictionsCounter,
|
||||
sizeof(unsigned)
|
||||
);
|
||||
for(
|
||||
restrictions_iterator = restrictions_list.begin();
|
||||
restrictions_iterator != restrictions_list.end();
|
||||
++restrictions_iterator
|
||||
) {
|
||||
if(
|
||||
UINT_MAX != restrictions_iterator->restriction.fromNode &&
|
||||
UINT_MAX != restrictions_iterator->restriction.toNode
|
||||
) {
|
||||
restrictions_out_stream.write(
|
||||
(char *)&(restrictions_iterator->restriction),
|
||||
sizeof(TurnRestriction)
|
||||
);
|
||||
}
|
||||
}
|
||||
restrictions_out_stream.close();
|
||||
|
||||
std::ofstream file_out_stream;
|
||||
file_out_stream.open(output_file_name.c_str(), std::ios::binary);
|
||||
file_out_stream.write((char*)&uuid, sizeof(UUID));
|
||||
file_out_stream.write((char*)&number_of_used_nodes, sizeof(unsigned));
|
||||
time = get_timestamp();
|
||||
std::cout << "[extractor] Confirming/Writing used nodes ... " << std::flush;
|
||||
|
||||
//identify all used nodes by a merging step of two sorted lists
|
||||
STXXLNodeVector::iterator node_iterator = all_nodes_list.begin();
|
||||
STXXLNodeIDVector::iterator node_id_iterator = used_node_id_list.begin();
|
||||
while(
|
||||
node_id_iterator != used_node_id_list.end() &&
|
||||
node_iterator != all_nodes_list.end()
|
||||
) {
|
||||
if(*node_id_iterator < node_iterator->id){
|
||||
++node_id_iterator;
|
||||
continue;
|
||||
}
|
||||
if(*node_id_iterator > node_iterator->id) {
|
||||
++node_iterator;
|
||||
continue;
|
||||
}
|
||||
BOOST_ASSERT( *node_id_iterator == node_iterator->id);
|
||||
|
||||
file_out_stream.write(
|
||||
(char*)&(*node_iterator),
|
||||
sizeof(ExternalMemoryNode)
|
||||
);
|
||||
|
||||
++number_of_used_nodes;
|
||||
++node_id_iterator;
|
||||
++node_iterator;
|
||||
}
|
||||
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
|
||||
std::cout << "[extractor] setting number of nodes ... " << std::flush;
|
||||
std::ios::pos_type previous_file_position = file_out_stream.tellp();
|
||||
file_out_stream.seekp(std::ios::beg+sizeof(UUID));
|
||||
file_out_stream.write((char*)&number_of_used_nodes, sizeof(unsigned));
|
||||
file_out_stream.seekp(previous_file_position);
|
||||
|
||||
std::cout << "ok" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
// Sort edges by start.
|
||||
std::cout << "[extractor] Sorting edges by start ... " << std::flush;
|
||||
stxxl::sort(
|
||||
all_edges_list.begin(),
|
||||
all_edges_list.end(),
|
||||
CmpEdgeByStartID(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Setting start coords ... " << std::flush;
|
||||
file_out_stream.write((char*)&number_of_used_edges, sizeof(unsigned));
|
||||
// Traverse list of edges and nodes in parallel and set start coord
|
||||
node_iterator = all_nodes_list.begin();
|
||||
STXXLEdgeVector::iterator edge_iterator = all_edges_list.begin();
|
||||
while(
|
||||
edge_iterator != all_edges_list.end() &&
|
||||
node_iterator != all_nodes_list.end()
|
||||
) {
|
||||
if(edge_iterator->start < node_iterator->id){
|
||||
++edge_iterator;
|
||||
continue;
|
||||
}
|
||||
if(edge_iterator->start > node_iterator->id) {
|
||||
node_iterator++;
|
||||
continue;
|
||||
}
|
||||
|
||||
BOOST_ASSERT(edge_iterator->start == node_iterator->id);
|
||||
edge_iterator->startCoord.lat = node_iterator->lat;
|
||||
edge_iterator->startCoord.lon = node_iterator->lon;
|
||||
++edge_iterator;
|
||||
}
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
// Sort Edges by target
|
||||
std::cout << "[extractor] Sorting edges by target ... " << std::flush;
|
||||
stxxl::sort(
|
||||
all_edges_list.begin(),
|
||||
all_edges_list.end(),
|
||||
CmpEdgeByTargetID(),
|
||||
4294967296
|
||||
);
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] Setting target coords ... " << std::flush;
|
||||
// Traverse list of edges and nodes in parallel and set target coord
|
||||
node_iterator = all_nodes_list.begin();
|
||||
edge_iterator = all_edges_list.begin();
|
||||
|
||||
while(
|
||||
edge_iterator != all_edges_list.end() &&
|
||||
node_iterator != all_nodes_list.end()
|
||||
) {
|
||||
if(edge_iterator->target < node_iterator->id){
|
||||
++edge_iterator;
|
||||
continue;
|
||||
}
|
||||
if(edge_iterator->target > node_iterator->id) {
|
||||
++node_iterator;
|
||||
continue;
|
||||
}
|
||||
BOOST_ASSERT(edge_iterator->target == node_iterator->id);
|
||||
if(edge_iterator->startCoord.lat != INT_MIN && edge_iterator->startCoord.lon != INT_MIN) {
|
||||
edge_iterator->targetCoord.lat = node_iterator->lat;
|
||||
edge_iterator->targetCoord.lon = node_iterator->lon;
|
||||
|
||||
const double distance = FixedPointCoordinate::ApproximateDistance(
|
||||
edge_iterator->startCoord.lat,
|
||||
edge_iterator->startCoord.lon,
|
||||
node_iterator->lat,
|
||||
node_iterator->lon
|
||||
);
|
||||
|
||||
BOOST_ASSERT(edge_iterator->speed != -1);
|
||||
const double weight = ( distance * 10. ) / (edge_iterator->speed / 3.6);
|
||||
int integer_weight = std::max( 1, (int)std::floor((edge_iterator->isDurationSet ? edge_iterator->speed : weight)+.5) );
|
||||
int integer_distance = std::max( 1, (int)distance );
|
||||
short zero = 0;
|
||||
short one = 1;
|
||||
|
||||
file_out_stream.write((char*)&edge_iterator->start, sizeof(unsigned));
|
||||
file_out_stream.write((char*)&edge_iterator->target, sizeof(unsigned));
|
||||
file_out_stream.write((char*)&integer_distance, sizeof(int));
|
||||
switch(edge_iterator->direction) {
|
||||
case ExtractionWay::notSure:
|
||||
file_out_stream.write((char*)&zero, sizeof(short));
|
||||
break;
|
||||
case ExtractionWay::oneway:
|
||||
file_out_stream.write((char*)&one, sizeof(short));
|
||||
break;
|
||||
case ExtractionWay::bidirectional:
|
||||
file_out_stream.write((char*)&zero, sizeof(short));
|
||||
|
||||
break;
|
||||
case ExtractionWay::opposite:
|
||||
file_out_stream.write((char*)&one, sizeof(short));
|
||||
break;
|
||||
default:
|
||||
throw OSRMException("edge has broken direction");
|
||||
break;
|
||||
}
|
||||
file_out_stream.write(
|
||||
(char*)&integer_weight, sizeof(int)
|
||||
);
|
||||
BOOST_ASSERT(edge_iterator->type >= 0);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->type,
|
||||
sizeof(short)
|
||||
);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->nameID,
|
||||
sizeof(unsigned)
|
||||
);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->isRoundabout,
|
||||
sizeof(bool)
|
||||
);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->ignoreInGrid,
|
||||
sizeof(bool)
|
||||
);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->isAccessRestricted,
|
||||
sizeof(bool)
|
||||
);
|
||||
file_out_stream.write(
|
||||
(char*)&edge_iterator->isContraFlow,
|
||||
sizeof(bool)
|
||||
);
|
||||
++number_of_used_edges;
|
||||
}
|
||||
++edge_iterator;
|
||||
}
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
std::cout << "[extractor] setting number of edges ... " << std::flush;
|
||||
|
||||
file_out_stream.seekp(previous_file_position);
|
||||
file_out_stream.write((char*)&number_of_used_edges, sizeof(unsigned));
|
||||
file_out_stream.close();
|
||||
std::cout << "ok" << std::endl;
|
||||
time = get_timestamp();
|
||||
|
||||
std::cout << "[extractor] writing street name index ... " << std::flush;
|
||||
std::string name_file_streamName = (output_file_name + ".names");
|
||||
boost::filesystem::ofstream name_file_stream(
|
||||
name_file_streamName,
|
||||
std::ios::binary
|
||||
);
|
||||
|
||||
//write number of names
|
||||
const unsigned number_of_names = name_list.size()+1;
|
||||
name_file_stream.write((char *)&(number_of_names), sizeof(unsigned));
|
||||
|
||||
//compute total number of chars
|
||||
unsigned total_number_of_chars = 0;
|
||||
BOOST_FOREACH(const std::string & temp_string, name_list) {
|
||||
total_number_of_chars += temp_string.length();
|
||||
}
|
||||
//write total number of chars
|
||||
name_file_stream.write(
|
||||
(char *)&(total_number_of_chars),
|
||||
sizeof(unsigned)
|
||||
);
|
||||
//write prefixe sums
|
||||
unsigned name_lengths_prefix_sum = 0;
|
||||
BOOST_FOREACH(const std::string & temp_string, name_list) {
|
||||
name_file_stream.write(
|
||||
(char *)&(name_lengths_prefix_sum),
|
||||
sizeof(unsigned)
|
||||
);
|
||||
name_lengths_prefix_sum += temp_string.length();
|
||||
}
|
||||
//duplicate on purpose!
|
||||
name_file_stream.write(
|
||||
(char *)&(name_lengths_prefix_sum),
|
||||
sizeof(unsigned)
|
||||
);
|
||||
|
||||
//write all chars consecutively
|
||||
BOOST_FOREACH(const std::string & temp_string, name_list) {
|
||||
const unsigned string_length = temp_string.length();
|
||||
name_file_stream.write(temp_string.c_str(), string_length);
|
||||
}
|
||||
|
||||
name_file_stream.close();
|
||||
std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl;
|
||||
SimpleLogger().Write() << "Processed " <<
|
||||
number_of_used_nodes << " nodes and " <<
|
||||
number_of_used_edges << " edges";
|
||||
|
||||
} catch ( const std::exception& e ) {
|
||||
std::cerr << "Caught Execption:" << e.what() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
65
Extractor/ExtractionContainers.h
Normal file
65
Extractor/ExtractionContainers.h
Normal file
@ -0,0 +1,65 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef EXTRACTIONCONTAINERS_H_
|
||||
#define EXTRACTIONCONTAINERS_H_
|
||||
|
||||
#include "InternalExtractorEdge.h"
|
||||
#include "ExtractorStructs.h"
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../Util/UUID.h"
|
||||
|
||||
#include <stxxl/vector>
|
||||
|
||||
class ExtractionContainers {
|
||||
public:
|
||||
typedef stxxl::vector<NodeID> STXXLNodeIDVector;
|
||||
typedef stxxl::vector<ExternalMemoryNode> STXXLNodeVector;
|
||||
typedef stxxl::vector<InternalExtractorEdge> STXXLEdgeVector;
|
||||
typedef stxxl::vector<std::string> STXXLStringVector;
|
||||
typedef stxxl::vector<InputRestrictionContainer> STXXLRestrictionsVector;
|
||||
typedef stxxl::vector<_WayIDStartAndEndEdge> STXXLWayIDStartEndVector;
|
||||
|
||||
STXXLNodeIDVector used_node_id_list;
|
||||
STXXLNodeVector all_nodes_list;
|
||||
STXXLEdgeVector all_edges_list;
|
||||
STXXLStringVector name_list;
|
||||
STXXLRestrictionsVector restrictions_list;
|
||||
STXXLWayIDStartEndVector way_start_end_id_list;
|
||||
const UUID uuid;
|
||||
|
||||
ExtractionContainers();
|
||||
|
||||
virtual ~ExtractionContainers();
|
||||
|
||||
void PrepareData(
|
||||
const std::string & output_file_name,
|
||||
const std::string & restrictions_file_name
|
||||
);
|
||||
};
|
||||
|
||||
#endif /* EXTRACTIONCONTAINERS_H_ */
|
||||
87
Extractor/ExtractionHelperFunctions.h
Normal file
87
Extractor/ExtractionHelperFunctions.h
Normal file
@ -0,0 +1,87 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef EXTRACTIONHELPERFUNCTIONS_H_
|
||||
#define EXTRACTIONHELPERFUNCTIONS_H_
|
||||
|
||||
#include "../Util/StringUtil.h"
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/algorithm/string_regex.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
#include <climits>
|
||||
|
||||
namespace qi = boost::spirit::qi;
|
||||
|
||||
//TODO: Move into LUA
|
||||
|
||||
inline bool durationIsValid(const std::string &s) {
|
||||
boost::regex e ("((\\d|\\d\\d):(\\d|\\d\\d):(\\d|\\d\\d))|((\\d|\\d\\d):(\\d|\\d\\d))|(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl);
|
||||
|
||||
std::vector< std::string > result;
|
||||
boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ;
|
||||
bool matched = regex_match(s, e);
|
||||
return matched;
|
||||
}
|
||||
|
||||
inline unsigned parseDuration(const std::string &s) {
|
||||
unsigned hours = 0;
|
||||
unsigned minutes = 0;
|
||||
unsigned seconds = 0;
|
||||
boost::regex e ("((\\d|\\d\\d):(\\d|\\d\\d):(\\d|\\d\\d))|((\\d|\\d\\d):(\\d|\\d\\d))|(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl);
|
||||
|
||||
std::vector< std::string > result;
|
||||
boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ;
|
||||
bool matched = regex_match(s, e);
|
||||
if(matched) {
|
||||
if(1 == result.size()) {
|
||||
minutes = stringToInt(result[0]);
|
||||
}
|
||||
if(2 == result.size()) {
|
||||
minutes = stringToInt(result[1]);
|
||||
hours = stringToInt(result[0]);
|
||||
}
|
||||
if(3 == result.size()) {
|
||||
seconds = stringToInt(result[2]);
|
||||
minutes = stringToInt(result[1]);
|
||||
hours = stringToInt(result[0]);
|
||||
}
|
||||
return 10*(3600*hours+60*minutes+seconds);
|
||||
}
|
||||
return UINT_MAX;
|
||||
}
|
||||
|
||||
inline int parseMaxspeed(std::string input) { //call-by-value on purpose.
|
||||
boost::algorithm::to_lower(input);
|
||||
int n = stringToInt(input);
|
||||
if (input.find("mph") != std::string::npos || input.find("mp/h") != std::string::npos) {
|
||||
n = (n*1609)/1000;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
#endif /* EXTRACTIONHELPERFUNCTIONS_H_ */
|
||||
78
Extractor/ExtractionWay.h
Normal file
78
Extractor/ExtractionWay.h
Normal file
@ -0,0 +1,78 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef EXTRACTION_WAY_H
|
||||
#define EXTRACTION_WAY_H
|
||||
|
||||
#include "../DataStructures/HashTable.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
struct ExtractionWay {
|
||||
ExtractionWay() {
|
||||
Clear();
|
||||
}
|
||||
|
||||
inline void Clear(){
|
||||
id = UINT_MAX;
|
||||
nameID = UINT_MAX;
|
||||
path.clear();
|
||||
keyVals.clear();
|
||||
direction = ExtractionWay::notSure;
|
||||
speed = -1;
|
||||
backward_speed = -1;
|
||||
duration = -1;
|
||||
type = -1;
|
||||
access = true;
|
||||
roundabout = false;
|
||||
isAccessRestricted = false;
|
||||
ignoreInGrid = false;
|
||||
}
|
||||
|
||||
enum Directions {
|
||||
notSure = 0, oneway, bidirectional, opposite
|
||||
};
|
||||
unsigned id;
|
||||
unsigned nameID;
|
||||
double speed;
|
||||
double backward_speed;
|
||||
double duration;
|
||||
Directions direction;
|
||||
std::string name;
|
||||
short type;
|
||||
bool access;
|
||||
bool roundabout;
|
||||
bool isAccessRestricted;
|
||||
bool ignoreInGrid;
|
||||
std::vector< NodeID > path;
|
||||
HashTable<std::string, std::string> keyVals;
|
||||
};
|
||||
|
||||
|
||||
#endif //EXTRACTION_WAY_H
|
||||
155
Extractor/ExtractorCallbacks.cpp
Normal file
155
Extractor/ExtractorCallbacks.cpp
Normal file
@ -0,0 +1,155 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "ExtractionContainers.h"
|
||||
#include "ExtractionHelperFunctions.h"
|
||||
#include "ExtractionWay.h"
|
||||
#include "ExtractorCallbacks.h"
|
||||
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <cfloat>
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/algorithm/string/regex.hpp>
|
||||
#include <boost/regex.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
ExtractorCallbacks::ExtractorCallbacks()
|
||||
:
|
||||
stringMap(NULL),
|
||||
externalMemory(NULL)
|
||||
{ }
|
||||
|
||||
ExtractorCallbacks::ExtractorCallbacks(
|
||||
ExtractionContainers * ext,
|
||||
StringMap * strMap
|
||||
) :
|
||||
stringMap(strMap),
|
||||
externalMemory(ext)
|
||||
{ }
|
||||
|
||||
ExtractorCallbacks::~ExtractorCallbacks() { }
|
||||
|
||||
/** warning: caller needs to take care of synchronization! */
|
||||
void ExtractorCallbacks::nodeFunction(const ExternalMemoryNode &n) {
|
||||
if(n.lat <= 85*COORDINATE_PRECISION && n.lat >= -85*COORDINATE_PRECISION) {
|
||||
externalMemory->all_nodes_list.push_back(n);
|
||||
}
|
||||
}
|
||||
|
||||
bool ExtractorCallbacks::restrictionFunction(const InputRestrictionContainer &r) {
|
||||
externalMemory->restrictions_list.push_back(r);
|
||||
return true;
|
||||
}
|
||||
|
||||
/** warning: caller needs to take care of synchronization! */
|
||||
void ExtractorCallbacks::wayFunction(ExtractionWay &parsed_way) {
|
||||
if((0 < parsed_way.speed) || (0 < parsed_way.duration)) { //Only true if the way is specified by the speed profile
|
||||
if(UINT_MAX == parsed_way.id){
|
||||
SimpleLogger().Write(logDEBUG) <<
|
||||
"found bogus way with id: " << parsed_way.id <<
|
||||
" of size " << parsed_way.path.size();
|
||||
return;
|
||||
}
|
||||
|
||||
if(0 < parsed_way.duration) {
|
||||
//TODO: iterate all way segments and set duration corresponding to the length of each segment
|
||||
parsed_way.speed = parsed_way.duration/(parsed_way.path.size()-1);
|
||||
}
|
||||
|
||||
if(std::numeric_limits<double>::epsilon() >= std::abs(-1. - parsed_way.speed)){
|
||||
SimpleLogger().Write(logDEBUG) <<
|
||||
"found way with bogus speed, id: " << parsed_way.id;
|
||||
return;
|
||||
}
|
||||
|
||||
//Get the unique identifier for the street name
|
||||
const StringMap::const_iterator & string_map_iterator = stringMap->find(parsed_way.name);
|
||||
if(stringMap->end() == string_map_iterator) {
|
||||
parsed_way.nameID = externalMemory->name_list.size();
|
||||
externalMemory->name_list.push_back(parsed_way.name);
|
||||
stringMap->insert(std::make_pair(parsed_way.name, parsed_way.nameID));
|
||||
} else {
|
||||
parsed_way.nameID = string_map_iterator->second;
|
||||
}
|
||||
|
||||
if(ExtractionWay::opposite == parsed_way.direction) {
|
||||
std::reverse( parsed_way.path.begin(), parsed_way.path.end() );
|
||||
parsed_way.direction = ExtractionWay::oneway;
|
||||
}
|
||||
|
||||
const bool split_bidirectional_edge = (parsed_way.backward_speed > 0) && (parsed_way.speed != parsed_way.backward_speed);
|
||||
|
||||
for(std::vector< NodeID >::size_type n = 0; n < parsed_way.path.size()-1; ++n) {
|
||||
externalMemory->all_edges_list.push_back(
|
||||
InternalExtractorEdge(parsed_way.path[n],
|
||||
parsed_way.path[n+1],
|
||||
parsed_way.type,
|
||||
(split_bidirectional_edge ? ExtractionWay::oneway : parsed_way.direction),
|
||||
parsed_way.speed,
|
||||
parsed_way.nameID,
|
||||
parsed_way.roundabout,
|
||||
parsed_way.ignoreInGrid,
|
||||
(0 < parsed_way.duration),
|
||||
parsed_way.isAccessRestricted
|
||||
)
|
||||
);
|
||||
externalMemory->used_node_id_list.push_back(parsed_way.path[n]);
|
||||
}
|
||||
externalMemory->used_node_id_list.push_back(parsed_way.path.back());
|
||||
|
||||
//The following information is needed to identify start and end segments of restrictions
|
||||
externalMemory->way_start_end_id_list.push_back(_WayIDStartAndEndEdge(parsed_way.id, parsed_way.path[0], parsed_way.path[1], parsed_way.path[parsed_way.path.size()-2], parsed_way.path.back()));
|
||||
|
||||
if(split_bidirectional_edge) { //Only true if the way should be split
|
||||
std::reverse( parsed_way.path.begin(), parsed_way.path.end() );
|
||||
for(std::vector< NodeID >::size_type n = 0; n < parsed_way.path.size()-1; ++n) {
|
||||
externalMemory->all_edges_list.push_back(
|
||||
InternalExtractorEdge(parsed_way.path[n],
|
||||
parsed_way.path[n+1],
|
||||
parsed_way.type,
|
||||
ExtractionWay::oneway,
|
||||
parsed_way.backward_speed,
|
||||
parsed_way.nameID,
|
||||
parsed_way.roundabout,
|
||||
parsed_way.ignoreInGrid,
|
||||
(0 < parsed_way.duration),
|
||||
parsed_way.isAccessRestricted,
|
||||
(ExtractionWay::oneway == parsed_way.direction)
|
||||
)
|
||||
);
|
||||
}
|
||||
externalMemory->way_start_end_id_list.push_back(_WayIDStartAndEndEdge(parsed_way.id, parsed_way.path[0], parsed_way.path[1], parsed_way.path[parsed_way.path.size()-2], parsed_way.path.back()));
|
||||
}
|
||||
}
|
||||
}
|
||||
68
Extractor/ExtractorCallbacks.h
Normal file
68
Extractor/ExtractorCallbacks.h
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef EXTRACTORCALLBACKS_H_
|
||||
#define EXTRACTORCALLBACKS_H_
|
||||
|
||||
#include "ExtractorStructs.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/unordered_map.hpp>
|
||||
#include <string>
|
||||
|
||||
class ExtractionContainers;
|
||||
struct ExtractionWay;
|
||||
struct InputRestrictionContainer;
|
||||
|
||||
typedef boost::unordered_map<std::string, NodeID> StringMap;
|
||||
|
||||
class ExtractorCallbacks{
|
||||
private:
|
||||
|
||||
StringMap * stringMap;
|
||||
ExtractionContainers * externalMemory;
|
||||
|
||||
ExtractorCallbacks();
|
||||
public:
|
||||
explicit ExtractorCallbacks(
|
||||
ExtractionContainers * ext,
|
||||
StringMap * strMap
|
||||
);
|
||||
|
||||
~ExtractorCallbacks();
|
||||
|
||||
/** warning: caller needs to take care of synchronization! */
|
||||
void nodeFunction(const ExternalMemoryNode &n);
|
||||
|
||||
bool restrictionFunction(const InputRestrictionContainer &r);
|
||||
|
||||
/** warning: caller needs to take care of synchronization! */
|
||||
void wayFunction(ExtractionWay &w);
|
||||
|
||||
};
|
||||
|
||||
#endif /* EXTRACTORCALLBACKS_H_ */
|
||||
130
Extractor/ExtractorStructs.h
Normal file
130
Extractor/ExtractorStructs.h
Normal file
@ -0,0 +1,130 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef EXTRACTORSTRUCTS_H_
|
||||
#define EXTRACTORSTRUCTS_H_
|
||||
|
||||
#include "../DataStructures/HashTable.h"
|
||||
#include "../DataStructures/ImportNode.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
struct ExtractorRelation {
|
||||
ExtractorRelation() : type(unknown){}
|
||||
enum {
|
||||
unknown = 0, ferry, turnRestriction
|
||||
} type;
|
||||
HashTable<std::string, std::string> keyVals;
|
||||
};
|
||||
|
||||
struct _WayIDStartAndEndEdge {
|
||||
unsigned wayID;
|
||||
NodeID firstStart;
|
||||
NodeID firstTarget;
|
||||
NodeID lastStart;
|
||||
NodeID lastTarget;
|
||||
_WayIDStartAndEndEdge()
|
||||
:
|
||||
wayID(UINT_MAX),
|
||||
firstStart(UINT_MAX),
|
||||
firstTarget(UINT_MAX),
|
||||
lastStart(UINT_MAX),
|
||||
lastTarget(UINT_MAX)
|
||||
{ }
|
||||
|
||||
explicit _WayIDStartAndEndEdge(
|
||||
unsigned w,
|
||||
NodeID fs,
|
||||
NodeID ft,
|
||||
NodeID ls,
|
||||
NodeID lt
|
||||
) :
|
||||
wayID(w),
|
||||
firstStart(fs),
|
||||
firstTarget(ft),
|
||||
lastStart(ls),
|
||||
lastTarget(lt)
|
||||
{ }
|
||||
|
||||
static _WayIDStartAndEndEdge min_value() {
|
||||
return _WayIDStartAndEndEdge((std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)());
|
||||
}
|
||||
static _WayIDStartAndEndEdge max_value() {
|
||||
return _WayIDStartAndEndEdge((std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)());
|
||||
}
|
||||
};
|
||||
|
||||
struct CmpWayByID {
|
||||
typedef _WayIDStartAndEndEdge value_type;
|
||||
bool operator ()(
|
||||
const _WayIDStartAndEndEdge & a,
|
||||
const _WayIDStartAndEndEdge & b
|
||||
) const {
|
||||
return a.wayID < b.wayID;
|
||||
}
|
||||
value_type max_value() {
|
||||
return _WayIDStartAndEndEdge::max_value();
|
||||
}
|
||||
value_type min_value() {
|
||||
return _WayIDStartAndEndEdge::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
struct Cmp {
|
||||
typedef NodeID value_type;
|
||||
bool operator ()(
|
||||
const NodeID a,
|
||||
const NodeID b
|
||||
) const {
|
||||
return a < b;
|
||||
}
|
||||
value_type max_value() {
|
||||
return 0xffffffff;
|
||||
}
|
||||
value_type min_value() {
|
||||
return 0x0;
|
||||
}
|
||||
};
|
||||
|
||||
struct CmpNodeByID {
|
||||
typedef ExternalMemoryNode value_type;
|
||||
bool operator () (
|
||||
const ExternalMemoryNode & a,
|
||||
const ExternalMemoryNode & b
|
||||
) const {
|
||||
return a.id < b.id;
|
||||
}
|
||||
value_type max_value() {
|
||||
return ExternalMemoryNode::max_value();
|
||||
}
|
||||
value_type min_value() {
|
||||
return ExternalMemoryNode::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* EXTRACTORSTRUCTS_H_ */
|
||||
207
Extractor/InternalExtractorEdge.h
Normal file
207
Extractor/InternalExtractorEdge.h
Normal file
@ -0,0 +1,207 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef INTERNAL_EXTRACTOR_EDGE_H
|
||||
#define INTERNAL_EXTRACTOR_EDGE_H
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
struct InternalExtractorEdge {
|
||||
InternalExtractorEdge()
|
||||
:
|
||||
start(0),
|
||||
target(0),
|
||||
type(0),
|
||||
direction(0),
|
||||
speed(0),
|
||||
nameID(0),
|
||||
isRoundabout(false),
|
||||
ignoreInGrid(false),
|
||||
isDurationSet(false),
|
||||
isAccessRestricted(false),
|
||||
isContraFlow(false)
|
||||
{ }
|
||||
|
||||
explicit InternalExtractorEdge(NodeID start, NodeID target)
|
||||
:
|
||||
start(start),
|
||||
target(target),
|
||||
type(0),
|
||||
direction(0),
|
||||
speed(0),
|
||||
nameID(0),
|
||||
isRoundabout(false),
|
||||
ignoreInGrid(false),
|
||||
isDurationSet(false),
|
||||
isAccessRestricted(false),
|
||||
isContraFlow(false)
|
||||
{ }
|
||||
|
||||
explicit InternalExtractorEdge(
|
||||
NodeID start,
|
||||
NodeID target,
|
||||
short type,
|
||||
short d,
|
||||
double speed
|
||||
) :
|
||||
start(start),
|
||||
target(target),
|
||||
type(type),
|
||||
direction(d),
|
||||
speed(speed),
|
||||
nameID(0),
|
||||
isRoundabout(false),
|
||||
ignoreInGrid(false),
|
||||
isDurationSet(false),
|
||||
isAccessRestricted(false),
|
||||
isContraFlow(false)
|
||||
{ }
|
||||
|
||||
explicit InternalExtractorEdge(
|
||||
NodeID start,
|
||||
NodeID target,
|
||||
short type,
|
||||
short direction,
|
||||
double speed,
|
||||
unsigned nameID,
|
||||
bool isRoundabout,
|
||||
bool ignoreInGrid,
|
||||
bool isDurationSet,
|
||||
bool isAccressRestricted
|
||||
) :
|
||||
start(start),
|
||||
target(target),
|
||||
type(type),
|
||||
direction(direction),
|
||||
speed(speed),
|
||||
nameID(nameID),
|
||||
isRoundabout(isRoundabout),
|
||||
ignoreInGrid(ignoreInGrid),
|
||||
isDurationSet(isDurationSet),
|
||||
isAccessRestricted(isAccressRestricted),
|
||||
isContraFlow(false)
|
||||
{
|
||||
BOOST_ASSERT(0 <= type);
|
||||
}
|
||||
|
||||
explicit InternalExtractorEdge(
|
||||
NodeID start,
|
||||
NodeID target,
|
||||
short type,
|
||||
short direction,
|
||||
double speed,
|
||||
unsigned nameID,
|
||||
bool isRoundabout,
|
||||
bool ignoreInGrid,
|
||||
bool isDurationSet,
|
||||
bool isAccressRestricted,
|
||||
bool isContraFlow
|
||||
) :
|
||||
start(start),
|
||||
target(target),
|
||||
type(type),
|
||||
direction(direction),
|
||||
speed(speed),
|
||||
nameID(nameID),
|
||||
isRoundabout(isRoundabout),
|
||||
ignoreInGrid(ignoreInGrid),
|
||||
isDurationSet(isDurationSet),
|
||||
isAccessRestricted(isAccressRestricted),
|
||||
isContraFlow(isContraFlow)
|
||||
{
|
||||
BOOST_ASSERT(0 <= type);
|
||||
}
|
||||
|
||||
// necessary static util functions for stxxl's sorting
|
||||
static InternalExtractorEdge min_value() {
|
||||
return InternalExtractorEdge(0,0);
|
||||
}
|
||||
static InternalExtractorEdge max_value() {
|
||||
return InternalExtractorEdge(
|
||||
std::numeric_limits<unsigned>::max(),
|
||||
std::numeric_limits<unsigned>::max()
|
||||
);
|
||||
}
|
||||
|
||||
NodeID start;
|
||||
NodeID target;
|
||||
short type;
|
||||
short direction;
|
||||
double speed;
|
||||
unsigned nameID;
|
||||
bool isRoundabout;
|
||||
bool ignoreInGrid;
|
||||
bool isDurationSet;
|
||||
bool isAccessRestricted;
|
||||
bool isContraFlow;
|
||||
|
||||
FixedPointCoordinate startCoord;
|
||||
FixedPointCoordinate targetCoord;
|
||||
};
|
||||
|
||||
struct CmpEdgeByStartID {
|
||||
typedef InternalExtractorEdge value_type;
|
||||
bool operator ()(
|
||||
const InternalExtractorEdge & a,
|
||||
const InternalExtractorEdge & b
|
||||
) const {
|
||||
return a.start < b.start;
|
||||
}
|
||||
|
||||
value_type max_value() {
|
||||
return InternalExtractorEdge::max_value();
|
||||
}
|
||||
|
||||
value_type min_value() {
|
||||
return InternalExtractorEdge::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
struct CmpEdgeByTargetID {
|
||||
typedef InternalExtractorEdge value_type;
|
||||
|
||||
bool operator ()(
|
||||
const InternalExtractorEdge & a,
|
||||
const InternalExtractorEdge & b
|
||||
) const {
|
||||
return a.target < b.target;
|
||||
}
|
||||
|
||||
value_type max_value() {
|
||||
return InternalExtractorEdge::max_value();
|
||||
}
|
||||
|
||||
value_type min_value() {
|
||||
return InternalExtractorEdge::min_value();
|
||||
}
|
||||
};
|
||||
|
||||
#endif //INTERNAL_EXTRACTOR_EDGE_H
|
||||
533
Extractor/PBFParser.cpp
Normal file
533
Extractor/PBFParser.cpp
Normal file
@ -0,0 +1,533 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "PBFParser.h"
|
||||
|
||||
#include "ExtractionWay.h"
|
||||
#include "ExtractorCallbacks.h"
|
||||
#include "ScriptingEnvironment.h"
|
||||
|
||||
#include "../DataStructures/HashTable.h"
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../Util/MachineInfo.h"
|
||||
#include "../Util/OpenMPWrapper.h"
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/ref.hpp>
|
||||
|
||||
#include <zlib.h>
|
||||
|
||||
PBFParser::PBFParser(
|
||||
const char * fileName,
|
||||
ExtractorCallbacks * extractor_callbacks,
|
||||
ScriptingEnvironment& scripting_environment
|
||||
) : BaseParser( extractor_callbacks, scripting_environment ) {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
//TODO: What is the bottleneck here? Filling the queue or reading the stuff from disk?
|
||||
//NOTE: With Lua scripting, it is parsing the stuff. I/O is virtually for free.
|
||||
|
||||
// Max 2500 items in queue, hardcoded.
|
||||
threadDataQueue = boost::make_shared<ConcurrentQueue<_ThreadData*> >( 2500 );
|
||||
input.open(fileName, std::ios::in | std::ios::binary);
|
||||
|
||||
if (!input) {
|
||||
throw OSRMException("pbf file not found.");
|
||||
}
|
||||
|
||||
blockCount = 0;
|
||||
groupCount = 0;
|
||||
}
|
||||
|
||||
PBFParser::~PBFParser() {
|
||||
if(input.is_open()) {
|
||||
input.close();
|
||||
}
|
||||
|
||||
// Clean up any leftover ThreadData objects in the queue
|
||||
_ThreadData* thread_data;
|
||||
while (threadDataQueue->try_pop(thread_data))
|
||||
{
|
||||
delete thread_data;
|
||||
}
|
||||
google::protobuf::ShutdownProtobufLibrary();
|
||||
|
||||
SimpleLogger().Write(logDEBUG) <<
|
||||
"parsed " << blockCount <<
|
||||
" blocks from pbf with " << groupCount <<
|
||||
" groups";
|
||||
}
|
||||
|
||||
inline bool PBFParser::ReadHeader() {
|
||||
_ThreadData initData;
|
||||
/** read Header */
|
||||
if(!readPBFBlobHeader(input, &initData)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(readBlob(input, &initData)) {
|
||||
if(!initData.PBFHeaderBlock.ParseFromArray(&(initData.charBuffer[0]), initData.charBuffer.size() ) ) {
|
||||
std::cerr << "[error] Header not parseable!" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
for(int i = 0, featureSize = initData.PBFHeaderBlock.required_features_size(); i < featureSize; ++i) {
|
||||
const std::string& feature = initData.PBFHeaderBlock.required_features( i );
|
||||
bool supported = false;
|
||||
if ( "OsmSchema-V0.6" == feature ) {
|
||||
supported = true;
|
||||
}
|
||||
else if ( "DenseNodes" == feature ) {
|
||||
supported = true;
|
||||
}
|
||||
|
||||
if ( !supported ) {
|
||||
std::cerr << "[error] required feature not supported: " << feature.data() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
std::cerr << "[error] blob not loaded!" << std::endl;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
inline void PBFParser::ReadData() {
|
||||
bool keepRunning = true;
|
||||
do {
|
||||
_ThreadData *threadData = new _ThreadData();
|
||||
keepRunning = readNextBlock(input, threadData);
|
||||
|
||||
if (keepRunning) {
|
||||
threadDataQueue->push(threadData);
|
||||
} else {
|
||||
threadDataQueue->push(NULL); // No more data to read, parse stops when NULL encountered
|
||||
delete threadData;
|
||||
}
|
||||
} while(keepRunning);
|
||||
}
|
||||
|
||||
inline void PBFParser::ParseData() {
|
||||
while (true) {
|
||||
_ThreadData *threadData;
|
||||
threadDataQueue->wait_and_pop(threadData);
|
||||
if( NULL==threadData ) {
|
||||
SimpleLogger().Write() << "Parse Data Thread Finished";
|
||||
threadDataQueue->push(NULL); // Signal end of data for other threads
|
||||
break;
|
||||
}
|
||||
|
||||
loadBlock(threadData);
|
||||
|
||||
for(int i = 0, groupSize = threadData->PBFprimitiveBlock.primitivegroup_size(); i < groupSize; ++i) {
|
||||
threadData->currentGroupID = i;
|
||||
loadGroup(threadData);
|
||||
|
||||
if(threadData->entityTypeIndicator == TypeNode) {
|
||||
parseNode(threadData);
|
||||
}
|
||||
if(threadData->entityTypeIndicator == TypeWay) {
|
||||
parseWay(threadData);
|
||||
}
|
||||
if(threadData->entityTypeIndicator == TypeRelation) {
|
||||
parseRelation(threadData);
|
||||
}
|
||||
if(threadData->entityTypeIndicator == TypeDenseNode) {
|
||||
parseDenseNode(threadData);
|
||||
}
|
||||
}
|
||||
|
||||
delete threadData;
|
||||
threadData = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
inline bool PBFParser::Parse() {
|
||||
// Start the read and parse threads
|
||||
boost::thread readThread(boost::bind(&PBFParser::ReadData, this));
|
||||
|
||||
//Open several parse threads that are synchronized before call to
|
||||
boost::thread parseThread(boost::bind(&PBFParser::ParseData, this));
|
||||
|
||||
// Wait for the threads to finish
|
||||
readThread.join();
|
||||
parseThread.join();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
inline void PBFParser::parseDenseNode(_ThreadData * threadData) {
|
||||
const OSMPBF::DenseNodes& dense = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).dense();
|
||||
int denseTagIndex = 0;
|
||||
int64_t m_lastDenseID = 0;
|
||||
int64_t m_lastDenseLatitude = 0;
|
||||
int64_t m_lastDenseLongitude = 0;
|
||||
|
||||
const int number_of_nodes = dense.id_size();
|
||||
std::vector<ImportNode> extracted_nodes_vector(number_of_nodes);
|
||||
for(int i = 0; i < number_of_nodes; ++i) {
|
||||
m_lastDenseID += dense.id( i );
|
||||
m_lastDenseLatitude += dense.lat( i );
|
||||
m_lastDenseLongitude += dense.lon( i );
|
||||
extracted_nodes_vector[i].id = m_lastDenseID;
|
||||
extracted_nodes_vector[i].lat = COORDINATE_PRECISION*( ( double ) m_lastDenseLatitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lat_offset() ) / NANO;
|
||||
extracted_nodes_vector[i].lon = COORDINATE_PRECISION*( ( double ) m_lastDenseLongitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lon_offset() ) / NANO;
|
||||
while (denseTagIndex < dense.keys_vals_size()) {
|
||||
const int tagValue = dense.keys_vals( denseTagIndex );
|
||||
if( 0 == tagValue ) {
|
||||
++denseTagIndex;
|
||||
break;
|
||||
}
|
||||
const int keyValue = dense.keys_vals ( denseTagIndex+1 );
|
||||
const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(tagValue);
|
||||
const std::string & value = threadData->PBFprimitiveBlock.stringtable().s(keyValue);
|
||||
extracted_nodes_vector[i].keyVals.emplace(key, value);
|
||||
denseTagIndex += 2;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma omp parallel for schedule ( guided )
|
||||
for(int i = 0; i < number_of_nodes; ++i) {
|
||||
ImportNode & import_node = extracted_nodes_vector[i];
|
||||
ParseNodeInLua(
|
||||
import_node,
|
||||
scripting_environment.getLuaStateForThreadID(omp_get_thread_num())
|
||||
);
|
||||
}
|
||||
|
||||
BOOST_FOREACH(const ImportNode &import_node, extracted_nodes_vector) {
|
||||
extractor_callbacks->nodeFunction(import_node);
|
||||
}
|
||||
}
|
||||
|
||||
inline void PBFParser::parseNode(_ThreadData * ) {
|
||||
throw OSRMException(
|
||||
"Parsing of simple nodes not supported. PBF should use dense nodes"
|
||||
);
|
||||
}
|
||||
|
||||
inline void PBFParser::parseRelation(_ThreadData * threadData) {
|
||||
//TODO: leave early, if relation is not a restriction
|
||||
//TODO: reuse rawRestriction container
|
||||
if( !use_turn_restrictions ) {
|
||||
return;
|
||||
}
|
||||
const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID );
|
||||
|
||||
for(int i = 0, relation_size = group.relations_size(); i < relation_size; ++i ) {
|
||||
std::string except_tag_string;
|
||||
const OSMPBF::Relation& inputRelation = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).relations(i);
|
||||
bool isRestriction = false;
|
||||
bool isOnlyRestriction = false;
|
||||
for(int k = 0, endOfKeys = inputRelation.keys_size(); k < endOfKeys; ++k) {
|
||||
const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.keys(k));
|
||||
const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.vals(k));
|
||||
if ("type" == key) {
|
||||
if( "restriction" == val) {
|
||||
isRestriction = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ( ("restriction" == key) && (val.find("only_") == 0) )
|
||||
{
|
||||
isOnlyRestriction = true;
|
||||
}
|
||||
if ("except" == key)
|
||||
{
|
||||
except_tag_string = val;
|
||||
}
|
||||
}
|
||||
|
||||
if( isRestriction && ShouldIgnoreRestriction(except_tag_string) ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if(isRestriction) {
|
||||
int64_t lastRef = 0;
|
||||
InputRestrictionContainer currentRestrictionContainer(isOnlyRestriction);
|
||||
for(
|
||||
int rolesIndex = 0, last_role = inputRelation.roles_sid_size();
|
||||
rolesIndex < last_role;
|
||||
++rolesIndex
|
||||
) {
|
||||
const std::string & role = threadData->PBFprimitiveBlock.stringtable().s( inputRelation.roles_sid( rolesIndex ) );
|
||||
lastRef += inputRelation.memids(rolesIndex);
|
||||
|
||||
if(!("from" == role || "to" == role || "via" == role)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
switch(inputRelation.types(rolesIndex)) {
|
||||
case 0: //node
|
||||
if("from" == role || "to" == role) { //Only via should be a node
|
||||
continue;
|
||||
}
|
||||
assert("via" == role);
|
||||
if(UINT_MAX != currentRestrictionContainer.viaNode) {
|
||||
currentRestrictionContainer.viaNode = UINT_MAX;
|
||||
}
|
||||
assert(UINT_MAX == currentRestrictionContainer.viaNode);
|
||||
currentRestrictionContainer.restriction.viaNode = lastRef;
|
||||
break;
|
||||
case 1: //way
|
||||
assert("from" == role || "to" == role || "via" == role);
|
||||
if("from" == role) {
|
||||
currentRestrictionContainer.fromWay = lastRef;
|
||||
}
|
||||
if ("to" == role) {
|
||||
currentRestrictionContainer.toWay = lastRef;
|
||||
}
|
||||
if ("via" == role) {
|
||||
assert(currentRestrictionContainer.restriction.toNode == UINT_MAX);
|
||||
currentRestrictionContainer.viaNode = lastRef;
|
||||
}
|
||||
break;
|
||||
case 2: //relation, not used. relations relating to relations are evil.
|
||||
continue;
|
||||
assert(false);
|
||||
break;
|
||||
|
||||
default: //should not happen
|
||||
assert(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!extractor_callbacks->restrictionFunction(currentRestrictionContainer)) {
|
||||
std::cerr << "[PBFParser] relation not parsed" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void PBFParser::parseWay(_ThreadData * threadData) {
|
||||
const int number_of_ways = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways_size();
|
||||
std::vector<ExtractionWay> parsed_way_vector(number_of_ways);
|
||||
for(int i = 0; i < number_of_ways; ++i) {
|
||||
const OSMPBF::Way& inputWay = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways( i );
|
||||
parsed_way_vector[i].id = inputWay.id();
|
||||
unsigned pathNode(0);
|
||||
const int number_of_referenced_nodes = inputWay.refs_size();
|
||||
for(int j = 0; j < number_of_referenced_nodes; ++j) {
|
||||
pathNode += inputWay.refs(j);
|
||||
parsed_way_vector[i].path.push_back(pathNode);
|
||||
}
|
||||
assert(inputWay.keys_size() == inputWay.vals_size());
|
||||
const int number_of_keys = inputWay.keys_size();
|
||||
for(int j = 0; j < number_of_keys; ++j) {
|
||||
const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputWay.keys(j));
|
||||
const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputWay.vals(j));
|
||||
parsed_way_vector[i].keyVals.emplace(key, val);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma omp parallel for schedule ( guided )
|
||||
for(int i = 0; i < number_of_ways; ++i) {
|
||||
ExtractionWay & extraction_way = parsed_way_vector[i];
|
||||
if (2 > extraction_way.path.size())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
ParseWayInLua(
|
||||
extraction_way,
|
||||
scripting_environment.getLuaStateForThreadID( omp_get_thread_num())
|
||||
);
|
||||
}
|
||||
|
||||
BOOST_FOREACH(ExtractionWay & extraction_way, parsed_way_vector) {
|
||||
if (2 > extraction_way.path.size())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
extractor_callbacks->wayFunction(extraction_way);
|
||||
}
|
||||
}
|
||||
|
||||
inline void PBFParser::loadGroup(_ThreadData * threadData) {
|
||||
#ifndef NDEBUG
|
||||
++groupCount;
|
||||
#endif
|
||||
|
||||
const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID );
|
||||
threadData->entityTypeIndicator = TypeDummy;
|
||||
if ( 0 != group.nodes_size() ) {
|
||||
threadData->entityTypeIndicator = TypeNode;
|
||||
}
|
||||
if ( 0 != group.ways_size() ) {
|
||||
threadData->entityTypeIndicator = TypeWay;
|
||||
}
|
||||
if ( 0 != group.relations_size() ) {
|
||||
threadData->entityTypeIndicator = TypeRelation;
|
||||
}
|
||||
if ( group.has_dense() ) {
|
||||
threadData->entityTypeIndicator = TypeDenseNode;
|
||||
assert( 0 != group.dense().id_size() );
|
||||
}
|
||||
assert( threadData->entityTypeIndicator != TypeDummy );
|
||||
}
|
||||
|
||||
inline void PBFParser::loadBlock(_ThreadData * threadData) {
|
||||
++blockCount;
|
||||
threadData->currentGroupID = 0;
|
||||
threadData->currentEntityID = 0;
|
||||
}
|
||||
|
||||
inline bool PBFParser::readPBFBlobHeader(std::fstream& stream, _ThreadData * threadData) {
|
||||
int size(0);
|
||||
stream.read((char *)&size, sizeof(int));
|
||||
size = swapEndian(size);
|
||||
if(stream.eof()) {
|
||||
return false;
|
||||
}
|
||||
if ( size > MAX_BLOB_HEADER_SIZE || size < 0 ) {
|
||||
return false;
|
||||
}
|
||||
char *data = new char[size];
|
||||
stream.read(data, size*sizeof(data[0]));
|
||||
|
||||
bool dataSuccessfullyParsed = (threadData->PBFBlobHeader).ParseFromArray( data, size);
|
||||
delete[] data;
|
||||
return dataSuccessfullyParsed;
|
||||
}
|
||||
|
||||
inline bool PBFParser::unpackZLIB(std::fstream &, _ThreadData * threadData) {
|
||||
unsigned rawSize = threadData->PBFBlob.raw_size();
|
||||
char* unpackedDataArray = new char[rawSize];
|
||||
z_stream compressedDataStream;
|
||||
compressedDataStream.next_in = ( unsigned char* ) threadData->PBFBlob.zlib_data().data();
|
||||
compressedDataStream.avail_in = threadData->PBFBlob.zlib_data().size();
|
||||
compressedDataStream.next_out = ( unsigned char* ) unpackedDataArray;
|
||||
compressedDataStream.avail_out = rawSize;
|
||||
compressedDataStream.zalloc = Z_NULL;
|
||||
compressedDataStream.zfree = Z_NULL;
|
||||
compressedDataStream.opaque = Z_NULL;
|
||||
int ret = inflateInit( &compressedDataStream );
|
||||
if ( ret != Z_OK ) {
|
||||
std::cerr << "[error] failed to init zlib stream" << std::endl;
|
||||
delete[] unpackedDataArray;
|
||||
return false;
|
||||
}
|
||||
|
||||
ret = inflate( &compressedDataStream, Z_FINISH );
|
||||
if ( ret != Z_STREAM_END ) {
|
||||
std::cerr << "[error] failed to inflate zlib stream" << std::endl;
|
||||
std::cerr << "[error] Error type: " << ret << std::endl;
|
||||
delete[] unpackedDataArray;
|
||||
return false;
|
||||
}
|
||||
|
||||
ret = inflateEnd( &compressedDataStream );
|
||||
if ( ret != Z_OK ) {
|
||||
std::cerr << "[error] failed to deinit zlib stream" << std::endl;
|
||||
delete[] unpackedDataArray;
|
||||
return false;
|
||||
}
|
||||
|
||||
threadData->charBuffer.clear(); threadData->charBuffer.resize(rawSize);
|
||||
std::copy(unpackedDataArray, unpackedDataArray + rawSize, threadData->charBuffer.begin());
|
||||
delete[] unpackedDataArray;
|
||||
return true;
|
||||
}
|
||||
|
||||
inline bool PBFParser::unpackLZMA(std::fstream &, _ThreadData * ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline bool PBFParser::readBlob(std::fstream& stream, _ThreadData * threadData) {
|
||||
if(stream.eof()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const int size = threadData->PBFBlobHeader.datasize();
|
||||
if ( size < 0 || size > MAX_BLOB_SIZE ) {
|
||||
std::cerr << "[error] invalid Blob size:" << size << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
char* data = new char[size];
|
||||
stream.read(data, sizeof(data[0])*size);
|
||||
|
||||
if ( !threadData->PBFBlob.ParseFromArray( data, size ) ) {
|
||||
std::cerr << "[error] failed to parse blob" << std::endl;
|
||||
delete[] data;
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( threadData->PBFBlob.has_raw() ) {
|
||||
const std::string& data = threadData->PBFBlob.raw();
|
||||
threadData->charBuffer.clear();
|
||||
threadData->charBuffer.resize( data.size() );
|
||||
std::copy(data.begin(), data.end(), threadData->charBuffer.begin());
|
||||
} else if ( threadData->PBFBlob.has_zlib_data() ) {
|
||||
if ( !unpackZLIB(stream, threadData) ) {
|
||||
std::cerr << "[error] zlib data encountered that could not be unpacked" << std::endl;
|
||||
delete[] data;
|
||||
return false;
|
||||
}
|
||||
} else if ( threadData->PBFBlob.has_lzma_data() ) {
|
||||
if ( !unpackLZMA(stream, threadData) ) {
|
||||
std::cerr << "[error] lzma data encountered that could not be unpacked" << std::endl;
|
||||
}
|
||||
delete[] data;
|
||||
return false;
|
||||
} else {
|
||||
std::cerr << "[error] Blob contains no data" << std::endl;
|
||||
delete[] data;
|
||||
return false;
|
||||
}
|
||||
delete[] data;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PBFParser::readNextBlock(std::fstream& stream, _ThreadData * threadData) {
|
||||
if(stream.eof()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( !readPBFBlobHeader(stream, threadData) ){
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( threadData->PBFBlobHeader.type() != "OSMData" ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( !readBlob(stream, threadData) ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( !threadData->PBFprimitiveBlock.ParseFromArray( &(threadData->charBuffer[0]), threadData-> charBuffer.size() ) ) {
|
||||
std::cerr << "failed to parse PrimitiveBlock" << std::endl;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
103
Extractor/PBFParser.h
Normal file
103
Extractor/PBFParser.h
Normal file
@ -0,0 +1,103 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef PBFPARSER_H_
|
||||
#define PBFPARSER_H_
|
||||
|
||||
#include "BaseParser.h"
|
||||
#include "../DataStructures/ConcurrentQueue.h"
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
#include <osmpbf/fileformat.pb.h>
|
||||
#include <osmpbf/osmformat.pb.h>
|
||||
|
||||
#include <fstream>
|
||||
|
||||
class PBFParser : public BaseParser {
|
||||
|
||||
enum EntityType {
|
||||
TypeDummy = 0,
|
||||
TypeNode = 1,
|
||||
TypeWay = 2,
|
||||
TypeRelation = 4,
|
||||
TypeDenseNode = 8
|
||||
};
|
||||
|
||||
struct _ThreadData {
|
||||
int currentGroupID;
|
||||
int currentEntityID;
|
||||
EntityType entityTypeIndicator;
|
||||
|
||||
OSMPBF::BlobHeader PBFBlobHeader;
|
||||
OSMPBF::Blob PBFBlob;
|
||||
|
||||
OSMPBF::HeaderBlock PBFHeaderBlock;
|
||||
OSMPBF::PrimitiveBlock PBFprimitiveBlock;
|
||||
|
||||
std::vector<char> charBuffer;
|
||||
};
|
||||
|
||||
public:
|
||||
PBFParser(
|
||||
const char * fileName,
|
||||
ExtractorCallbacks* ec,
|
||||
ScriptingEnvironment& se
|
||||
);
|
||||
virtual ~PBFParser();
|
||||
|
||||
inline bool ReadHeader();
|
||||
inline bool Parse();
|
||||
|
||||
private:
|
||||
inline void ReadData();
|
||||
inline void ParseData();
|
||||
inline void parseDenseNode (_ThreadData * threadData);
|
||||
inline void parseNode (_ThreadData * threadData);
|
||||
inline void parseRelation (_ThreadData * threadData);
|
||||
inline void parseWay (_ThreadData * threadData);
|
||||
|
||||
inline void loadGroup (_ThreadData * threadData);
|
||||
inline void loadBlock (_ThreadData * threadData);
|
||||
inline bool readPBFBlobHeader(std::fstream & stream, _ThreadData * threadData);
|
||||
inline bool unpackZLIB (std::fstream & stream, _ThreadData * threadData);
|
||||
inline bool unpackLZMA (std::fstream & stream, _ThreadData * threadData);
|
||||
inline bool readBlob (std::fstream & stream, _ThreadData * threadData);
|
||||
inline bool readNextBlock (std::fstream & stream, _ThreadData * threadData);
|
||||
|
||||
static const int NANO = 1000 * 1000 * 1000;
|
||||
static const int MAX_BLOB_HEADER_SIZE = 64 * 1024;
|
||||
static const int MAX_BLOB_SIZE = 32 * 1024 * 1024;
|
||||
|
||||
unsigned groupCount;
|
||||
unsigned blockCount;
|
||||
|
||||
std::fstream input; // the input stream to parse
|
||||
boost::shared_ptr<ConcurrentQueue < _ThreadData* > > threadDataQueue;
|
||||
};
|
||||
|
||||
#endif /* PBFPARSER_H_ */
|
||||
128
Extractor/ScriptingEnvironment.cpp
Normal file
128
Extractor/ScriptingEnvironment.cpp
Normal file
@ -0,0 +1,128 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "ScriptingEnvironment.h"
|
||||
|
||||
#include "ExtractionHelperFunctions.h"
|
||||
#include "ExtractionWay.h"
|
||||
#include "../DataStructures/ImportNode.h"
|
||||
#include "../Util/LuaUtil.h"
|
||||
#include "../Util/OpenMPWrapper.h"
|
||||
#include "../Util/OSRMException.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
ScriptingEnvironment::ScriptingEnvironment() {}
|
||||
ScriptingEnvironment::ScriptingEnvironment(const char * fileName) {
|
||||
SimpleLogger().Write() << "Using script " << fileName;
|
||||
|
||||
// Create a new lua state
|
||||
for(int i = 0; i < omp_get_max_threads(); ++i) {
|
||||
luaStateVector.push_back(luaL_newstate());
|
||||
}
|
||||
|
||||
// Connect LuaBind to this lua state for all threads
|
||||
#pragma omp parallel
|
||||
{
|
||||
lua_State * myLuaState = getLuaStateForThreadID(omp_get_thread_num());
|
||||
luabind::open(myLuaState);
|
||||
//open utility libraries string library;
|
||||
luaL_openlibs(myLuaState);
|
||||
|
||||
luaAddScriptFolderToLoadPath( myLuaState, fileName );
|
||||
|
||||
// Add our function to the state's global scope
|
||||
luabind::module(myLuaState) [
|
||||
luabind::def("print", LUA_print<std::string>),
|
||||
luabind::def("parseMaxspeed", parseMaxspeed),
|
||||
luabind::def("durationIsValid", durationIsValid),
|
||||
luabind::def("parseDuration", parseDuration)
|
||||
];
|
||||
|
||||
luabind::module(myLuaState) [
|
||||
luabind::class_<HashTable<std::string, std::string> >("keyVals")
|
||||
.def("Add", &HashTable<std::string, std::string>::Add)
|
||||
.def("Find", &HashTable<std::string, std::string>::Find)
|
||||
.def("Holds", &HashTable<std::string, std::string>::Holds)
|
||||
];
|
||||
|
||||
luabind::module(myLuaState) [
|
||||
luabind::class_<ImportNode>("Node")
|
||||
.def(luabind::constructor<>())
|
||||
.def_readwrite("lat", &ImportNode::lat)
|
||||
.def_readwrite("lon", &ImportNode::lon)
|
||||
.def_readonly("id", &ImportNode::id)
|
||||
.def_readwrite("bollard", &ImportNode::bollard)
|
||||
.def_readwrite("traffic_light", &ImportNode::trafficLight)
|
||||
.def_readwrite("tags", &ImportNode::keyVals)
|
||||
];
|
||||
|
||||
luabind::module(myLuaState) [
|
||||
luabind::class_<ExtractionWay>("Way")
|
||||
.def(luabind::constructor<>())
|
||||
.def_readonly("id", &ExtractionWay::id)
|
||||
.def_readwrite("name", &ExtractionWay::name)
|
||||
.def_readwrite("speed", &ExtractionWay::speed)
|
||||
.def_readwrite("backward_speed", &ExtractionWay::backward_speed)
|
||||
.def_readwrite("duration", &ExtractionWay::duration)
|
||||
.def_readwrite("type", &ExtractionWay::type)
|
||||
.def_readwrite("access", &ExtractionWay::access)
|
||||
.def_readwrite("roundabout", &ExtractionWay::roundabout)
|
||||
.def_readwrite("is_access_restricted", &ExtractionWay::isAccessRestricted)
|
||||
.def_readwrite("ignore_in_grid", &ExtractionWay::ignoreInGrid)
|
||||
.def_readwrite("tags", &ExtractionWay::keyVals)
|
||||
.def_readwrite("direction", &ExtractionWay::direction)
|
||||
.enum_("constants") [
|
||||
luabind::value("notSure", 0),
|
||||
luabind::value("oneway", 1),
|
||||
luabind::value("bidirectional", 2),
|
||||
luabind::value("opposite", 3)
|
||||
]
|
||||
];
|
||||
|
||||
// fails on c++11/OS X 10.9
|
||||
luabind::module(myLuaState) [
|
||||
luabind::class_<std::vector<std::string> >("vector")
|
||||
.def("Add", static_cast<void (std::vector<std::string>::*)(const std::string&)>(&std::vector<std::string>::push_back)
|
||||
)
|
||||
];
|
||||
|
||||
if(0 != luaL_dofile(myLuaState, fileName) ) {
|
||||
throw OSRMException("ERROR occured in scripting block");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ScriptingEnvironment::~ScriptingEnvironment() {
|
||||
for(unsigned i = 0; i < luaStateVector.size(); ++i) {
|
||||
// luaStateVector[i];
|
||||
}
|
||||
}
|
||||
|
||||
lua_State * ScriptingEnvironment::getLuaStateForThreadID(const int id) {
|
||||
return luaStateVector[id];
|
||||
}
|
||||
46
Extractor/ScriptingEnvironment.h
Normal file
46
Extractor/ScriptingEnvironment.h
Normal file
@ -0,0 +1,46 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef SCRIPTINGENVIRONMENT_H_
|
||||
#define SCRIPTINGENVIRONMENT_H_
|
||||
|
||||
#include <vector>
|
||||
|
||||
struct lua_State;
|
||||
|
||||
class ScriptingEnvironment {
|
||||
public:
|
||||
ScriptingEnvironment();
|
||||
ScriptingEnvironment(const char * fileName);
|
||||
virtual ~ScriptingEnvironment();
|
||||
|
||||
lua_State * getLuaStateForThreadID(const int);
|
||||
|
||||
std::vector<lua_State *> luaStateVector;
|
||||
};
|
||||
|
||||
#endif /* SCRIPTINGENVIRONMENT_H_ */
|
||||
288
Extractor/XMLParser.cpp
Normal file
288
Extractor/XMLParser.cpp
Normal file
@ -0,0 +1,288 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#include "XMLParser.h"
|
||||
|
||||
#include "ExtractionWay.h"
|
||||
#include "../DataStructures/HashTable.h"
|
||||
#include "../DataStructures/ImportNode.h"
|
||||
#include "../DataStructures/InputReaderFactory.h"
|
||||
#include "../DataStructures/Restriction.h"
|
||||
#include "../Util/SimpleLogger.h"
|
||||
#include "../Util/StringUtil.h"
|
||||
#include "../typedefs.h"
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/ref.hpp>
|
||||
|
||||
XMLParser::XMLParser(const char * filename, ExtractorCallbacks* ec, ScriptingEnvironment& se) : BaseParser(ec, se) {
|
||||
inputReader = inputReaderFactory(filename);
|
||||
}
|
||||
|
||||
bool XMLParser::ReadHeader() {
|
||||
return (xmlTextReaderRead( inputReader ) == 1);
|
||||
}
|
||||
bool XMLParser::Parse() {
|
||||
while ( xmlTextReaderRead( inputReader ) == 1 ) {
|
||||
const int type = xmlTextReaderNodeType( inputReader );
|
||||
|
||||
//1 is Element
|
||||
if ( type != 1 ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
xmlChar* currentName = xmlTextReaderName( inputReader );
|
||||
if ( currentName == NULL ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( xmlStrEqual( currentName, ( const xmlChar* ) "node" ) == 1 ) {
|
||||
ImportNode n = _ReadXMLNode();
|
||||
ParseNodeInLua( n, lua_state );
|
||||
extractor_callbacks->nodeFunction(n);
|
||||
// if(!extractor_callbacks->nodeFunction(n))
|
||||
// std::cerr << "[XMLParser] dense node not parsed" << std::endl;
|
||||
}
|
||||
|
||||
if ( xmlStrEqual( currentName, ( const xmlChar* ) "way" ) == 1 ) {
|
||||
ExtractionWay way = _ReadXMLWay( );
|
||||
ParseWayInLua( way, lua_state );
|
||||
extractor_callbacks->wayFunction(way);
|
||||
// if(!extractor_callbacks->wayFunction(way))
|
||||
// std::cerr << "[PBFParser] way not parsed" << std::endl;
|
||||
}
|
||||
if( use_turn_restrictions ) {
|
||||
if ( xmlStrEqual( currentName, ( const xmlChar* ) "relation" ) == 1 ) {
|
||||
InputRestrictionContainer r = _ReadXMLRestriction();
|
||||
if(r.fromWay != UINT_MAX) {
|
||||
if(!extractor_callbacks->restrictionFunction(r)) {
|
||||
std::cerr << "[XMLParser] restriction not parsed" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
xmlFree( currentName );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
InputRestrictionContainer XMLParser::_ReadXMLRestriction() {
|
||||
InputRestrictionContainer restriction;
|
||||
std::string except_tag_string;
|
||||
|
||||
if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) {
|
||||
const int depth = xmlTextReaderDepth( inputReader );while ( xmlTextReaderRead( inputReader ) == 1 ) {
|
||||
const int childType = xmlTextReaderNodeType( inputReader );
|
||||
if ( childType != 1 && childType != 15 ) {
|
||||
continue;
|
||||
}
|
||||
const int childDepth = xmlTextReaderDepth( inputReader );
|
||||
xmlChar* childName = xmlTextReaderName( inputReader );
|
||||
if ( childName == NULL ) {
|
||||
continue;
|
||||
}
|
||||
if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "relation" ) == 1 ) {
|
||||
xmlFree( childName );
|
||||
break;
|
||||
}
|
||||
if ( childType != 1 ) {
|
||||
xmlFree( childName );
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) {
|
||||
xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" );
|
||||
xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" );
|
||||
if ( k != NULL && value != NULL ) {
|
||||
if(xmlStrEqual(k, ( const xmlChar* ) "restriction" )){
|
||||
if(0 == std::string((const char *) value).find("only_")) {
|
||||
restriction.restriction.flags.isOnly = true;
|
||||
}
|
||||
}
|
||||
if ( xmlStrEqual(k, (const xmlChar *) "except") ) {
|
||||
except_tag_string = (const char*) value;
|
||||
}
|
||||
}
|
||||
|
||||
if ( k != NULL ) {
|
||||
xmlFree( k );
|
||||
}
|
||||
if ( value != NULL ) {
|
||||
xmlFree( value );
|
||||
}
|
||||
} else if ( xmlStrEqual( childName, ( const xmlChar* ) "member" ) == 1 ) {
|
||||
xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" );
|
||||
if ( ref != NULL ) {
|
||||
xmlChar * role = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "role" );
|
||||
xmlChar * type = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "type" );
|
||||
|
||||
if(xmlStrEqual(role, (const xmlChar *) "to") && xmlStrEqual(type, (const xmlChar *) "way")) {
|
||||
restriction.toWay = stringToUint((const char*) ref);
|
||||
}
|
||||
if(xmlStrEqual(role, (const xmlChar *) "from") && xmlStrEqual(type, (const xmlChar *) "way")) {
|
||||
restriction.fromWay = stringToUint((const char*) ref);
|
||||
}
|
||||
if(xmlStrEqual(role, (const xmlChar *) "via") && xmlStrEqual(type, (const xmlChar *) "node")) {
|
||||
restriction.restriction.viaNode = stringToUint((const char*) ref);
|
||||
}
|
||||
|
||||
if(NULL != type) {
|
||||
xmlFree( type );
|
||||
}
|
||||
if(NULL != role) {
|
||||
xmlFree( role );
|
||||
}
|
||||
if(NULL != ref) {
|
||||
xmlFree( ref );
|
||||
}
|
||||
}
|
||||
}
|
||||
xmlFree( childName );
|
||||
}
|
||||
}
|
||||
|
||||
if( ShouldIgnoreRestriction(except_tag_string) ) {
|
||||
restriction.fromWay = UINT_MAX; //workaround to ignore the restriction
|
||||
}
|
||||
return restriction;
|
||||
}
|
||||
|
||||
ExtractionWay XMLParser::_ReadXMLWay() {
|
||||
ExtractionWay way;
|
||||
if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) {
|
||||
const int depth = xmlTextReaderDepth( inputReader );
|
||||
while ( xmlTextReaderRead( inputReader ) == 1 ) {
|
||||
const int childType = xmlTextReaderNodeType( inputReader );
|
||||
if ( childType != 1 && childType != 15 ) {
|
||||
continue;
|
||||
}
|
||||
const int childDepth = xmlTextReaderDepth( inputReader );
|
||||
xmlChar* childName = xmlTextReaderName( inputReader );
|
||||
if ( childName == NULL ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "way" ) == 1 ) {
|
||||
xmlChar* id = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" );
|
||||
way.id = stringToUint((char*)id);
|
||||
xmlFree(id);
|
||||
xmlFree( childName );
|
||||
break;
|
||||
}
|
||||
if ( childType != 1 ) {
|
||||
xmlFree( childName );
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) {
|
||||
xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" );
|
||||
xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" );
|
||||
// cout << "->k=" << k << ", v=" << value << endl;
|
||||
if ( k != NULL && value != NULL ) {
|
||||
way.keyVals.Add(std::string( (char *) k ), std::string( (char *) value));
|
||||
}
|
||||
if ( k != NULL ) {
|
||||
xmlFree( k );
|
||||
}
|
||||
if ( value != NULL ) {
|
||||
xmlFree( value );
|
||||
}
|
||||
} else if ( xmlStrEqual( childName, ( const xmlChar* ) "nd" ) == 1 ) {
|
||||
xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" );
|
||||
if ( ref != NULL ) {
|
||||
way.path.push_back( stringToUint(( const char* ) ref ) );
|
||||
xmlFree( ref );
|
||||
}
|
||||
}
|
||||
xmlFree( childName );
|
||||
}
|
||||
}
|
||||
return way;
|
||||
}
|
||||
|
||||
ImportNode XMLParser::_ReadXMLNode() {
|
||||
ImportNode node;
|
||||
|
||||
xmlChar* attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lat" );
|
||||
if ( attribute != NULL ) {
|
||||
node.lat = static_cast<NodeID>(COORDINATE_PRECISION*atof(( const char* ) attribute ) );
|
||||
xmlFree( attribute );
|
||||
}
|
||||
attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lon" );
|
||||
if ( attribute != NULL ) {
|
||||
node.lon = static_cast<NodeID>(COORDINATE_PRECISION*atof(( const char* ) attribute ));
|
||||
xmlFree( attribute );
|
||||
}
|
||||
attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" );
|
||||
if ( attribute != NULL ) {
|
||||
node.id = stringToUint(( const char* ) attribute );
|
||||
xmlFree( attribute );
|
||||
}
|
||||
|
||||
if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) {
|
||||
const int depth = xmlTextReaderDepth( inputReader );
|
||||
while ( xmlTextReaderRead( inputReader ) == 1 ) {
|
||||
const int childType = xmlTextReaderNodeType( inputReader );
|
||||
// 1 = Element, 15 = EndElement
|
||||
if ( childType != 1 && childType != 15 ) {
|
||||
continue;
|
||||
}
|
||||
const int childDepth = xmlTextReaderDepth( inputReader );
|
||||
xmlChar* childName = xmlTextReaderName( inputReader );
|
||||
if ( childName == NULL ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "node" ) == 1 ) {
|
||||
xmlFree( childName );
|
||||
break;
|
||||
}
|
||||
if ( childType != 1 ) {
|
||||
xmlFree( childName );
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) {
|
||||
xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" );
|
||||
xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" );
|
||||
if ( k != NULL && value != NULL ) {
|
||||
node.keyVals.Add(std::string( reinterpret_cast<char*>(k) ), std::string( reinterpret_cast<char*>(value)));
|
||||
}
|
||||
if ( k != NULL ) {
|
||||
xmlFree( k );
|
||||
}
|
||||
if ( value != NULL ) {
|
||||
xmlFree( value );
|
||||
}
|
||||
}
|
||||
|
||||
xmlFree( childName );
|
||||
}
|
||||
}
|
||||
return node;
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,28 +25,30 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef CONTRACTOR_CONTRACTOR_HPP
|
||||
#define CONTRACTOR_CONTRACTOR_HPP
|
||||
#ifndef XMLPARSER_H_
|
||||
#define XMLPARSER_H_
|
||||
|
||||
#include "contractor/contractor_config.hpp"
|
||||
#include "ExtractorCallbacks.h"
|
||||
#include "BaseParser.h"
|
||||
|
||||
namespace osrm::contractor
|
||||
{
|
||||
#include <libxml/xmlreader.h>
|
||||
|
||||
/// Base class of osrm-contract
|
||||
class Contractor
|
||||
{
|
||||
public:
|
||||
explicit Contractor(const ContractorConfig &config_) : config{config_} {}
|
||||
|
||||
Contractor(const Contractor &) = delete;
|
||||
Contractor &operator=(const Contractor &) = delete;
|
||||
class XMLParser : public BaseParser {
|
||||
public:
|
||||
XMLParser(
|
||||
const char* filename,
|
||||
ExtractorCallbacks* ec,
|
||||
ScriptingEnvironment& se
|
||||
);
|
||||
bool ReadHeader();
|
||||
bool Parse();
|
||||
|
||||
int Run();
|
||||
|
||||
private:
|
||||
ContractorConfig config;
|
||||
private:
|
||||
InputRestrictionContainer _ReadXMLRestriction();
|
||||
ExtractionWay _ReadXMLWay();
|
||||
ImportNode _ReadXMLNode();
|
||||
xmlTextReaderPtr inputReader;
|
||||
};
|
||||
} // namespace osrm::contractor
|
||||
|
||||
#endif // PROCESSING_CHAIN_HPP
|
||||
#endif /* XMLPARSER_H_ */
|
||||
7
Gemfile
Normal file
7
Gemfile
Normal file
@ -0,0 +1,7 @@
|
||||
source "http://rubygems.org"
|
||||
|
||||
gem "cucumber"
|
||||
gem "rake"
|
||||
gem "osmlib-base"
|
||||
gem "sys-proctable"
|
||||
gem "rspec-expectations"
|
||||
30
Gemfile.lock
Normal file
30
Gemfile.lock
Normal file
@ -0,0 +1,30 @@
|
||||
GEM
|
||||
remote: http://rubygems.org/
|
||||
specs:
|
||||
builder (3.2.2)
|
||||
cucumber (1.3.8)
|
||||
builder (>= 2.1.2)
|
||||
diff-lcs (>= 1.1.3)
|
||||
gherkin (~> 2.12.1)
|
||||
multi_json (>= 1.7.5, < 2.0)
|
||||
multi_test (>= 0.0.2)
|
||||
diff-lcs (1.2.4)
|
||||
gherkin (2.12.1)
|
||||
multi_json (~> 1.3)
|
||||
multi_json (1.8.0)
|
||||
multi_test (0.0.2)
|
||||
osmlib-base (0.1.4)
|
||||
rake (10.1.0)
|
||||
rspec-expectations (2.14.3)
|
||||
diff-lcs (>= 1.1.3, < 2.0)
|
||||
sys-proctable (0.9.3)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
cucumber
|
||||
osmlib-base
|
||||
rake
|
||||
rspec-expectations
|
||||
sys-proctable
|
||||
84
Include/osrm/Coordinate.h
Normal file
84
Include/osrm/Coordinate.h
Normal file
@ -0,0 +1,84 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef FIXED_POINT_COORDINATE_H_
|
||||
#define FIXED_POINT_COORDINATE_H_
|
||||
|
||||
#include <iostream>
|
||||
|
||||
static const double COORDINATE_PRECISION = 1000000.;
|
||||
|
||||
struct FixedPointCoordinate {
|
||||
int lat;
|
||||
int lon;
|
||||
|
||||
FixedPointCoordinate();
|
||||
explicit FixedPointCoordinate (int lat, int lon);
|
||||
void Reset();
|
||||
bool isSet() const;
|
||||
bool isValid() const;
|
||||
bool operator==(const FixedPointCoordinate & other) const;
|
||||
|
||||
static double ApproximateDistance(
|
||||
const int lat1,
|
||||
const int lon1,
|
||||
const int lat2,
|
||||
const int lon2
|
||||
);
|
||||
|
||||
static double ApproximateDistance(
|
||||
const FixedPointCoordinate & c1,
|
||||
const FixedPointCoordinate & c2
|
||||
);
|
||||
|
||||
static double ApproximateEuclideanDistance(
|
||||
const FixedPointCoordinate & c1,
|
||||
const FixedPointCoordinate & c2
|
||||
);
|
||||
|
||||
static void convertInternalLatLonToString(
|
||||
const int value,
|
||||
std::string & output
|
||||
);
|
||||
|
||||
static void convertInternalCoordinateToString(
|
||||
const FixedPointCoordinate & coord,
|
||||
std::string & output
|
||||
);
|
||||
|
||||
static void convertInternalReversedCoordinateToString(
|
||||
const FixedPointCoordinate & coord,
|
||||
std::string & output
|
||||
);
|
||||
};
|
||||
|
||||
inline std::ostream & operator<<(std::ostream & out, const FixedPointCoordinate & c){
|
||||
out << "(" << c.lat << "," << c.lon << ")";
|
||||
return out;
|
||||
}
|
||||
|
||||
#endif /* FIXED_POINT_COORDINATE_H_ */
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,14 +25,21 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef OSRM_APPROACH_HPP
|
||||
#define OSRM_APPROACH_HPP
|
||||
#ifndef HTTP_HEADER_H
|
||||
#define HTTP_HEADER_H
|
||||
|
||||
#include "engine/approach.hpp"
|
||||
#include <string>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
using engine::Approach;
|
||||
namespace http {
|
||||
|
||||
struct Header {
|
||||
std::string name;
|
||||
std::string value;
|
||||
void Clear() {
|
||||
name.clear();
|
||||
value.clear();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif //HTTP_HEADER_H
|
||||
73
Include/osrm/Reply.h
Normal file
73
Include/osrm/Reply.h
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef REPLY_H
|
||||
#define REPLY_H
|
||||
|
||||
#include "Header.h"
|
||||
|
||||
#include <boost/asio.hpp>
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace http {
|
||||
|
||||
const char okHTML[] = "";
|
||||
const char badRequestHTML[] = "<html><head><title>Bad Request</title></head><body><h1>400 Bad Request</h1></body></html>";
|
||||
const char internalServerErrorHTML[] = "<html><head><title>Internal Server Error</title></head><body><h1>500 Internal Server Error</h1></body></html>";
|
||||
const char seperators[] = { ':', ' ' };
|
||||
const char crlf[] = { '\r', '\n' };
|
||||
const std::string okString = "HTTP/1.0 200 OK\r\n";
|
||||
const std::string badRequestString = "HTTP/1.0 400 Bad Request\r\n";
|
||||
const std::string internalServerErrorString = "HTTP/1.0 500 Internal Server Error\r\n";
|
||||
|
||||
class Reply {
|
||||
public:
|
||||
enum status_type {
|
||||
ok = 200,
|
||||
badRequest = 400,
|
||||
internalServerError = 500
|
||||
} status;
|
||||
|
||||
|
||||
std::vector<Header> headers;
|
||||
std::vector<boost::asio::const_buffer> toBuffers();
|
||||
std::vector<boost::asio::const_buffer> HeaderstoBuffers();
|
||||
std::vector<std::string> content;
|
||||
static Reply StockReply(status_type status);
|
||||
void setSize(const unsigned size);
|
||||
void SetUncompressedSize();
|
||||
|
||||
Reply();
|
||||
private:
|
||||
static std::string ToString(Reply::status_type status);
|
||||
boost::asio::const_buffer ToBuffer(Reply::status_type status);
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif //REPLY_H
|
||||
125
Include/osrm/RouteParameters.h
Normal file
125
Include/osrm/RouteParameters.h
Normal file
@ -0,0 +1,125 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef ROUTE_PARAMETERS_H
|
||||
#define ROUTE_PARAMETERS_H
|
||||
|
||||
#include <osrm/Coordinate.h>
|
||||
|
||||
#include <boost/fusion/container/vector.hpp>
|
||||
#include <boost/fusion/sequence/intrinsic.hpp>
|
||||
#include <boost/fusion/include/at_c.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
struct RouteParameters {
|
||||
RouteParameters() :
|
||||
zoomLevel(18),
|
||||
printInstructions(false),
|
||||
alternateRoute(true),
|
||||
geometry(true),
|
||||
compression(true),
|
||||
deprecatedAPI(false),
|
||||
checkSum(-1)
|
||||
{ }
|
||||
|
||||
short zoomLevel;
|
||||
bool printInstructions;
|
||||
bool alternateRoute;
|
||||
bool geometry;
|
||||
bool compression;
|
||||
bool deprecatedAPI;
|
||||
unsigned checkSum;
|
||||
std::string service;
|
||||
std::string outputFormat;
|
||||
std::string jsonpParameter;
|
||||
std::string language;
|
||||
std::vector<std::string> hints;
|
||||
std::vector<FixedPointCoordinate> coordinates;
|
||||
|
||||
void setZoomLevel(const short i) {
|
||||
if (18 >= i && 0 <= i) {
|
||||
zoomLevel = i;
|
||||
}
|
||||
}
|
||||
|
||||
void setAlternateRouteFlag(const bool b) {
|
||||
alternateRoute = b;
|
||||
}
|
||||
|
||||
void setDeprecatedAPIFlag(const std::string &) {
|
||||
deprecatedAPI = true;
|
||||
}
|
||||
|
||||
void setChecksum(const unsigned c) {
|
||||
checkSum = c;
|
||||
}
|
||||
|
||||
void setInstructionFlag(const bool b) {
|
||||
printInstructions = b;
|
||||
}
|
||||
|
||||
void setService( const std::string & s) {
|
||||
service = s;
|
||||
}
|
||||
|
||||
void setOutputFormat(const std::string & s) {
|
||||
outputFormat = s;
|
||||
}
|
||||
|
||||
void setJSONpParameter(const std::string & s) {
|
||||
jsonpParameter = s;
|
||||
}
|
||||
|
||||
void addHint(const std::string & s) {
|
||||
hints.resize( coordinates.size() );
|
||||
if( !hints.empty() ) {
|
||||
hints.back() = s;
|
||||
}
|
||||
}
|
||||
|
||||
void setLanguage(const std::string & s) {
|
||||
language = s;
|
||||
}
|
||||
|
||||
void setGeometryFlag(const bool b) {
|
||||
geometry = b;
|
||||
}
|
||||
|
||||
void setCompressionFlag(const bool b) {
|
||||
compression = b;
|
||||
}
|
||||
|
||||
void addCoordinate(const boost::fusion::vector < double, double > & arg_) {
|
||||
int lat = COORDINATE_PRECISION*boost::fusion::at_c < 0 > (arg_);
|
||||
int lon = COORDINATE_PRECISION*boost::fusion::at_c < 1 > (arg_);
|
||||
coordinates.push_back(FixedPointCoordinate(lat, lon));
|
||||
}
|
||||
};
|
||||
|
||||
#endif /*ROUTE_PARAMETERS_H*/
|
||||
@ -1,6 +1,6 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
@ -25,20 +25,14 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
*/
|
||||
|
||||
#ifndef OSRM_ENGINE_APPROACH_HPP
|
||||
#define OSRM_ENGINE_APPROACH_HPP
|
||||
#ifndef SERVER_PATH_H
|
||||
#define SERVER_PATH_H
|
||||
|
||||
#include <cstdint>
|
||||
#include <boost/unordered_map.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
|
||||
namespace osrm::engine
|
||||
{
|
||||
#include <string>
|
||||
|
||||
enum class Approach : std::uint8_t
|
||||
{
|
||||
CURB = 0,
|
||||
UNRESTRICTED = 1,
|
||||
OPPOSITE = 2
|
||||
typedef boost::unordered_map<const std::string, boost::filesystem::path> ServerPaths;
|
||||
|
||||
};
|
||||
} // namespace osrm::engine
|
||||
#endif
|
||||
#endif //SERVER_PATH_H
|
||||
@ -1,4 +1,4 @@
|
||||
Copyright (c) 2017, Project OSRM contributors
|
||||
Copyright (c) 2013, Project OSRM, Dennis Luxen, others
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user