Compare commits
No commits in common. "master" and "v4.7.1" have entirely different histories.
7
.babelrc
7
.babelrc
@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"plugins": ["transform-class-properties"],
|
|
||||||
"presets": [
|
|
||||||
"@babel/preset-env",
|
|
||||||
"@babel/preset-react"
|
|
||||||
]
|
|
||||||
}
|
|
@ -2,89 +2,53 @@
|
|||||||
Language: Cpp
|
Language: Cpp
|
||||||
# BasedOnStyle: LLVM
|
# BasedOnStyle: LLVM
|
||||||
AccessModifierOffset: -2
|
AccessModifierOffset: -2
|
||||||
AlignAfterOpenBracket: Align
|
ConstructorInitializerIndentWidth: 4
|
||||||
AlignConsecutiveAssignments: false
|
|
||||||
AlignConsecutiveDeclarations: false
|
|
||||||
AlignEscapedNewlinesLeft: false
|
AlignEscapedNewlinesLeft: false
|
||||||
AlignOperands: true
|
|
||||||
AlignTrailingComments: true
|
AlignTrailingComments: true
|
||||||
AllowAllParametersOfDeclarationOnNextLine: true
|
AllowAllParametersOfDeclarationOnNextLine: true
|
||||||
AllowShortBlocksOnASingleLine: true
|
|
||||||
AllowShortCaseLabelsOnASingleLine: false
|
|
||||||
AllowShortFunctionsOnASingleLine: All
|
|
||||||
AllowShortIfStatementsOnASingleLine: false
|
AllowShortIfStatementsOnASingleLine: false
|
||||||
AllowShortLoopsOnASingleLine: false
|
AllowShortLoopsOnASingleLine: false
|
||||||
AlwaysBreakAfterDefinitionReturnType: None
|
AllowShortFunctionsOnASingleLine: true
|
||||||
AlwaysBreakAfterReturnType: None
|
|
||||||
AlwaysBreakBeforeMultilineStrings: false
|
|
||||||
AlwaysBreakTemplateDeclarations: false
|
AlwaysBreakTemplateDeclarations: false
|
||||||
BinPackArguments: false
|
AlwaysBreakBeforeMultilineStrings: false
|
||||||
BinPackParameters: false
|
|
||||||
BraceWrapping:
|
|
||||||
AfterClass: true
|
|
||||||
AfterControlStatement: true
|
|
||||||
AfterEnum: true
|
|
||||||
AfterFunction: true
|
|
||||||
AfterNamespace: true
|
|
||||||
AfterObjCDeclaration: true
|
|
||||||
AfterStruct: true
|
|
||||||
AfterUnion: true
|
|
||||||
BeforeCatch: true
|
|
||||||
BeforeElse: true
|
|
||||||
IndentBraces: true
|
|
||||||
BreakBeforeBinaryOperators: false
|
BreakBeforeBinaryOperators: false
|
||||||
BreakBeforeBraces: Allman
|
|
||||||
BreakBeforeTernaryOperators: true
|
BreakBeforeTernaryOperators: true
|
||||||
BreakConstructorInitializersBeforeComma: false
|
BreakConstructorInitializersBeforeComma: false
|
||||||
|
BinPackParameters: false
|
||||||
ColumnLimit: 100
|
ColumnLimit: 100
|
||||||
CommentPragmas: '^ IWYU pragma:'
|
|
||||||
ConstructorInitializerAllOnOneLineOrOnePerLine: false
|
ConstructorInitializerAllOnOneLineOrOnePerLine: false
|
||||||
ConstructorInitializerIndentWidth: 4
|
DerivePointerBinding: false
|
||||||
ContinuationIndentWidth: 4
|
|
||||||
Cpp11BracedListStyle: true
|
|
||||||
DerivePointerAlignment: false
|
|
||||||
DisableFormat: false
|
|
||||||
ExperimentalAutoDetectBinPacking: false
|
ExperimentalAutoDetectBinPacking: false
|
||||||
ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ]
|
|
||||||
IncludeCategories:
|
|
||||||
- Regex: '^<'
|
|
||||||
Priority: 3
|
|
||||||
- Regex: '^"(osrm|util|engine|extract|contract)/'
|
|
||||||
Priority: 2
|
|
||||||
- Regex: '.*'
|
|
||||||
Priority: 1
|
|
||||||
IndentCaseLabels: false
|
IndentCaseLabels: false
|
||||||
IndentWidth: 4
|
|
||||||
IndentWrappedFunctionNames: false
|
|
||||||
KeepEmptyLinesAtTheStartOfBlocks: true
|
|
||||||
MacroBlockBegin: ''
|
|
||||||
MacroBlockEnd: ''
|
|
||||||
MaxEmptyLinesToKeep: 1
|
MaxEmptyLinesToKeep: 1
|
||||||
|
KeepEmptyLinesAtTheStartOfBlocks: true
|
||||||
NamespaceIndentation: None
|
NamespaceIndentation: None
|
||||||
ObjCBlockIndentWidth: 2
|
|
||||||
ObjCSpaceAfterProperty: false
|
ObjCSpaceAfterProperty: false
|
||||||
ObjCSpaceBeforeProtocolList: true
|
ObjCSpaceBeforeProtocolList: true
|
||||||
PenaltyBreakBeforeFirstCallParameter: 19
|
PenaltyBreakBeforeFirstCallParameter: 19
|
||||||
PenaltyBreakComment: 300
|
PenaltyBreakComment: 300
|
||||||
PenaltyBreakFirstLessLess: 120
|
|
||||||
PenaltyBreakString: 1000
|
PenaltyBreakString: 1000
|
||||||
PenaltyExcessCharacter: 1000000
|
PenaltyBreakFirstLessLess: 120
|
||||||
|
PenaltyExcessCharacter: 1000
|
||||||
PenaltyReturnTypeOnItsOwnLine: 60
|
PenaltyReturnTypeOnItsOwnLine: 60
|
||||||
PointerAlignment: Right
|
PointerBindsToType: false
|
||||||
ReflowComments: true
|
|
||||||
SortIncludes: true
|
|
||||||
SpaceAfterCStyleCast: false
|
|
||||||
SpaceBeforeAssignmentOperators: true
|
|
||||||
SpaceBeforeParens: ControlStatements
|
|
||||||
SpaceInEmptyParentheses: false
|
|
||||||
SpacesBeforeTrailingComments: 1
|
SpacesBeforeTrailingComments: 1
|
||||||
SpacesInAngles: false
|
Cpp11BracedListStyle: true
|
||||||
SpacesInContainerLiterals: true
|
|
||||||
SpacesInCStyleCastParentheses: false
|
|
||||||
SpacesInParentheses: false
|
|
||||||
SpacesInSquareBrackets: false
|
|
||||||
Standard: Cpp11
|
Standard: Cpp11
|
||||||
|
IndentWidth: 4
|
||||||
TabWidth: 8
|
TabWidth: 8
|
||||||
UseTab: Never
|
UseTab: Never
|
||||||
|
BreakBeforeBraces: Allman
|
||||||
|
IndentFunctionDeclarationAfterType: false
|
||||||
|
SpacesInParentheses: false
|
||||||
|
SpacesInAngles: false
|
||||||
|
SpaceInEmptyParentheses: false
|
||||||
|
SpacesInCStyleCastParentheses: false
|
||||||
|
SpacesInContainerLiterals: true
|
||||||
|
SpaceBeforeAssignmentOperators: true
|
||||||
|
ContinuationIndentWidth: 4
|
||||||
|
CommentPragmas: '^ IWYU pragma:'
|
||||||
|
ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ]
|
||||||
|
SpaceBeforeParens: ControlStatements
|
||||||
...
|
...
|
||||||
|
|
||||||
|
101
.clang-tidy
101
.clang-tidy
@ -1,101 +0,0 @@
|
|||||||
---
|
|
||||||
Checks: >
|
|
||||||
bugprone-*,
|
|
||||||
-bugprone-narrowing-conversions,
|
|
||||||
-bugprone-easily-swappable-parameters,
|
|
||||||
-bugprone-branch-clone,
|
|
||||||
-bugprone-misplaced-widening-cast,
|
|
||||||
-bugprone-exception-escape,
|
|
||||||
-bugprone-implicit-widening-of-multiplication-result,
|
|
||||||
-bugprone-integer-division,
|
|
||||||
-bugprone-reserved-identifier,
|
|
||||||
-bugprone-unhandled-self-assignment,
|
|
||||||
-bugprone-forward-declaration-namespace,
|
|
||||||
-bugprone-sizeof-expression,
|
|
||||||
-bugprone-throw-keyword-missing,
|
|
||||||
-bugprone-chained-comparison,
|
|
||||||
-bugprone-incorrect-enable-if,
|
|
||||||
-bugprone-switch-missing-default-case,
|
|
||||||
-bugprone-empty-catch,
|
|
||||||
-bugprone-unchecked-optional-access,
|
|
||||||
-clang-analyzer-*,
|
|
||||||
-clang-diagnostic-deprecated-declarations,
|
|
||||||
-clang-diagnostic-constant-conversion,
|
|
||||||
cppcoreguidelines-avoid-goto,
|
|
||||||
cppcoreguidelines-no-malloc,
|
|
||||||
cppcoreguidelines-virtual-class-destructor,
|
|
||||||
google-*,
|
|
||||||
-google-build-explicit-make-pair,
|
|
||||||
-google-build-using-namespace,
|
|
||||||
-google-explicit-constructor,
|
|
||||||
-google-default-arguments,
|
|
||||||
-google-readability-braces-around-statements,
|
|
||||||
-google-readability-casting,
|
|
||||||
-google-readability-namespace-comments,
|
|
||||||
-google-readability-function,
|
|
||||||
-google-readability-todo,
|
|
||||||
-google-runtime-int,
|
|
||||||
-google-build-namespaces,
|
|
||||||
-google-runtime-references,
|
|
||||||
-google-readability-function-size,
|
|
||||||
llvm-*,
|
|
||||||
-llvm-namespace-comment,
|
|
||||||
-llvm-qualified-auto,
|
|
||||||
-llvm-include-order,
|
|
||||||
-llvm-else-after-return,
|
|
||||||
-llvm-header-guard,
|
|
||||||
-llvm-twine-local,
|
|
||||||
misc-*,
|
|
||||||
-misc-argument-comment,
|
|
||||||
-misc-const-correctness,
|
|
||||||
-misc-non-private-member-variables-in-classes,
|
|
||||||
-misc-unconventional-assign-operator,
|
|
||||||
-misc-no-recursion,
|
|
||||||
-misc-misplaced-const,
|
|
||||||
-misc-definitions-in-headers,
|
|
||||||
-misc-unused-parameters,
|
|
||||||
-misc-include-cleaner,
|
|
||||||
modernize-concat-nested-namespaces,
|
|
||||||
modernize-use-using,
|
|
||||||
performance-*,
|
|
||||||
-performance-no-int-to-ptr,
|
|
||||||
-performance-enum-size,
|
|
||||||
-performance-avoid-endl,
|
|
||||||
readability-*,
|
|
||||||
-readability-avoid-const-params-in-decls,
|
|
||||||
-readability-braces-around-statements,
|
|
||||||
-readability-container-size-empty,
|
|
||||||
-readability-convert-member-functions-to-static,
|
|
||||||
-readability-const-return-type,
|
|
||||||
-readability-function-cognitive-complexity,
|
|
||||||
-readability-function-size,
|
|
||||||
-readability-identifier-naming,
|
|
||||||
-readability-implicit-bool-conversion,
|
|
||||||
-readability-magic-numbers,
|
|
||||||
-readability-else-after-return,
|
|
||||||
-readability-inconsistent-declaration-parameter-name,
|
|
||||||
-readability-isolate-declaration,
|
|
||||||
-readability-identifier-length,
|
|
||||||
-readability-redundant-declaration,
|
|
||||||
-readability-uppercase-literal-suffix,
|
|
||||||
-readability-named-parameter,
|
|
||||||
-readability-qualified-auto,
|
|
||||||
-readability-suspicious-call-argument,
|
|
||||||
-readability-redundant-access-specifiers,
|
|
||||||
-readability-redundant-member-init,
|
|
||||||
-readability-static-definition-in-anonymous-namespace,
|
|
||||||
-readability-use-anyofallof,
|
|
||||||
-readability-simplify-boolean-expr,
|
|
||||||
-readability-make-member-function-const,
|
|
||||||
-readability-redundant-string-init,
|
|
||||||
-readability-non-const-parameter,
|
|
||||||
-readability-redundant-inline-specifier,
|
|
||||||
-readability-avoid-nested-conditional-operator,
|
|
||||||
-readability-avoid-return-with-void-value,
|
|
||||||
-readability-redundant-casting,
|
|
||||||
-readability-static-accessed-through-instance
|
|
||||||
|
|
||||||
WarningsAsErrors: '*'
|
|
||||||
HeaderFilterRegex: '.*'
|
|
||||||
|
|
||||||
|
|
14
.cncc.style
14
.cncc.style
@ -1,14 +0,0 @@
|
|||||||
# Kind-specific patterns to check AST nodes against. Both python-clang and
|
|
||||||
# libclang docs explain CursorKind, with differences in detail. See also:
|
|
||||||
# - https://github.com/llvm-mirror/clang/blob/aca4fe314a55cacae29e1548cb7bfd2119c6df4c/bindings/python/clang/cindex.py#L599
|
|
||||||
# - http://clang.llvm.org/doxygen/group__CINDEX.html#gaaccc432245b4cd9f2d470913f9ef0013
|
|
||||||
# - https://docs.python.org/2/library/re.html#regular-expression-syntax
|
|
||||||
|
|
||||||
class_decl: '^([A-Z]+[a-z]+)+$'
|
|
||||||
struct_decl: '^([A-Z]+[a-z]+)+$'
|
|
||||||
field_decl: '^[a-z_]+$'
|
|
||||||
var_decl: '^[a-z]+[a-z0-9_]*$'
|
|
||||||
parm_decl: '^[a-z]*[a-z0-9_]*$'
|
|
||||||
namespace: '^[a-z_]*$'
|
|
||||||
cxx_method: '^([A-Z]+[a-z]+)+$'
|
|
||||||
function_decl: '^[a-z]+([A-Z]+[a-z]+)*$'
|
|
@ -1,2 +0,0 @@
|
|||||||
test
|
|
||||||
build
|
|
@ -1,30 +0,0 @@
|
|||||||
# EditorConfig is awesome: http://EditorConfig.org
|
|
||||||
#
|
|
||||||
# NOTE: Keep settings in sync with the master .clang-format file
|
|
||||||
#
|
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
|
||||||
|
|
||||||
# CMake configuration files
|
|
||||||
[{CMakeLists.txt,CMakeSettings.json,*.cmake}]
|
|
||||||
indent_size = 2
|
|
||||||
indent_style = space
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
# CI configuration files
|
|
||||||
[{.travis.yml,appveyor.yml}]
|
|
||||||
indent_size = 2
|
|
||||||
indent_style = space
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
# Unix shell scripts
|
|
||||||
[*.sh]
|
|
||||||
end_of_line = lf
|
|
||||||
indent_style = space
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
# Windows shell scripts
|
|
||||||
[*.bat]
|
|
||||||
end_of_line = crlf
|
|
||||||
indent_style = space
|
|
||||||
trim_trailing_whitespace = true
|
|
@ -1,2 +0,0 @@
|
|||||||
features/support/flatbuffers.js
|
|
||||||
features/support/fbresult_generated.js
|
|
28
.eslintrc
28
.eslintrc
@ -1,28 +0,0 @@
|
|||||||
{
|
|
||||||
"rules": {
|
|
||||||
"indent": [
|
|
||||||
2,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"quotes": [
|
|
||||||
1,
|
|
||||||
"single"
|
|
||||||
],
|
|
||||||
"linebreak-style": [
|
|
||||||
2,
|
|
||||||
"unix"
|
|
||||||
],
|
|
||||||
"semi": [
|
|
||||||
2,
|
|
||||||
"always"
|
|
||||||
],
|
|
||||||
"no-console": [
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"env": {
|
|
||||||
"es6": true,
|
|
||||||
"node": true
|
|
||||||
},
|
|
||||||
"extends": "eslint:recommended"
|
|
||||||
}
|
|
18
.gitattributes
vendored
18
.gitattributes
vendored
@ -1,18 +0,0 @@
|
|||||||
# Set the default behavior, in case people don't have core.autocrlf set.
|
|
||||||
* text=auto
|
|
||||||
|
|
||||||
# Explicitly declare text files you want to always be normalized and converted
|
|
||||||
# to native line endings on checkout.
|
|
||||||
*.cpp text
|
|
||||||
*.hpp text
|
|
||||||
|
|
||||||
# Declare files that will always have CRLF line endings on checkout.
|
|
||||||
*.bat text eol=crlf
|
|
||||||
*.cmd text eol=crlf
|
|
||||||
*.ps1 text eol=crlf
|
|
||||||
|
|
||||||
# Declare files that will always have LF line endings on checkout.
|
|
||||||
*.sh text eol=lf
|
|
||||||
|
|
||||||
# https://eslint.org/docs/latest/rules/linebreak-style#using-this-rule-with-version-control-systems
|
|
||||||
*.js text eol=lf
|
|
@ -1,65 +0,0 @@
|
|||||||
name: Build and Publish Docker Image
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published, prereleased]
|
|
||||||
|
|
||||||
env:
|
|
||||||
IMAGE_NAME: openharbor/osrm-backend
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
docker-base-image: ["debian", "alpine"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: ${{ env.IMAGE_NAME }}
|
|
||||||
|
|
||||||
- name: Docker meta - debug
|
|
||||||
id: metadebug
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: ${{ env.IMAGE_NAME }}
|
|
||||||
flavor: |
|
|
||||||
latest=true
|
|
||||||
suffix=-debug,onlatest=true
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and push debug image
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
|
|
||||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
|
||||||
tags: ${{ steps.metadebug.outputs.tags }}
|
|
||||||
build-args: |
|
|
||||||
DOCKER_TAG=${{ join(steps.metadebug.outputs.tags) }}-${{ matrix.docker-base-image }}
|
|
||||||
|
|
||||||
- name: Build and push normal image
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
|
|
||||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
build-args: |
|
|
||||||
DOCKER_TAG=${{ join(steps.meta.outputs.tags) }}-${{ matrix.docker-base-image }}
|
|
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,30 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug Report
|
|
||||||
about: Report issue with osrm-backend
|
|
||||||
labels: Bug Report
|
|
||||||
---
|
|
||||||
|
|
||||||
# Issue
|
|
||||||
|
|
||||||
Please describe the issue you are seeing with OSRM.
|
|
||||||
Images are a good way to illustrate your problem.
|
|
||||||
|
|
||||||
**Note**: If your issue relates to the demo site (https://map.project-osrm.org) or routing provided on openstreetmap.org, be aware that they use separate [profile settings](https://github.com/fossgis-routing-server/cbf-routing-profiles) from those provided by default in `osrm-backend`.
|
|
||||||
If your issue relates to the demo site or openstreetmap.org behaviour, please check these profiles first to see if they explain the behaviour before creating an issue here.
|
|
||||||
|
|
||||||
# Steps to reproduce
|
|
||||||
|
|
||||||
Please provide the steps required to reproduce your problem.
|
|
||||||
- `osrm-backend` version being used
|
|
||||||
- OSM extract that was processed
|
|
||||||
- Processing commands (e.g. CH vs MLD processing)
|
|
||||||
- Server queries
|
|
||||||
|
|
||||||
If you're reporting an issue with https://map.project-osrm.org, please provide a link to the problematic request.
|
|
||||||
|
|
||||||
# Specifications
|
|
||||||
|
|
||||||
Please provide details of your development environment.
|
|
||||||
- Library/dependency versions
|
|
||||||
- Operating system
|
|
||||||
- Hardware
|
|
10
.github/ISSUE_TEMPLATE/feature.md
vendored
10
.github/ISSUE_TEMPLATE/feature.md
vendored
@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature Request
|
|
||||||
about: Request a new feature in osrm-backend
|
|
||||||
labels: Feature Request
|
|
||||||
---
|
|
||||||
|
|
||||||
# Feature
|
|
||||||
|
|
||||||
Please describe the feature you would like to see in OSRM.
|
|
||||||
Images are often a good way to illustrate your requested feature.
|
|
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,19 +0,0 @@
|
|||||||
# Issue
|
|
||||||
|
|
||||||
What issue is this PR targeting? If there is no issue that addresses the problem, please open a corresponding issue and link it here.
|
|
||||||
|
|
||||||
Please read our [documentation](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/releasing.md) on release and version management.
|
|
||||||
If your PR is still work in progress please attach the relevant label.
|
|
||||||
|
|
||||||
## Tasklist
|
|
||||||
|
|
||||||
- [ ] CHANGELOG.md entry ([How to write a changelog entry](http://keepachangelog.com/en/1.0.0/#how))
|
|
||||||
- [ ] update relevant [Wiki pages](https://github.com/Project-OSRM/osrm-backend/wiki)
|
|
||||||
- [ ] add tests (see [testing documentation](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/testing.md))
|
|
||||||
- [ ] review
|
|
||||||
- [ ] adjust for comments
|
|
||||||
- [ ] cherry pick to release branch
|
|
||||||
|
|
||||||
## Requirements / Relations
|
|
||||||
|
|
||||||
Link any requirements here. Other pull requests this PR is based on?
|
|
84
.github/workflows/osrm-backend-docker.yml
vendored
84
.github/workflows/osrm-backend-docker.yml
vendored
@ -1,84 +0,0 @@
|
|||||||
name: build and publish container image
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
docker-base-image: ["debian", "alpine"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v3
|
|
||||||
with:
|
|
||||||
images: ghcr.io/${{ github.repository }}
|
|
||||||
|
|
||||||
- name: Docker meta - debug
|
|
||||||
id: metadebug
|
|
||||||
uses: docker/metadata-action@v3
|
|
||||||
with:
|
|
||||||
images: ghcr.io/${{ github.repository }}
|
|
||||||
flavor: |
|
|
||||||
latest=true
|
|
||||||
suffix=-debug,onlatest=true
|
|
||||||
|
|
||||||
- name: Docker meta - assertions
|
|
||||||
id: metaassertions
|
|
||||||
uses: docker/metadata-action@v3
|
|
||||||
with:
|
|
||||||
images: ghcr.io/${{ github.repository }}
|
|
||||||
flavor: |
|
|
||||||
latest=true
|
|
||||||
suffix=-assertions,onlatest=true
|
|
||||||
|
|
||||||
- name: Log in to GitHub Docker Registry
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build container image - debug
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
|
||||||
tags: ${{ steps.metadebug.outputs.tags }}
|
|
||||||
build-args: |
|
|
||||||
DOCKER_TAG=${{ join(steps.metadebug.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
|
||||||
|
|
||||||
|
|
||||||
- name: Build container image - assertions
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
|
||||||
tags: ${{ steps.metaassertions.outputs.tags }}
|
|
||||||
build-args: |
|
|
||||||
DOCKER_TAG=${{ join(steps.metaassertions.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
|
||||||
|
|
||||||
# build and publish "normal" image as last to get it listed on top
|
|
||||||
- name: Build container image - normal
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
build-args: |
|
|
||||||
DOCKER_TAG=${{ join(steps.meta.outputs.tags ) }}-${{ matrix.docker-base-image }}
|
|
786
.github/workflows/osrm-backend.yml
vendored
786
.github/workflows/osrm-backend.yml
vendored
@ -1,786 +0,0 @@
|
|||||||
name: osrm-backend CI
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
tags:
|
|
||||||
- v[1-9]+.[0-9]+.[0-9]+
|
|
||||||
- v[1-9]+.[0-9]+.[0-9]+-[a-zA-Z]+.[0-9]+
|
|
||||||
- v[1-9]+.[0-9]+-[0-9a-zA-Z]+
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
env:
|
|
||||||
CCACHE_TEMPDIR: /tmp/.ccache-temp
|
|
||||||
CCACHE_COMPRESS: 1
|
|
||||||
CASHER_TIME_OUT: 599 # one second less than 10m to avoid 10m timeout error: https://github.com/Project-OSRM/osrm-backend/issues/2742
|
|
||||||
CMAKE_VERSION: 3.21.2
|
|
||||||
ENABLE_NODE_BINDINGS: "ON"
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
windows-release-node:
|
|
||||||
needs: format-taginfo-docs
|
|
||||||
runs-on: windows-2022
|
|
||||||
continue-on-error: false
|
|
||||||
env:
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- run: pip install "conan<2.0.0"
|
|
||||||
- run: conan --version
|
|
||||||
- run: cmake --version
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
- run: node --version
|
|
||||||
- run: npm --version
|
|
||||||
- name: Prepare environment
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
|
|
||||||
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
|
|
||||||
- run: npm install --ignore-scripts
|
|
||||||
- run: npm link --ignore-scripts
|
|
||||||
- name: Build
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
|
|
||||||
cmake --build . --config Release
|
|
||||||
|
|
||||||
# TODO: MSVC goes out of memory when building our tests
|
|
||||||
# - name: Run tests
|
|
||||||
# shell: bash
|
|
||||||
# run: |
|
|
||||||
# cd build
|
|
||||||
# cmake --build . --config Release --target tests
|
|
||||||
# # TODO: run tests
|
|
||||||
# - name: Run node tests
|
|
||||||
# shell: bash
|
|
||||||
# run: |
|
|
||||||
# ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
|
|
||||||
|
|
||||||
# mkdir -p test/data/ch
|
|
||||||
# cp test/data/monaco.osrm* test/data/ch/
|
|
||||||
# ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
|
|
||||||
|
|
||||||
# ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
|
|
||||||
# node test/nodejs/index.js
|
|
||||||
- name: Build Node package
|
|
||||||
shell: bash
|
|
||||||
run: ./scripts/ci/node_package.sh
|
|
||||||
- name: Publish Node package
|
|
||||||
if: ${{ env.PUBLISH == 'On' }}
|
|
||||||
uses: ncipollo/release-action@v1
|
|
||||||
with:
|
|
||||||
allowUpdates: true
|
|
||||||
artifactErrorsFailBuild: true
|
|
||||||
artifacts: build/stage/**/*.tar.gz
|
|
||||||
omitBody: true
|
|
||||||
omitBodyDuringUpdate: true
|
|
||||||
omitName: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
format-taginfo-docs:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Use Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
- name: Enable Node.js cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.npm
|
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-node-
|
|
||||||
- name: Prepare environment
|
|
||||||
run: |
|
|
||||||
npm ci --ignore-scripts
|
|
||||||
clang-format-15 --version
|
|
||||||
- name: Run checks
|
|
||||||
run: |
|
|
||||||
./scripts/check_taginfo.py taginfo.json profiles/car.lua
|
|
||||||
./scripts/format.sh && ./scripts/error_on_dirty.sh
|
|
||||||
node ./scripts/validate_changelog.js
|
|
||||||
npm run docs && ./scripts/error_on_dirty.sh
|
|
||||||
npm audit --production
|
|
||||||
|
|
||||||
docker-image-matrix:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
docker-base-image: ["debian", "alpine"]
|
|
||||||
needs: format-taginfo-docs
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
continue-on-error: false
|
|
||||||
steps:
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Enable osm.pbf cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: berlin-latest.osm.pbf
|
|
||||||
key: v1-berlin-osm-pbf
|
|
||||||
restore-keys: |
|
|
||||||
v1-berlin-osm-pbf
|
|
||||||
- name: Docker build
|
|
||||||
run: |
|
|
||||||
docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
|
|
||||||
- name: Test Docker image
|
|
||||||
run: |
|
|
||||||
if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
|
|
||||||
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
|
|
||||||
fi
|
|
||||||
TAG=osrm-backend-local
|
|
||||||
# when `--memory-swap` value equals `--memory` it means container won't use swap
|
|
||||||
# see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
|
|
||||||
MEMORY_ARGS="--memory=1g --memory-swap=1g"
|
|
||||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
|
|
||||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
|
|
||||||
if [ ! -s "${PWD}/berlin-latest.geojson" ]
|
|
||||||
then
|
|
||||||
>&2 echo "No berlin-latest.geojson found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# removing `.osrm.nbg` to check that whole pipeline works without it
|
|
||||||
rm -rf "${PWD}/berlin-latest.osrm.nbg"
|
|
||||||
|
|
||||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
|
|
||||||
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
|
|
||||||
docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
|
|
||||||
curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
|
|
||||||
docker stop osrm-container
|
|
||||||
|
|
||||||
build-test-publish:
|
|
||||||
needs: format-taginfo-docs
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- name: gcc-13-debug-cov
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Debug
|
|
||||||
CCOMPILER: gcc-13
|
|
||||||
CUCUMBER_TIMEOUT: 20000
|
|
||||||
CXXCOMPILER: g++-13
|
|
||||||
ENABLE_COVERAGE: ON
|
|
||||||
|
|
||||||
- name: clang-18-debug-asan-ubsan
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Debug
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CUCUMBER_TIMEOUT: 20000
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
ENABLE_SANITIZER: ON
|
|
||||||
TARGET_ARCH: x86_64-asan-ubsan
|
|
||||||
OSRM_CONNECTION_RETRIES: 10
|
|
||||||
OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
|
|
||||||
|
|
||||||
- name: clang-18-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: clang-18-debug
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Debug
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: clang-18-debug-clang-tidy
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Debug
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_CLANG_TIDY: ON
|
|
||||||
|
|
||||||
|
|
||||||
- name: clang-17-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-17
|
|
||||||
CXXCOMPILER: clang++-17
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: clang-16-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-16
|
|
||||||
CXXCOMPILER: clang++-16
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: conan-linux-debug-asan-ubsan
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
ENABLE_SANITIZER: ON
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: conan-linux-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 18
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-18
|
|
||||||
CXXCOMPILER: clang++-18
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
ENABLE_LTO: OFF
|
|
||||||
|
|
||||||
- name: gcc-14-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: gcc-14
|
|
||||||
CXXCOMPILER: g++-14
|
|
||||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
|
||||||
|
|
||||||
- name: gcc-13-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: gcc-13
|
|
||||||
CXXCOMPILER: g++-13
|
|
||||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
|
||||||
|
|
||||||
- name: gcc-12-release
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
BUILD_TOOLS: ON
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: gcc-12
|
|
||||||
CXXCOMPILER: g++-12
|
|
||||||
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
|
|
||||||
|
|
||||||
- name: conan-linux-release-node
|
|
||||||
build_node_package: true
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang-16
|
|
||||||
CXXCOMPILER: clang++-16
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
NODE_PACKAGE_TESTS_ONLY: ON
|
|
||||||
|
|
||||||
- name: conan-linux-debug-node
|
|
||||||
build_node_package: true
|
|
||||||
continue-on-error: false
|
|
||||||
node: 20
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
BUILD_TYPE: Debug
|
|
||||||
CCOMPILER: clang-16
|
|
||||||
CXXCOMPILER: clang++-16
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
NODE_PACKAGE_TESTS_ONLY: ON
|
|
||||||
|
|
||||||
- name: conan-macos-x64-release-node
|
|
||||||
build_node_package: true
|
|
||||||
continue-on-error: true
|
|
||||||
node: 20
|
|
||||||
runs-on: macos-13 # x86_64
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang
|
|
||||||
CXXCOMPILER: clang++
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_ASSERTIONS: ON
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
|
|
||||||
- name: conan-macos-arm64-release-node
|
|
||||||
build_node_package: true
|
|
||||||
continue-on-error: true
|
|
||||||
node: 20
|
|
||||||
runs-on: macos-14 # arm64
|
|
||||||
BUILD_TYPE: Release
|
|
||||||
CCOMPILER: clang
|
|
||||||
CXXCOMPILER: clang++
|
|
||||||
CUCUMBER_TIMEOUT: 60000
|
|
||||||
ENABLE_ASSERTIONS: ON
|
|
||||||
ENABLE_CONAN: ON
|
|
||||||
|
|
||||||
name: ${{ matrix.name}}
|
|
||||||
continue-on-error: ${{ matrix.continue-on-error }}
|
|
||||||
runs-on: ${{ matrix.runs-on }}
|
|
||||||
env:
|
|
||||||
BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }}
|
|
||||||
BUILD_TYPE: ${{ matrix.BUILD_TYPE }}
|
|
||||||
BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }}
|
|
||||||
CCOMPILER: ${{ matrix.CCOMPILER }}
|
|
||||||
CFLAGS: ${{ matrix.CFLAGS }}
|
|
||||||
CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }}
|
|
||||||
CXXCOMPILER: ${{ matrix.CXXCOMPILER }}
|
|
||||||
CXXFLAGS: ${{ matrix.CXXFLAGS }}
|
|
||||||
ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }}
|
|
||||||
ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }}
|
|
||||||
ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }}
|
|
||||||
ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }}
|
|
||||||
ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }}
|
|
||||||
NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }}
|
|
||||||
TARGET_ARCH: ${{ matrix.TARGET_ARCH }}
|
|
||||||
OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }}
|
|
||||||
OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }}
|
|
||||||
ENABLE_LTO: ${{ matrix.ENABLE_LTO }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Build machine architecture
|
|
||||||
run: uname -m
|
|
||||||
- name: Use Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: ${{ matrix.node }}
|
|
||||||
- name: Enable Node.js cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.npm
|
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-node-
|
|
||||||
- name: Enable compiler cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.ccache
|
|
||||||
key: ccache-${{ matrix.name }}-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
ccache-${{ matrix.name }}-
|
|
||||||
- name: Enable Conan cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.conan
|
|
||||||
key: v9-conan-${{ matrix.name }}-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
v9-conan-${{ matrix.name }}-
|
|
||||||
- name: Enable test cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ${{github.workspace}}/test/cache
|
|
||||||
key: v4-test-${{ matrix.name }}-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
v4-test-${{ matrix.name }}-
|
|
||||||
- name: Prepare environment
|
|
||||||
run: |
|
|
||||||
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
|
|
||||||
mkdir -p $HOME/.ccache
|
|
||||||
|
|
||||||
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
|
|
||||||
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
|
|
||||||
echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV
|
|
||||||
echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV
|
|
||||||
if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then
|
|
||||||
# We can only set this after checkout once we know the workspace directory
|
|
||||||
echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV
|
|
||||||
echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV
|
|
||||||
echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
|
||||||
echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV
|
|
||||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
|
||||||
echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
# See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041
|
|
||||||
# We need it to be able to access system folders while restoring cached Boost below
|
|
||||||
- name: Give tar root ownership
|
|
||||||
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
|
||||||
run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar
|
|
||||||
- name: Cache Boost
|
|
||||||
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
|
||||||
id: cache-boost
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
/usr/local/include/boost
|
|
||||||
/usr/local/lib/libboost*
|
|
||||||
key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
|
|
||||||
restore-keys: |
|
|
||||||
v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
|
|
||||||
|
|
||||||
- name: Install Boost
|
|
||||||
if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
|
|
||||||
run: |
|
|
||||||
BOOST_VERSION="1.85.0"
|
|
||||||
BOOST_VERSION_FLAVOR="${BOOST_VERSION}-b2-nodocs"
|
|
||||||
wget -q https://github.com/boostorg/boost/releases/download/boost-${BOOST_VERSION}/boost-${BOOST_VERSION_FLAVOR}.tar.gz
|
|
||||||
tar xzf boost-${BOOST_VERSION_FLAVOR}.tar.gz
|
|
||||||
cd boost-${BOOST_VERSION}
|
|
||||||
sudo ./bootstrap.sh
|
|
||||||
sudo ./b2 install
|
|
||||||
cd ..
|
|
||||||
sudo rm -rf boost-${BOOST_VERSION}*
|
|
||||||
|
|
||||||
- name: Install dev dependencies
|
|
||||||
run: |
|
|
||||||
python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages
|
|
||||||
|
|
||||||
# workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499
|
|
||||||
# and that's why CI cannot find conan executable installed above
|
|
||||||
if [[ "${RUNNER_OS}" == "macOS" ]]; then
|
|
||||||
echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ccache
|
|
||||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
|
||||||
sudo apt-get update -y && sudo apt-get install ccache
|
|
||||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
|
||||||
brew install ccache
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Linux dev packages
|
|
||||||
if [ "${ENABLE_CONAN}" != "ON" ]; then
|
|
||||||
sudo apt-get update -y
|
|
||||||
sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev
|
|
||||||
if [[ "${CCOMPILER}" != clang-* ]]; then
|
|
||||||
sudo apt-get install -y ${CXXCOMPILER}
|
|
||||||
fi
|
|
||||||
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
|
|
||||||
sudo apt-get install -y lcov
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# TBB
|
|
||||||
TBB_VERSION=2021.12.0
|
|
||||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
|
||||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz"
|
|
||||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
|
||||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz"
|
|
||||||
fi
|
|
||||||
wget --tries 5 ${TBB_URL} -O onetbb.tgz
|
|
||||||
tar zxvf onetbb.tgz
|
|
||||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/
|
|
||||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/
|
|
||||||
- name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18
|
|
||||||
if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }}
|
|
||||||
run: |
|
|
||||||
sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq
|
|
||||||
|
|
||||||
conan config init
|
|
||||||
yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml"
|
|
||||||
- name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16
|
|
||||||
if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }}
|
|
||||||
run: |
|
|
||||||
sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq
|
|
||||||
|
|
||||||
conan config init
|
|
||||||
yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml"
|
|
||||||
- name: Prepare build
|
|
||||||
run: |
|
|
||||||
mkdir ${OSRM_BUILD_DIR}
|
|
||||||
ccache --max-size=256M
|
|
||||||
npm ci --ignore-scripts
|
|
||||||
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
|
|
||||||
lcov --directory . --zerocounters # clean cached files
|
|
||||||
fi
|
|
||||||
echo "CC=${CCOMPILER}" >> $GITHUB_ENV
|
|
||||||
echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV
|
|
||||||
if [[ "${RUNNER_OS}" == "macOS" ]]; then
|
|
||||||
# missing from GCC path, needed for conan builds of libiconv, for example.
|
|
||||||
sudo xcode-select --switch /Library/Developer/CommandLineTools
|
|
||||||
echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV
|
|
||||||
echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Build and install OSRM
|
|
||||||
run: |
|
|
||||||
echo "Using ${JOBS} jobs"
|
|
||||||
pushd ${OSRM_BUILD_DIR}
|
|
||||||
|
|
||||||
ccache --zero-stats
|
|
||||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
|
||||||
-DENABLE_CONAN=${ENABLE_CONAN:-OFF} \
|
|
||||||
-DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \
|
|
||||||
-DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \
|
|
||||||
-DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \
|
|
||||||
-DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \
|
|
||||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
|
||||||
-DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \
|
|
||||||
-DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \
|
|
||||||
-DENABLE_CCACHE=ON \
|
|
||||||
-DENABLE_LTO=${ENABLE_LTO:-ON} \
|
|
||||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR}
|
|
||||||
make --jobs=${JOBS}
|
|
||||||
|
|
||||||
if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then
|
|
||||||
make tests --jobs=${JOBS}
|
|
||||||
make benchmarks --jobs=${JOBS}
|
|
||||||
|
|
||||||
sudo make install
|
|
||||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
|
||||||
echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
popd
|
|
||||||
- name: Build example
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }}
|
|
||||||
run: |
|
|
||||||
mkdir example/build && pushd example/build
|
|
||||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE}
|
|
||||||
make --jobs=${JOBS}
|
|
||||||
popd
|
|
||||||
- name: Run all tests
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }}
|
|
||||||
run: |
|
|
||||||
make -C test/data benchmark
|
|
||||||
|
|
||||||
# macOS SIP strips the linker path. Reset this inside the running shell
|
|
||||||
export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }}
|
|
||||||
./example/build/osrm-example test/data/mld/monaco.osrm
|
|
||||||
|
|
||||||
# All tests assume to be run from the build directory
|
|
||||||
pushd ${OSRM_BUILD_DIR}
|
|
||||||
for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done
|
|
||||||
if [ -z "${ENABLE_SANITIZER}" ]; then
|
|
||||||
npm run nodejs-tests
|
|
||||||
fi
|
|
||||||
popd
|
|
||||||
npm test
|
|
||||||
|
|
||||||
- name: Use Node 18
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
- name: Run Node package tests on Node 18
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
run: |
|
|
||||||
node --version
|
|
||||||
npm run nodejs-tests
|
|
||||||
- name: Use Node 20
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
- name: Run Node package tests on Node 20
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
run: |
|
|
||||||
node --version
|
|
||||||
npm run nodejs-tests
|
|
||||||
- name: Use Node latest
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: latest
|
|
||||||
- name: Run Node package tests on Node-latest
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
|
|
||||||
run: |
|
|
||||||
node --version
|
|
||||||
npm run nodejs-tests
|
|
||||||
|
|
||||||
- name: Upload test logs
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
if: failure()
|
|
||||||
with:
|
|
||||||
name: logs
|
|
||||||
path: test/logs/
|
|
||||||
|
|
||||||
# - name: Generate code coverage
|
|
||||||
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
|
||||||
# run: |
|
|
||||||
# lcov --directory . --capture --output-file coverage.info # capture coverage info
|
|
||||||
# lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system
|
|
||||||
# lcov --list coverage.info #debug info
|
|
||||||
|
|
||||||
# # Uploading report to CodeCov
|
|
||||||
# - name: Upload code coverage
|
|
||||||
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
|
||||||
# uses: codecov/codecov-action@v4
|
|
||||||
# with:
|
|
||||||
# files: coverage.info
|
|
||||||
# name: codecov-osrm-backend
|
|
||||||
# fail_ci_if_error: true
|
|
||||||
# verbose: true
|
|
||||||
- name: Build Node package
|
|
||||||
if: ${{ matrix.build_node_package }}
|
|
||||||
run: ./scripts/ci/node_package.sh
|
|
||||||
- name: Publish Node package
|
|
||||||
if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }}
|
|
||||||
uses: ncipollo/release-action@v1
|
|
||||||
with:
|
|
||||||
allowUpdates: true
|
|
||||||
artifactErrorsFailBuild: true
|
|
||||||
artifacts: build/stage/**/*.tar.gz
|
|
||||||
omitBody: true
|
|
||||||
omitBodyDuringUpdate: true
|
|
||||||
omitName: true
|
|
||||||
omitNameDuringUpdate: true
|
|
||||||
replacesArtifacts: true
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Show CCache statistics
|
|
||||||
run: |
|
|
||||||
ccache -p
|
|
||||||
ccache -s
|
|
||||||
|
|
||||||
benchmarks:
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
needs: [format-taginfo-docs]
|
|
||||||
runs-on: self-hosted
|
|
||||||
env:
|
|
||||||
CCOMPILER: clang-16
|
|
||||||
CXXCOMPILER: clang++-16
|
|
||||||
CC: clang-16
|
|
||||||
CXX: clang++-16
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
|
||||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
|
||||||
RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout PR Branch
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.head_ref }}
|
|
||||||
path: pr
|
|
||||||
- name: Activate virtualenv
|
|
||||||
run: |
|
|
||||||
python3 -m venv .venv
|
|
||||||
source .venv/bin/activate
|
|
||||||
echo PATH=$PATH >> $GITHUB_ENV
|
|
||||||
pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4"
|
|
||||||
- name: Prepare data
|
|
||||||
run: |
|
|
||||||
if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
|
|
||||||
rm -rf ~/data.osm.pbf
|
|
||||||
wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
|
|
||||||
gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
|
|
||||||
else
|
|
||||||
if [ ! -f "~/data.osm.pbf" ]; then
|
|
||||||
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
|
|
||||||
else
|
|
||||||
echo "Using cached data.osm.pbf"
|
|
||||||
fi
|
|
||||||
gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
|
|
||||||
fi
|
|
||||||
- name: Prepare environment
|
|
||||||
run: |
|
|
||||||
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
|
|
||||||
mkdir -p $HOME/.ccache
|
|
||||||
ccache --zero-stats
|
|
||||||
ccache --max-size=256M
|
|
||||||
- name: Checkout Base Branch
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.ref }}
|
|
||||||
path: base
|
|
||||||
- name: Build Base Branch
|
|
||||||
run: |
|
|
||||||
cd base
|
|
||||||
npm ci --ignore-scripts
|
|
||||||
cd ..
|
|
||||||
mkdir base/build
|
|
||||||
cd base/build
|
|
||||||
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
|
|
||||||
make -j$(nproc)
|
|
||||||
make -j$(nproc) benchmarks
|
|
||||||
cd ..
|
|
||||||
make -C test/data
|
|
||||||
- name: Build PR Branch
|
|
||||||
run: |
|
|
||||||
cd pr
|
|
||||||
npm ci --ignore-scripts
|
|
||||||
cd ..
|
|
||||||
mkdir -p pr/build
|
|
||||||
cd pr/build
|
|
||||||
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
|
|
||||||
make -j$(nproc)
|
|
||||||
make -j$(nproc) benchmarks
|
|
||||||
cd ..
|
|
||||||
make -C test/data
|
|
||||||
# we run benchmarks in tmpfs to avoid impact of disk IO
|
|
||||||
- name: Create folder for tmpfs
|
|
||||||
run: |
|
|
||||||
# if by any chance it was mounted before(e.g. due to previous job failed), unmount it
|
|
||||||
sudo umount ~/benchmarks | true
|
|
||||||
rm -rf ~/benchmarks
|
|
||||||
mkdir -p ~/benchmarks
|
|
||||||
# see https://llvm.org/docs/Benchmarking.html
|
|
||||||
- name: Run PR Benchmarks
|
|
||||||
run: |
|
|
||||||
sudo cset shield -c 2-3 -k on
|
|
||||||
sudo mount -t tmpfs -o size=4g none ~/benchmarks
|
|
||||||
cp -rf pr/build ~/benchmarks/build
|
|
||||||
cp -rf pr/lib ~/benchmarks/lib
|
|
||||||
mkdir -p ~/benchmarks/test
|
|
||||||
cp -rf pr/test/data ~/benchmarks/test/data
|
|
||||||
cp -rf pr/profiles ~/benchmarks/profiles
|
|
||||||
|
|
||||||
sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
|
|
||||||
sudo umount ~/benchmarks
|
|
||||||
sudo cset shield --reset
|
|
||||||
- name: Run Base Benchmarks
|
|
||||||
run: |
|
|
||||||
sudo cset shield -c 2-3 -k on
|
|
||||||
sudo mount -t tmpfs -o size=4g none ~/benchmarks
|
|
||||||
cp -rf base/build ~/benchmarks/build
|
|
||||||
cp -rf base/lib ~/benchmarks/lib
|
|
||||||
mkdir -p ~/benchmarks/test
|
|
||||||
cp -rf base/test/data ~/benchmarks/test/data
|
|
||||||
cp -rf base/profiles ~/benchmarks/profiles
|
|
||||||
|
|
||||||
# TODO: remove it when base branch will have this file at needed location
|
|
||||||
if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
|
|
||||||
cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
|
|
||||||
fi
|
|
||||||
# we intentionally use scripts from PR branch to be able to update them and see results in the same PR
|
|
||||||
sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
|
|
||||||
sudo umount ~/benchmarks
|
|
||||||
sudo cset shield --reset
|
|
||||||
- name: Post Benchmark Results
|
|
||||||
run: |
|
|
||||||
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
|
|
||||||
- name: Show CCache statistics
|
|
||||||
run: |
|
|
||||||
ccache -p
|
|
||||||
ccache -s
|
|
||||||
|
|
||||||
ci-complete:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
|
|
||||||
steps:
|
|
||||||
- run: echo "CI complete"
|
|
||||||
|
|
29
.github/workflows/stale.yml
vendored
29
.github/workflows/stale.yml
vendored
@ -1,29 +0,0 @@
|
|||||||
name: 'Close stale issues'
|
|
||||||
on:
|
|
||||||
# NOTE: uncomment if you want to test changes to this file in PRs CI
|
|
||||||
# pull_request:
|
|
||||||
# branches:
|
|
||||||
# - master
|
|
||||||
schedule:
|
|
||||||
- cron: '30 1 * * *' # every day at 1:30am
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@v9
|
|
||||||
with:
|
|
||||||
operations-per-run: 3000
|
|
||||||
stale-issue-message: 'This issue seems to be stale. It will be closed in 30 days if no further activity occurs.'
|
|
||||||
stale-pr-message: 'This PR seems to be stale. Is it still relevant?'
|
|
||||||
days-before-issue-stale: 180 # 6 months
|
|
||||||
days-before-issue-close: 30 # 1 month
|
|
||||||
days-before-pr-stale: 180 # 6 months
|
|
||||||
days-before-pr-close: -1 # never close PRs
|
|
||||||
exempt-issue-labels: 'Do Not Stale,Feature Request,Performance,Bug Report,CI,Starter Task,Refactor,Guidance'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
39
.gitignore
vendored
39
.gitignore
vendored
@ -2,8 +2,6 @@
|
|||||||
#############################
|
#############################
|
||||||
osrm-deps
|
osrm-deps
|
||||||
|
|
||||||
.ycm_extra_conf.py
|
|
||||||
|
|
||||||
# Compiled source #
|
# Compiled source #
|
||||||
###################
|
###################
|
||||||
*.com
|
*.com
|
||||||
@ -41,12 +39,9 @@ Thumbs.db
|
|||||||
|
|
||||||
# build related files #
|
# build related files #
|
||||||
#######################
|
#######################
|
||||||
/_build*
|
|
||||||
/build/
|
/build/
|
||||||
/example/build/
|
/util/fingerprint_impl.hpp
|
||||||
/test/data/monaco.osrm*
|
/util/git_sha.cpp
|
||||||
/test/data/ch
|
|
||||||
/test/data/mld
|
|
||||||
/cmake/postinst
|
/cmake/postinst
|
||||||
|
|
||||||
# Eclipse related files #
|
# Eclipse related files #
|
||||||
@ -56,16 +51,11 @@ Thumbs.db
|
|||||||
.cproject
|
.cproject
|
||||||
.project
|
.project
|
||||||
|
|
||||||
# Visual Studio (Code) related files #
|
# stxxl related files #
|
||||||
######################################
|
#######################
|
||||||
/.vs*
|
.stxxl
|
||||||
/*.local.bat
|
stxxl.log
|
||||||
/CMakeSettings.json
|
stxxl.errlog
|
||||||
/.cache
|
|
||||||
|
|
||||||
# Jetbrains related files #
|
|
||||||
###########################
|
|
||||||
.idea/
|
|
||||||
|
|
||||||
# Compiled Binary Files #
|
# Compiled Binary Files #
|
||||||
####################################
|
####################################
|
||||||
@ -85,23 +75,8 @@ Thumbs.db
|
|||||||
###################
|
###################
|
||||||
/sandbox/
|
/sandbox/
|
||||||
|
|
||||||
# Test related files #
|
|
||||||
######################
|
|
||||||
/test/profile.lua
|
/test/profile.lua
|
||||||
/test/cache
|
|
||||||
/test/speeds.csv
|
|
||||||
/test/penalties.csv
|
|
||||||
node_modules
|
|
||||||
|
|
||||||
# Deprecated config file #
|
# Deprecated config file #
|
||||||
##########################
|
##########################
|
||||||
/server.ini
|
/server.ini
|
||||||
|
|
||||||
*.swp
|
|
||||||
|
|
||||||
# local lua debugging file
|
|
||||||
debug.lua
|
|
||||||
|
|
||||||
# node-osrm artifacts
|
|
||||||
lib/binding
|
|
||||||
|
|
||||||
|
16
.npmignore
16
.npmignore
@ -1,16 +0,0 @@
|
|||||||
*
|
|
||||||
!README.md
|
|
||||||
!CHANGELOG.md
|
|
||||||
!CONTRIBUTING.MD
|
|
||||||
!LICENCE.TXT
|
|
||||||
!package.json
|
|
||||||
!package-lock.json
|
|
||||||
!yarn.lock
|
|
||||||
!docs
|
|
||||||
!example
|
|
||||||
!taginfo.json
|
|
||||||
!lib/*.js
|
|
||||||
!profiles/*
|
|
||||||
!profiles/lib/*
|
|
||||||
!profiles/examples/*
|
|
||||||
!scripts/node_install.sh
|
|
65
.travis.yml
Normal file
65
.travis.yml
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
language: cpp
|
||||||
|
compiler:
|
||||||
|
- gcc
|
||||||
|
# - clang
|
||||||
|
# Make sure CMake is installed
|
||||||
|
install:
|
||||||
|
- sudo apt-add-repository -y ppa:ubuntu-toolchain-r/test
|
||||||
|
- sudo add-apt-repository -y ppa:boost-latest/ppa
|
||||||
|
- sudo apt-get update >/dev/null
|
||||||
|
- sudo apt-get -q install protobuf-compiler libprotoc-dev libprotobuf7 libprotobuf-dev libbz2-dev libstxxl-dev libstxxl1 libxml2-dev libzip-dev lua5.1 liblua5.1-0-dev rubygems libtbb-dev
|
||||||
|
- sudo apt-get -q install g++-4.8
|
||||||
|
- sudo apt-get install libboost1.54-all-dev
|
||||||
|
- sudo apt-get install libgdal-dev
|
||||||
|
# luabind
|
||||||
|
- curl https://gist.githubusercontent.com/DennisOSRM/f2eb7b948e6fe1ae319e/raw/install-luabind.sh | sudo bash
|
||||||
|
# osmosis
|
||||||
|
- curl -s https://gist.githubusercontent.com/DennisOSRM/803a64a9178ec375069f/raw/ | sudo bash
|
||||||
|
# cmake
|
||||||
|
- curl -s https://gist.githubusercontent.com/DennisOSRM/5fad9bee5c7f09fd7fc9/raw/ | sudo bash
|
||||||
|
# osmpbf library
|
||||||
|
- curl -s https://gist.githubusercontent.com/DennisOSRM/13b1b4fe38a57ead850e/raw/install_osmpbf.sh | sudo bash
|
||||||
|
before_script:
|
||||||
|
- rvm use 1.9.3
|
||||||
|
- gem install bundler
|
||||||
|
- bundle install
|
||||||
|
- mkdir build
|
||||||
|
- cd build
|
||||||
|
- cmake .. $CMAKEOPTIONS -DBUILD_TOOLS=1
|
||||||
|
script:
|
||||||
|
- make
|
||||||
|
- make tests
|
||||||
|
- make benchmarks
|
||||||
|
- ./algorithm-tests
|
||||||
|
- ./datastructure-tests
|
||||||
|
- cd ..
|
||||||
|
- cucumber -p verify
|
||||||
|
after_script:
|
||||||
|
# - cd ..
|
||||||
|
# - cucumber -p verify
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
cache:
|
||||||
|
- bundler
|
||||||
|
- apt
|
||||||
|
env:
|
||||||
|
- CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5000 OSRM_TIMEOUT=60
|
||||||
|
- CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5010 OSRM_TIMEOUT=60
|
||||||
|
- CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5020 OSRM_TIMEOUT=60
|
||||||
|
notifications:
|
||||||
|
slack: mapbox:4A6euphDwfxAQnhLurXbu6A1
|
||||||
|
irc:
|
||||||
|
channels:
|
||||||
|
- irc.oftc.net#osrm
|
||||||
|
on_success: change
|
||||||
|
on_failure: always
|
||||||
|
use_notice: true
|
||||||
|
skip_join: false
|
||||||
|
|
||||||
|
recipients:
|
||||||
|
- patrick@mapbox.com
|
||||||
|
email:
|
||||||
|
on_success: change
|
||||||
|
on_failure: always
|
1102
CHANGELOG.md
1102
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
951
CMakeLists.txt
951
CMakeLists.txt
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
|||||||
# Code of conduct
|
|
||||||
|
|
||||||
Everyone is invited to participate in Project OSRM’s open source projects and public discussions: we want to create a welcoming and friendly environment. Harassment of participants or other unethical and unprofessional behavior will not be tolerated in our spaces. The [Contributor Covenant](http://contributor-covenant.org) applies to all projects under the Project-OSRM organization and we ask that you please read [the full text](http://contributor-covenant.org/version/1/2/0/).
|
|
@ -1,81 +0,0 @@
|
|||||||
# Everyone
|
|
||||||
|
|
||||||
Please take some time to review our [code of conduct](CODE-OF-CONDUCT.md) to help guide your interactions with others on this project.
|
|
||||||
|
|
||||||
# User
|
|
||||||
|
|
||||||
Before you open a new issue, please search for older ones that cover the same issue.
|
|
||||||
In general "me too" comments/issues are frowned upon.
|
|
||||||
You can add a :+1: emoji reaction to the issue if you want to express interest in this.
|
|
||||||
|
|
||||||
# Developer
|
|
||||||
|
|
||||||
We use `clang-format` version `15` to consistently format the code base. There is a helper script under `scripts/format.sh`.
|
|
||||||
The format is automatically checked by the `mason-linux-release` job of a Travis CI build.
|
|
||||||
To save development time a local hook `.git/hooks/pre-push`
|
|
||||||
```
|
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
remote="$1"
|
|
||||||
if [ x"$remote" = xorigin ] ; then
|
|
||||||
if [ $(git rev-parse --abbrev-ref HEAD) = master ] ; then
|
|
||||||
echo "Rejected push to $remote/master" ; exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
./scripts/format.sh && ./scripts/error_on_dirty.sh
|
|
||||||
if [ $? -ne 0 ] ; then
|
|
||||||
echo "Unstaged format changes" ; exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
```
|
|
||||||
could check code format, modify a local repository and reject push due to unstaged formatting changes.
|
|
||||||
Also `pre-push` hook rejects direct pushes to `origin/master`.
|
|
||||||
|
|
||||||
⚠️ `scripts/format.sh` checks all local files that match `*.cpp` or `*.hpp` patterns.
|
|
||||||
|
|
||||||
|
|
||||||
In general changes that affect the API and/or increase the memory consumption need to be discussed first.
|
|
||||||
Often we don't include changes that would increase the memory consumption a lot if they are not generally usable (e.g. elevation data is a good example).
|
|
||||||
|
|
||||||
## Pull Request
|
|
||||||
|
|
||||||
Every pull-request that changes the API needs to update the docs in `docs/http.md` and add an entry to `CHANGELOG.md`.
|
|
||||||
Breaking changes need to have a BREAKING prefix. See the [releasing documentation](docs/releasing.md) on how this affects the version.
|
|
||||||
|
|
||||||
Early feedback is also important.
|
|
||||||
You will see that a lot of the PR have tags like `[not ready]` or `[wip]`.
|
|
||||||
We like to open PRs as soon as we are starting to work on something to make it visible to the rest of the team.
|
|
||||||
If your work is going in entirely the wrong direction, there is a good chance someone will pick up on this before it is too late.
|
|
||||||
Everyone is encouraged to read PRs of other people and give feedback.
|
|
||||||
|
|
||||||
For every significant code change we require a pull request review before it is merged.
|
|
||||||
If your pull request modifies the API this need to be signed of by a team discussion.
|
|
||||||
This means you will need to find another member of the team with commit access and request a review of your pull request.
|
|
||||||
|
|
||||||
Once your pull request is reviewed you can merge it! If you don't have commit access, ping someone that has commit access.
|
|
||||||
If you do have commit access there are in general two accepted styles to merging:
|
|
||||||
|
|
||||||
1. Make sure the branch is up to date with `master`. Run `git rebase master` to find out.
|
|
||||||
2. Once that is ensured you can either:
|
|
||||||
- Click the nice green merge button (for a non-fast-forward merge)
|
|
||||||
- Merge by hand using a fast-forward merge
|
|
||||||
|
|
||||||
Which merge you prefer is up to personal preference. In general it is recommended to use fast-forward merges because it creates a history that is sequential and easier to understand.
|
|
||||||
|
|
||||||
# Maintainer
|
|
||||||
|
|
||||||
## Doing a release
|
|
||||||
|
|
||||||
There is an in-depth guide around how to push out a release once it is ready [here](docs/releasing.md).
|
|
||||||
|
|
||||||
## The API
|
|
||||||
|
|
||||||
Changes to the API need to be discussed and signed off by the team. Breaking changes even more so than additive changes.
|
|
||||||
|
|
||||||
## Milestones
|
|
||||||
|
|
||||||
If a pull request or an issue is applicable for the current or next milestone, depends on the target version number.
|
|
||||||
Since we use semantic versioning we restrict breaking changes to major releases.
|
|
||||||
After a Release Candidate is released we usually don't change the API anymore if it is not critical.
|
|
||||||
Bigger code changes after a RC was released should also be avoided.
|
|
||||||
|
|
44
Doxyfile.in
44
Doxyfile.in
@ -1,44 +0,0 @@
|
|||||||
PROJECT_NAME = "Project OSRM"
|
|
||||||
PROJECT_BRIEF = "Open Source Routing Machine"
|
|
||||||
BUILTIN_STL_SUPPORT = YES
|
|
||||||
|
|
||||||
EXTRACT_ALL = YES
|
|
||||||
EXTRACT_PRIVATE = YES
|
|
||||||
EXTRACT_PACKAGE = YES
|
|
||||||
EXTRACT_STATIC = YES
|
|
||||||
EXTRACT_LOCAL_CLASSES = YES
|
|
||||||
EXTRACT_ANON_NSPACES = YES
|
|
||||||
|
|
||||||
QUIET = YES
|
|
||||||
|
|
||||||
INPUT = @CMAKE_CURRENT_SOURCE_DIR@
|
|
||||||
USE_MDFILE_AS_MAINPAGE = @CMAKE_CURRENT_SOURCE_DIR@/README.md
|
|
||||||
FILE_PATTERNS = *.h *.hpp *.c *.cc *.cpp *.md
|
|
||||||
RECURSIVE = YES
|
|
||||||
|
|
||||||
EXCLUDE = @CMAKE_CURRENT_SOURCE_DIR@/third_party \
|
|
||||||
@CMAKE_CURRENT_SOURCE_DIR@/build \
|
|
||||||
@CMAKE_CURRENT_SOURCE_DIR@/node_modules \
|
|
||||||
@CMAKE_CURRENT_SOURCE_DIR@/unit_tests \
|
|
||||||
@CMAKE_CURRENT_SOURCE_DIR@/benchmarks \
|
|
||||||
@CMAKE_CURRENT_SOURCE_DIR@/features
|
|
||||||
|
|
||||||
SOURCE_BROWSER = YES
|
|
||||||
|
|
||||||
CLANG_ASSISTED_PARSING = NO
|
|
||||||
|
|
||||||
HTML_COLORSTYLE_HUE = 217
|
|
||||||
HTML_COLORSTYLE_SAT = 71
|
|
||||||
HTML_COLORSTYLE_GAMMA = 50
|
|
||||||
|
|
||||||
GENERATE_TREEVIEW = YES
|
|
||||||
|
|
||||||
HAVE_DOT = @DOXYGEN_DOT_FOUND@
|
|
||||||
CALL_GRAPH = YES
|
|
||||||
CALLER_GRAPH = YES
|
|
||||||
|
|
||||||
DOT_IMAGE_FORMAT = svg
|
|
||||||
INTERACTIVE_SVG = YES
|
|
||||||
DOT_GRAPH_MAX_NODES = 500
|
|
||||||
DOT_TRANSPARENT = YES
|
|
||||||
DOT_MULTI_TARGETS = YES
|
|
7
Gemfile
Normal file
7
Gemfile
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
source "http://rubygems.org"
|
||||||
|
|
||||||
|
gem "cucumber"
|
||||||
|
gem "rake"
|
||||||
|
gem "osmlib-base"
|
||||||
|
gem "sys-proctable"
|
||||||
|
gem "rspec-expectations"
|
35
Gemfile.lock
Normal file
35
Gemfile.lock
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
GEM
|
||||||
|
remote: http://rubygems.org/
|
||||||
|
specs:
|
||||||
|
builder (3.2.2)
|
||||||
|
cucumber (2.0.0)
|
||||||
|
builder (>= 2.1.2)
|
||||||
|
cucumber-core (~> 1.1.3)
|
||||||
|
diff-lcs (>= 1.1.3)
|
||||||
|
gherkin (~> 2.12)
|
||||||
|
multi_json (>= 1.7.5, < 2.0)
|
||||||
|
multi_test (>= 0.1.2)
|
||||||
|
cucumber-core (1.1.3)
|
||||||
|
gherkin (~> 2.12.0)
|
||||||
|
diff-lcs (1.2.5)
|
||||||
|
gherkin (2.12.2)
|
||||||
|
multi_json (~> 1.3)
|
||||||
|
multi_json (1.11.0)
|
||||||
|
multi_test (0.1.2)
|
||||||
|
osmlib-base (0.1.4)
|
||||||
|
rake (10.4.2)
|
||||||
|
rspec-expectations (3.2.1)
|
||||||
|
diff-lcs (>= 1.2.0, < 2.0)
|
||||||
|
rspec-support (~> 3.2.0)
|
||||||
|
rspec-support (3.2.2)
|
||||||
|
sys-proctable (0.9.8)
|
||||||
|
|
||||||
|
PLATFORMS
|
||||||
|
ruby
|
||||||
|
|
||||||
|
DEPENDENCIES
|
||||||
|
cucumber
|
||||||
|
osmlib-base
|
||||||
|
rake
|
||||||
|
rspec-expectations
|
||||||
|
sys-proctable
|
@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2017, Project OSRM contributors
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
Redistribution and use in source and binary forms, with or without modification,
|
173
README.md
173
README.md
@ -1,171 +1,25 @@
|
|||||||
## Open Source Routing Machine
|
## About
|
||||||
|
|
||||||
|
The Open Source Routing Machine is a high performance routing engine written in C++11 designed to run on OpenStreetMap data.
|
||||||
|
|
||||||
[](https://github.com/Project-OSRM/osrm-backend/actions/workflows/osrm-backend.yml) [](https://codecov.io/gh/Project-OSRM/osrm-backend) [](https://discord.gg/es9CdcCXcb)
|
## Current build status
|
||||||
|
|
||||||
High performance routing engine written in C++ designed to run on OpenStreetMap data.
|
| build config | branch | status |
|
||||||
|
|:-------------|:--------|:------------|
|
||||||
|
| Linux | master | [](https://travis-ci.org/Project-OSRM/osrm-backend) |
|
||||||
|
| Linux | develop | [](https://travis-ci.org/Project-OSRM/osrm-backend) |
|
||||||
|
| Windows | master/develop | [](https://ci.appveyor.com/project/DennisOSRM/osrm-backend) |
|
||||||
|
| LUAbind fork | master | [](https://travis-ci.org/DennisOSRM/luabind) |
|
||||||
|
|
||||||
The following services are available via HTTP API, C++ library interface and NodeJs wrapper:
|
## Building
|
||||||
- Nearest - Snaps coordinates to the street network and returns the nearest matches
|
|
||||||
- Route - Finds the fastest route between coordinates
|
|
||||||
- Table - Computes the duration or distances of the fastest route between all pairs of supplied coordinates
|
|
||||||
- Match - Snaps noisy GPS traces to the road network in the most plausible way
|
|
||||||
- Trip - Solves the Traveling Salesman Problem using a greedy heuristic
|
|
||||||
- Tile - Generates Mapbox Vector Tiles with internal routing metadata
|
|
||||||
|
|
||||||
To quickly try OSRM use our [demo server](http://map.project-osrm.org) which comes with both the backend and a frontend on top.
|
For instructions on how to [build](https://github.com/Project-OSRM/osrm-backend/wiki/Building-OSRM) and [run OSRM](https://github.com/Project-OSRM/osrm-backend/wiki/Running-OSRM), please consult [the Wiki](https://github.com/Project-OSRM/osrm-backend/wiki).
|
||||||
|
|
||||||
For a quick introduction about how the road network is represented in OpenStreetMap and how to map specific road network features have a look at [the OSM wiki on routing](https://wiki.openstreetmap.org/wiki/Routing) or [this guide about mapping for navigation](https://web.archive.org/web/20221206013651/https://labs.mapbox.com/mapping/mapping-for-navigation/).
|
To quickly try OSRM use our [free and daily updated online service](http://map.project-osrm.org)
|
||||||
|
|
||||||
Related [Project-OSRM](https://github.com/Project-OSRM) repositories:
|
|
||||||
- [osrm-frontend](https://github.com/Project-OSRM/osrm-frontend) - User-facing frontend with map. The demo server runs this on top of the backend
|
|
||||||
- [osrm-text-instructions](https://github.com/Project-OSRM/osrm-text-instructions) - Text instructions from OSRM route response
|
|
||||||
- [osrm-backend-docker](https://github.com/project-osrm/osrm-backend/pkgs/container/osrm-backend) - Ready to use Docker images
|
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
### Full documentation
|
See the Wiki's [server API documentation](https://github.com/Project-OSRM/osrm-backend/wiki/Server-api) as well as the [library API documentation](https://github.com/Project-OSRM/osrm-backend/wiki/Library-api)
|
||||||
|
|
||||||
- [Hosted documentation](http://project-osrm.org)
|
|
||||||
- [osrm-routed HTTP API documentation](docs/http.md)
|
|
||||||
- [libosrm API documentation](docs/libosrm.md)
|
|
||||||
|
|
||||||
## Contact
|
|
||||||
|
|
||||||
- Discord: [join](https://discord.gg/es9CdcCXcb)
|
|
||||||
- IRC: `irc.oftc.net`, channel: `#osrm` ([Webchat](https://webchat.oftc.net))
|
|
||||||
- Mailinglist: `https://lists.openstreetmap.org/listinfo/osrm-talk`
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
The easiest and quickest way to setup your own routing engine is to use Docker images we provide.
|
|
||||||
|
|
||||||
There are two pre-processing pipelines available:
|
|
||||||
- Contraction Hierarchies (CH)
|
|
||||||
- Multi-Level Dijkstra (MLD)
|
|
||||||
|
|
||||||
we recommend using MLD by default except for special use-cases such as very large distance matrices where CH is still a better fit for the time being.
|
|
||||||
In the following we explain the MLD pipeline.
|
|
||||||
If you want to use the CH pipeline instead replace `osrm-partition` and `osrm-customize` with a single `osrm-contract` and change the algorithm option for `osrm-routed` to `--algorithm ch`.
|
|
||||||
|
|
||||||
### Using Docker
|
|
||||||
|
|
||||||
We base our Docker images ([backend](https://github.com/Project-OSRM/osrm-backend/pkgs/container/osrm-backend), [frontend](https://hub.docker.com/r/osrm/osrm-frontend/)) on Debian and make sure they are as lightweight as possible. Older backend versions can be found on [Docker Hub](https://hub.docker.com/r/osrm/osrm-backend/).
|
|
||||||
|
|
||||||
Download OpenStreetMap extracts for example from [Geofabrik](http://download.geofabrik.de/)
|
|
||||||
|
|
||||||
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
|
|
||||||
|
|
||||||
Pre-process the extract with the car profile and start a routing engine HTTP server on port 5000
|
|
||||||
|
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-extract -p /opt/car.lua /data/berlin-latest.osm.pbf || echo "osrm-extract failed"
|
|
||||||
|
|
||||||
The flag `-v "${PWD}:/data"` creates the directory `/data` inside the docker container and makes the current working directory `"${PWD}"` available there. The file `/data/berlin-latest.osm.pbf` inside the container is referring to `"${PWD}/berlin-latest.osm.pbf"` on the host.
|
|
||||||
|
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-partition /data/berlin-latest.osrm || echo "osrm-partition failed"
|
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-customize /data/berlin-latest.osrm || echo "osrm-customize failed"
|
|
||||||
|
|
||||||
Note there is no `berlin-latest.osrm` file, but multiple `berlin-latest.osrm.*` files, i.e. `berlin-latest.osrm` is not file path, but "base" path referring to set of files and there is an option to omit this `.osrm` suffix completely(e.g. `osrm-partition /data/berlin-latest`).
|
|
||||||
|
|
||||||
docker run -t -i -p 5000:5000 -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-routed --algorithm mld /data/berlin-latest.osrm
|
|
||||||
|
|
||||||
Make requests against the HTTP server
|
|
||||||
|
|
||||||
curl "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
|
|
||||||
|
|
||||||
Optionally start a user-friendly frontend on port 9966, and open it up in your browser
|
|
||||||
|
|
||||||
docker run -p 9966:9966 osrm/osrm-frontend
|
|
||||||
xdg-open 'http://127.0.0.1:9966'
|
|
||||||
|
|
||||||
In case Docker complains about not being able to connect to the Docker daemon make sure you are in the `docker` group.
|
|
||||||
|
|
||||||
sudo usermod -aG docker $USER
|
|
||||||
|
|
||||||
After adding yourself to the `docker` group make sure to log out and back in again with your terminal.
|
|
||||||
|
|
||||||
We support the following images in the Container Registry:
|
|
||||||
|
|
||||||
Name | Description
|
|
||||||
-----|------
|
|
||||||
`latest` | `master` compiled with release flag
|
|
||||||
`latest-assertions` | `master` compiled with with release flag, assertions enabled and debug symbols
|
|
||||||
`latest-debug` | `master` compiled with debug flag
|
|
||||||
`<tag>` | specific tag compiled with release flag
|
|
||||||
`<tag>-debug` | specific tag compiled with debug flag
|
|
||||||
|
|
||||||
### Building from Source
|
|
||||||
|
|
||||||
The following targets Ubuntu 22.04.
|
|
||||||
For instructions how to build on different distributions, macOS or Windows see our [Wiki](https://github.com/Project-OSRM/osrm-backend/wiki).
|
|
||||||
|
|
||||||
Install dependencies
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo apt install build-essential git cmake pkg-config \
|
|
||||||
libbz2-dev libxml2-dev libzip-dev libboost-all-dev \
|
|
||||||
lua5.2 liblua5.2-dev libtbb-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
Compile and install OSRM binaries
|
|
||||||
|
|
||||||
```bash
|
|
||||||
mkdir -p build
|
|
||||||
cd build
|
|
||||||
cmake ..
|
|
||||||
cmake --build .
|
|
||||||
sudo cmake --build . --target install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Request Against the Demo Server
|
|
||||||
|
|
||||||
Read the [API usage policy](https://github.com/Project-OSRM/osrm-backend/wiki/Demo-server).
|
|
||||||
|
|
||||||
Simple query with instructions and alternatives on Berlin:
|
|
||||||
|
|
||||||
```
|
|
||||||
curl "https://router.project-osrm.org/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true&alternatives=true"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using the Node.js Bindings
|
|
||||||
|
|
||||||
The Node.js bindings provide read-only access to the routing engine.
|
|
||||||
We provide API documentation and examples [here](docs/nodejs/api.md).
|
|
||||||
|
|
||||||
You will need a modern `libstdc++` toolchain (`>= GLIBCXX_3.4.26`) for binary compatibility if you want to use the pre-built binaries.
|
|
||||||
For older Ubuntu systems you can upgrade your standard library for example with:
|
|
||||||
|
|
||||||
```
|
|
||||||
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
|
|
||||||
sudo apt-get update -y
|
|
||||||
sudo apt-get install -y libstdc++-9-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
You can install the Node.js bindings via `npm install @project-osrm/osrm` or from this repository either via
|
|
||||||
|
|
||||||
npm install
|
|
||||||
|
|
||||||
which will check and use pre-built binaries if they're available for this release and your Node version, or via
|
|
||||||
|
|
||||||
npm install --build-from-source
|
|
||||||
|
|
||||||
to always force building the Node.js bindings from source.
|
|
||||||
|
|
||||||
#### Unscoped packages
|
|
||||||
|
|
||||||
Prior to v5.27.0, the `osrm` Node package was unscoped. If you are upgrading from an old package, you will need to do the following:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm uninstall osrm --save
|
|
||||||
npm install @project-osrm/osrm --save
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Package docs
|
|
||||||
|
|
||||||
For usage details have a look [these API docs](docs/nodejs/api.md).
|
|
||||||
|
|
||||||
An exemplary implementation by a 3rd party with Docker and Node.js can be found [here](https://github.com/door2door-io/osrm-express-server-demo).
|
|
||||||
|
|
||||||
|
|
||||||
## References in publications
|
## References in publications
|
||||||
|
|
||||||
@ -189,3 +43,4 @@ When using the code in a (scientific) publication, please cite
|
|||||||
address = {New York, NY, USA},
|
address = {New York, NY, USA},
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
190
Rakefile
Normal file
190
Rakefile
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
require 'OSM/StreamParser'
|
||||||
|
require 'socket'
|
||||||
|
require 'digest/sha1'
|
||||||
|
require 'cucumber/rake/task'
|
||||||
|
require 'sys/proctable'
|
||||||
|
|
||||||
|
BUILD_FOLDER = 'build'
|
||||||
|
DATA_FOLDER = 'sandbox'
|
||||||
|
PROFILE = 'bicycle'
|
||||||
|
OSRM_PORT = 5000
|
||||||
|
PROFILES_FOLDER = '../profiles'
|
||||||
|
|
||||||
|
Cucumber::Rake::Task.new do |t|
|
||||||
|
t.cucumber_opts = %w{--format pretty}
|
||||||
|
end
|
||||||
|
|
||||||
|
areas = {
|
||||||
|
:kbh => { :country => 'denmark', :bbox => 'top=55.6972 left=12.5222 right=12.624 bottom=55.6376' },
|
||||||
|
:frd => { :country => 'denmark', :bbox => 'top=55.7007 left=12.4765 bottom=55.6576 right=12.5698' },
|
||||||
|
:regh => { :country => 'denmark', :bbox => 'top=56.164 left=11.792 bottom=55.403 right=12.731' },
|
||||||
|
:denmark => { :country => 'denmark', :bbox => nil },
|
||||||
|
:skaane => { :country => 'sweden', :bbox => 'top=56.55 left=12.4 bottom=55.3 right=14.6' }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
osm_data_area_name = ARGV[1] ? ARGV[1].to_s.to_sym : :kbh
|
||||||
|
raise "Unknown data area." unless areas[osm_data_area_name]
|
||||||
|
osm_data_country = areas[osm_data_area_name][:country]
|
||||||
|
osm_data_area_bbox = areas[osm_data_area_name][:bbox]
|
||||||
|
|
||||||
|
|
||||||
|
task osm_data_area_name.to_sym {} #define empty task to prevent rake from whining. will break if area has same name as a task
|
||||||
|
|
||||||
|
|
||||||
|
def each_process name, &block
|
||||||
|
Sys::ProcTable.ps do |process|
|
||||||
|
if process.comm.strip == name.strip && process.state != 'zombie'
|
||||||
|
yield process.pid.to_i, process.state.strip
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def up?
|
||||||
|
find_pid('osrm-routed') != nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_pid name
|
||||||
|
each_process(name) { |pid,state| return pid.to_i }
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def wait_for_shutdown name
|
||||||
|
timeout = 10
|
||||||
|
(timeout*10).times do
|
||||||
|
return if find_pid(name) == nil
|
||||||
|
sleep 0.1
|
||||||
|
end
|
||||||
|
raise "*** Could not terminate #{name}."
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
desc "Rebuild and run tests."
|
||||||
|
task :default => [:build]
|
||||||
|
|
||||||
|
desc "Build using CMake."
|
||||||
|
task :build do
|
||||||
|
if Dir.exists? BUILD_FOLDER
|
||||||
|
Dir.chdir BUILD_FOLDER do
|
||||||
|
system "make"
|
||||||
|
end
|
||||||
|
else
|
||||||
|
system "mkdir build; cd build; cmake ..; make"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Setup config files."
|
||||||
|
task :setup do
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Download OSM data."
|
||||||
|
task :download do
|
||||||
|
Dir.mkdir "#{DATA_FOLDER}" unless File.exist? "#{DATA_FOLDER}"
|
||||||
|
puts "Downloading..."
|
||||||
|
puts "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf"
|
||||||
|
raise "Error while downloading data." unless system "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf"
|
||||||
|
if osm_data_area_bbox
|
||||||
|
puts "Cropping and converting to protobuffer..."
|
||||||
|
raise "Error while cropping data." unless system "osmosis --read-pbf file=#{DATA_FOLDER}/#{osm_data_country}.osm.pbf --bounding-box #{osm_data_area_bbox} --write-pbf file=#{DATA_FOLDER}/#{osm_data_area_name}.osm.pbf omitmetadata=true"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Crop OSM data"
|
||||||
|
task :crop do
|
||||||
|
if osm_data_area_bbox
|
||||||
|
raise "Error while cropping data." unless system "osmosis --read-pbf file=#{DATA_FOLDER}/#{osm_data_country}.osm.pbf --bounding-box #{osm_data_area_bbox} --write-pbf file=#{DATA_FOLDER}/#{osm_data_area_name}.osm.pbf omitmetadata=true"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Reprocess OSM data."
|
||||||
|
task :process => [:extract,:prepare] do
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Extract OSM data."
|
||||||
|
task :extract do
|
||||||
|
Dir.chdir DATA_FOLDER do
|
||||||
|
raise "Error while extracting data." unless system "../#{BUILD_FOLDER}/osrm-extract #{osm_data_area_name}.osm.pbf --profile ../profiles/#{PROFILE}.lua"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Prepare OSM data."
|
||||||
|
task :prepare do
|
||||||
|
Dir.chdir DATA_FOLDER do
|
||||||
|
raise "Error while preparing data." unless system "../#{BUILD_FOLDER}/osrm-prepare #{osm_data_area_name}.osrm --profile ../profiles/#{PROFILE}.lua"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Delete preprocessing files."
|
||||||
|
task :clean do
|
||||||
|
File.delete *Dir.glob("#{DATA_FOLDER}/*.osrm")
|
||||||
|
File.delete *Dir.glob("#{DATA_FOLDER}/*.osrm.*")
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Run all cucumber test"
|
||||||
|
task :test do
|
||||||
|
system "cucumber"
|
||||||
|
puts
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Run the routing server in the terminal. Press Ctrl-C to stop."
|
||||||
|
task :run do
|
||||||
|
Dir.chdir DATA_FOLDER do
|
||||||
|
system "../#{BUILD_FOLDER}/osrm-routed #{osm_data_area_name}.osrm --port #{OSRM_PORT}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Launch the routing server in the background. Use rake:down to stop it."
|
||||||
|
task :up do
|
||||||
|
Dir.chdir DATA_FOLDER do
|
||||||
|
abort("Already up.") if up?
|
||||||
|
pipe = IO.popen("../#{BUILD_FOLDER}/osrm-routed #{osm_data_area_name}.osrm --port #{OSRM_PORT} 1>>osrm-routed.log 2>>osrm-routed.log")
|
||||||
|
timeout = 5
|
||||||
|
(timeout*10).times do
|
||||||
|
begin
|
||||||
|
socket = TCPSocket.new('localhost', OSRM_PORT)
|
||||||
|
socket.puts 'ping'
|
||||||
|
rescue Errno::ECONNREFUSED
|
||||||
|
sleep 0.1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Stop the routing server."
|
||||||
|
task :down do
|
||||||
|
pid = find_pid 'osrm-routed'
|
||||||
|
if pid
|
||||||
|
Process.kill 'TERM', pid
|
||||||
|
else
|
||||||
|
puts "Already down."
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Kill all osrm-extract, osrm-prepare and osrm-routed processes."
|
||||||
|
task :kill do
|
||||||
|
each_process('osrm-routed') { |pid,state| Process.kill 'KILL', pid }
|
||||||
|
each_process('osrm-prepare') { |pid,state| Process.kill 'KILL', pid }
|
||||||
|
each_process('osrm-extract') { |pid,state| Process.kill 'KILL', pid }
|
||||||
|
wait_for_shutdown 'osrm-routed'
|
||||||
|
wait_for_shutdown 'osrm-prepare'
|
||||||
|
wait_for_shutdown 'osrm-extract'
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Get PIDs of all osrm-extract, osrm-prepare and osrm-routed processes."
|
||||||
|
task :pid do
|
||||||
|
each_process 'osrm-routed' do |pid,state|
|
||||||
|
puts "#{pid}\t#{state}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
desc "Stop, reprocess and restart."
|
||||||
|
task :update => [:down,:process,:up] do
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
desc "Remove test cache files."
|
||||||
|
task :sweep do
|
||||||
|
system "rm test/cache/*"
|
||||||
|
end
|
||||||
|
|
@ -1,16 +1,37 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
#ifndef BAYES_CLASSIFIER_HPP
|
#ifndef BAYES_CLASSIFIER_HPP
|
||||||
#define BAYES_CLASSIFIER_HPP
|
#define BAYES_CLASSIFIER_HPP
|
||||||
|
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
||||||
#include <utility>
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include <numbers>
|
|
||||||
|
|
||||||
namespace osrm::engine::map_matching
|
|
||||||
{
|
|
||||||
|
|
||||||
struct NormalDistribution
|
struct NormalDistribution
|
||||||
{
|
{
|
||||||
NormalDistribution(const double mean, const double standard_deviation)
|
NormalDistribution(const double mean, const double standard_deviation)
|
||||||
@ -18,11 +39,11 @@ struct NormalDistribution
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME implement log-probability version since it's faster
|
// FIXME implement log-probability version since its faster
|
||||||
double Density(const double val) const
|
double density_function(const double val) const
|
||||||
{
|
{
|
||||||
const double x = val - mean;
|
const double x = val - mean;
|
||||||
return 1.0 / (std::sqrt(2 * std::numbers::pi) * standard_deviation) *
|
return 1.0 / (std::sqrt(2. * M_PI) * standard_deviation) *
|
||||||
std::exp(-x * x / (standard_deviation * standard_deviation));
|
std::exp(-x * x / (standard_deviation * standard_deviation));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,8 +58,8 @@ struct LaplaceDistribution
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME implement log-probability version since it's faster
|
// FIXME implement log-probability version since its faster
|
||||||
double Density(const double val) const
|
double density_function(const double val) const
|
||||||
{
|
{
|
||||||
const double x = std::abs(val - location);
|
const double x = std::abs(val - location);
|
||||||
return 1.0 / (2. * scale) * std::exp(-x / scale);
|
return 1.0 / (2. * scale) * std::exp(-x / scale);
|
||||||
@ -59,11 +80,11 @@ class BayesClassifier
|
|||||||
};
|
};
|
||||||
using ClassificationT = std::pair<ClassLabel, double>;
|
using ClassificationT = std::pair<ClassLabel, double>;
|
||||||
|
|
||||||
BayesClassifier(PositiveDistributionT positive_distribution,
|
BayesClassifier(const PositiveDistributionT &positive_distribution,
|
||||||
NegativeDistributionT negative_distribution,
|
const NegativeDistributionT &negative_distribution,
|
||||||
const double positive_apriori_probability)
|
const double positive_apriori_probability)
|
||||||
: positive_distribution(std::move(positive_distribution)),
|
: positive_distribution(positive_distribution),
|
||||||
negative_distribution(std::move(negative_distribution)),
|
negative_distribution(negative_distribution),
|
||||||
positive_apriori_probability(positive_apriori_probability),
|
positive_apriori_probability(positive_apriori_probability),
|
||||||
negative_apriori_probability(1. - positive_apriori_probability)
|
negative_apriori_probability(1. - positive_apriori_probability)
|
||||||
{
|
{
|
||||||
@ -73,9 +94,9 @@ class BayesClassifier
|
|||||||
ClassificationT classify(const ValueT &v) const
|
ClassificationT classify(const ValueT &v) const
|
||||||
{
|
{
|
||||||
const double positive_postpriori =
|
const double positive_postpriori =
|
||||||
positive_apriori_probability * positive_distribution.Density(v);
|
positive_apriori_probability * positive_distribution.density_function(v);
|
||||||
const double negative_postpriori =
|
const double negative_postpriori =
|
||||||
negative_apriori_probability * negative_distribution.Density(v);
|
negative_apriori_probability * negative_distribution.density_function(v);
|
||||||
const double norm = positive_postpriori + negative_postpriori;
|
const double norm = positive_postpriori + negative_postpriori;
|
||||||
|
|
||||||
if (positive_postpriori > negative_postpriori)
|
if (positive_postpriori > negative_postpriori)
|
||||||
@ -92,6 +113,5 @@ class BayesClassifier
|
|||||||
double positive_apriori_probability;
|
double positive_apriori_probability;
|
||||||
double negative_apriori_probability;
|
double negative_apriori_probability;
|
||||||
};
|
};
|
||||||
} // namespace osrm::engine::map_matching
|
|
||||||
|
|
||||||
#endif // BAYES_CLASSIFIER_HPP
|
#endif // BAYES_CLASSIFIER_HPP
|
174
algorithms/bfs_components.hpp
Normal file
174
algorithms/bfs_components.hpp
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef BFS_COMPONENTS_HPP_
|
||||||
|
#define BFS_COMPONENTS_HPP_
|
||||||
|
|
||||||
|
#include "../typedefs.h"
|
||||||
|
#include "../data_structures/restriction_map.hpp"
|
||||||
|
|
||||||
|
#include <queue>
|
||||||
|
#include <unordered_set>
|
||||||
|
|
||||||
|
// Explores the components of the given graph while respecting turn restrictions
|
||||||
|
// and barriers.
|
||||||
|
template <typename GraphT> class BFSComponentExplorer
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
BFSComponentExplorer(const GraphT &dynamic_graph,
|
||||||
|
const RestrictionMap &restrictions,
|
||||||
|
const std::unordered_set<NodeID> &barrier_nodes)
|
||||||
|
: m_graph(dynamic_graph), m_restriction_map(restrictions), m_barrier_nodes(barrier_nodes)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(m_graph.GetNumberOfNodes() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Returns the size of the component that the node belongs to.
|
||||||
|
*/
|
||||||
|
unsigned int GetComponentSize(const NodeID node) const
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(node < m_component_index_list.size());
|
||||||
|
|
||||||
|
return m_component_index_size[m_component_index_list[node]];
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned int GetNumberOfComponents() { return m_component_index_size.size(); }
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Computes the component sizes.
|
||||||
|
*/
|
||||||
|
void run()
|
||||||
|
{
|
||||||
|
std::queue<std::pair<NodeID, NodeID>> bfs_queue;
|
||||||
|
unsigned current_component = 0;
|
||||||
|
|
||||||
|
BOOST_ASSERT(m_component_index_list.empty());
|
||||||
|
BOOST_ASSERT(m_component_index_size.empty());
|
||||||
|
|
||||||
|
unsigned num_nodes = m_graph.GetNumberOfNodes();
|
||||||
|
|
||||||
|
m_component_index_list.resize(num_nodes, std::numeric_limits<unsigned>::max());
|
||||||
|
|
||||||
|
BOOST_ASSERT(num_nodes > 0);
|
||||||
|
|
||||||
|
// put unexplorered node with parent pointer into queue
|
||||||
|
for (NodeID node = 0; node < num_nodes; ++node)
|
||||||
|
{
|
||||||
|
if (std::numeric_limits<unsigned>::max() == m_component_index_list[node])
|
||||||
|
{
|
||||||
|
unsigned size = ExploreComponent(bfs_queue, node, current_component);
|
||||||
|
|
||||||
|
// push size into vector
|
||||||
|
m_component_index_size.emplace_back(size);
|
||||||
|
++current_component;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
/*!
|
||||||
|
* Explores the current component that starts at node using BFS.
|
||||||
|
*/
|
||||||
|
unsigned ExploreComponent(std::queue<std::pair<NodeID, NodeID>> &bfs_queue,
|
||||||
|
NodeID node,
|
||||||
|
unsigned current_component)
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
Graphical representation of variables:
|
||||||
|
|
||||||
|
u v w
|
||||||
|
*---------->*---------->*
|
||||||
|
e2
|
||||||
|
*/
|
||||||
|
|
||||||
|
bfs_queue.emplace(node, node);
|
||||||
|
// mark node as read
|
||||||
|
m_component_index_list[node] = current_component;
|
||||||
|
|
||||||
|
unsigned current_component_size = 1;
|
||||||
|
|
||||||
|
while (!bfs_queue.empty())
|
||||||
|
{
|
||||||
|
// fetch element from BFS queue
|
||||||
|
std::pair<NodeID, NodeID> current_queue_item = bfs_queue.front();
|
||||||
|
bfs_queue.pop();
|
||||||
|
|
||||||
|
const NodeID v = current_queue_item.first; // current node
|
||||||
|
const NodeID u = current_queue_item.second; // parent
|
||||||
|
// increment size counter of current component
|
||||||
|
++current_component_size;
|
||||||
|
if (m_barrier_nodes.find(v) != m_barrier_nodes.end())
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const NodeID to_node_of_only_restriction =
|
||||||
|
m_restriction_map.CheckForEmanatingIsOnlyTurn(u, v);
|
||||||
|
|
||||||
|
for (auto e2 : m_graph.GetAdjacentEdgeRange(v))
|
||||||
|
{
|
||||||
|
const NodeID w = m_graph.GetTarget(e2);
|
||||||
|
|
||||||
|
if (to_node_of_only_restriction != std::numeric_limits<unsigned>::max() &&
|
||||||
|
w != to_node_of_only_restriction)
|
||||||
|
{
|
||||||
|
// At an only_-restriction but not at the right turn
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (u != w)
|
||||||
|
{
|
||||||
|
// only add an edge if turn is not a U-turn except
|
||||||
|
// when it is at the end of a dead-end street.
|
||||||
|
if (!m_restriction_map.CheckIfTurnIsRestricted(u, v, w))
|
||||||
|
{
|
||||||
|
// only add an edge if turn is not prohibited
|
||||||
|
if (std::numeric_limits<unsigned>::max() == m_component_index_list[w])
|
||||||
|
{
|
||||||
|
// insert next (node, parent) only if w has
|
||||||
|
// not yet been explored
|
||||||
|
// mark node as read
|
||||||
|
m_component_index_list[w] = current_component;
|
||||||
|
bfs_queue.emplace(w, v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return current_component_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<unsigned> m_component_index_list;
|
||||||
|
std::vector<NodeID> m_component_index_size;
|
||||||
|
|
||||||
|
const GraphT &m_graph;
|
||||||
|
const RestrictionMap &m_restriction_map;
|
||||||
|
const std::unordered_set<NodeID> &m_barrier_nodes;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // BFS_COMPONENTS_HPP_
|
145
algorithms/crc32_processor.hpp
Normal file
145
algorithms/crc32_processor.hpp
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef ITERATOR_BASED_CRC32_H
|
||||||
|
#define ITERATOR_BASED_CRC32_H
|
||||||
|
|
||||||
|
#if defined(__x86_64__) && !defined(__MINGW64__)
|
||||||
|
#include <cpuid.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include <boost/crc.hpp> // for boost::crc_32_type
|
||||||
|
|
||||||
|
#include <iterator>
|
||||||
|
|
||||||
|
class IteratorbasedCRC32
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
bool using_hardware() const { return use_hardware_implementation; }
|
||||||
|
|
||||||
|
IteratorbasedCRC32() : crc(0) { use_hardware_implementation = detect_hardware_support(); }
|
||||||
|
|
||||||
|
template <class Iterator> unsigned operator()(Iterator iter, const Iterator end)
|
||||||
|
{
|
||||||
|
unsigned crc = 0;
|
||||||
|
while (iter != end)
|
||||||
|
{
|
||||||
|
using value_type = typename std::iterator_traits<Iterator>::value_type;
|
||||||
|
const char *data = reinterpret_cast<const char *>(&(*iter));
|
||||||
|
|
||||||
|
if (use_hardware_implementation)
|
||||||
|
{
|
||||||
|
crc = compute_in_hardware(data, sizeof(value_type));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
crc = compute_in_software(data, sizeof(value_type));
|
||||||
|
}
|
||||||
|
++iter;
|
||||||
|
}
|
||||||
|
return crc;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool detect_hardware_support() const
|
||||||
|
{
|
||||||
|
static const int sse42_bit = 0x00100000;
|
||||||
|
const unsigned ecx = cpuid();
|
||||||
|
const bool sse42_found = (ecx & sse42_bit) != 0;
|
||||||
|
return sse42_found;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned compute_in_software(const char *str, unsigned len)
|
||||||
|
{
|
||||||
|
crc_processor.process_bytes(str, len);
|
||||||
|
return crc_processor.checksum();
|
||||||
|
}
|
||||||
|
|
||||||
|
// adapted from http://byteworm.com/2010/10/13/crc32/
|
||||||
|
unsigned compute_in_hardware(const char *str, unsigned len)
|
||||||
|
{
|
||||||
|
#if defined(__x86_64__)
|
||||||
|
unsigned q = len / sizeof(unsigned);
|
||||||
|
unsigned r = len % sizeof(unsigned);
|
||||||
|
unsigned *p = (unsigned *)str;
|
||||||
|
|
||||||
|
// crc=0;
|
||||||
|
while (q--)
|
||||||
|
{
|
||||||
|
__asm__ __volatile__(".byte 0xf2, 0xf, 0x38, 0xf1, 0xf1;"
|
||||||
|
: "=S"(crc)
|
||||||
|
: "0"(crc), "c"(*p));
|
||||||
|
++p;
|
||||||
|
}
|
||||||
|
|
||||||
|
str = reinterpret_cast<char *>(p);
|
||||||
|
while (r--)
|
||||||
|
{
|
||||||
|
__asm__ __volatile__(".byte 0xf2, 0xf, 0x38, 0xf1, 0xf1;"
|
||||||
|
: "=S"(crc)
|
||||||
|
: "0"(crc), "c"(*str));
|
||||||
|
++str;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
return crc;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline unsigned cpuid() const
|
||||||
|
{
|
||||||
|
unsigned eax = 0, ebx = 0, ecx = 0, edx = 0;
|
||||||
|
// on X64 this calls hardware cpuid(.) instr. otherwise a dummy impl.
|
||||||
|
__get_cpuid(1, &eax, &ebx, &ecx, &edx);
|
||||||
|
return ecx;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(__MINGW64__) || defined(_MSC_VER) || !defined(__x86_64__)
|
||||||
|
inline void
|
||||||
|
__get_cpuid(int param, unsigned *eax, unsigned *ebx, unsigned *ecx, unsigned *edx) const
|
||||||
|
{
|
||||||
|
*ecx = 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
boost::crc_optimal<32, 0x1EDC6F41, 0x0, 0x0, true, true> crc_processor;
|
||||||
|
unsigned crc;
|
||||||
|
bool use_hardware_implementation;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct RangebasedCRC32
|
||||||
|
{
|
||||||
|
template <typename Iteratable> unsigned operator()(const Iteratable &iterable)
|
||||||
|
{
|
||||||
|
return crc32(std::begin(iterable), std::end(iterable));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool using_hardware() const { return crc32.using_hardware(); }
|
||||||
|
|
||||||
|
private:
|
||||||
|
IteratorbasedCRC32 crc32;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* ITERATOR_BASED_CRC32_H */
|
164
algorithms/douglas_peucker.cpp
Normal file
164
algorithms/douglas_peucker.cpp
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "douglas_peucker.hpp"
|
||||||
|
|
||||||
|
#include "../data_structures/segment_information.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <cmath>
|
||||||
|
#include <algorithm>
|
||||||
|
#include <iterator>
|
||||||
|
|
||||||
|
namespace
|
||||||
|
{
|
||||||
|
struct CoordinatePairCalculator
|
||||||
|
{
|
||||||
|
CoordinatePairCalculator() = delete;
|
||||||
|
CoordinatePairCalculator(const FixedPointCoordinate &coordinate_a,
|
||||||
|
const FixedPointCoordinate &coordinate_b)
|
||||||
|
{
|
||||||
|
// initialize distance calculator with two fixed coordinates a, b
|
||||||
|
const float RAD = 0.017453292519943295769236907684886f;
|
||||||
|
first_lat = (coordinate_a.lat / COORDINATE_PRECISION) * RAD;
|
||||||
|
first_lon = (coordinate_a.lon / COORDINATE_PRECISION) * RAD;
|
||||||
|
second_lat = (coordinate_b.lat / COORDINATE_PRECISION) * RAD;
|
||||||
|
second_lon = (coordinate_b.lon / COORDINATE_PRECISION) * RAD;
|
||||||
|
}
|
||||||
|
|
||||||
|
int operator()(FixedPointCoordinate &other) const
|
||||||
|
{
|
||||||
|
// set third coordinate c
|
||||||
|
const float RAD = 0.017453292519943295769236907684886f;
|
||||||
|
const float earth_radius = 6372797.560856f;
|
||||||
|
const float float_lat1 = (other.lat / COORDINATE_PRECISION) * RAD;
|
||||||
|
const float float_lon1 = (other.lon / COORDINATE_PRECISION) * RAD;
|
||||||
|
|
||||||
|
// compute distance (a,c)
|
||||||
|
const float x_value_1 = (first_lon - float_lon1) * cos((float_lat1 + first_lat) / 2.f);
|
||||||
|
const float y_value_1 = first_lat - float_lat1;
|
||||||
|
const float dist1 = std::hypot(x_value_1, y_value_1) * earth_radius;
|
||||||
|
|
||||||
|
// compute distance (b,c)
|
||||||
|
const float x_value_2 = (second_lon - float_lon1) * cos((float_lat1 + second_lat) / 2.f);
|
||||||
|
const float y_value_2 = second_lat - float_lat1;
|
||||||
|
const float dist2 = std::hypot(x_value_2, y_value_2) * earth_radius;
|
||||||
|
|
||||||
|
// return the minimum
|
||||||
|
return static_cast<int>(std::min(dist1, dist2));
|
||||||
|
}
|
||||||
|
|
||||||
|
float first_lat;
|
||||||
|
float first_lon;
|
||||||
|
float second_lat;
|
||||||
|
float second_lon;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
void DouglasPeucker::Run(std::vector<SegmentInformation> &input_geometry, const unsigned zoom_level)
|
||||||
|
{
|
||||||
|
Run(std::begin(input_geometry), std::end(input_geometry), zoom_level);
|
||||||
|
}
|
||||||
|
|
||||||
|
void DouglasPeucker::Run(RandomAccessIt begin, RandomAccessIt end, const unsigned zoom_level)
|
||||||
|
{
|
||||||
|
const auto size = std::distance(begin, end);
|
||||||
|
if (size < 2)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
begin->necessary = true;
|
||||||
|
std::prev(end)->necessary = true;
|
||||||
|
|
||||||
|
{
|
||||||
|
BOOST_ASSERT_MSG(zoom_level < DOUGLAS_PEUCKER_THRESHOLDS.size(), "unsupported zoom level");
|
||||||
|
RandomAccessIt left_border = begin;
|
||||||
|
RandomAccessIt right_border = std::next(begin);
|
||||||
|
// Sweep over array and identify those ranges that need to be checked
|
||||||
|
do
|
||||||
|
{
|
||||||
|
// traverse list until new border element found
|
||||||
|
if (right_border->necessary)
|
||||||
|
{
|
||||||
|
// sanity checks
|
||||||
|
BOOST_ASSERT(left_border->necessary);
|
||||||
|
BOOST_ASSERT(right_border->necessary);
|
||||||
|
recursion_stack.emplace(left_border, right_border);
|
||||||
|
left_border = right_border;
|
||||||
|
}
|
||||||
|
++right_border;
|
||||||
|
} while (right_border != end);
|
||||||
|
}
|
||||||
|
|
||||||
|
// mark locations as 'necessary' by divide-and-conquer
|
||||||
|
while (!recursion_stack.empty())
|
||||||
|
{
|
||||||
|
// pop next element
|
||||||
|
const GeometryRange pair = recursion_stack.top();
|
||||||
|
recursion_stack.pop();
|
||||||
|
// sanity checks
|
||||||
|
BOOST_ASSERT_MSG(pair.first->necessary, "left border must be necessary");
|
||||||
|
BOOST_ASSERT_MSG(pair.second->necessary, "right border must be necessary");
|
||||||
|
BOOST_ASSERT_MSG(std::distance(pair.second, end) > 0, "right border outside of geometry");
|
||||||
|
BOOST_ASSERT_MSG(std::distance(pair.first, pair.second) >= 0,
|
||||||
|
"left border on the wrong side");
|
||||||
|
|
||||||
|
int max_int_distance = 0;
|
||||||
|
auto farthest_entry_it = pair.second;
|
||||||
|
const CoordinatePairCalculator dist_calc(pair.first->location, pair.second->location);
|
||||||
|
|
||||||
|
// sweep over range to find the maximum
|
||||||
|
for (auto it = std::next(pair.first); it != pair.second; ++it)
|
||||||
|
{
|
||||||
|
const int distance = dist_calc(it->location);
|
||||||
|
// found new feasible maximum?
|
||||||
|
if (distance > max_int_distance && distance > DOUGLAS_PEUCKER_THRESHOLDS[zoom_level])
|
||||||
|
{
|
||||||
|
farthest_entry_it = it;
|
||||||
|
max_int_distance = distance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if maximum violates a zoom level dependent threshold
|
||||||
|
if (max_int_distance > DOUGLAS_PEUCKER_THRESHOLDS[zoom_level])
|
||||||
|
{
|
||||||
|
// mark idx as necessary
|
||||||
|
farthest_entry_it->necessary = true;
|
||||||
|
if (1 < std::distance(pair.first, farthest_entry_it))
|
||||||
|
{
|
||||||
|
recursion_stack.emplace(pair.first, farthest_entry_it);
|
||||||
|
}
|
||||||
|
if (1 < std::distance(farthest_entry_it, pair.second))
|
||||||
|
{
|
||||||
|
recursion_stack.emplace(farthest_entry_it, pair.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
81
algorithms/douglas_peucker.hpp
Normal file
81
algorithms/douglas_peucker.hpp
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2013, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef DOUGLAS_PEUCKER_HPP_
|
||||||
|
#define DOUGLAS_PEUCKER_HPP_
|
||||||
|
|
||||||
|
#include "../data_structures/segment_information.hpp"
|
||||||
|
|
||||||
|
#include <array>
|
||||||
|
#include <stack>
|
||||||
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
/* This class object computes the bitvector of indicating generalized input
|
||||||
|
* points according to the (Ramer-)Douglas-Peucker algorithm.
|
||||||
|
*
|
||||||
|
* Input is vector of pairs. Each pair consists of the point information and a
|
||||||
|
* bit indicating if the points is present in the generalization.
|
||||||
|
* Note: points may also be pre-selected*/
|
||||||
|
|
||||||
|
static const std::array<int, 19> DOUGLAS_PEUCKER_THRESHOLDS{{
|
||||||
|
512440, // z0
|
||||||
|
256720, // z1
|
||||||
|
122560, // z2
|
||||||
|
56780, // z3
|
||||||
|
28800, // z4
|
||||||
|
14400, // z5
|
||||||
|
7200, // z6
|
||||||
|
3200, // z7
|
||||||
|
2400, // z8
|
||||||
|
1000, // z9
|
||||||
|
600, // z10
|
||||||
|
120, // z11
|
||||||
|
60, // z12
|
||||||
|
45, // z13
|
||||||
|
36, // z14
|
||||||
|
20, // z15
|
||||||
|
8, // z16
|
||||||
|
6, // z17
|
||||||
|
4 // z18
|
||||||
|
}};
|
||||||
|
|
||||||
|
class DouglasPeucker
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
using RandomAccessIt = std::vector<SegmentInformation>::iterator;
|
||||||
|
|
||||||
|
using GeometryRange = std::pair<RandomAccessIt, RandomAccessIt>;
|
||||||
|
// Stack to simulate the recursion
|
||||||
|
std::stack<GeometryRange> recursion_stack;
|
||||||
|
|
||||||
|
public:
|
||||||
|
void Run(RandomAccessIt begin, RandomAccessIt end, const unsigned zoom_level);
|
||||||
|
void Run(std::vector<SegmentInformation> &input_geometry, const unsigned zoom_level);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* DOUGLAS_PEUCKER_HPP_ */
|
91
algorithms/object_encoder.hpp
Normal file
91
algorithms/object_encoder.hpp
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef OBJECT_ENCODER_HPP
|
||||||
|
#define OBJECT_ENCODER_HPP
|
||||||
|
|
||||||
|
#include "../util/string_util.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
#include <boost/archive/iterators/base64_from_binary.hpp>
|
||||||
|
#include <boost/archive/iterators/binary_from_base64.hpp>
|
||||||
|
#include <boost/archive/iterators/transform_width.hpp>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct ObjectEncoder
|
||||||
|
{
|
||||||
|
using base64_t = boost::archive::iterators::base64_from_binary<
|
||||||
|
boost::archive::iterators::transform_width<const char *, 6, 8>>;
|
||||||
|
|
||||||
|
using binary_t = boost::archive::iterators::transform_width<
|
||||||
|
boost::archive::iterators::binary_from_base64<std::string::const_iterator>,
|
||||||
|
8,
|
||||||
|
6>;
|
||||||
|
|
||||||
|
template <class ObjectT> static void EncodeToBase64(const ObjectT &object, std::string &encoded)
|
||||||
|
{
|
||||||
|
const char *char_ptr_to_object = reinterpret_cast<const char *>(&object);
|
||||||
|
std::vector<unsigned char> data(sizeof(object));
|
||||||
|
std::copy(char_ptr_to_object, char_ptr_to_object + sizeof(ObjectT), data.begin());
|
||||||
|
|
||||||
|
unsigned char number_of_padded_chars = 0; // is in {0,1,2};
|
||||||
|
while (data.size() % 3 != 0)
|
||||||
|
{
|
||||||
|
++number_of_padded_chars;
|
||||||
|
data.push_back(0x00);
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT_MSG(0 == data.size() % 3, "base64 input data size is not a multiple of 3!");
|
||||||
|
encoded.resize(sizeof(ObjectT));
|
||||||
|
encoded.assign(base64_t(&data[0]),
|
||||||
|
base64_t(&data[0] + (data.size() - number_of_padded_chars)));
|
||||||
|
replaceAll(encoded, "+", "-");
|
||||||
|
replaceAll(encoded, "/", "_");
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class ObjectT> static void DecodeFromBase64(const std::string &input, ObjectT &object)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
std::string encoded(input);
|
||||||
|
// replace "-" with "+" and "_" with "/"
|
||||||
|
replaceAll(encoded, "-", "+");
|
||||||
|
replaceAll(encoded, "_", "/");
|
||||||
|
|
||||||
|
std::copy(binary_t(encoded.begin()), binary_t(encoded.begin() + encoded.length() - 1),
|
||||||
|
reinterpret_cast<char *>(&object));
|
||||||
|
}
|
||||||
|
catch (...)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* OBJECT_ENCODER_HPP */
|
128
algorithms/polyline_compressor.cpp
Normal file
128
algorithms/polyline_compressor.cpp
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "polyline_compressor.hpp"
|
||||||
|
#include "../data_structures/segment_information.hpp"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
std::string PolylineCompressor::encode_vector(std::vector<int> &numbers) const
|
||||||
|
{
|
||||||
|
std::string output;
|
||||||
|
const auto end = numbers.size();
|
||||||
|
for (std::size_t i = 0; i < end; ++i)
|
||||||
|
{
|
||||||
|
numbers[i] <<= 1;
|
||||||
|
if (numbers[i] < 0)
|
||||||
|
{
|
||||||
|
numbers[i] = ~(numbers[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const int number : numbers)
|
||||||
|
{
|
||||||
|
output += encode_number(number);
|
||||||
|
}
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string PolylineCompressor::encode_number(int number_to_encode) const
|
||||||
|
{
|
||||||
|
std::string output;
|
||||||
|
while (number_to_encode >= 0x20)
|
||||||
|
{
|
||||||
|
const int next_value = (0x20 | (number_to_encode & 0x1f)) + 63;
|
||||||
|
output += static_cast<char>(next_value);
|
||||||
|
number_to_encode >>= 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
number_to_encode += 63;
|
||||||
|
output += static_cast<char>(number_to_encode);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string
|
||||||
|
PolylineCompressor::get_encoded_string(const std::vector<SegmentInformation> &polyline) const
|
||||||
|
{
|
||||||
|
if (polyline.empty())
|
||||||
|
{
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<int> delta_numbers;
|
||||||
|
delta_numbers.reserve((polyline.size() - 1) * 2);
|
||||||
|
FixedPointCoordinate previous_coordinate = {0, 0};
|
||||||
|
for (const auto &segment : polyline)
|
||||||
|
{
|
||||||
|
if (segment.necessary)
|
||||||
|
{
|
||||||
|
const int lat_diff = segment.location.lat - previous_coordinate.lat;
|
||||||
|
const int lon_diff = segment.location.lon - previous_coordinate.lon;
|
||||||
|
delta_numbers.emplace_back(lat_diff);
|
||||||
|
delta_numbers.emplace_back(lon_diff);
|
||||||
|
previous_coordinate = segment.location;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return encode_vector(delta_numbers);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<FixedPointCoordinate> PolylineCompressor::decode_string(const std::string &geometry_string) const
|
||||||
|
{
|
||||||
|
std::vector<FixedPointCoordinate> new_coordinates;
|
||||||
|
int index = 0, len = geometry_string.size();
|
||||||
|
int lat = 0, lng = 0;
|
||||||
|
|
||||||
|
while (index < len)
|
||||||
|
{
|
||||||
|
int b, shift = 0, result = 0;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
b = geometry_string.at(index++) - 63;
|
||||||
|
result |= (b & 0x1f) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (b >= 0x20);
|
||||||
|
int dlat = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
|
||||||
|
lat += dlat;
|
||||||
|
|
||||||
|
shift = 0;
|
||||||
|
result = 0;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
b = geometry_string.at(index++) - 63;
|
||||||
|
result |= (b & 0x1f) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (b >= 0x20);
|
||||||
|
int dlng = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1));
|
||||||
|
lng += dlng;
|
||||||
|
|
||||||
|
FixedPointCoordinate p;
|
||||||
|
p.lat = COORDINATE_PRECISION * (((double) lat / 1E6));
|
||||||
|
p.lon = COORDINATE_PRECISION * (((double) lng / 1E6));
|
||||||
|
new_coordinates.push_back(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new_coordinates;
|
||||||
|
}
|
51
algorithms/polyline_compressor.hpp
Normal file
51
algorithms/polyline_compressor.hpp
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2013, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef POLYLINECOMPRESSOR_H_
|
||||||
|
#define POLYLINECOMPRESSOR_H_
|
||||||
|
|
||||||
|
struct SegmentInformation;
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
class PolylineCompressor
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
std::string encode_vector(std::vector<int> &numbers) const;
|
||||||
|
|
||||||
|
std::string encode_number(const int number_to_encode) const;
|
||||||
|
|
||||||
|
public:
|
||||||
|
std::string get_encoded_string(const std::vector<SegmentInformation> &polyline) const;
|
||||||
|
|
||||||
|
std::vector<FixedPointCoordinate> decode_string(const std::string &geometry_string) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* POLYLINECOMPRESSOR_H_ */
|
56
algorithms/polyline_formatter.cpp
Normal file
56
algorithms/polyline_formatter.cpp
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "polyline_formatter.hpp"
|
||||||
|
|
||||||
|
#include "polyline_compressor.hpp"
|
||||||
|
#include "../data_structures/segment_information.hpp"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
osrm::json::String
|
||||||
|
PolylineFormatter::printEncodedString(const std::vector<SegmentInformation> &polyline) const
|
||||||
|
{
|
||||||
|
return osrm::json::String(PolylineCompressor().get_encoded_string(polyline));
|
||||||
|
}
|
||||||
|
|
||||||
|
osrm::json::Array
|
||||||
|
PolylineFormatter::printUnencodedString(const std::vector<SegmentInformation> &polyline) const
|
||||||
|
{
|
||||||
|
osrm::json::Array json_geometry_array;
|
||||||
|
for (const auto &segment : polyline)
|
||||||
|
{
|
||||||
|
if (segment.necessary)
|
||||||
|
{
|
||||||
|
osrm::json::Array json_coordinate;
|
||||||
|
json_coordinate.values.push_back(segment.location.lat / COORDINATE_PRECISION);
|
||||||
|
json_coordinate.values.push_back(segment.location.lon / COORDINATE_PRECISION);
|
||||||
|
json_geometry_array.values.push_back(json_coordinate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return json_geometry_array;
|
||||||
|
}
|
45
algorithms/polyline_formatter.hpp
Normal file
45
algorithms/polyline_formatter.hpp
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef POLYLINE_FORMATTER_HPP
|
||||||
|
#define POLYLINE_FORMATTER_HPP
|
||||||
|
|
||||||
|
struct SegmentInformation;
|
||||||
|
|
||||||
|
#include <osrm/json_container.hpp>
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct PolylineFormatter
|
||||||
|
{
|
||||||
|
osrm::json::String printEncodedString(const std::vector<SegmentInformation> &polyline) const;
|
||||||
|
|
||||||
|
osrm::json::Array printUnencodedString(const std::vector<SegmentInformation> &polyline) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* POLYLINE_FORMATTER_HPP */
|
162
algorithms/route_name_extraction.hpp
Normal file
162
algorithms/route_name_extraction.hpp
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef EXTRACT_ROUTE_NAMES_H
|
||||||
|
#define EXTRACT_ROUTE_NAMES_H
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct RouteNames
|
||||||
|
{
|
||||||
|
std::string shortest_path_name_1;
|
||||||
|
std::string shortest_path_name_2;
|
||||||
|
std::string alternative_path_name_1;
|
||||||
|
std::string alternative_path_name_2;
|
||||||
|
};
|
||||||
|
|
||||||
|
// construct routes names
|
||||||
|
template <class DataFacadeT, class SegmentT> struct ExtractRouteNames
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
SegmentT PickNextLongestSegment(const std::vector<SegmentT> &segment_list,
|
||||||
|
const unsigned blocked_name_id) const
|
||||||
|
{
|
||||||
|
SegmentT result_segment;
|
||||||
|
result_segment.length = 0;
|
||||||
|
|
||||||
|
for (const SegmentT &segment : segment_list)
|
||||||
|
{
|
||||||
|
if (segment.name_id != blocked_name_id && segment.length > result_segment.length &&
|
||||||
|
segment.name_id != 0)
|
||||||
|
{
|
||||||
|
result_segment = segment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result_segment;
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
RouteNames operator()(std::vector<SegmentT> &shortest_path_segments,
|
||||||
|
std::vector<SegmentT> &alternative_path_segments,
|
||||||
|
const DataFacadeT *facade) const
|
||||||
|
{
|
||||||
|
RouteNames route_names;
|
||||||
|
|
||||||
|
SegmentT shortest_segment_1, shortest_segment_2;
|
||||||
|
SegmentT alternative_segment_1, alternative_segment_2;
|
||||||
|
|
||||||
|
auto length_comperator = [](const SegmentT &a, const SegmentT &b)
|
||||||
|
{
|
||||||
|
return a.length > b.length;
|
||||||
|
};
|
||||||
|
auto name_id_comperator = [](const SegmentT &a, const SegmentT &b)
|
||||||
|
{
|
||||||
|
return a.name_id < b.name_id;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (shortest_path_segments.empty())
|
||||||
|
{
|
||||||
|
return route_names;
|
||||||
|
}
|
||||||
|
|
||||||
|
// pick the longest segment for the shortest path.
|
||||||
|
std::sort(shortest_path_segments.begin(), shortest_path_segments.end(), length_comperator);
|
||||||
|
shortest_segment_1 = shortest_path_segments[0];
|
||||||
|
if (!alternative_path_segments.empty())
|
||||||
|
{
|
||||||
|
std::sort(alternative_path_segments.begin(), alternative_path_segments.end(),
|
||||||
|
length_comperator);
|
||||||
|
|
||||||
|
// also pick the longest segment for the alternative path
|
||||||
|
alternative_segment_1 = alternative_path_segments[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute the set difference (for shortest path) depending on names between shortest and
|
||||||
|
// alternative
|
||||||
|
std::vector<SegmentT> shortest_path_set_difference(shortest_path_segments.size());
|
||||||
|
std::sort(shortest_path_segments.begin(), shortest_path_segments.end(), name_id_comperator);
|
||||||
|
std::sort(alternative_path_segments.begin(), alternative_path_segments.end(),
|
||||||
|
name_id_comperator);
|
||||||
|
std::set_difference(shortest_path_segments.begin(), shortest_path_segments.end(),
|
||||||
|
alternative_path_segments.begin(), alternative_path_segments.end(),
|
||||||
|
shortest_path_set_difference.begin(), name_id_comperator);
|
||||||
|
|
||||||
|
std::sort(shortest_path_set_difference.begin(), shortest_path_set_difference.end(),
|
||||||
|
length_comperator);
|
||||||
|
shortest_segment_2 =
|
||||||
|
PickNextLongestSegment(shortest_path_set_difference, shortest_segment_1.name_id);
|
||||||
|
|
||||||
|
// compute the set difference (for alternative path) depending on names between shortest and
|
||||||
|
// alternative
|
||||||
|
// vectors are still sorted, no need to do again
|
||||||
|
BOOST_ASSERT(std::is_sorted(shortest_path_segments.begin(), shortest_path_segments.end(),
|
||||||
|
name_id_comperator));
|
||||||
|
BOOST_ASSERT(std::is_sorted(alternative_path_segments.begin(),
|
||||||
|
alternative_path_segments.end(), name_id_comperator));
|
||||||
|
|
||||||
|
std::vector<SegmentT> alternative_path_set_difference(alternative_path_segments.size());
|
||||||
|
std::set_difference(alternative_path_segments.begin(), alternative_path_segments.end(),
|
||||||
|
shortest_path_segments.begin(), shortest_path_segments.end(),
|
||||||
|
alternative_path_set_difference.begin(), name_id_comperator);
|
||||||
|
|
||||||
|
std::sort(alternative_path_set_difference.begin(), alternative_path_set_difference.end(),
|
||||||
|
length_comperator);
|
||||||
|
|
||||||
|
if (!alternative_path_segments.empty())
|
||||||
|
{
|
||||||
|
alternative_segment_2 = PickNextLongestSegment(alternative_path_set_difference,
|
||||||
|
alternative_segment_1.name_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// move the segments into the order in which they occur.
|
||||||
|
if (shortest_segment_1.position > shortest_segment_2.position)
|
||||||
|
{
|
||||||
|
std::swap(shortest_segment_1, shortest_segment_2);
|
||||||
|
}
|
||||||
|
if (alternative_segment_1.position > alternative_segment_2.position)
|
||||||
|
{
|
||||||
|
std::swap(alternative_segment_1, alternative_segment_2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetching names for the selected segments
|
||||||
|
route_names.shortest_path_name_1 = facade->get_name_for_id(shortest_segment_1.name_id);
|
||||||
|
route_names.shortest_path_name_2 = facade->get_name_for_id(shortest_segment_2.name_id);
|
||||||
|
|
||||||
|
route_names.alternative_path_name_1 =
|
||||||
|
facade->get_name_for_id(alternative_segment_1.name_id);
|
||||||
|
route_names.alternative_path_name_2 =
|
||||||
|
facade->get_name_for_id(alternative_segment_2.name_id);
|
||||||
|
|
||||||
|
return route_names;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // EXTRACT_ROUTE_NAMES_H
|
239
algorithms/tiny_components.hpp
Normal file
239
algorithms/tiny_components.hpp
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef TINY_COMPONENTS_HPP
|
||||||
|
#define TINY_COMPONENTS_HPP
|
||||||
|
|
||||||
|
#include "../typedefs.h"
|
||||||
|
#include "../data_structures/deallocating_vector.hpp"
|
||||||
|
#include "../data_structures/import_edge.hpp"
|
||||||
|
#include "../data_structures/query_node.hpp"
|
||||||
|
#include "../data_structures/percent.hpp"
|
||||||
|
#include "../data_structures/restriction.hpp"
|
||||||
|
#include "../data_structures/restriction_map.hpp"
|
||||||
|
#include "../data_structures/turn_instructions.hpp"
|
||||||
|
|
||||||
|
#include "../util/integer_range.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
#include "../util/std_hash.hpp"
|
||||||
|
#include "../util/timing_util.hpp"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <tbb/parallel_sort.h>
|
||||||
|
|
||||||
|
#include <cstdint>
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
#include <stack>
|
||||||
|
#include <unordered_map>
|
||||||
|
#include <unordered_set>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
template <typename GraphT> class TarjanSCC
|
||||||
|
{
|
||||||
|
struct TarjanStackFrame
|
||||||
|
{
|
||||||
|
explicit TarjanStackFrame(NodeID v, NodeID parent) : v(v), parent(parent) {}
|
||||||
|
NodeID v;
|
||||||
|
NodeID parent;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TarjanNode
|
||||||
|
{
|
||||||
|
TarjanNode() : index(SPECIAL_NODEID), low_link(SPECIAL_NODEID), on_stack(false) {}
|
||||||
|
unsigned index;
|
||||||
|
unsigned low_link;
|
||||||
|
bool on_stack;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<unsigned> components_index;
|
||||||
|
std::vector<NodeID> component_size_vector;
|
||||||
|
std::shared_ptr<GraphT> m_node_based_graph;
|
||||||
|
std::unordered_set<NodeID> barrier_node_set;
|
||||||
|
RestrictionMap m_restriction_map;
|
||||||
|
std::size_t size_one_counter;
|
||||||
|
|
||||||
|
public:
|
||||||
|
template <class ContainerT>
|
||||||
|
TarjanSCC(std::shared_ptr<GraphT> graph,
|
||||||
|
const RestrictionMap &restrictions,
|
||||||
|
const ContainerT &barrier_node_list)
|
||||||
|
: components_index(graph->GetNumberOfNodes(), SPECIAL_NODEID), m_node_based_graph(graph),
|
||||||
|
m_restriction_map(restrictions), size_one_counter(0)
|
||||||
|
{
|
||||||
|
barrier_node_set.insert(std::begin(barrier_node_list), std::end(barrier_node_list));
|
||||||
|
BOOST_ASSERT(m_node_based_graph->GetNumberOfNodes() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
void run()
|
||||||
|
{
|
||||||
|
TIMER_START(SCC_RUN);
|
||||||
|
const NodeID max_node_id = m_node_based_graph->GetNumberOfNodes();
|
||||||
|
|
||||||
|
// The following is a hack to distinguish between stuff that happens
|
||||||
|
// before the recursive call and stuff that happens after
|
||||||
|
std::stack<TarjanStackFrame> recursion_stack;
|
||||||
|
// true = stuff before, false = stuff after call
|
||||||
|
std::stack<NodeID> tarjan_stack;
|
||||||
|
std::vector<TarjanNode> tarjan_node_list(max_node_id);
|
||||||
|
unsigned component_index = 0, size_of_current_component = 0;
|
||||||
|
unsigned index = 0;
|
||||||
|
std::vector<bool> processing_node_before_recursion(max_node_id, true);
|
||||||
|
for (const NodeID node : osrm::irange(0u, max_node_id))
|
||||||
|
{
|
||||||
|
if (SPECIAL_NODEID == components_index[node])
|
||||||
|
{
|
||||||
|
recursion_stack.emplace(TarjanStackFrame(node, node));
|
||||||
|
}
|
||||||
|
|
||||||
|
while (!recursion_stack.empty())
|
||||||
|
{
|
||||||
|
TarjanStackFrame currentFrame = recursion_stack.top();
|
||||||
|
const NodeID u = currentFrame.parent;
|
||||||
|
const NodeID v = currentFrame.v;
|
||||||
|
recursion_stack.pop();
|
||||||
|
|
||||||
|
const bool before_recursion = processing_node_before_recursion[v];
|
||||||
|
|
||||||
|
if (before_recursion && tarjan_node_list[v].index != UINT_MAX)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (before_recursion)
|
||||||
|
{
|
||||||
|
// Mark frame to handle tail of recursion
|
||||||
|
recursion_stack.emplace(currentFrame);
|
||||||
|
processing_node_before_recursion[v] = false;
|
||||||
|
|
||||||
|
// Mark essential information for SCC
|
||||||
|
tarjan_node_list[v].index = index;
|
||||||
|
tarjan_node_list[v].low_link = index;
|
||||||
|
tarjan_stack.push(v);
|
||||||
|
tarjan_node_list[v].on_stack = true;
|
||||||
|
++index;
|
||||||
|
|
||||||
|
const NodeID to_node_of_only_restriction =
|
||||||
|
m_restriction_map.CheckForEmanatingIsOnlyTurn(u, v);
|
||||||
|
|
||||||
|
for (const auto current_edge : m_node_based_graph->GetAdjacentEdgeRange(v))
|
||||||
|
{
|
||||||
|
const auto vprime = m_node_based_graph->GetTarget(current_edge);
|
||||||
|
|
||||||
|
// Traverse outgoing edges
|
||||||
|
if (barrier_node_set.find(v) != barrier_node_set.end() && u != vprime)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (to_node_of_only_restriction != std::numeric_limits<unsigned>::max() &&
|
||||||
|
vprime == to_node_of_only_restriction)
|
||||||
|
{
|
||||||
|
// At an only_-restriction but not at the right turn
|
||||||
|
// continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (m_restriction_map.CheckIfTurnIsRestricted(u, v, vprime))
|
||||||
|
{
|
||||||
|
// continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SPECIAL_NODEID == tarjan_node_list[vprime].index)
|
||||||
|
{
|
||||||
|
recursion_stack.emplace(TarjanStackFrame(vprime, v));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (tarjan_node_list[vprime].on_stack &&
|
||||||
|
tarjan_node_list[vprime].index < tarjan_node_list[v].low_link)
|
||||||
|
{
|
||||||
|
tarjan_node_list[v].low_link = tarjan_node_list[vprime].index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
processing_node_before_recursion[v] = true;
|
||||||
|
tarjan_node_list[currentFrame.parent].low_link =
|
||||||
|
std::min(tarjan_node_list[currentFrame.parent].low_link,
|
||||||
|
tarjan_node_list[v].low_link);
|
||||||
|
// after recursion, lets do cycle checking
|
||||||
|
// Check if we found a cycle. This is the bottom part of the recursion
|
||||||
|
if (tarjan_node_list[v].low_link == tarjan_node_list[v].index)
|
||||||
|
{
|
||||||
|
NodeID vprime;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
vprime = tarjan_stack.top();
|
||||||
|
tarjan_stack.pop();
|
||||||
|
tarjan_node_list[vprime].on_stack = false;
|
||||||
|
components_index[vprime] = component_index;
|
||||||
|
++size_of_current_component;
|
||||||
|
} while (v != vprime);
|
||||||
|
|
||||||
|
component_size_vector.emplace_back(size_of_current_component);
|
||||||
|
|
||||||
|
if (size_of_current_component > 1000)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "large component [" << component_index
|
||||||
|
<< "]=" << size_of_current_component;
|
||||||
|
}
|
||||||
|
|
||||||
|
++component_index;
|
||||||
|
size_of_current_component = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TIMER_STOP(SCC_RUN);
|
||||||
|
SimpleLogger().Write() << "SCC run took: " << TIMER_MSEC(SCC_RUN) / 1000. << "s";
|
||||||
|
|
||||||
|
size_one_counter = std::count_if(component_size_vector.begin(), component_size_vector.end(),
|
||||||
|
[](unsigned value)
|
||||||
|
{
|
||||||
|
return 1 == value;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t get_number_of_components() const { return component_size_vector.size(); }
|
||||||
|
|
||||||
|
std::size_t get_size_one_count() const { return size_one_counter; }
|
||||||
|
|
||||||
|
unsigned get_component_size(const NodeID node) const
|
||||||
|
{
|
||||||
|
return component_size_vector[components_index[node]];
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned get_component_id(const NodeID node) const { return components_index[node]; }
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* TINY_COMPONENTS_HPP */
|
113
appveyor-build.bat
Normal file
113
appveyor-build.bat
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
@ECHO OFF
|
||||||
|
SETLOCAL
|
||||||
|
SET EL=0
|
||||||
|
|
||||||
|
ECHO platform^: %platform%
|
||||||
|
:: HARDCODE "x64" as it is uppercase on AppVeyor and download from S3 is case sensitive
|
||||||
|
SET DEPSPKG=osrm-deps-win-x64-14.0.7z
|
||||||
|
|
||||||
|
:: local development
|
||||||
|
IF "%computername%"=="MB" GOTO SKIPDL
|
||||||
|
|
||||||
|
IF EXIST %DEPSPKG% DEL %DEPSPKG%
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
ECHO downloading %DEPSPKG%
|
||||||
|
powershell Invoke-WebRequest https://mapbox.s3.amazonaws.com/windows-builds/windows-deps/$env:DEPSPKG -OutFile C:\projects\osrm\$env:DEPSPKG
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
7z -y x %DEPSPKG% | %windir%\system32\FIND "ing archive"
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
:SKIPDL
|
||||||
|
|
||||||
|
IF EXIST build rd /s /q build
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
mkdir build
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
cd build
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
SET OSRMDEPSDIR=c:\projects\osrm\osrm-deps
|
||||||
|
set PREFIX=%OSRMDEPSDIR%/libs
|
||||||
|
set BOOST_ROOT=%OSRMDEPSDIR%/boost
|
||||||
|
set TBB_INSTALL_DIR=%OSRMDEPSDIR%/tbb
|
||||||
|
set TBB_ARCH_PLATFORM=intel64/vc14
|
||||||
|
|
||||||
|
ECHO calling cmake ....
|
||||||
|
cmake .. ^
|
||||||
|
-G "Visual Studio 14 Win64" ^
|
||||||
|
-DBOOST_ROOT=%BOOST_ROOT% ^
|
||||||
|
-DBoost_ADDITIONAL_VERSIONS=1.57 ^
|
||||||
|
-DBoost_USE_MULTITHREADED=ON ^
|
||||||
|
-DBoost_USE_STATIC_LIBS=ON ^
|
||||||
|
-DCMAKE_BUILD_TYPE=%CONFIGURATION% ^
|
||||||
|
-DCMAKE_INSTALL_PREFIX=%PREFIX%
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
ECHO building ...
|
||||||
|
msbuild OSRM.sln ^
|
||||||
|
/p:Configuration=%Configuration% ^
|
||||||
|
/p:Platform=x64 ^
|
||||||
|
/t:rebuild ^
|
||||||
|
/p:BuildInParallel=true ^
|
||||||
|
/m:%NUMBER_OF_PROCESSORS% ^
|
||||||
|
/toolsversion:14.0 ^
|
||||||
|
/p:PlatformToolset=v140 ^
|
||||||
|
/clp:Verbosity=normal ^
|
||||||
|
/nologo ^
|
||||||
|
/flp1:logfile=build_errors.txt;errorsonly ^
|
||||||
|
/flp2:logfile=build_warnings.txt;warningsonly
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
ECHO ========= TODO^: CREATE PACKAGES ==========
|
||||||
|
|
||||||
|
CD c:\projects\osrm\build\%Configuration%
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
SET PATH=c:\projects\osrm\osrm-deps\libs\bin;%PATH%
|
||||||
|
|
||||||
|
ECHO running datastructure-tests.exe ...
|
||||||
|
datastructure-tests.exe
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
ECHO running algorithm-tests.exe ...
|
||||||
|
algorithm-tests.exe
|
||||||
|
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
||||||
|
|
||||||
|
GOTO DONE
|
||||||
|
|
||||||
|
:ERROR
|
||||||
|
SET EL=%ERRORLEVEL%
|
||||||
|
ECHO ============== ERROR ===============
|
||||||
|
|
||||||
|
:DONE
|
||||||
|
ECHO ============= DONE ===============
|
||||||
|
CD C:\projects\osrm
|
||||||
|
EXIT /b %EL%
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- cd c:/projects/osrm
|
||||||
|
- mkdir build
|
||||||
|
- cd build
|
||||||
|
- echo Running cmake...
|
||||||
|
- call "%VS120COMNTOOLS%\..\..\VC\vcvarsall.bat" x86_amd64
|
||||||
|
- SET PATH=C:\Program Files (x86)\MSBuild\12.0\bin\;%PATH%
|
||||||
|
- SET P=c:/projects/osrm
|
||||||
|
- set TBB_INSTALL_DIR=%P%/tbb
|
||||||
|
- set TBB_ARCH_PLATFORM=intel64/vc12
|
||||||
|
- cmake .. -G "Visual Studio 14 Win64" -DCMAKE_BUILD_TYPE=%Configuration% -DCMAKE_INSTALL_PREFIX=%P%/libs -DBOOST_ROOT=%P%/boost_min -DBoost_ADDITIONAL_VERSIONS=1.57 -DBoost_USE_STATIC_LIBS=ON
|
||||||
|
- SET PLATFORM_TOOLSET=v140
|
||||||
|
- SET TOOLS_VERSION=14.0
|
||||||
|
- msbuild /p:Platform=x64 /clp:Verbosity=minimal /toolsversion:%TOOLS_VERSION% /p:PlatformToolset=%PLATFORM_TOOLSET% /nologo OSRM.sln
|
||||||
|
- msbuild /p:Platform=x64 /clp:Verbosity=minimal /toolsversion:%TOOLS_VERSION% /p:PlatformToolset=%PLATFORM_TOOLSET% /nologo tests.vcxproj
|
||||||
|
- cd %Configuration%
|
||||||
|
- if "%APPVEYOR_REPO_BRANCH%"=="develop" (7z a %P%/osrm_%Configuration%.zip *.exe *.pdb %P%/libs/bin/*.dll -tzip)
|
||||||
|
- cd ..\..\profiles
|
||||||
|
- echo disk=c:\temp\stxxl,10000,wincall > .stxxl.txt
|
||||||
|
- if "%APPVEYOR_REPO_BRANCH%"=="develop" (7z a %P%/osrm_%Configuration%.zip * -tzip)
|
||||||
|
- set PATH=%PATH%;c:/projects/osrm/libs/bin
|
||||||
|
- cd c:/projects/osrm/build/%Configuration%
|
||||||
|
- datastructure-tests.exe
|
||||||
|
- algorithm-tests.exe
|
48
appveyor.yml
Normal file
48
appveyor.yml
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
environment:
|
||||||
|
matrix:
|
||||||
|
- configuration: Release
|
||||||
|
# - configuration: Debug
|
||||||
|
|
||||||
|
# scripts that are called at very beginning, before repo cloning
|
||||||
|
init:
|
||||||
|
- git config --global core.autocrlf input
|
||||||
|
|
||||||
|
os: Visual Studio 2015 RC
|
||||||
|
|
||||||
|
# clone directory
|
||||||
|
clone_folder: c:\projects\osrm
|
||||||
|
|
||||||
|
platform: x64
|
||||||
|
|
||||||
|
install:
|
||||||
|
- set PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
|
||||||
|
- CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
|
||||||
|
|
||||||
|
build_script:
|
||||||
|
- CALL appveyor-build.bat
|
||||||
|
|
||||||
|
test: off
|
||||||
|
|
||||||
|
artifacts:
|
||||||
|
- path: osrm_Release.zip
|
||||||
|
name: osrm_Release.zip
|
||||||
|
# - path: osrm_Debug.zip
|
||||||
|
# name: osrm_Debug.zip
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
provider: FTP
|
||||||
|
server:
|
||||||
|
secure: ef7oiQTTXFGt8NdNiOHm/uRFVrUttzyFbIlnaeHhQvw=
|
||||||
|
username:
|
||||||
|
secure: Bw+Se2GTJxA6+GtRkEc//tQSBHOuFIuJHBjFwR9cD+8=
|
||||||
|
password:
|
||||||
|
secure: eqwESZqxMXC/j5mOCpaXuw==
|
||||||
|
folder: /
|
||||||
|
enable_ssl: true
|
||||||
|
active_mode: false
|
||||||
|
|
||||||
|
# notifications:
|
||||||
|
# - provider: HipChat
|
||||||
|
# auth_token:
|
||||||
|
# secure: boLE7BjcahdIUxv9jkN7U3F8iOASF+MkhtctlVoWJoo=
|
||||||
|
# room: Directions
|
184
benchmarks/static_rtree.cpp
Normal file
184
benchmarks/static_rtree.cpp
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "../data_structures/original_edge_data.hpp"
|
||||||
|
#include "../data_structures/query_node.hpp"
|
||||||
|
#include "../data_structures/shared_memory_vector_wrapper.hpp"
|
||||||
|
#include "../data_structures/static_rtree.hpp"
|
||||||
|
#include "../util/boost_filesystem_2_fix.hpp"
|
||||||
|
#include "../data_structures/edge_based_node.hpp"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <random>
|
||||||
|
|
||||||
|
// Choosen by a fair W20 dice roll (this value is completely arbitrary)
|
||||||
|
constexpr unsigned RANDOM_SEED = 13;
|
||||||
|
constexpr int32_t WORLD_MIN_LAT = -90 * COORDINATE_PRECISION;
|
||||||
|
constexpr int32_t WORLD_MAX_LAT = 90 * COORDINATE_PRECISION;
|
||||||
|
constexpr int32_t WORLD_MIN_LON = -180 * COORDINATE_PRECISION;
|
||||||
|
constexpr int32_t WORLD_MAX_LON = 180 * COORDINATE_PRECISION;
|
||||||
|
|
||||||
|
using RTreeLeaf = EdgeBasedNode;
|
||||||
|
using FixedPointCoordinateListPtr = std::shared_ptr<std::vector<FixedPointCoordinate>>;
|
||||||
|
using BenchStaticRTree = StaticRTree<RTreeLeaf, ShM<FixedPointCoordinate, false>::vector, false>;
|
||||||
|
|
||||||
|
FixedPointCoordinateListPtr LoadCoordinates(const boost::filesystem::path &nodes_file)
|
||||||
|
{
|
||||||
|
boost::filesystem::ifstream nodes_input_stream(nodes_file, std::ios::binary);
|
||||||
|
|
||||||
|
QueryNode current_node;
|
||||||
|
unsigned coordinate_count = 0;
|
||||||
|
nodes_input_stream.read((char *)&coordinate_count, sizeof(unsigned));
|
||||||
|
auto coords = std::make_shared<std::vector<FixedPointCoordinate>>(coordinate_count);
|
||||||
|
for (unsigned i = 0; i < coordinate_count; ++i)
|
||||||
|
{
|
||||||
|
nodes_input_stream.read((char *)¤t_node, sizeof(QueryNode));
|
||||||
|
coords->at(i) = FixedPointCoordinate(current_node.lat, current_node.lon);
|
||||||
|
BOOST_ASSERT((std::abs(coords->at(i).lat) >> 30) == 0);
|
||||||
|
BOOST_ASSERT((std::abs(coords->at(i).lon) >> 30) == 0);
|
||||||
|
}
|
||||||
|
nodes_input_stream.close();
|
||||||
|
return coords;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Benchmark(BenchStaticRTree &rtree, unsigned num_queries)
|
||||||
|
{
|
||||||
|
std::mt19937 mt_rand(RANDOM_SEED);
|
||||||
|
std::uniform_int_distribution<> lat_udist(WORLD_MIN_LAT, WORLD_MAX_LAT);
|
||||||
|
std::uniform_int_distribution<> lon_udist(WORLD_MIN_LON, WORLD_MAX_LON);
|
||||||
|
std::vector<FixedPointCoordinate> queries;
|
||||||
|
for (unsigned i = 0; i < num_queries; i++)
|
||||||
|
{
|
||||||
|
queries.emplace_back(FixedPointCoordinate(lat_udist(mt_rand), lon_udist(mt_rand)));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const unsigned num_results = 5;
|
||||||
|
std::cout << "#### IncrementalFindPhantomNodeForCoordinate : " << num_results
|
||||||
|
<< " phantom nodes"
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
TIMER_START(query_phantom);
|
||||||
|
std::vector<PhantomNode> phantom_node_vector;
|
||||||
|
for (const auto &q : queries)
|
||||||
|
{
|
||||||
|
phantom_node_vector.clear();
|
||||||
|
rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 3, num_results);
|
||||||
|
phantom_node_vector.clear();
|
||||||
|
rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 17, num_results);
|
||||||
|
}
|
||||||
|
TIMER_STOP(query_phantom);
|
||||||
|
|
||||||
|
std::cout << "Took " << TIMER_MSEC(query_phantom) << " msec for " << num_queries
|
||||||
|
<< " queries."
|
||||||
|
<< "\n";
|
||||||
|
std::cout << TIMER_MSEC(query_phantom) / ((double)num_queries) << " msec/query."
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
std::cout << "#### LocateClosestEndPointForCoordinate"
|
||||||
|
<< "\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
TIMER_START(query_endpoint);
|
||||||
|
FixedPointCoordinate result;
|
||||||
|
for (const auto &q : queries)
|
||||||
|
{
|
||||||
|
rtree.LocateClosestEndPointForCoordinate(q, result, 3);
|
||||||
|
}
|
||||||
|
TIMER_STOP(query_endpoint);
|
||||||
|
|
||||||
|
std::cout << "Took " << TIMER_MSEC(query_endpoint) << " msec for " << num_queries << " queries."
|
||||||
|
<< "\n";
|
||||||
|
std::cout << TIMER_MSEC(query_endpoint) / ((double)num_queries) << " msec/query."
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
std::cout << "#### FindPhantomNodeForCoordinate"
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
TIMER_START(query_node);
|
||||||
|
for (const auto &q : queries)
|
||||||
|
{
|
||||||
|
PhantomNode phantom;
|
||||||
|
rtree.FindPhantomNodeForCoordinate(q, phantom, 3);
|
||||||
|
}
|
||||||
|
TIMER_STOP(query_node);
|
||||||
|
|
||||||
|
std::cout << "Took " << TIMER_MSEC(query_node) << " msec for " << num_queries << " queries."
|
||||||
|
<< "\n";
|
||||||
|
std::cout << TIMER_MSEC(query_node) / ((double)num_queries) << " msec/query."
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
{
|
||||||
|
const unsigned num_results = 1;
|
||||||
|
std::cout << "#### IncrementalFindPhantomNodeForCoordinate : " << num_results
|
||||||
|
<< " phantom nodes"
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
TIMER_START(query_phantom);
|
||||||
|
std::vector<PhantomNode> phantom_node_vector;
|
||||||
|
for (const auto &q : queries)
|
||||||
|
{
|
||||||
|
phantom_node_vector.clear();
|
||||||
|
rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 3, num_results);
|
||||||
|
phantom_node_vector.clear();
|
||||||
|
rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 17, num_results);
|
||||||
|
}
|
||||||
|
TIMER_STOP(query_phantom);
|
||||||
|
|
||||||
|
std::cout << "Took " << TIMER_MSEC(query_phantom) << " msec for " << num_queries
|
||||||
|
<< " queries."
|
||||||
|
<< "\n";
|
||||||
|
std::cout << TIMER_MSEC(query_phantom) / ((double)num_queries) << " msec/query."
|
||||||
|
<< "\n";
|
||||||
|
|
||||||
|
std::cout << "#### LocateClosestEndPointForCoordinate"
|
||||||
|
<< "\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char **argv)
|
||||||
|
{
|
||||||
|
if (argc < 4)
|
||||||
|
{
|
||||||
|
std::cout << "./rtree-bench file.ramIndex file.fileIndx file.nodes"
|
||||||
|
<< "\n";
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const char *ramPath = argv[1];
|
||||||
|
const char *filePath = argv[2];
|
||||||
|
const char *nodesPath = argv[3];
|
||||||
|
|
||||||
|
auto coords = LoadCoordinates(nodesPath);
|
||||||
|
|
||||||
|
BenchStaticRTree rtree(ramPath, filePath, coords);
|
||||||
|
|
||||||
|
Benchmark(rtree, 10000);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
19
build-local.bat
Normal file
19
build-local.bat
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
SET PLATFORM=x64
|
||||||
|
SET CONFIGURATION=Release
|
||||||
|
|
||||||
|
WHERE msbuild
|
||||||
|
IF %ERRORLEVEL% EQU 0 GOTO RUNBUILD
|
||||||
|
|
||||||
|
SET PATH=C:\mb\windows-builds-64\tmp-bin\cmake-3.1.0-win32-x86\bin;%PATH%
|
||||||
|
SET PATH=C:\Program Files\7-Zip;%PATH%
|
||||||
|
ECHO activating VS command prompt ...
|
||||||
|
SET PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
|
||||||
|
CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
|
||||||
|
|
||||||
|
:RUNBUILD
|
||||||
|
|
||||||
|
powershell Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted -Force
|
||||||
|
CALL appveyor-build.bat
|
||||||
|
EXIT /b %ERRORLEVEL%
|
@ -1,44 +0,0 @@
|
|||||||
IF(NOT CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
|
||||||
MESSAGE(FATAL_ERROR "Cannot configure CPack to generate Debian/RPM packages on non-linux systems.")
|
|
||||||
ENDIF()
|
|
||||||
string(TOLOWER "${CMAKE_PROJECT_NAME}" CPACK_PACKAGE_NAME)
|
|
||||||
SET(CPACK_PACKAGE_VERSION_MAJOR ${OSRM_VERSION_MAJOR})
|
|
||||||
SET(CPACK_PACKAGE_VERSION_MINOR ${OSRM_VERSION_MINOR})
|
|
||||||
SET(CPACK_PACKAGE_VERSION_PATCH ${OSRM_VERSION_PATCH})
|
|
||||||
SET(CPACK_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}")
|
|
||||||
|
|
||||||
SET(CPACK_INCLUDE_TOPLEVEL_DIRECTORY "FALSE")
|
|
||||||
SET(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_SOURCE_DIR}/README.md")
|
|
||||||
SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Open Source Routing Machine (OSRM) is a high-performance routing engine. It combines sophisticated routing algorithms with the open and free data of the OpenStreetMap.")
|
|
||||||
SET(CPACK_PACKAGE_CONTACT "Project OSRM <info@project-osrm.org>")
|
|
||||||
SET(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_SOURCE_DIR}/LICENSE.TXT")
|
|
||||||
|
|
||||||
SET(CPACK_STRIP_FILES "TRUE")
|
|
||||||
file(GLOB_RECURSE ProfileGlob ${CMAKE_SOURCE_DIR}/profiles/*)
|
|
||||||
install(FILES ${ProfileGlob} DESTINATION "share/doc/${CPACK_PACKAGE_NAME}/profiles")
|
|
||||||
|
|
||||||
find_program(DPKG_PROGRAM dpkg DOC "dpkg program of Debian-based systems")
|
|
||||||
if(DPKG_PROGRAM)
|
|
||||||
SET(CPACK_GENERATOR "DEB")
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${DPKG_PROGRAM} --print-architecture
|
|
||||||
OUTPUT_VARIABLE CPACK_DEBIAN_PACKAGE_ARCHITECTURE
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
|
||||||
)
|
|
||||||
SET(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}_${CPACK_PACKAGE_VERSION}_${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
|
|
||||||
SET(CPACK_DEBIAN_PACKAGE_SHLIBDEPS "ON")
|
|
||||||
else(DPKG_PROGRAM)
|
|
||||||
find_program(RPM_PROGRAM rpm DOC "rpm RPM-based systems")
|
|
||||||
find_program(RPMBUILD_PROGRAM rpm DOC "rpm RPM-based systems")
|
|
||||||
if(RPMBUILD_PROGRAM)
|
|
||||||
SET(CPACK_GENERATOR "RPM")
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${RPM_PROGRAM} --eval %{_arch}
|
|
||||||
OUTPUT_VARIABLE CPACK_RPM_PACKAGE_ARCHITECTURE
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
|
||||||
)
|
|
||||||
SET(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}_${CPACK_PACKAGE_VERSION}.${CPACK_RPM_PACKAGE_ARCHITECTURE}")
|
|
||||||
# Exclude /usr/lib64/pkgconfig directory given that it is already owned by the pkg-config rpm package.
|
|
||||||
SET(CPACK_RPM_EXCLUDE_FROM_AUTO_FILELIST_ADDITION "/usr/${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
|
||||||
endif(RPMBUILD_PROGRAM)
|
|
||||||
endif(DPKG_PROGRAM)
|
|
44
cmake/CPackDebianConfig.cmake
Normal file
44
cmake/CPackDebianConfig.cmake
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
IF(NOT CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||||
|
MESSAGE(FATAL_ERROR "Cannot configure CPack to generate Debian packages on non-linux systems.")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
INCLUDE(FindDebArch)
|
||||||
|
|
||||||
|
SET(CPACK_RESOURCE_FILE_README "${CMAKE_SOURCE_DIR}/README.md")
|
||||||
|
SET(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_SOURCE_DIR}/LICENCE.TXT")
|
||||||
|
SET(CPACK_PACKAGE_DESCRIPTION_FILE "${CPACK_RESOURCE_FILE_README}")
|
||||||
|
SET(CPACK_PACKAGE_VERSION_MAJOR "0")
|
||||||
|
SET(CPACK_PACKAGE_VERSION_MINOR "4")
|
||||||
|
SET(CPACK_PACKAGE_VERSION_PATCH "3")
|
||||||
|
|
||||||
|
SET(CPACK_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}")
|
||||||
|
|
||||||
|
string(TOLOWER "${CMAKE_PROJECT_NAME}" LOWER_PROJECT_NAME)
|
||||||
|
SET(CPACK_PACKAGE_FILE_NAME "${LOWER_PROJECT_NAME}_${CPACK_PACKAGE_VERSION}_${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
|
||||||
|
SET(CPACK_SOURCE_PACKAGE_FILE_NAME "${LOWER_PROJECT_NAME}_${CPACK_PACKAGE_VERSION}_orig")
|
||||||
|
SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Open Source Routing Machine (OSRM).")
|
||||||
|
SET(CPACK_PACKAGE_DESCRIPTION "Open Source Routing Machine (OSRM) is a routing engine.")
|
||||||
|
|
||||||
|
# To create a proper Debian/Ubuntu package, the following CMake
|
||||||
|
# options should be used:
|
||||||
|
|
||||||
|
SET(CPACK_STRIP_FILES "TRUE")
|
||||||
|
SET(CPACK_INCLUDE_TOPLEVEL_DIRECTORY "FALSE")
|
||||||
|
SET(CPACK_GENERATOR "DEB")
|
||||||
|
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_NAME "${CPACK_PACKAGE_NAME}${VERSION_SUFFIX}")
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}${CPACK_PACKAGE_REVISION}")
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_MAINTAINER "Dennis Luxen <info@project-osrm.org>")
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_PRIORITY "optional")
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_SECTION "devel")
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_DESCRIPTION "Open Source Routing Machine (OSRM) is a high-performance routing engine.
|
||||||
|
It combines sophisticated routing algorithms with the open and free data of the OpenStreetMap."
|
||||||
|
)
|
||||||
|
SET(CPACK_DEBIAN_PACKAGE_DEPENDS "libc6-dev, libprotobuf-dev, libosmpbf-dev, libbz2-1.0, libstxxl1, libxml2, libzip2, liblua5.1-0, libtbb2, libboost-all-dev")
|
||||||
|
|
||||||
|
file(GLOB_RECURSE ProfileGlob ${CMAKE_SOURCE_DIR}/profiles/*)
|
||||||
|
install(FILES ${ProfileGlob} DESTINATION "share/doc/${LOWER_PROJECT_NAME}/profiles")
|
||||||
|
CONFIGURE_FILE (${CMAKE_SOURCE_DIR}/cmake/postinst.in postinst)
|
||||||
|
set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA "${CMAKE_CURRENT_BINARY_DIR}/postinst;${CMAKE_CURRENT_BINARY_DIR}/copyright;")
|
||||||
|
|
||||||
|
MESSAGE(STATUS "Debian Package: ${CPACK_DEBIAN_PACKAGE_NAME} (${CPACK_DEBIAN_PACKAGE_VERSION}) [${CPACK_PACKAGE_FILE_NAME}.deb]")
|
29
cmake/CheckCXXCompilerFlag.cmake
Normal file
29
cmake/CheckCXXCompilerFlag.cmake
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# - Check whether the CXX compiler supports a given flag.
|
||||||
|
# CHECK_CXX_COMPILER_FLAG(<flag> <var>)
|
||||||
|
# <flag> - the compiler flag
|
||||||
|
# <var> - variable to store the result
|
||||||
|
# This internally calls the check_cxx_source_compiles macro. See help
|
||||||
|
# for CheckCXXSourceCompiles for a listing of variables that can
|
||||||
|
# modify the build.
|
||||||
|
|
||||||
|
# Copyright (c) 2006, Alexander Neundorf, <neundorf@kde.org>
|
||||||
|
#
|
||||||
|
# Redistribution and use is allowed according to the terms of the BSD license.
|
||||||
|
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
|
||||||
|
|
||||||
|
|
||||||
|
INCLUDE(CheckCXXSourceCompiles)
|
||||||
|
|
||||||
|
MACRO (CHECK_CXX_COMPILER_FLAG _FLAG _RESULT)
|
||||||
|
SET(SAFE_CMAKE_REQUIRED_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS}")
|
||||||
|
SET(CMAKE_REQUIRED_DEFINITIONS "${_FLAG}")
|
||||||
|
CHECK_CXX_SOURCE_COMPILES("int main() { return 0;}" ${_RESULT}
|
||||||
|
# Some compilers do not fail with a bad flag
|
||||||
|
FAIL_REGEX "unrecognized .*option" # GNU
|
||||||
|
FAIL_REGEX "ignoring unknown option" # MSVC
|
||||||
|
FAIL_REGEX "[Uu]nknown option" # HP
|
||||||
|
FAIL_REGEX "[Ww]arning: [Oo]ption" # SunPro
|
||||||
|
FAIL_REGEX "command option .* is not recognized" # XL
|
||||||
|
)
|
||||||
|
SET (CMAKE_REQUIRED_DEFINITIONS "${SAFE_CMAKE_REQUIRED_DEFINITIONS}")
|
||||||
|
ENDMACRO (CHECK_CXX_COMPILER_FLAG)
|
@ -1,205 +0,0 @@
|
|||||||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
|
||||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
|
||||||
|
|
||||||
#.rst:
|
|
||||||
# FindLua
|
|
||||||
# -------
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Locate Lua library This module defines
|
|
||||||
#
|
|
||||||
# ::
|
|
||||||
#
|
|
||||||
# LUA_FOUND - if false, do not try to link to Lua
|
|
||||||
# LUA_LIBRARIES - both lua and lualib
|
|
||||||
# LUA_INCLUDE_DIR - where to find lua.h
|
|
||||||
# LUA_VERSION_STRING - the version of Lua found
|
|
||||||
# LUA_VERSION_MAJOR - the major version of Lua
|
|
||||||
# LUA_VERSION_MINOR - the minor version of Lua
|
|
||||||
# LUA_VERSION_PATCH - the patch version of Lua
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Note that the expected include convention is
|
|
||||||
#
|
|
||||||
# ::
|
|
||||||
#
|
|
||||||
# #include "lua.h"
|
|
||||||
#
|
|
||||||
# and not
|
|
||||||
#
|
|
||||||
# ::
|
|
||||||
#
|
|
||||||
# #include <lua/lua.h>
|
|
||||||
#
|
|
||||||
# This is because, the lua location is not standardized and may exist in
|
|
||||||
# locations other than lua/
|
|
||||||
|
|
||||||
if(NOT PKG_CONFIG_FOUND)
|
|
||||||
include(CMakeFindDependencyMacro)
|
|
||||||
find_dependency(PkgConfig)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
unset(_lua_include_subdirs)
|
|
||||||
unset(_lua_library_names)
|
|
||||||
unset(_lua_append_versions)
|
|
||||||
|
|
||||||
# this is a function only to have all the variables inside go away automatically
|
|
||||||
function(_lua_set_version_vars)
|
|
||||||
set(LUA_VERSIONS5 5.4 5.3 5.2 5.1 5.0)
|
|
||||||
|
|
||||||
if (Lua_FIND_VERSION_EXACT)
|
|
||||||
if (Lua_FIND_VERSION_COUNT GREATER 1)
|
|
||||||
set(_lua_append_versions ${Lua_FIND_VERSION_MAJOR}.${Lua_FIND_VERSION_MINOR})
|
|
||||||
endif ()
|
|
||||||
elseif (Lua_FIND_VERSION)
|
|
||||||
# once there is a different major version supported this should become a loop
|
|
||||||
if (NOT Lua_FIND_VERSION_MAJOR GREATER 5)
|
|
||||||
if (Lua_FIND_VERSION_COUNT EQUAL 1)
|
|
||||||
set(_lua_append_versions ${LUA_VERSIONS5})
|
|
||||||
else ()
|
|
||||||
foreach (subver IN LISTS LUA_VERSIONS5)
|
|
||||||
if (NOT subver VERSION_LESS ${Lua_FIND_VERSION})
|
|
||||||
list(APPEND _lua_append_versions ${subver})
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
else ()
|
|
||||||
# once there is a different major version supported this should become a loop
|
|
||||||
set(_lua_append_versions ${LUA_VERSIONS5})
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
list(APPEND _lua_include_subdirs "include/lua" "include")
|
|
||||||
|
|
||||||
foreach (ver IN LISTS _lua_append_versions)
|
|
||||||
string(REGEX MATCH "^([0-9]+)\\.([0-9]+)$" _ver "${ver}")
|
|
||||||
list(APPEND _lua_include_subdirs
|
|
||||||
include/lua${CMAKE_MATCH_1}${CMAKE_MATCH_2}
|
|
||||||
include/lua${CMAKE_MATCH_1}.${CMAKE_MATCH_2}
|
|
||||||
include/lua-${CMAKE_MATCH_1}.${CMAKE_MATCH_2}
|
|
||||||
)
|
|
||||||
list(APPEND _lua_library_names
|
|
||||||
lua${CMAKE_MATCH_1}${CMAKE_MATCH_2}
|
|
||||||
lua${CMAKE_MATCH_1}.${CMAKE_MATCH_2}
|
|
||||||
lua-${CMAKE_MATCH_1}.${CMAKE_MATCH_2}
|
|
||||||
lua.${CMAKE_MATCH_1}.${CMAKE_MATCH_2}
|
|
||||||
)
|
|
||||||
pkg_check_modules(LUA QUIET "lua${ver}")
|
|
||||||
list(APPEND _lua_include_subdirs ${LUA_INCLUDE_DIRS})
|
|
||||||
list(APPEND _lua_library_names ${LUA_LIBRARIES})
|
|
||||||
list(APPEND _lua_library_dirs ${LUA_LIBRARY_DIRS})
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
set(_lua_include_subdirs "${_lua_include_subdirs}" PARENT_SCOPE)
|
|
||||||
set(_lua_library_names "${_lua_library_names}" PARENT_SCOPE)
|
|
||||||
set(_lua_append_versions "${_lua_append_versions}" PARENT_SCOPE)
|
|
||||||
set(_lua_library_dirs "${_lua_library_dirs}" PARENT_SCOPE)
|
|
||||||
endfunction(_lua_set_version_vars)
|
|
||||||
|
|
||||||
function(_lua_check_header_version _hdr_file)
|
|
||||||
# At least 5.[012] have different ways to express the version
|
|
||||||
# so all of them need to be tested. Lua 5.2 defines LUA_VERSION
|
|
||||||
# and LUA_RELEASE as joined by the C preprocessor, so avoid those.
|
|
||||||
file(STRINGS "${_hdr_file}" lua_version_strings
|
|
||||||
REGEX "^#define[ \t]+LUA_(RELEASE[ \t]+\"Lua [0-9]|VERSION([ \t]+\"Lua [0-9]|_[MR])).*")
|
|
||||||
|
|
||||||
string(REGEX REPLACE ".*;#define[ \t]+LUA_VERSION_MAJOR[ \t]+\"([0-9])\"[ \t]*;.*" "\\1" LUA_VERSION_MAJOR ";${lua_version_strings};")
|
|
||||||
if (LUA_VERSION_MAJOR MATCHES "^[0-9]+$")
|
|
||||||
string(REGEX REPLACE ".*;#define[ \t]+LUA_VERSION_MINOR[ \t]+\"([0-9])\"[ \t]*;.*" "\\1" LUA_VERSION_MINOR ";${lua_version_strings};")
|
|
||||||
string(REGEX REPLACE ".*;#define[ \t]+LUA_VERSION_RELEASE[ \t]+\"([0-9])\"[ \t]*;.*" "\\1" LUA_VERSION_PATCH ";${lua_version_strings};")
|
|
||||||
set(LUA_VERSION_STRING "${LUA_VERSION_MAJOR}.${LUA_VERSION_MINOR}.${LUA_VERSION_PATCH}")
|
|
||||||
else ()
|
|
||||||
string(REGEX REPLACE ".*;#define[ \t]+LUA_RELEASE[ \t]+\"Lua ([0-9.]+)\"[ \t]*;.*" "\\1" LUA_VERSION_STRING ";${lua_version_strings};")
|
|
||||||
if (NOT LUA_VERSION_STRING MATCHES "^[0-9.]+$")
|
|
||||||
string(REGEX REPLACE ".*;#define[ \t]+LUA_VERSION[ \t]+\"Lua ([0-9.]+)\"[ \t]*;.*" "\\1" LUA_VERSION_STRING ";${lua_version_strings};")
|
|
||||||
endif ()
|
|
||||||
string(REGEX REPLACE "^([0-9]+)\\.[0-9.]*$" "\\1" LUA_VERSION_MAJOR "${LUA_VERSION_STRING}")
|
|
||||||
string(REGEX REPLACE "^[0-9]+\\.([0-9]+)[0-9.]*$" "\\1" LUA_VERSION_MINOR "${LUA_VERSION_STRING}")
|
|
||||||
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.([0-9]).*" "\\1" LUA_VERSION_PATCH "${LUA_VERSION_STRING}")
|
|
||||||
endif ()
|
|
||||||
foreach (ver IN LISTS _lua_append_versions)
|
|
||||||
if (ver STREQUAL "${LUA_VERSION_MAJOR}.${LUA_VERSION_MINOR}")
|
|
||||||
set(LUA_VERSION_MAJOR ${LUA_VERSION_MAJOR} PARENT_SCOPE)
|
|
||||||
set(LUA_VERSION_MINOR ${LUA_VERSION_MINOR} PARENT_SCOPE)
|
|
||||||
set(LUA_VERSION_PATCH ${LUA_VERSION_PATCH} PARENT_SCOPE)
|
|
||||||
set(LUA_VERSION_STRING ${LUA_VERSION_STRING} PARENT_SCOPE)
|
|
||||||
return()
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
endfunction(_lua_check_header_version)
|
|
||||||
|
|
||||||
_lua_set_version_vars()
|
|
||||||
|
|
||||||
if (LUA_INCLUDE_DIR AND EXISTS "${LUA_INCLUDE_DIR}/lua.h")
|
|
||||||
_lua_check_header_version("${LUA_INCLUDE_DIR}/lua.h")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (NOT LUA_VERSION_STRING)
|
|
||||||
foreach (subdir IN LISTS _lua_include_subdirs)
|
|
||||||
unset(LUA_INCLUDE_PREFIX CACHE)
|
|
||||||
find_path(LUA_INCLUDE_PREFIX ${subdir}/lua.h
|
|
||||||
HINTS
|
|
||||||
ENV LUA_DIR
|
|
||||||
PATHS
|
|
||||||
~/Library/Frameworks
|
|
||||||
/Library/Frameworks
|
|
||||||
/sw # Fink
|
|
||||||
/opt/local # DarwinPorts
|
|
||||||
/opt/csw # Blastwave
|
|
||||||
/opt
|
|
||||||
)
|
|
||||||
if (LUA_INCLUDE_PREFIX)
|
|
||||||
_lua_check_header_version("${LUA_INCLUDE_PREFIX}/${subdir}/lua.h")
|
|
||||||
if (LUA_VERSION_STRING)
|
|
||||||
set(LUA_INCLUDE_DIR "${LUA_INCLUDE_PREFIX}/${subdir}")
|
|
||||||
break()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
unset(_lua_include_subdirs)
|
|
||||||
unset(_lua_append_versions)
|
|
||||||
|
|
||||||
find_library(LUA_LIBRARY
|
|
||||||
NAMES ${_lua_library_names} lua
|
|
||||||
HINTS
|
|
||||||
ENV LUA_DIR
|
|
||||||
PATH_SUFFIXES lib
|
|
||||||
PATHS
|
|
||||||
${_lua_library_dirs}
|
|
||||||
~/Library/Frameworks
|
|
||||||
/Library/Frameworks
|
|
||||||
/sw
|
|
||||||
/opt/local
|
|
||||||
/opt/csw
|
|
||||||
/opt
|
|
||||||
)
|
|
||||||
unset(_lua_library_names)
|
|
||||||
|
|
||||||
if (LUA_LIBRARY)
|
|
||||||
# include the math library for Unix
|
|
||||||
if (UNIX AND NOT APPLE AND NOT BEOS)
|
|
||||||
find_library(LUA_MATH_LIBRARY m)
|
|
||||||
set(LUA_LIBRARIES "${LUA_LIBRARY};${LUA_MATH_LIBRARY}")
|
|
||||||
|
|
||||||
# include dl library for statically-linked Lua library
|
|
||||||
get_filename_component(LUA_LIB_EXT ${LUA_LIBRARY} EXT)
|
|
||||||
if(LUA_LIB_EXT STREQUAL CMAKE_STATIC_LIBRARY_SUFFIX)
|
|
||||||
list(APPEND LUA_LIBRARIES ${CMAKE_DL_LIBS})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# For Windows and Mac, don't need to explicitly include the math library
|
|
||||||
else ()
|
|
||||||
set(LUA_LIBRARIES "${LUA_LIBRARY}")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
include(FindPackageHandleStandardArgs)
|
|
||||||
# handle the QUIETLY and REQUIRED arguments and set LUA_FOUND to TRUE if
|
|
||||||
# all listed variables are TRUE
|
|
||||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Lua
|
|
||||||
REQUIRED_VARS LUA_LIBRARIES LUA_INCLUDE_DIR
|
|
||||||
VERSION_VAR LUA_VERSION_STRING)
|
|
||||||
|
|
||||||
mark_as_advanced(LUA_INCLUDE_DIR LUA_LIBRARY LUA_MATH_LIBRARY)
|
|
82
cmake/FindLua52.cmake
Normal file
82
cmake/FindLua52.cmake
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
# Locate Lua library
|
||||||
|
# This module defines
|
||||||
|
# LUA52_FOUND, if false, do not try to link to Lua
|
||||||
|
# LUA_LIBRARIES
|
||||||
|
# LUA_INCLUDE_DIR, where to find lua.h
|
||||||
|
# LUA_VERSION_STRING, the version of Lua found (since CMake 2.8.8)
|
||||||
|
#
|
||||||
|
# Note that the expected include convention is
|
||||||
|
# #include "lua.h"
|
||||||
|
# and not
|
||||||
|
# #include <lua/lua.h>
|
||||||
|
# This is because, the lua location is not standardized and may exist
|
||||||
|
# in locations other than lua/
|
||||||
|
|
||||||
|
#=============================================================================
|
||||||
|
# Copyright 2007-2009 Kitware, Inc.
|
||||||
|
# Copyright 2013 for Project-OSRM, Lua5.1 => Lua5.2
|
||||||
|
#
|
||||||
|
# Distributed under the OSI-approved BSD License (the "License");
|
||||||
|
# see accompanying file Copyright.txt for details.
|
||||||
|
#
|
||||||
|
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||||
|
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||||
|
# See the License for more information.
|
||||||
|
#=============================================================================
|
||||||
|
# (To distribute this file outside of CMake, substitute the full
|
||||||
|
# License text for the above reference.)
|
||||||
|
|
||||||
|
find_path(LUA_INCLUDE_DIR lua.h
|
||||||
|
HINTS
|
||||||
|
ENV LUA_DIR
|
||||||
|
PATH_SUFFIXES include/lua52 include/lua5.2 include/lua-5.2 include/lua include
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/sw # Fink
|
||||||
|
/opt/local # DarwinPorts
|
||||||
|
/opt/csw # Blastwave
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
find_library(LUA_LIBRARY
|
||||||
|
NAMES lua52 lua5.2 lua-5.2 lua
|
||||||
|
HINTS
|
||||||
|
ENV LUA_DIR
|
||||||
|
PATH_SUFFIXES lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/sw
|
||||||
|
/opt/local
|
||||||
|
/opt/csw
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
if(LUA_LIBRARY)
|
||||||
|
# include the math library for Unix
|
||||||
|
if(UNIX AND NOT APPLE AND NOT BEOS)
|
||||||
|
find_library(LUA_MATH_LIBRARY m)
|
||||||
|
set( LUA_LIBRARIES "${LUA_LIBRARY};${LUA_MATH_LIBRARY}" CACHE STRING "Lua Libraries")
|
||||||
|
# For Windows and Mac, don't need to explicitly include the math library
|
||||||
|
else()
|
||||||
|
set( LUA_LIBRARIES "${LUA_LIBRARY}" CACHE STRING "Lua Libraries")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(LUA_INCLUDE_DIR AND EXISTS "${LUA_INCLUDE_DIR}/lua.h")
|
||||||
|
file(STRINGS "${LUA_INCLUDE_DIR}/lua.h" lua_version_str REGEX "^#define[ \t]+LUA_RELEASE[ \t]+\"Lua .+\"")
|
||||||
|
|
||||||
|
string(REGEX REPLACE "^#define[ \t]+LUA_RELEASE[ \t]+\"Lua ([^\"]+)\".*" "\\1" LUA_VERSION_STRING "${lua_version_str}")
|
||||||
|
unset(lua_version_str)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set LUA_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Lua52
|
||||||
|
REQUIRED_VARS LUA_LIBRARIES LUA_INCLUDE_DIR
|
||||||
|
VERSION_VAR LUA_VERSION_STRING)
|
||||||
|
|
||||||
|
mark_as_advanced(LUA_INCLUDE_DIR LUA_LIBRARIES LUA_LIBRARY LUA_MATH_LIBRARY)
|
||||||
|
|
93
cmake/FindLuaJIT.cmake
Normal file
93
cmake/FindLuaJIT.cmake
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# Locate Lua library
|
||||||
|
# This module defines
|
||||||
|
# LUAJIT_FOUND, if false, do not try to link to Lua
|
||||||
|
# LUAJIT_LIBRARIES
|
||||||
|
# LUAJIT_INCLUDE_DIR, where to find lua.h
|
||||||
|
#
|
||||||
|
# Note that the expected include convention is
|
||||||
|
# #include "lua.h"
|
||||||
|
# and not
|
||||||
|
# #include <lua/lua.h>
|
||||||
|
# This is because, the lua location is not standardized and may exist
|
||||||
|
# in locations other than lua/
|
||||||
|
|
||||||
|
#=============================================================================
|
||||||
|
# Copyright 2007-2009 Kitware, Inc.
|
||||||
|
#
|
||||||
|
# Distributed under the OSI-approved BSD License (the "License");
|
||||||
|
# see accompanying file Copyright.txt for details.
|
||||||
|
#
|
||||||
|
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||||
|
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||||
|
# See the License for more information.
|
||||||
|
#=============================================================================
|
||||||
|
# (To distributed this file outside of CMake, substitute the full
|
||||||
|
# License text for the above reference.)
|
||||||
|
#
|
||||||
|
# ################
|
||||||
|
# 2010 - modified for cronkite to find luajit instead of lua, as it was before.
|
||||||
|
#
|
||||||
|
|
||||||
|
if ( NOT LuaJIT_FIND_VERSION )
|
||||||
|
MESSAGE(FATAL_ERROR "You need to specify a version of libluajit to use")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
IF( NOT LUAJIT_FIND_QUIETLY )
|
||||||
|
MESSAGE(STATUS "Looking for LuaJIT ${LuaJIT_FIND_VERSION}")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
FIND_PATH(LUAJIT_INCLUDE_DIR lua.h
|
||||||
|
HINTS
|
||||||
|
$ENV{LUAJIT_DIR}
|
||||||
|
PATH_SUFFIXES include/luajit-2.0 include/luajit2.0 include/luajit include
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/sw # Fink
|
||||||
|
/opt/local # DarwinPorts
|
||||||
|
/opt/csw # Blastwave
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
FIND_LIBRARY(LUAJIT_LIBRARY
|
||||||
|
NAMES luajit-${LuaJIT_FIND_VERSION_MAJOR}${LuaJIT_FIND_VERSION_MINOR} luajit-${LuaJIT_FIND_VERSION}
|
||||||
|
HINTS
|
||||||
|
$ENV{LUAJIT_DIR}
|
||||||
|
PATH_SUFFIXES lib64 lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/sw
|
||||||
|
/opt/local
|
||||||
|
/opt/csw
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
IF(LUAJIT_LIBRARY)
|
||||||
|
# include the math library for Unix
|
||||||
|
IF(UNIX AND NOT APPLE)
|
||||||
|
FIND_LIBRARY(LUAJIT_MATH_LIBRARY m)
|
||||||
|
SET( LUAJIT_LIBRARIES "${LUAJIT_LIBRARY};${LUAJIT_MATH_LIBRARY}" CACHE STRING "Lua Libraries")
|
||||||
|
# For Windows and Mac, don't need to explicitly include the math library
|
||||||
|
ELSE(UNIX AND NOT APPLE)
|
||||||
|
SET( LUAJIT_LIBRARIES "${LUAJIT_LIBRARY}" CACHE STRING "Lua Libraries")
|
||||||
|
ENDIF(UNIX AND NOT APPLE)
|
||||||
|
ENDIF(LUAJIT_LIBRARY)
|
||||||
|
|
||||||
|
INCLUDE(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set LUAJIT_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(LUAJIT DEFAULT_MSG LUAJIT_LIBRARIES LUAJIT_INCLUDE_DIR)
|
||||||
|
|
||||||
|
IF( NOT LUAJIT_FIND_QUIETLY )
|
||||||
|
IF( LUAJIT_FOUND AND LUAJIT_LIBRARIES)
|
||||||
|
MESSAGE(STATUS "Found LuaJIT: ${LUAJIT_LIBRARY}" )
|
||||||
|
MARK_AS_ADVANCED(LUAJIT_INCLUDE_DIR LUAJIT_LIBRARIES LUAJIT_LIBRARY LUAJIT_MATH_LIBRARY)
|
||||||
|
ELSE()
|
||||||
|
SET ( LUAJIT_FOUND FALSE )
|
||||||
|
ENDIF()
|
||||||
|
ENDIF()
|
75
cmake/FindLuabind.cmake
Normal file
75
cmake/FindLuabind.cmake
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
# Locate Luabind library
|
||||||
|
# This module defines
|
||||||
|
# LUABIND_FOUND, if false, do not try to link to Luabind
|
||||||
|
# LUABIND_LIBRARIES
|
||||||
|
# LUABIND_INCLUDE_DIR, where to find luabind.hpp
|
||||||
|
#
|
||||||
|
# Note that the expected include convention is
|
||||||
|
# #include <luabind/luabind.hpp>
|
||||||
|
# and not
|
||||||
|
# #include <luabind.hpp>
|
||||||
|
|
||||||
|
IF( NOT LUABIND_FIND_QUIETLY )
|
||||||
|
MESSAGE(STATUS "Looking for Luabind...")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
FIND_PATH(LUABIND_INCLUDE_DIR luabind.hpp
|
||||||
|
HINTS
|
||||||
|
$ENV{LUABIND_DIR}
|
||||||
|
PATH_SUFFIXES luabind include/luabind include
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local # DarwinPorts
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
FIND_LIBRARY(LUABIND_LIBRARY
|
||||||
|
NAMES luabind luabind09
|
||||||
|
HINTS
|
||||||
|
$ENV{LUABIND_DIR}
|
||||||
|
PATH_SUFFIXES lib64 lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
FIND_LIBRARY(LUABIND_LIBRARY_DBG
|
||||||
|
NAMES luabindd
|
||||||
|
HINTS
|
||||||
|
$ENV{LUABIND_DIR}
|
||||||
|
PATH_SUFFIXES lib64 lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
IF(LUABIND_LIBRARY)
|
||||||
|
SET( LUABIND_LIBRARIES "${LUABIND_LIBRARY}" CACHE STRING "Luabind Libraries")
|
||||||
|
ENDIF(LUABIND_LIBRARY)
|
||||||
|
|
||||||
|
INCLUDE(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set LUABIND_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Luabind DEFAULT_MSG LUABIND_LIBRARIES LUABIND_INCLUDE_DIR)
|
||||||
|
|
||||||
|
IF( NOT LUABIND_FIND_QUIETLY )
|
||||||
|
IF( LUABIND_FOUND )
|
||||||
|
MESSAGE(STATUS "Found Luabind: ${LUABIND_LIBRARY}" )
|
||||||
|
ENDIF()
|
||||||
|
IF( LUABIND_LIBRARY_DBG )
|
||||||
|
MESSAGE(STATUS "Luabind debug library availible: ${LUABIND_LIBRARY_DBG}")
|
||||||
|
ENDIF()
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
MARK_AS_ADVANCED(LUABIND_INCLUDE_DIR LUABIND_LIBRARIES LUABIND_LIBRARY LUABIND_LIBRARY_DBG)
|
54
cmake/FindOSMPBF.cmake
Normal file
54
cmake/FindOSMPBF.cmake
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Locate OSMPBF library
|
||||||
|
# This module defines
|
||||||
|
# OSMPBF_FOUND, if false, do not try to link to OSMPBF
|
||||||
|
# OSMPBF_LIBRARIES
|
||||||
|
# OSMPBF_INCLUDE_DIR, where to find OSMPBF.hpp
|
||||||
|
#
|
||||||
|
# Note that the expected include convention is
|
||||||
|
# #include <osmpbf/osmpbf.h>
|
||||||
|
# and not
|
||||||
|
# #include <osmpbf.h>
|
||||||
|
|
||||||
|
IF( NOT OSMPBF_FIND_QUIETLY )
|
||||||
|
MESSAGE(STATUS "Looking for OSMPBF...")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
FIND_PATH(OSMPBF_INCLUDE_DIR osmpbf.h
|
||||||
|
HINTS
|
||||||
|
$ENV{OSMPBF_DIR}
|
||||||
|
PATH_SUFFIXES OSMPBF include/osmpbf include
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local # DarwinPorts
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
FIND_LIBRARY(OSMPBF_LIBRARY
|
||||||
|
NAMES osmpbf
|
||||||
|
HINTS
|
||||||
|
$ENV{OSMPBF_DIR}
|
||||||
|
PATH_SUFFIXES lib64 lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
INCLUDE(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set OSMPBF_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(OSMPBF DEFAULT_MSG OSMPBF_LIBRARY OSMPBF_INCLUDE_DIR)
|
||||||
|
|
||||||
|
IF( NOT OSMPBF_FIND_QUIETLY )
|
||||||
|
IF( OSMPBF_FOUND )
|
||||||
|
MESSAGE(STATUS "Found OSMPBF: ${OSMPBF_LIBRARY}" )
|
||||||
|
ENDIF()
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
#MARK_AS_ADVANCED(OSMPBF_INCLUDE_DIR OSMPBF_LIBRARIES OSMPBF_LIBRARY OSMPBF_LIBRARY_DBG)
|
51
cmake/FindSTXXL.cmake
Normal file
51
cmake/FindSTXXL.cmake
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# Locate STXXL library
|
||||||
|
# This module defines
|
||||||
|
# STXXL_FOUND, if false, do not try to link to libstxxl
|
||||||
|
# STXXL_LIBRARY
|
||||||
|
# STXXL_INCLUDE_DIR, where to find stxxl.h
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
IF( NOT STXXL_FIND_QUIETLY )
|
||||||
|
MESSAGE(STATUS "Looking for STXXL...")
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
FIND_PATH(STXXL_INCLUDE_DIR stxxl.h
|
||||||
|
HINTS
|
||||||
|
$ENV{STXXL_DIR}
|
||||||
|
PATH_SUFFIXES stxxl include/stxxl/stxxl include/stxxl include
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local # DarwinPorts
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
FIND_LIBRARY(STXXL_LIBRARY
|
||||||
|
NAMES stxxl
|
||||||
|
HINTS
|
||||||
|
$ENV{STXXL_DIR}
|
||||||
|
PATH_SUFFIXES lib64 lib
|
||||||
|
PATHS
|
||||||
|
~/Library/Frameworks
|
||||||
|
/Library/Frameworks
|
||||||
|
/usr/local
|
||||||
|
/usr
|
||||||
|
/opt/local
|
||||||
|
/opt
|
||||||
|
)
|
||||||
|
|
||||||
|
INCLUDE(FindPackageHandleStandardArgs)
|
||||||
|
# handle the QUIETLY and REQUIRED arguments and set STXXL_FOUND to TRUE if
|
||||||
|
# all listed variables are TRUE
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(STXXL DEFAULT_MSG STXXL_LIBRARY STXXL_INCLUDE_DIR)
|
||||||
|
|
||||||
|
IF( NOT STXXL_FIND_QUIETLY )
|
||||||
|
IF( STXXL_FOUND )
|
||||||
|
MESSAGE(STATUS "Found STXXL: ${STXXL_LIBRARY}" )
|
||||||
|
ENDIF()
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
MARK_AS_ADVANCED(STXXL_INCLUDE_DIR STXXL_LIBRARY)
|
@ -1,456 +1,283 @@
|
|||||||
# - Find ThreadingBuildingBlocks include dirs and libraries
|
# Locate Intel Threading Building Blocks include paths and libraries
|
||||||
# Use this module by invoking find_package with the form:
|
# FindTBB.cmake can be found at https://code.google.com/p/findtbb/
|
||||||
# find_package(TBB
|
# Written by Hannes Hofmann <hannes.hofmann _at_ informatik.uni-erlangen.de>
|
||||||
# [REQUIRED] # Fail with error if TBB is not found
|
# Improvements by Gino van den Bergen <gino _at_ dtecta.com>,
|
||||||
# ) #
|
# Florian Uhlig <F.Uhlig _at_ gsi.de>,
|
||||||
# Once done, this will define
|
# Jiri Marsik <jiri.marsik89 _at_ gmail.com>
|
||||||
|
|
||||||
|
# The MIT License
|
||||||
#
|
#
|
||||||
# TBB_FOUND - system has TBB
|
# Copyright (c) 2011 Hannes Hofmann
|
||||||
# TBB_INCLUDE_DIRS - the TBB include directories
|
|
||||||
# TBB_LIBRARIES - TBB libraries to be lined, doesn't include malloc or
|
|
||||||
# malloc proxy
|
|
||||||
# TBB::tbb - imported target for the TBB library
|
|
||||||
#
|
#
|
||||||
# TBB_VERSION_MAJOR - Major Product Version Number
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
# TBB_VERSION_MINOR - Minor Product Version Number
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
# TBB_INTERFACE_VERSION - Engineering Focused Version Number
|
# in the Software without restriction, including without limitation the rights
|
||||||
# TBB_COMPATIBLE_INTERFACE_VERSION - The oldest major interface version
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
# still supported. This uses the engineering
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
# focused interface version numbers.
|
# furnished to do so, subject to the following conditions:
|
||||||
#
|
#
|
||||||
# TBB_MALLOC_FOUND - system has TBB malloc library
|
# The above copyright notice and this permission notice shall be included in
|
||||||
# TBB_MALLOC_INCLUDE_DIRS - the TBB malloc include directories
|
# all copies or substantial portions of the Software.
|
||||||
# TBB_MALLOC_LIBRARIES - The TBB malloc libraries to be lined
|
|
||||||
# TBB::malloc - imported target for the TBB malloc library
|
|
||||||
#
|
#
|
||||||
# TBB_MALLOC_PROXY_FOUND - system has TBB malloc proxy library
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
# TBB_MALLOC_PROXY_INCLUDE_DIRS = the TBB malloc proxy include directories
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
# TBB_MALLOC_PROXY_LIBRARIES - The TBB malloc proxy libraries to be lined
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
# TBB::malloc_proxy - imported target for the TBB malloc proxy library
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
# GvdB: This module uses the environment variable TBB_ARCH_PLATFORM which defines architecture and compiler.
|
||||||
|
# e.g. "ia32/vc8" or "em64t/cc4.1.0_libc2.4_kernel2.6.16.21"
|
||||||
|
# TBB_ARCH_PLATFORM is set by the build script tbbvars[.bat|.sh|.csh], which can be found
|
||||||
|
# in the TBB installation directory (TBB_INSTALL_DIR).
|
||||||
#
|
#
|
||||||
|
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||||
#
|
#
|
||||||
# This module reads hints about search locations from variables:
|
# For backwards compatibility, you may explicitely set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER.
|
||||||
# ENV TBB_ARCH_PLATFORM - for eg. set it to "mic" for Xeon Phi builds
|
# TBB_ARCHITECTURE [ ia32 | em64t | itanium ]
|
||||||
# ENV TBB_ROOT or just TBB_ROOT - root directory of tbb installation
|
# which architecture to use
|
||||||
# ENV TBB_BUILD_PREFIX - specifies the build prefix for user built tbb
|
# TBB_COMPILER e.g. vc9 or cc3.2.3_libc2.3.2_kernel2.4.21 or cc4.0.1_os10.4.9
|
||||||
# libraries. Should be specified with ENV TBB_ROOT
|
# which compiler to use (detected automatically on Windows)
|
||||||
# and optionally...
|
|
||||||
# ENV TBB_BUILD_DIR - if build directory is different than ${TBB_ROOT}/build
|
# This module respects
|
||||||
#
|
# TBB_INSTALL_DIR or $ENV{TBB21_INSTALL_DIR} or $ENV{TBB_INSTALL_DIR}
|
||||||
#
|
|
||||||
# Modified by Robert Maynard from the original OGRE source
|
# This module defines
|
||||||
#
|
# TBB_INCLUDE_DIRS, where to find task_scheduler_init.h, etc.
|
||||||
#-------------------------------------------------------------------
|
# TBB_LIBRARY_DIRS, where to find libtbb, libtbbmalloc
|
||||||
# This file is part of the CMake build system for OGRE
|
# TBB_DEBUG_LIBRARY_DIRS, where to find libtbb_debug, libtbbmalloc_debug
|
||||||
# (Object-oriented Graphics Rendering Engine)
|
# TBB_INSTALL_DIR, the base TBB install directory
|
||||||
# For the latest info, see http://www.ogre3d.org/
|
# TBB_LIBRARIES, the libraries to link against to use TBB.
|
||||||
#
|
# TBB_DEBUG_LIBRARIES, the libraries to link against to use TBB with debug symbols.
|
||||||
# The contents of this file are placed in the public domain. Feel
|
# TBB_FOUND, If false, don't try to use TBB.
|
||||||
# free to make use of it in any way you like.
|
# TBB_INTERFACE_VERSION, as defined in tbb/tbb_stddef.h
|
||||||
#-------------------------------------------------------------------
|
|
||||||
#
|
|
||||||
#=============================================================================
|
|
||||||
# Copyright 2010-2012 Kitware, Inc.
|
|
||||||
# Copyright 2012 Rolf Eike Beer <eike@sf-mail.de>
|
|
||||||
#
|
|
||||||
# Distributed under the OSI-approved BSD License (the "License");
|
|
||||||
# see accompanying file Copyright.txt for details.
|
|
||||||
#
|
|
||||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
|
||||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
|
||||||
# See the License for more information.
|
|
||||||
#=============================================================================
|
|
||||||
# (To distribute this file outside of CMake, substitute the full
|
|
||||||
# License text for the above reference.)
|
|
||||||
|
|
||||||
|
|
||||||
#=============================================================================
|
if (WIN32)
|
||||||
# FindTBB helper functions and macros
|
# has em64t/vc8 em64t/vc9
|
||||||
#
|
# has ia32/vc7.1 ia32/vc8 ia32/vc9
|
||||||
|
set(_TBB_DEFAULT_INSTALL_DIR "C:/Program Files/Intel/TBB" "C:/Program Files (x86)/Intel/TBB")
|
||||||
|
set(_TBB_LIB_NAME "tbb")
|
||||||
|
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||||
|
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||||
|
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||||
|
if (MSVC71)
|
||||||
|
set (_TBB_COMPILER "vc7.1")
|
||||||
|
endif(MSVC71)
|
||||||
|
if (MSVC80)
|
||||||
|
set(_TBB_COMPILER "vc8")
|
||||||
|
endif(MSVC80)
|
||||||
|
if (MSVC90)
|
||||||
|
set(_TBB_COMPILER "vc9")
|
||||||
|
endif(MSVC90)
|
||||||
|
if(MSVC10)
|
||||||
|
set(_TBB_COMPILER "vc10")
|
||||||
|
endif(MSVC10)
|
||||||
|
# Todo: add other Windows compilers such as ICL.
|
||||||
|
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||||
|
endif (WIN32)
|
||||||
|
|
||||||
# Use TBBConfig.cmake if possible.
|
if (UNIX)
|
||||||
|
if (APPLE)
|
||||||
|
# MAC
|
||||||
|
set(_TBB_DEFAULT_INSTALL_DIR "/Library/Frameworks/Intel_TBB.framework/Versions")
|
||||||
|
# libs: libtbb.dylib, libtbbmalloc.dylib, *_debug
|
||||||
|
set(_TBB_LIB_NAME "tbb")
|
||||||
|
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||||
|
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||||
|
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||||
|
# default flavor on apple: ia32/cc4.0.1_os10.4.9
|
||||||
|
# Jiri: There is no reason to presume there is only one flavor and
|
||||||
|
# that user's setting of variables should be ignored.
|
||||||
|
if(NOT TBB_COMPILER)
|
||||||
|
set(_TBB_COMPILER "cc4.0.1_os10.4.9")
|
||||||
|
elseif (NOT TBB_COMPILER)
|
||||||
|
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||||
|
endif(NOT TBB_COMPILER)
|
||||||
|
if(NOT TBB_ARCHITECTURE)
|
||||||
|
set(_TBB_ARCHITECTURE "ia32")
|
||||||
|
elseif(NOT TBB_ARCHITECTURE)
|
||||||
|
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||||
|
endif(NOT TBB_ARCHITECTURE)
|
||||||
|
else (APPLE)
|
||||||
|
# LINUX
|
||||||
|
set(_TBB_DEFAULT_INSTALL_DIR "/opt/intel/tbb" "/usr/local/include" "/usr/include")
|
||||||
|
set(_TBB_LIB_NAME "tbb")
|
||||||
|
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||||
|
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||||
|
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||||
|
# has em64t/cc3.2.3_libc2.3.2_kernel2.4.21 em64t/cc3.3.3_libc2.3.3_kernel2.6.5 em64t/cc3.4.3_libc2.3.4_kernel2.6.9 em64t/cc4.1.0_libc2.4_kernel2.6.16.21
|
||||||
|
# has ia32/*
|
||||||
|
# has itanium/*
|
||||||
|
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||||
|
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||||
|
endif (APPLE)
|
||||||
|
endif (UNIX)
|
||||||
|
|
||||||
set(_tbb_find_quiet)
|
if (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||||
if (TBB_FIND_QUIETLY)
|
# SUN
|
||||||
set(_tbb_find_quiet QUIET)
|
# not yet supported
|
||||||
endif ()
|
# has em64t/cc3.4.3_kernel5.10
|
||||||
set(_tbb_find_components)
|
# has ia32/*
|
||||||
set(_tbb_find_optional_components)
|
endif (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||||
foreach (_tbb_find_component IN LISTS TBB_FIND_COMPONENTS)
|
|
||||||
if (TBB_FIND_REQUIRED_${_tbb_find_component})
|
|
||||||
list(APPEND _tbb_find_components "${_tbb_find_component}")
|
|
||||||
else ()
|
|
||||||
list(APPEND _tbb_find_optional_components "${_tbb_find_component}")
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
unset(_tbb_find_component)
|
|
||||||
find_package(TBB CONFIG ${_tbb_find_quiet}
|
|
||||||
COMPONENTS ${_tbb_find_components}
|
|
||||||
OPTIONAL_COMPONENTS ${_tbb_find_optional_components})
|
|
||||||
unset(_tbb_find_quiet)
|
|
||||||
unset(_tbb_find_components)
|
|
||||||
unset(_tbb_find_optional_components)
|
|
||||||
if (TBB_FOUND)
|
|
||||||
return ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
#====================================================
|
|
||||||
# Fix the library path in case it is a linker script
|
|
||||||
#====================================================
|
|
||||||
function(tbb_extract_real_library library real_library)
|
|
||||||
if(NOT UNIX OR NOT EXISTS ${library})
|
|
||||||
set(${real_library} "${library}" PARENT_SCOPE)
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
#Read in the first 4 bytes and see if they are the ELF magic number
|
|
||||||
set(_elf_magic "7f454c46")
|
|
||||||
file(READ ${library} _hex_data OFFSET 0 LIMIT 4 HEX)
|
|
||||||
if(_hex_data STREQUAL _elf_magic)
|
|
||||||
#we have opened a elf binary so this is what
|
|
||||||
#we should link to
|
|
||||||
set(${real_library} "${library}" PARENT_SCOPE)
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
file(READ ${library} _data OFFSET 0 LIMIT 1024)
|
|
||||||
if("${_data}" MATCHES "INPUT \\(([^(]+)\\)")
|
|
||||||
#extract out the .so name from REGEX MATCH command
|
|
||||||
set(_proper_so_name "${CMAKE_MATCH_1}")
|
|
||||||
|
|
||||||
#construct path to the real .so which is presumed to be in the same directory
|
|
||||||
#as the input file
|
|
||||||
get_filename_component(_so_dir "${library}" DIRECTORY)
|
|
||||||
set(${real_library} "${_so_dir}/${_proper_so_name}" PARENT_SCOPE)
|
|
||||||
else()
|
|
||||||
#unable to determine what this library is so just hope everything works
|
|
||||||
#and pass it unmodified.
|
|
||||||
set(${real_library} "${library}" PARENT_SCOPE)
|
|
||||||
endif()
|
|
||||||
endfunction()
|
|
||||||
|
|
||||||
#===============================================
|
|
||||||
# Do the final processing for the package find.
|
|
||||||
#===============================================
|
|
||||||
macro(findpkg_finish PREFIX TARGET_NAME)
|
|
||||||
if (${PREFIX}_INCLUDE_DIR AND ${PREFIX}_LIBRARY)
|
|
||||||
set(${PREFIX}_FOUND TRUE)
|
|
||||||
set (${PREFIX}_INCLUDE_DIRS ${${PREFIX}_INCLUDE_DIR})
|
|
||||||
set (${PREFIX}_LIBRARIES ${${PREFIX}_LIBRARY})
|
|
||||||
else ()
|
|
||||||
if (${PREFIX}_FIND_REQUIRED AND NOT ${PREFIX}_FIND_QUIETLY)
|
|
||||||
message(FATAL_ERROR "Required library ${PREFIX} not found.")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (NOT TARGET "TBB::${TARGET_NAME}")
|
|
||||||
if (${PREFIX}_LIBRARY_RELEASE)
|
|
||||||
tbb_extract_real_library(${${PREFIX}_LIBRARY_RELEASE} real_release)
|
|
||||||
endif ()
|
|
||||||
if (${PREFIX}_LIBRARY_DEBUG)
|
|
||||||
tbb_extract_real_library(${${PREFIX}_LIBRARY_DEBUG} real_debug)
|
|
||||||
endif ()
|
|
||||||
add_library(TBB::${TARGET_NAME} UNKNOWN IMPORTED)
|
|
||||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
|
||||||
INTERFACE_INCLUDE_DIRECTORIES "${${PREFIX}_INCLUDE_DIR}")
|
|
||||||
if (${PREFIX}_LIBRARY_DEBUG AND ${PREFIX}_LIBRARY_RELEASE)
|
|
||||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
|
||||||
IMPORTED_LOCATION "${real_release}"
|
|
||||||
IMPORTED_LOCATION_DEBUG "${real_debug}"
|
|
||||||
IMPORTED_LOCATION_RELEASE "${real_release}")
|
|
||||||
elseif (${PREFIX}_LIBRARY_RELEASE)
|
|
||||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
|
||||||
IMPORTED_LOCATION "${real_release}")
|
|
||||||
elseif (${PREFIX}_LIBRARY_DEBUG)
|
|
||||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
|
||||||
IMPORTED_LOCATION "${real_debug}")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
#mark the following variables as internal variables
|
|
||||||
mark_as_advanced(${PREFIX}_INCLUDE_DIR
|
|
||||||
${PREFIX}_LIBRARY
|
|
||||||
${PREFIX}_LIBRARY_DEBUG
|
|
||||||
${PREFIX}_LIBRARY_RELEASE)
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
#===============================================
|
|
||||||
# Generate debug names from given release names
|
|
||||||
#===============================================
|
|
||||||
macro(get_debug_names PREFIX)
|
|
||||||
foreach(i ${${PREFIX}})
|
|
||||||
set(${PREFIX}_DEBUG ${${PREFIX}_DEBUG} ${i}d ${i}D ${i}_d ${i}_D ${i}_debug ${i})
|
|
||||||
endforeach()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
#===============================================
|
|
||||||
# See if we have env vars to help us find tbb
|
|
||||||
#===============================================
|
|
||||||
macro(getenv_path VAR)
|
|
||||||
set(ENV_${VAR} $ENV{${VAR}})
|
|
||||||
# replace won't work if var is blank
|
|
||||||
if (ENV_${VAR})
|
|
||||||
string( REGEX REPLACE "\\\\" "/" ENV_${VAR} ${ENV_${VAR}} )
|
|
||||||
endif ()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
#===============================================
|
|
||||||
# Couple a set of release AND debug libraries
|
|
||||||
#===============================================
|
|
||||||
macro(make_library_set PREFIX)
|
|
||||||
if (${PREFIX}_RELEASE AND ${PREFIX}_DEBUG)
|
|
||||||
set(${PREFIX} optimized ${${PREFIX}_RELEASE} debug ${${PREFIX}_DEBUG})
|
|
||||||
elseif (${PREFIX}_RELEASE)
|
|
||||||
set(${PREFIX} ${${PREFIX}_RELEASE})
|
|
||||||
elseif (${PREFIX}_DEBUG)
|
|
||||||
set(${PREFIX} ${${PREFIX}_DEBUG})
|
|
||||||
endif ()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
|
|
||||||
#=============================================================================
|
#-- Clear the public variables
|
||||||
# Now to actually find TBB
|
set (TBB_FOUND "NO")
|
||||||
#
|
|
||||||
|
|
||||||
# Get path, convert backslashes as ${ENV_${var}}
|
|
||||||
getenv_path(TBB_ROOT)
|
|
||||||
|
|
||||||
# initialize search paths
|
|
||||||
set(TBB_PREFIX_PATH ${TBB_ROOT} ${ENV_TBB_ROOT})
|
|
||||||
set(TBB_INC_SEARCH_PATH "")
|
|
||||||
set(TBB_LIB_SEARCH_PATH "")
|
|
||||||
|
|
||||||
|
|
||||||
# If user built from sources
|
#-- Find TBB install dir and set ${_TBB_INSTALL_DIR} and cached ${TBB_INSTALL_DIR}
|
||||||
set(TBB_BUILD_PREFIX $ENV{TBB_BUILD_PREFIX})
|
# first: use CMake variable TBB_INSTALL_DIR
|
||||||
if (TBB_BUILD_PREFIX AND ENV_TBB_ROOT)
|
if (TBB_INSTALL_DIR)
|
||||||
getenv_path(TBB_BUILD_DIR)
|
set (_TBB_INSTALL_DIR ${TBB_INSTALL_DIR})
|
||||||
if (NOT ENV_TBB_BUILD_DIR)
|
endif (TBB_INSTALL_DIR)
|
||||||
set(ENV_TBB_BUILD_DIR ${ENV_TBB_ROOT}/build)
|
# second: use environment variable
|
||||||
endif ()
|
if (NOT _TBB_INSTALL_DIR)
|
||||||
|
if (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||||
# include directory under ${ENV_TBB_ROOT}/include
|
set (_TBB_INSTALL_DIR $ENV{TBB_INSTALL_DIR})
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH
|
endif (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||||
${ENV_TBB_BUILD_DIR}/${TBB_BUILD_PREFIX}_release
|
# Intel recommends setting TBB21_INSTALL_DIR
|
||||||
${ENV_TBB_BUILD_DIR}/${TBB_BUILD_PREFIX}_debug)
|
if (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||||
endif ()
|
set (_TBB_INSTALL_DIR $ENV{TBB21_INSTALL_DIR})
|
||||||
|
endif (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||||
|
if (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||||
|
set (_TBB_INSTALL_DIR $ENV{TBB22_INSTALL_DIR})
|
||||||
|
endif (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||||
|
if (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||||
|
set (_TBB_INSTALL_DIR $ENV{TBB30_INSTALL_DIR})
|
||||||
|
endif (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||||
|
endif (NOT _TBB_INSTALL_DIR)
|
||||||
|
# third: try to find path automatically
|
||||||
|
if (NOT _TBB_INSTALL_DIR)
|
||||||
|
if (_TBB_DEFAULT_INSTALL_DIR)
|
||||||
|
set (_TBB_INSTALL_DIR ${_TBB_DEFAULT_INSTALL_DIR})
|
||||||
|
endif (_TBB_DEFAULT_INSTALL_DIR)
|
||||||
|
endif (NOT _TBB_INSTALL_DIR)
|
||||||
|
# sanity check
|
||||||
|
if (NOT _TBB_INSTALL_DIR)
|
||||||
|
message ("ERROR: Unable to find Intel TBB install directory. ${_TBB_INSTALL_DIR}")
|
||||||
|
else (NOT _TBB_INSTALL_DIR)
|
||||||
|
# finally: set the cached CMake variable TBB_INSTALL_DIR
|
||||||
|
if (NOT TBB_INSTALL_DIR)
|
||||||
|
set (TBB_INSTALL_DIR ${_TBB_INSTALL_DIR} CACHE PATH "Intel TBB install directory")
|
||||||
|
mark_as_advanced(TBB_INSTALL_DIR)
|
||||||
|
endif (NOT TBB_INSTALL_DIR)
|
||||||
|
|
||||||
|
|
||||||
# For Windows, let's assume that the user might be using the precompiled
|
#-- A macro to rewrite the paths of the library. This is necessary, because
|
||||||
# TBB packages from the main website. These use a rather awkward directory
|
# find_library() always found the em64t/vc9 version of the TBB libs
|
||||||
# structure (at least for automatically finding the right files) depending
|
macro(TBB_CORRECT_LIB_DIR var_name)
|
||||||
# on platform and compiler, but we'll do our best to accommodate it.
|
# if (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||||
# Not adding the same effort for the precompiled linux builds, though. Those
|
string(REPLACE em64t "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||||
# have different versions for CC compiler versions and linux kernels which
|
# endif (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||||
# will never adequately match the user's setup, so there is no feasible way
|
string(REPLACE ia32 "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||||
# to detect the "best" version to use. The user will have to manually
|
string(REPLACE vc7.1 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||||
# select the right files. (Chances are the distributions are shipping their
|
string(REPLACE vc8 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||||
# custom version of tbb, anyway, so the problem is probably nonexistent.)
|
string(REPLACE vc9 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||||
if (WIN32 AND MSVC)
|
string(REPLACE vc10 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||||
set(COMPILER_PREFIX "vc7.1")
|
endmacro(TBB_CORRECT_LIB_DIR var_content)
|
||||||
if (MSVC_VERSION EQUAL 1400)
|
|
||||||
set(COMPILER_PREFIX "vc8")
|
|
||||||
elseif(MSVC_VERSION EQUAL 1500)
|
|
||||||
set(COMPILER_PREFIX "vc9")
|
|
||||||
elseif(MSVC_VERSION EQUAL 1600)
|
|
||||||
set(COMPILER_PREFIX "vc10")
|
|
||||||
elseif(MSVC_VERSION EQUAL 1700)
|
|
||||||
set(COMPILER_PREFIX "vc11")
|
|
||||||
elseif(MSVC_VERSION EQUAL 1800)
|
|
||||||
set(COMPILER_PREFIX "vc12")
|
|
||||||
elseif(MSVC_VERSION GREATER_EQUAL 1900)
|
|
||||||
set(COMPILER_PREFIX "vc14")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# for each prefix path, add ia32/64\${COMPILER_PREFIX}\lib to the lib search path
|
|
||||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
|
||||||
if (CMAKE_CL_64)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia64/${COMPILER_PREFIX}/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia64/${COMPILER_PREFIX})
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/${COMPILER_PREFIX}/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64/${COMPILER_PREFIX})
|
|
||||||
else ()
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/${COMPILER_PREFIX}/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32/${COMPILER_PREFIX})
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# For OS X binary distribution, choose libc++ based libraries for Mavericks (10.9)
|
|
||||||
# and above and AppleClang
|
|
||||||
if (CMAKE_SYSTEM_NAME STREQUAL "Darwin" AND
|
|
||||||
NOT CMAKE_SYSTEM_VERSION VERSION_LESS 13.0)
|
|
||||||
set (USE_LIBCXX OFF)
|
|
||||||
cmake_policy(GET CMP0025 POLICY_VAR)
|
|
||||||
|
|
||||||
if (POLICY_VAR STREQUAL "NEW")
|
|
||||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
|
||||||
set (USE_LIBCXX ON)
|
|
||||||
endif ()
|
|
||||||
else ()
|
|
||||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
|
||||||
set (USE_LIBCXX ON)
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (USE_LIBCXX)
|
|
||||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
|
||||||
list (APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/libc++ ${dir}/libc++/lib)
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# check compiler ABI
|
|
||||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
|
||||||
set(COMPILER_PREFIX)
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.8)
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.8")
|
|
||||||
endif()
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.7)
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.7")
|
|
||||||
endif()
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.4)
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
|
||||||
endif()
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.1")
|
|
||||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
|
||||||
set(COMPILER_PREFIX)
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.0) # Complete guess
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.8")
|
|
||||||
endif()
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 3.6)
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.7")
|
|
||||||
endif()
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
|
||||||
else() # Assume compatibility with 4.4 for other compilers
|
|
||||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# if platform architecture is explicitly specified
|
|
||||||
set(TBB_ARCH_PLATFORM $ENV{TBB_ARCH_PLATFORM})
|
|
||||||
if (TBB_ARCH_PLATFORM)
|
|
||||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/${TBB_ARCH_PLATFORM}/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/${TBB_ARCH_PLATFORM})
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
|
||||||
foreach (prefix IN LISTS COMPILER_PREFIX)
|
|
||||||
if (CMAKE_SIZEOF_VOID_P EQUAL 8)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64/${prefix})
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/${prefix}/lib)
|
|
||||||
else ()
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32/${prefix})
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/lib)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/${prefix}/lib)
|
|
||||||
endif ()
|
|
||||||
endforeach()
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
# add general search paths
|
|
||||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
|
||||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib ${dir}/Lib ${dir}/lib/tbb
|
|
||||||
${dir}/Libs)
|
|
||||||
list(APPEND TBB_INC_SEARCH_PATH ${dir}/include ${dir}/Include
|
|
||||||
${dir}/include/tbb)
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
set(TBB_LIBRARY_NAMES tbb)
|
|
||||||
get_debug_names(TBB_LIBRARY_NAMES)
|
|
||||||
|
|
||||||
|
|
||||||
|
#-- Look for include directory and set ${TBB_INCLUDE_DIR}
|
||||||
|
set (TBB_INC_SEARCH_DIR ${_TBB_INSTALL_DIR}/include)
|
||||||
|
# Jiri: tbbvars now sets the CPATH environment variable to the directory
|
||||||
|
# containing the headers.
|
||||||
find_path(TBB_INCLUDE_DIR
|
find_path(TBB_INCLUDE_DIR
|
||||||
NAMES tbb/tbb.h
|
tbb/task_scheduler_init.h
|
||||||
PATHS ${TBB_INC_SEARCH_PATH})
|
PATHS ${TBB_INC_SEARCH_DIR} ENV CPATH
|
||||||
|
)
|
||||||
|
mark_as_advanced(TBB_INCLUDE_DIR)
|
||||||
|
|
||||||
find_library(TBB_LIBRARY_RELEASE
|
|
||||||
NAMES ${TBB_LIBRARY_NAMES}
|
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
find_library(TBB_LIBRARY_DEBUG
|
|
||||||
NAMES ${TBB_LIBRARY_NAMES_DEBUG}
|
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
make_library_set(TBB_LIBRARY)
|
|
||||||
|
|
||||||
findpkg_finish(TBB tbb)
|
#-- Look for libraries
|
||||||
|
# GvdB: $ENV{TBB_ARCH_PLATFORM} is set by the build script tbbvars[.bat|.sh|.csh]
|
||||||
|
if (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||||
|
set (_TBB_LIBRARY_DIR
|
||||||
|
${_TBB_INSTALL_DIR}/lib/$ENV{TBB_ARCH_PLATFORM}
|
||||||
|
${_TBB_INSTALL_DIR}/$ENV{TBB_ARCH_PLATFORM}/lib
|
||||||
|
)
|
||||||
|
endif (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||||
|
# Jiri: This block isn't mutually exclusive with the previous one
|
||||||
|
# (hence no else), instead I test if the user really specified
|
||||||
|
# the variables in question.
|
||||||
|
if ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||||
|
# HH: deprecated
|
||||||
|
message(STATUS "[Warning] FindTBB.cmake: The use of TBB_ARCHITECTURE and TBB_COMPILER is deprecated and may not be supported in future versions. Please set \$ENV{TBB_ARCH_PLATFORM} (using tbbvars.[bat|csh|sh]).")
|
||||||
|
# Jiri: It doesn't hurt to look in more places, so I store the hints from
|
||||||
|
# ENV{TBB_ARCH_PLATFORM} and the TBB_ARCHITECTURE and TBB_COMPILER
|
||||||
|
# variables and search them both.
|
||||||
|
set (_TBB_LIBRARY_DIR "${_TBB_INSTALL_DIR}/${_TBB_ARCHITECTURE}/${_TBB_COMPILER}/lib" ${_TBB_LIBRARY_DIR})
|
||||||
|
endif ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||||
|
|
||||||
|
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||||
|
list(APPEND _TBB_LIBRARY_DIR ${_TBB_INSTALL_DIR}/lib)
|
||||||
|
|
||||||
|
# Jiri: No reason not to check the default paths. From recent versions,
|
||||||
|
# tbbvars has started exporting the LIBRARY_PATH and LD_LIBRARY_PATH
|
||||||
|
# variables, which now point to the directories of the lib files.
|
||||||
|
# It all makes more sense to use the ${_TBB_LIBRARY_DIR} as a HINTS
|
||||||
|
# argument instead of the implicit PATHS as it isn't hard-coded
|
||||||
|
# but computed by system introspection. Searching the LIBRARY_PATH
|
||||||
|
# and LD_LIBRARY_PATH environment variables is now even more important
|
||||||
|
# that tbbvars doesn't export TBB_ARCH_PLATFORM and it facilitates
|
||||||
|
# the use of TBB built from sources.
|
||||||
|
find_library(TBB_LIBRARY ${_TBB_LIB_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||||
|
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||||
|
find_library(TBB_MALLOC_LIBRARY ${_TBB_LIB_MALLOC_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||||
|
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||||
|
|
||||||
|
#Extract path from TBB_LIBRARY name
|
||||||
|
get_filename_component(TBB_LIBRARY_DIR ${TBB_LIBRARY} PATH)
|
||||||
|
|
||||||
|
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY)
|
||||||
|
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY)
|
||||||
|
mark_as_advanced(TBB_LIBRARY TBB_MALLOC_LIBRARY)
|
||||||
|
|
||||||
|
#-- Look for debug libraries
|
||||||
|
# Jiri: Changed the same way as for the release libraries.
|
||||||
|
find_library(TBB_LIBRARY_DEBUG ${_TBB_LIB_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||||
|
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||||
|
find_library(TBB_MALLOC_LIBRARY_DEBUG ${_TBB_LIB_MALLOC_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||||
|
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||||
|
|
||||||
|
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||||
|
# Extract path from TBB_LIBRARY_DEBUG name
|
||||||
|
get_filename_component(TBB_LIBRARY_DEBUG_DIR ${TBB_LIBRARY_DEBUG} PATH)
|
||||||
|
|
||||||
|
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY_DEBUG)
|
||||||
|
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY_DEBUG)
|
||||||
|
mark_as_advanced(TBB_LIBRARY_DEBUG TBB_MALLOC_LIBRARY_DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
if (TBB_INCLUDE_DIR)
|
||||||
|
if (TBB_LIBRARY)
|
||||||
|
set (TBB_FOUND "YES")
|
||||||
|
set (TBB_LIBRARIES ${TBB_LIBRARY} ${TBB_MALLOC_LIBRARY} ${TBB_LIBRARIES})
|
||||||
|
set (TBB_DEBUG_LIBRARIES ${TBB_LIBRARY_DEBUG} ${TBB_MALLOC_LIBRARY_DEBUG} ${TBB_DEBUG_LIBRARIES})
|
||||||
|
set (TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR} CACHE PATH "TBB include directory" FORCE)
|
||||||
|
set (TBB_LIBRARY_DIRS ${TBB_LIBRARY_DIR} CACHE PATH "TBB library directory" FORCE)
|
||||||
|
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||||
|
set (TBB_DEBUG_LIBRARY_DIRS ${TBB_LIBRARY_DEBUG_DIR} CACHE PATH "TBB debug library directory" FORCE)
|
||||||
|
mark_as_advanced(TBB_INCLUDE_DIRS TBB_LIBRARY_DIRS TBB_DEBUG_LIBRARY_DIRS TBB_LIBRARIES TBB_DEBUG_LIBRARIES)
|
||||||
|
message(STATUS "Found Intel TBB")
|
||||||
|
endif (TBB_LIBRARY)
|
||||||
|
endif (TBB_INCLUDE_DIR)
|
||||||
|
|
||||||
#if we haven't found TBB no point on going any further
|
|
||||||
if (NOT TBB_FOUND)
|
if (NOT TBB_FOUND)
|
||||||
return()
|
message("ERROR: Intel TBB NOT found!")
|
||||||
endif ()
|
message(STATUS "Looked for Threading Building Blocks in ${_TBB_INSTALL_DIR}")
|
||||||
|
# do only throw fatal, if this pkg is REQUIRED
|
||||||
|
if (TBB_FIND_REQUIRED)
|
||||||
|
message(FATAL_ERROR "Could NOT find TBB library.")
|
||||||
|
endif (TBB_FIND_REQUIRED)
|
||||||
|
endif (NOT TBB_FOUND)
|
||||||
|
|
||||||
#=============================================================================
|
endif (NOT _TBB_INSTALL_DIR)
|
||||||
# Look for TBB's malloc package
|
|
||||||
set(TBB_MALLOC_LIBRARY_NAMES tbbmalloc)
|
|
||||||
get_debug_names(TBB_MALLOC_LIBRARY_NAMES)
|
|
||||||
|
|
||||||
find_path(TBB_MALLOC_INCLUDE_DIR
|
if (TBB_FOUND)
|
||||||
NAMES tbb/tbb.h
|
set(TBB_INTERFACE_VERSION 0)
|
||||||
PATHS ${TBB_INC_SEARCH_PATH})
|
FILE(READ "${TBB_INCLUDE_DIRS}/tbb/tbb_stddef.h" _TBB_VERSION_CONTENTS)
|
||||||
|
STRING(REGEX REPLACE ".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1" TBB_INTERFACE_VERSION "${_TBB_VERSION_CONTENTS}")
|
||||||
find_library(TBB_MALLOC_LIBRARY_RELEASE
|
set(TBB_INTERFACE_VERSION "${TBB_INTERFACE_VERSION}")
|
||||||
NAMES ${TBB_MALLOC_LIBRARY_NAMES}
|
endif (TBB_FOUND)
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
find_library(TBB_MALLOC_LIBRARY_DEBUG
|
|
||||||
NAMES ${TBB_MALLOC_LIBRARY_NAMES_DEBUG}
|
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
make_library_set(TBB_MALLOC_LIBRARY)
|
|
||||||
|
|
||||||
findpkg_finish(TBB_MALLOC tbbmalloc)
|
|
||||||
|
|
||||||
#=============================================================================
|
|
||||||
# Look for TBB's malloc proxy package
|
|
||||||
set(TBB_MALLOC_PROXY_LIBRARY_NAMES tbbmalloc_proxy)
|
|
||||||
get_debug_names(TBB_MALLOC_PROXY_LIBRARY_NAMES)
|
|
||||||
|
|
||||||
find_path(TBB_MALLOC_PROXY_INCLUDE_DIR
|
|
||||||
NAMES tbb/tbbmalloc_proxy.h
|
|
||||||
PATHS ${TBB_INC_SEARCH_PATH})
|
|
||||||
|
|
||||||
find_library(TBB_MALLOC_PROXY_LIBRARY_RELEASE
|
|
||||||
NAMES ${TBB_MALLOC_PROXY_LIBRARY_NAMES}
|
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
find_library(TBB_MALLOC_PROXY_LIBRARY_DEBUG
|
|
||||||
NAMES ${TBB_MALLOC_PROXY_LIBRARY_NAMES_DEBUG}
|
|
||||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
|
||||||
make_library_set(TBB_MALLOC_PROXY_LIBRARY)
|
|
||||||
|
|
||||||
findpkg_finish(TBB_MALLOC_PROXY tbbmalloc_proxy)
|
|
||||||
|
|
||||||
|
|
||||||
#=============================================================================
|
|
||||||
#parse all the version numbers from tbb
|
|
||||||
if(NOT TBB_VERSION)
|
|
||||||
if (EXISTS "${TBB_INCLUDE_DIR}/oneapi/tbb/version.h")
|
|
||||||
file(STRINGS
|
|
||||||
"${TBB_INCLUDE_DIR}/oneapi/tbb/version.h"
|
|
||||||
TBB_VERSION_CONTENTS
|
|
||||||
REGEX "VERSION")
|
|
||||||
else()
|
|
||||||
#only read the start of the file
|
|
||||||
file(STRINGS
|
|
||||||
"${TBB_INCLUDE_DIR}/tbb/tbb_stddef.h"
|
|
||||||
TBB_VERSION_CONTENTS
|
|
||||||
REGEX "VERSION")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
string(REGEX REPLACE
|
|
||||||
".*#define TBB_VERSION_MAJOR ([0-9]+).*" "\\1"
|
|
||||||
TBB_VERSION_MAJOR "${TBB_VERSION_CONTENTS}")
|
|
||||||
|
|
||||||
string(REGEX REPLACE
|
|
||||||
".*#define TBB_VERSION_MINOR ([0-9]+).*" "\\1"
|
|
||||||
TBB_VERSION_MINOR "${TBB_VERSION_CONTENTS}")
|
|
||||||
|
|
||||||
string(REGEX REPLACE
|
|
||||||
".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1"
|
|
||||||
TBB_INTERFACE_VERSION "${TBB_VERSION_CONTENTS}")
|
|
||||||
|
|
||||||
string(REGEX REPLACE
|
|
||||||
".*#define TBB_COMPATIBLE_INTERFACE_VERSION ([0-9]+).*" "\\1"
|
|
||||||
TBB_COMPATIBLE_INTERFACE_VERSION "${TBB_VERSION_CONTENTS}")
|
|
||||||
|
|
||||||
endif()
|
|
||||||
|
10
cmake/FingerPrint-Config.cmake
Normal file
10
cmake/FingerPrint-Config.cmake
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
set(OLDFILE ${SOURCE_DIR}/util/fingerprint_impl.hpp)
|
||||||
|
if (EXISTS ${OLDFILE})
|
||||||
|
file(REMOVE_RECURSE ${OLDFILE})
|
||||||
|
endif()
|
||||||
|
file(MD5 ${SOURCE_DIR}/prepare.cpp MD5PREPARE)
|
||||||
|
file(MD5 ${SOURCE_DIR}/data_structures/static_rtree.hpp MD5RTREE)
|
||||||
|
file(MD5 ${SOURCE_DIR}/util/graph_loader.hpp MD5GRAPH)
|
||||||
|
file(MD5 ${SOURCE_DIR}/server/data_structures/internal_datafacade.hpp MD5OBJECTS)
|
||||||
|
|
||||||
|
CONFIGURE_FILE(${SOURCE_DIR}/util/fingerprint_impl.hpp.in ${SOURCE_DIR}/util/fingerprint_impl.hpp)
|
123
cmake/GetGitRevisionDescription.cmake
Normal file
123
cmake/GetGitRevisionDescription.cmake
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
# - Returns a version string from Git
|
||||||
|
#
|
||||||
|
# These functions force a re-configure on each git commit so that you can
|
||||||
|
# trust the values of the variables in your build system.
|
||||||
|
#
|
||||||
|
# get_git_head_revision(<refspecvar> <hashvar> [<additional arguments to git describe> ...])
|
||||||
|
#
|
||||||
|
# Returns the refspec and sha hash of the current head revision
|
||||||
|
#
|
||||||
|
# git_describe(<var> [<additional arguments to git describe> ...])
|
||||||
|
#
|
||||||
|
# Returns the results of git describe on the source tree, and adjusting
|
||||||
|
# the output so that it tests false if an error occurs.
|
||||||
|
#
|
||||||
|
# git_get_exact_tag(<var> [<additional arguments to git describe> ...])
|
||||||
|
#
|
||||||
|
# Returns the results of git describe --exact-match on the source tree,
|
||||||
|
# and adjusting the output so that it tests false if there was no exact
|
||||||
|
# matching tag.
|
||||||
|
#
|
||||||
|
# Requires CMake 2.6 or newer (uses the 'function' command)
|
||||||
|
#
|
||||||
|
# Original Author:
|
||||||
|
# 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net>
|
||||||
|
# http://academic.cleardefinition.com
|
||||||
|
# Iowa State University HCI Graduate Program/VRAC
|
||||||
|
#
|
||||||
|
# Copyright Iowa State University 2009-2010.
|
||||||
|
# Distributed under the Boost Software License, Version 1.0.
|
||||||
|
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
# http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
|
if(__get_git_revision_description)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
set(__get_git_revision_description YES)
|
||||||
|
|
||||||
|
# We must run the following at "include" time, not at function call time,
|
||||||
|
# to find the path to this module rather than the path to a calling list file
|
||||||
|
get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH)
|
||||||
|
|
||||||
|
function(get_git_head_revision _refspecvar _hashvar)
|
||||||
|
set(GIT_PARENT_DIR "${CMAKE_SOURCE_DIR}")
|
||||||
|
set(GIT_DIR "${GIT_PARENT_DIR}/.git")
|
||||||
|
while(NOT EXISTS "${GIT_DIR}") # .git dir not found, search parent directories
|
||||||
|
set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}")
|
||||||
|
get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH)
|
||||||
|
if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT)
|
||||||
|
# We have reached the root directory, we are not in git
|
||||||
|
set(${_refspecvar} "GITDIR-NOTFOUND" PARENT_SCOPE)
|
||||||
|
set(${_hashvar} "GITDIR-NOTFOUND" PARENT_SCOPE)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
set(GIT_DIR "${GIT_PARENT_DIR}/.git")
|
||||||
|
endwhile()
|
||||||
|
set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data")
|
||||||
|
if(NOT EXISTS "${GIT_DATA}")
|
||||||
|
file(MAKE_DIRECTORY "${GIT_DATA}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT EXISTS "${GIT_DIR}/HEAD")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
set(HEAD_FILE "${GIT_DATA}/HEAD")
|
||||||
|
configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY)
|
||||||
|
|
||||||
|
configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in"
|
||||||
|
"${GIT_DATA}/grabRef.cmake"
|
||||||
|
@ONLY)
|
||||||
|
include("${GIT_DATA}/grabRef.cmake")
|
||||||
|
|
||||||
|
set(${_refspecvar} "${HEAD_REF}" PARENT_SCOPE)
|
||||||
|
set(${_hashvar} "${HEAD_HASH}" PARENT_SCOPE)
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
function(git_describe _var)
|
||||||
|
if(NOT GIT_FOUND)
|
||||||
|
find_package(Git QUIET)
|
||||||
|
endif()
|
||||||
|
get_git_head_revision(refspec hash)
|
||||||
|
if(NOT GIT_FOUND)
|
||||||
|
set(${_var} "GIT-NOTFOUND" PARENT_SCOPE)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
if(NOT hash)
|
||||||
|
set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# TODO sanitize
|
||||||
|
#if((${ARGN}" MATCHES "&&") OR
|
||||||
|
# (ARGN MATCHES "||") OR
|
||||||
|
# (ARGN MATCHES "\\;"))
|
||||||
|
# message("Please report the following error to the project!")
|
||||||
|
# message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}")
|
||||||
|
#endif()
|
||||||
|
|
||||||
|
#message(STATUS "Arguments to execute_process: ${ARGN}")
|
||||||
|
|
||||||
|
execute_process(COMMAND
|
||||||
|
"${GIT_EXECUTABLE}"
|
||||||
|
describe
|
||||||
|
${hash}
|
||||||
|
${ARGN}
|
||||||
|
WORKING_DIRECTORY
|
||||||
|
"${CMAKE_SOURCE_DIR}"
|
||||||
|
RESULT_VARIABLE
|
||||||
|
res
|
||||||
|
OUTPUT_VARIABLE
|
||||||
|
out
|
||||||
|
ERROR_QUIET
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
if(NOT res EQUAL 0)
|
||||||
|
set(out "${out}-${res}-NOTFOUND")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(${_var} "${out}" PARENT_SCOPE)
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
function(git_get_exact_tag _var)
|
||||||
|
git_describe(out --exact-match ${ARGN})
|
||||||
|
set(${_var} "${out}" PARENT_SCOPE)
|
||||||
|
endfunction()
|
38
cmake/GetGitRevisionDescription.cmake.in
Normal file
38
cmake/GetGitRevisionDescription.cmake.in
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#
|
||||||
|
# Internal file for GetGitRevisionDescription.cmake
|
||||||
|
#
|
||||||
|
# Requires CMake 2.6 or newer (uses the 'function' command)
|
||||||
|
#
|
||||||
|
# Original Author:
|
||||||
|
# 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net>
|
||||||
|
# http://academic.cleardefinition.com
|
||||||
|
# Iowa State University HCI Graduate Program/VRAC
|
||||||
|
#
|
||||||
|
# Copyright Iowa State University 2009-2010.
|
||||||
|
# Distributed under the Boost Software License, Version 1.0.
|
||||||
|
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
# http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
|
set(HEAD_HASH)
|
||||||
|
|
||||||
|
file(READ "@HEAD_FILE@" HEAD_CONTENTS LIMIT 1024)
|
||||||
|
|
||||||
|
string(STRIP "${HEAD_CONTENTS}" HEAD_CONTENTS)
|
||||||
|
if(HEAD_CONTENTS MATCHES "ref")
|
||||||
|
# named branch
|
||||||
|
string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}")
|
||||||
|
if(EXISTS "@GIT_DIR@/${HEAD_REF}")
|
||||||
|
configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
|
||||||
|
elseif(EXISTS "@GIT_DIR@/logs/${HEAD_REF}")
|
||||||
|
configure_file("@GIT_DIR@/logs/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
|
||||||
|
set(HEAD_HASH "${HEAD_REF}")
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
# detached HEAD
|
||||||
|
configure_file("@GIT_DIR@/HEAD" "@GIT_DATA@/head-ref" COPYONLY)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT HEAD_HASH)
|
||||||
|
file(READ "@GIT_DATA@/head-ref" HEAD_HASH LIMIT 1024)
|
||||||
|
string(STRIP "${HEAD_HASH}" HEAD_HASH)
|
||||||
|
endif()
|
@ -1,290 +0,0 @@
|
|||||||
# https://github.com/sbellus/json-cmake/blob/9913da8800b95322d393894d3525d634568f305e/JSONParser.cmake
|
|
||||||
# MIT Licensed - https://github.com/sbellus/json-cmake/blob/master/LICENSE
|
|
||||||
|
|
||||||
cmake_minimum_required(VERSION 3.18)
|
|
||||||
|
|
||||||
if (DEFINED JSonParserGuard)
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(JSonParserGuard yes)
|
|
||||||
|
|
||||||
macro(sbeParseJson prefix jsonString)
|
|
||||||
cmake_policy(PUSH)
|
|
||||||
|
|
||||||
set(json_string "${${jsonString}}")
|
|
||||||
string(LENGTH "${json_string}" json_jsonLen)
|
|
||||||
set(json_index 0)
|
|
||||||
set(json_AllVariables ${prefix})
|
|
||||||
set(json_ArrayNestingLevel 0)
|
|
||||||
set(json_MaxArrayNestingLevel 0)
|
|
||||||
|
|
||||||
_sbeParse(${prefix})
|
|
||||||
|
|
||||||
unset(json_index)
|
|
||||||
unset(json_AllVariables)
|
|
||||||
unset(json_jsonLen)
|
|
||||||
unset(json_string)
|
|
||||||
unset(json_value)
|
|
||||||
unset(json_inValue)
|
|
||||||
unset(json_name)
|
|
||||||
unset(json_inName)
|
|
||||||
unset(json_newPrefix)
|
|
||||||
unset(json_reservedWord)
|
|
||||||
unset(json_arrayIndex)
|
|
||||||
unset(json_char)
|
|
||||||
unset(json_end)
|
|
||||||
unset(json_ArrayNestingLevel)
|
|
||||||
foreach(json_nestingLevel RANGE ${json_MaxArrayNestingLevel})
|
|
||||||
unset(json_${json_nestingLevel}_arrayIndex)
|
|
||||||
endforeach()
|
|
||||||
unset(json_nestingLevel)
|
|
||||||
unset(json_MaxArrayNestingLevel)
|
|
||||||
|
|
||||||
cmake_policy(POP)
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(sbeClearJson prefix)
|
|
||||||
foreach(json_var ${${prefix}})
|
|
||||||
unset(${json_var})
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
unset(${prefix})
|
|
||||||
unset(json_var)
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(sbePrintJson prefix)
|
|
||||||
foreach(json_var ${${prefix}})
|
|
||||||
message("${json_var} = ${${json_var}}")
|
|
||||||
endforeach()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParse prefix)
|
|
||||||
|
|
||||||
while(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
if("\"" STREQUAL "${json_char}")
|
|
||||||
_sbeParseNameValue(${prefix})
|
|
||||||
elseif("{" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
_sbeParseObject(${prefix})
|
|
||||||
elseif("[" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
_sbeParseArray(${prefix})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
else()
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if ("}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}")
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
endwhile()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParseNameValue prefix)
|
|
||||||
set(json_name "")
|
|
||||||
set(json_inName no)
|
|
||||||
|
|
||||||
while(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
# check if name ends
|
|
||||||
if("\"" STREQUAL "${json_char}" AND json_inName)
|
|
||||||
set(json_inName no)
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
if(NOT ${json_index} LESS ${json_jsonLen})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
set(json_newPrefix ${prefix}.${json_name})
|
|
||||||
set(json_name "")
|
|
||||||
|
|
||||||
if(":" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
if(NOT ${json_index} LESS ${json_jsonLen})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
if("\"" STREQUAL "${json_char}")
|
|
||||||
_sbeParseValue(${json_newPrefix})
|
|
||||||
break()
|
|
||||||
elseif("{" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
_sbeParseObject(${json_newPrefix})
|
|
||||||
break()
|
|
||||||
elseif("[" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
_sbeParseArray(${json_newPrefix})
|
|
||||||
break()
|
|
||||||
else()
|
|
||||||
# reserved word starts
|
|
||||||
_sbeParseReservedWord(${json_newPrefix})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
else()
|
|
||||||
# name without value
|
|
||||||
list(APPEND ${json_AllVariables} ${json_newPrefix})
|
|
||||||
set(${json_newPrefix} "")
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(json_inName)
|
|
||||||
# remove escapes
|
|
||||||
if("\\" STREQUAL "${json_char}")
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
if(NOT ${json_index} LESS ${json_jsonLen})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(json_name "${json_name}${json_char}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# check if name starts
|
|
||||||
if("\"" STREQUAL "${json_char}" AND NOT json_inName)
|
|
||||||
set(json_inName yes)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
endwhile()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParseReservedWord prefix)
|
|
||||||
set(json_reservedWord "")
|
|
||||||
set(json_end no)
|
|
||||||
while(${json_index} LESS ${json_jsonLen} AND NOT json_end)
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
if("," STREQUAL "${json_char}" OR "}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}")
|
|
||||||
set(json_end yes)
|
|
||||||
else()
|
|
||||||
set(json_reservedWord "${json_reservedWord}${json_char}")
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
endif()
|
|
||||||
endwhile()
|
|
||||||
|
|
||||||
list(APPEND ${json_AllVariables} ${prefix})
|
|
||||||
string(STRIP "${json_reservedWord}" json_reservedWord)
|
|
||||||
set(${prefix} ${json_reservedWord})
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParseValue prefix)
|
|
||||||
cmake_policy(SET CMP0054 NEW) # turn off implicit expansions in if statement
|
|
||||||
|
|
||||||
set(json_value "")
|
|
||||||
set(json_inValue no)
|
|
||||||
|
|
||||||
while(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
# check if json_value ends, it is ended by "
|
|
||||||
if("\"" STREQUAL "${json_char}" AND json_inValue)
|
|
||||||
set(json_inValue no)
|
|
||||||
|
|
||||||
set(${prefix} ${json_value})
|
|
||||||
list(APPEND ${json_AllVariables} ${prefix})
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(json_inValue)
|
|
||||||
# if " is escaped consume
|
|
||||||
if("\\" STREQUAL "${json_char}")
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
if(NOT ${json_index} LESS ${json_jsonLen})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
if(NOT "\"" STREQUAL "${json_char}")
|
|
||||||
# if it is not " then copy also escape character
|
|
||||||
set(json_char "\\${json_char}")
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
_sbeAddEscapedCharacter("${json_char}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# check if value starts
|
|
||||||
if("\"" STREQUAL "${json_char}" AND NOT json_inValue)
|
|
||||||
set(json_inValue yes)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
endwhile()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeAddEscapedCharacter char)
|
|
||||||
string(CONCAT json_value "${json_value}" "${char}")
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParseObject prefix)
|
|
||||||
_sbeParse(${prefix})
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeParseArray prefix)
|
|
||||||
math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} + 1")
|
|
||||||
set(json_${json_ArrayNestingLevel}_arrayIndex 0)
|
|
||||||
|
|
||||||
set(${prefix} "")
|
|
||||||
list(APPEND ${json_AllVariables} ${prefix})
|
|
||||||
|
|
||||||
while(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
if("\"" STREQUAL "${json_char}")
|
|
||||||
# simple value
|
|
||||||
list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
_sbeParseValue(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
elseif("{" STREQUAL "${json_char}")
|
|
||||||
# object
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
_sbeParseObject(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
else()
|
|
||||||
list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
_sbeParseReservedWord(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(NOT ${json_index} LESS ${json_jsonLen})
|
|
||||||
break()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
|
|
||||||
if("]" STREQUAL "${json_char}")
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
break()
|
|
||||||
elseif("," STREQUAL "${json_char}")
|
|
||||||
math(EXPR json_${json_ArrayNestingLevel}_arrayIndex "${json_${json_ArrayNestingLevel}_arrayIndex} + 1")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
_sbeMoveToNextNonEmptyCharacter()
|
|
||||||
endwhile()
|
|
||||||
|
|
||||||
if(${json_MaxArrayNestingLevel} LESS ${json_ArrayNestingLevel})
|
|
||||||
set(json_MaxArrayNestingLevel ${json_ArrayNestingLevel})
|
|
||||||
endif()
|
|
||||||
math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} - 1")
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
macro(_sbeMoveToNextNonEmptyCharacter)
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
if(${json_index} LESS ${json_jsonLen})
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
while(${json_char} MATCHES "[ \t\n\r]" AND ${json_index} LESS ${json_jsonLen})
|
|
||||||
math(EXPR json_index "${json_index} + 1")
|
|
||||||
string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
|
|
||||||
endwhile()
|
|
||||||
endif()
|
|
||||||
endmacro()
|
|
40
cmake/check_luabind.cmake
Normal file
40
cmake/check_luabind.cmake
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
INCLUDE (CheckCXXSourceCompiles)
|
||||||
|
unset(LUABIND_WORKS CACHE)
|
||||||
|
unset(LUABIND51_WORKS CACHE)
|
||||||
|
set (LUABIND_CHECK_SRC "#include \"lua.h\"\n#include <luabind/luabind.hpp>\n int main() { lua_State *myLuaState = luaL_newstate(); luabind::open(myLuaState); return 0;}")
|
||||||
|
set (CMAKE_TRY_COMPILE_CONFIGURATION ${CMAKE_BUILD_TYPE})
|
||||||
|
set (CMAKE_REQUIRED_INCLUDES "${Boost_INCLUDE_DIR};${LUABIND_INCLUDE_DIR};${LUA_INCLUDE_DIR}")
|
||||||
|
set (CMAKE_REQUIRED_LIBRARIES "${LUABIND_LIBRARY};${LUA_LIBRARY}")
|
||||||
|
|
||||||
|
find_package(Lua52)
|
||||||
|
if(NOT APPLE)
|
||||||
|
find_package(LuaJIT 5.2)
|
||||||
|
endif()
|
||||||
|
if(LUA52_FOUND)
|
||||||
|
set (CMAKE_REQUIRED_INCLUDES "${Boost_INCLUDE_DIR};${LUABIND_INCLUDE_DIR};${LUA_INCLUDE_DIR}")
|
||||||
|
set (CMAKE_REQUIRED_LIBRARIES "${LUABIND_LIBRARY};${LUA_LIBRARY}")
|
||||||
|
CHECK_CXX_SOURCE_COMPILES("${LUABIND_CHECK_SRC}" LUABIND_WORKS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(LUABIND_WORKS)
|
||||||
|
message(STATUS "Luabind/Lua5.2 combination working with ${LUA_LIBRARY}")
|
||||||
|
else()
|
||||||
|
message(STATUS "Luabind/Lua5.2 not feasible, falling back to Lua 5.1.")
|
||||||
|
unset(LUA_FOUND CACHE)
|
||||||
|
unset(LUA_INCLUDE_DIR CACHE)
|
||||||
|
unset(LUA_LIBRARY CACHE)
|
||||||
|
find_package(Lua51 REQUIRED)
|
||||||
|
if(NOT APPLE)
|
||||||
|
find_package(LuaJIT 5.1)
|
||||||
|
endif()
|
||||||
|
set (CMAKE_REQUIRED_INCLUDES "${Boost_INCLUDE_DIR};${LUABIND_INCLUDE_DIR};${LUA_INCLUDE_DIR}")
|
||||||
|
set (CMAKE_REQUIRED_LIBRARIES "${LUABIND_LIBRARY};${LUA_LIBRARY}")
|
||||||
|
|
||||||
|
CHECK_CXX_SOURCE_COMPILES("${LUABIND_CHECK_SRC}" LUABIND51_WORKS)
|
||||||
|
|
||||||
|
if(LUABIND51_WORKS)
|
||||||
|
message(STATUS "Luabind works with Lua 5.1 at ${LUA_LIBRARY}")
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Luabind does not work with Lua 5.1 at ${LUA_LIBRARY}, no working Luabind found")
|
||||||
|
endif()
|
||||||
|
endif()
|
@ -32,11 +32,13 @@ cache_file = "%s/cached_options.txt" % (scriptpath)
|
|||||||
db = None
|
db = None
|
||||||
if os.access(cache_file, os.R_OK) == 0:
|
if os.access(cache_file, os.R_OK) == 0:
|
||||||
db = load_db(sys.argv[1])
|
db = load_db(sys.argv[1])
|
||||||
with open(cache_file, "wb") as f:
|
f = open(cache_file, "wb")
|
||||||
pickle.dump(db, f)
|
pickle.dump(db, f)
|
||||||
|
f.close()
|
||||||
else:
|
else:
|
||||||
with open(cache_file) as f:
|
f = open(cache_file)
|
||||||
db = pickle.load(f)
|
db = pickle.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
if db and sys.argv[2] in db:
|
if db and sys.argv[2] in db:
|
||||||
for option in db[sys.argv[2]]:
|
for option in db[sys.argv[2]]:
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
|
|
||||||
message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
|
|
||||||
endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
|
|
||||||
|
|
||||||
file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
|
|
||||||
string(REGEX REPLACE "\n" ";" files "${files}")
|
|
||||||
foreach(file ${files})
|
|
||||||
message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
|
|
||||||
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
|
|
||||||
exec_program(
|
|
||||||
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
|
|
||||||
OUTPUT_VARIABLE rm_out
|
|
||||||
RETURN_VALUE rm_retval
|
|
||||||
)
|
|
||||||
if(NOT "${rm_retval}" STREQUAL 0)
|
|
||||||
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
|
|
||||||
endif(NOT "${rm_retval}" STREQUAL 0)
|
|
||||||
else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
|
|
||||||
message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
|
|
||||||
endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
|
|
||||||
endforeach(file)
|
|
1026
cmake/conan.cmake
1026
cmake/conan.cmake
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,11 @@
|
|||||||
prefix=@CMAKE_INSTALL_PREFIX@
|
prefix=@CMAKE_INSTALL_PREFIX@
|
||||||
includedir=@PKGCONFIG_INCLUDE_DIR@
|
includedir=${prefix}/include/osrm
|
||||||
libdir=@PKGCONFIG_LIBRARY_DIR@
|
libdir=${prefix}/lib
|
||||||
|
|
||||||
Name: libOSRM
|
Name: libOSRM
|
||||||
Description: Project OSRM library
|
Description: Project OSRM library
|
||||||
Version: @OSRM_VERSION@
|
Version: @GIT_DESCRIPTION@
|
||||||
Requires:
|
Requires:
|
||||||
Libs: -L${libdir} -losrm @PKGCONFIG_OSRM_LDFLAGS@
|
Libs: -L${libdir} -lOSRM
|
||||||
Libs.private: @PKGCONFIG_OSRM_DEPENDENT_LIBRARIES@
|
Libs.private: @BOOST_LIBRARY_LISTING@
|
||||||
Cflags: @PKGCONFIG_OSRM_INCLUDE_FLAGS@ @PKGCONFIG_OSRM_CXXFLAGS@
|
Cflags: -I${includedir}
|
||||||
|
2
cmake/postinst.in
Normal file
2
cmake/postinst.in
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#/usr/bin/env bash
|
||||||
|
ln -s /usr/share/doc/@CMAKE_PROJECT_NAME@/profiles/car.lua @CMAKE_INSTALL_PREFIX@/profile.lua
|
@ -1,88 +0,0 @@
|
|||||||
include (CheckCXXCompilerFlag)
|
|
||||||
include (CheckCCompilerFlag)
|
|
||||||
|
|
||||||
# Try to add -Wflag if compiler supports it
|
|
||||||
macro (add_warning flag)
|
|
||||||
string(REPLACE "-" "_" underscored_flag ${flag})
|
|
||||||
string(REPLACE "+" "x" underscored_flag ${underscored_flag})
|
|
||||||
|
|
||||||
check_cxx_compiler_flag("-W${flag}" SUPPORTS_CXXFLAG_${underscored_flag})
|
|
||||||
check_c_compiler_flag("-W${flag}" SUPPORTS_CFLAG_${underscored_flag})
|
|
||||||
|
|
||||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W${flag}")
|
|
||||||
else()
|
|
||||||
message (STATUS "Flag -W${flag} is unsupported")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (SUPPORTS_CFLAG_${underscored_flag})
|
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -W${flag}")
|
|
||||||
else()
|
|
||||||
message(STATUS "Flag -W${flag} is unsupported")
|
|
||||||
endif()
|
|
||||||
endmacro()
|
|
||||||
|
|
||||||
# Try to add -Wno flag if compiler supports it
|
|
||||||
macro (no_warning flag)
|
|
||||||
add_warning(no-${flag})
|
|
||||||
endmacro ()
|
|
||||||
|
|
||||||
|
|
||||||
# The same but only for specified target.
|
|
||||||
macro (target_add_warning target flag)
|
|
||||||
string (REPLACE "-" "_" underscored_flag ${flag})
|
|
||||||
string (REPLACE "+" "x" underscored_flag ${underscored_flag})
|
|
||||||
|
|
||||||
check_cxx_compiler_flag("-W${flag}" SUPPORTS_CXXFLAG_${underscored_flag})
|
|
||||||
|
|
||||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
|
||||||
target_compile_options (${target} PRIVATE "-W${flag}")
|
|
||||||
else ()
|
|
||||||
message (STATUS "Flag -W${flag} is unsupported")
|
|
||||||
endif ()
|
|
||||||
endmacro ()
|
|
||||||
|
|
||||||
macro (target_no_warning target flag)
|
|
||||||
target_add_warning(${target} no-${flag})
|
|
||||||
endmacro ()
|
|
||||||
|
|
||||||
add_warning(all)
|
|
||||||
add_warning(extra)
|
|
||||||
add_warning(pedantic)
|
|
||||||
add_warning(error) # treat all warnings as errors
|
|
||||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
|
||||||
add_warning(strict-overflow=1)
|
|
||||||
endif()
|
|
||||||
add_warning(suggest-override)
|
|
||||||
add_warning(suggest-destructor-override)
|
|
||||||
add_warning(unused)
|
|
||||||
add_warning(unreachable-code)
|
|
||||||
add_warning(delete-incomplete)
|
|
||||||
add_warning(duplicated-cond)
|
|
||||||
add_warning(disabled-optimization)
|
|
||||||
add_warning(init-self)
|
|
||||||
add_warning(bool-compare)
|
|
||||||
add_warning(logical-not-parentheses)
|
|
||||||
add_warning(logical-op)
|
|
||||||
add_warning(misleading-indentation)
|
|
||||||
# `no-` prefix is part of warning name(i.e. doesn't mean we are disabling it)
|
|
||||||
add_warning(no-return-local-addr)
|
|
||||||
add_warning(odr)
|
|
||||||
add_warning(pointer-arith)
|
|
||||||
add_warning(redundant-decls)
|
|
||||||
add_warning(reorder)
|
|
||||||
add_warning(shift-negative-value)
|
|
||||||
add_warning(sizeof-array-argument)
|
|
||||||
add_warning(switch-bool)
|
|
||||||
add_warning(tautological-compare)
|
|
||||||
add_warning(trampolines)
|
|
||||||
# these warnings are not enabled by default
|
|
||||||
# no_warning(name-of-warning)
|
|
||||||
no_warning(deprecated-comma-subscript)
|
|
||||||
no_warning(comma-subscript)
|
|
||||||
no_warning(ambiguous-reversed-operator)
|
|
||||||
no_warning(restrict)
|
|
||||||
no_warning(free-nonheap-object)
|
|
||||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
|
||||||
no_warning(stringop-overflow)
|
|
||||||
endif()
|
|
@ -1,6 +0,0 @@
|
|||||||
coverage:
|
|
||||||
|
|
||||||
ignore:
|
|
||||||
- third_party/.*
|
|
||||||
|
|
||||||
comment: off
|
|
9
config/cucumber.yml
Normal file
9
config/cucumber.yml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# config/cucumber.yml
|
||||||
|
##YAML Template
|
||||||
|
---
|
||||||
|
default: --require features --tags ~@todo --tags ~@bug --tag ~@stress
|
||||||
|
verify: --require features --tags ~@todo --tags ~@bug --tags ~@stress -f progress
|
||||||
|
jenkins: --require features --tags ~@todo --tags ~@bug --tags ~@stress --tags ~@options -f progress
|
||||||
|
bugs: --require features --tags @bug
|
||||||
|
todo: --require features --tags @todo
|
||||||
|
all: --require features
|
963
contractor/contractor.hpp
Normal file
963
contractor/contractor.hpp
Normal file
@ -0,0 +1,963 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef CONTRACTOR_HPP
|
||||||
|
#define CONTRACTOR_HPP
|
||||||
|
|
||||||
|
#include "../data_structures/binary_heap.hpp"
|
||||||
|
#include "../data_structures/deallocating_vector.hpp"
|
||||||
|
#include "../data_structures/dynamic_graph.hpp"
|
||||||
|
#include "../data_structures/percent.hpp"
|
||||||
|
#include "../data_structures/query_edge.hpp"
|
||||||
|
#include "../data_structures/xor_fast_hash.hpp"
|
||||||
|
#include "../data_structures/xor_fast_hash_storage.hpp"
|
||||||
|
#include "../util/integer_range.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
#include "../util/timing_util.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <stxxl/vector>
|
||||||
|
|
||||||
|
#include <tbb/enumerable_thread_specific.h>
|
||||||
|
#include <tbb/parallel_for.h>
|
||||||
|
#include <tbb/parallel_sort.h>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <limits>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
class Contractor
|
||||||
|
{
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct ContractorEdgeData
|
||||||
|
{
|
||||||
|
ContractorEdgeData()
|
||||||
|
: distance(0), id(0), originalEdges(0), shortcut(0), forward(0), backward(0),
|
||||||
|
is_original_via_node_ID(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
ContractorEdgeData(unsigned distance,
|
||||||
|
unsigned original_edges,
|
||||||
|
unsigned id,
|
||||||
|
bool shortcut,
|
||||||
|
bool forward,
|
||||||
|
bool backward)
|
||||||
|
: distance(distance), id(id),
|
||||||
|
originalEdges(std::min((unsigned)1 << 28, original_edges)), shortcut(shortcut),
|
||||||
|
forward(forward), backward(backward), is_original_via_node_ID(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
unsigned distance;
|
||||||
|
unsigned id;
|
||||||
|
unsigned originalEdges : 28;
|
||||||
|
bool shortcut : 1;
|
||||||
|
bool forward : 1;
|
||||||
|
bool backward : 1;
|
||||||
|
bool is_original_via_node_ID : 1;
|
||||||
|
} data;
|
||||||
|
|
||||||
|
struct ContractorHeapData
|
||||||
|
{
|
||||||
|
short hop;
|
||||||
|
bool target;
|
||||||
|
ContractorHeapData() : hop(0), target(false) {}
|
||||||
|
ContractorHeapData(short h, bool t) : hop(h), target(t) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
using ContractorGraph = DynamicGraph<ContractorEdgeData>;
|
||||||
|
// using ContractorHeap = BinaryHeap<NodeID, NodeID, int, ContractorHeapData,
|
||||||
|
// ArrayStorage<NodeID, NodeID>
|
||||||
|
// >;
|
||||||
|
using ContractorHeap =
|
||||||
|
BinaryHeap<NodeID, NodeID, int, ContractorHeapData, XORFastHashStorage<NodeID, NodeID>>;
|
||||||
|
using ContractorEdge = ContractorGraph::InputEdge;
|
||||||
|
|
||||||
|
struct ContractorThreadData
|
||||||
|
{
|
||||||
|
ContractorHeap heap;
|
||||||
|
std::vector<ContractorEdge> inserted_edges;
|
||||||
|
std::vector<NodeID> neighbours;
|
||||||
|
explicit ContractorThreadData(NodeID nodes) : heap(nodes) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NodePriorityData
|
||||||
|
{
|
||||||
|
int depth;
|
||||||
|
NodePriorityData() : depth(0) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ContractionStats
|
||||||
|
{
|
||||||
|
int edges_deleted_count;
|
||||||
|
int edges_added_count;
|
||||||
|
int original_edges_deleted_count;
|
||||||
|
int original_edges_added_count;
|
||||||
|
ContractionStats()
|
||||||
|
: edges_deleted_count(0), edges_added_count(0), original_edges_deleted_count(0),
|
||||||
|
original_edges_added_count(0)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct RemainingNodeData
|
||||||
|
{
|
||||||
|
RemainingNodeData() : id(0), is_independent(false) {}
|
||||||
|
NodeID id : 31;
|
||||||
|
bool is_independent : 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ThreadDataContainer
|
||||||
|
{
|
||||||
|
explicit ThreadDataContainer(int number_of_nodes) : number_of_nodes(number_of_nodes) {}
|
||||||
|
|
||||||
|
inline ContractorThreadData *getThreadData()
|
||||||
|
{
|
||||||
|
bool exists = false;
|
||||||
|
auto &ref = data.local(exists);
|
||||||
|
if (!exists)
|
||||||
|
{
|
||||||
|
ref = std::make_shared<ContractorThreadData>(number_of_nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ref.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
int number_of_nodes;
|
||||||
|
using EnumerableThreadData =
|
||||||
|
tbb::enumerable_thread_specific<std::shared_ptr<ContractorThreadData>>;
|
||||||
|
EnumerableThreadData data;
|
||||||
|
};
|
||||||
|
|
||||||
|
public:
|
||||||
|
template <class ContainerT> Contractor(int nodes, ContainerT &input_edge_list)
|
||||||
|
{
|
||||||
|
std::vector<ContractorEdge> edges;
|
||||||
|
edges.reserve(input_edge_list.size() * 2);
|
||||||
|
|
||||||
|
const auto dend = input_edge_list.dend();
|
||||||
|
for (auto diter = input_edge_list.dbegin(); diter != dend; ++diter)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT_MSG(static_cast<unsigned int>(std::max(diter->weight, 1)) > 0,
|
||||||
|
"edge distance < 1");
|
||||||
|
#ifndef NDEBUG
|
||||||
|
if (static_cast<unsigned int>(std::max(diter->weight, 1)) > 24 * 60 * 60 * 10)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING)
|
||||||
|
<< "Edge weight large -> "
|
||||||
|
<< static_cast<unsigned int>(std::max(diter->weight, 1)) << " : "
|
||||||
|
<< static_cast<unsigned int>(diter->source) << " -> " << static_cast<unsigned int>(diter->target);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
edges.emplace_back(diter->source, diter->target,
|
||||||
|
static_cast<unsigned int>(std::max(diter->weight, 1)), 1,
|
||||||
|
diter->edge_id, false, diter->forward ? true : false,
|
||||||
|
diter->backward ? true : false);
|
||||||
|
|
||||||
|
edges.emplace_back(diter->target, diter->source,
|
||||||
|
static_cast<unsigned int>(std::max(diter->weight, 1)), 1,
|
||||||
|
diter->edge_id, false, diter->backward ? true : false,
|
||||||
|
diter->forward ? true : false);
|
||||||
|
}
|
||||||
|
// clear input vector
|
||||||
|
input_edge_list.clear();
|
||||||
|
// FIXME not sure if we need this
|
||||||
|
edges.shrink_to_fit();
|
||||||
|
|
||||||
|
tbb::parallel_sort(edges.begin(), edges.end());
|
||||||
|
NodeID edge = 0;
|
||||||
|
for (NodeID i = 0; i < edges.size();)
|
||||||
|
{
|
||||||
|
const NodeID source = edges[i].source;
|
||||||
|
const NodeID target = edges[i].target;
|
||||||
|
const NodeID id = edges[i].data.id;
|
||||||
|
// remove eigenloops
|
||||||
|
if (source == target)
|
||||||
|
{
|
||||||
|
++i;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ContractorEdge forward_edge;
|
||||||
|
ContractorEdge reverse_edge;
|
||||||
|
forward_edge.source = reverse_edge.source = source;
|
||||||
|
forward_edge.target = reverse_edge.target = target;
|
||||||
|
forward_edge.data.forward = reverse_edge.data.backward = true;
|
||||||
|
forward_edge.data.backward = reverse_edge.data.forward = false;
|
||||||
|
forward_edge.data.shortcut = reverse_edge.data.shortcut = false;
|
||||||
|
forward_edge.data.id = reverse_edge.data.id = id;
|
||||||
|
forward_edge.data.originalEdges = reverse_edge.data.originalEdges = 1;
|
||||||
|
forward_edge.data.distance = reverse_edge.data.distance =
|
||||||
|
std::numeric_limits<int>::max();
|
||||||
|
// remove parallel edges
|
||||||
|
while (i < edges.size() && edges[i].source == source && edges[i].target == target)
|
||||||
|
{
|
||||||
|
if (edges[i].data.forward)
|
||||||
|
{
|
||||||
|
forward_edge.data.distance =
|
||||||
|
std::min(edges[i].data.distance, forward_edge.data.distance);
|
||||||
|
}
|
||||||
|
if (edges[i].data.backward)
|
||||||
|
{
|
||||||
|
reverse_edge.data.distance =
|
||||||
|
std::min(edges[i].data.distance, reverse_edge.data.distance);
|
||||||
|
}
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
// merge edges (s,t) and (t,s) into bidirectional edge
|
||||||
|
if (forward_edge.data.distance == reverse_edge.data.distance)
|
||||||
|
{
|
||||||
|
if ((int)forward_edge.data.distance != std::numeric_limits<int>::max())
|
||||||
|
{
|
||||||
|
forward_edge.data.backward = true;
|
||||||
|
edges[edge++] = forward_edge;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{ // insert seperate edges
|
||||||
|
if (((int)forward_edge.data.distance) != std::numeric_limits<int>::max())
|
||||||
|
{
|
||||||
|
edges[edge++] = forward_edge;
|
||||||
|
}
|
||||||
|
if ((int)reverse_edge.data.distance != std::numeric_limits<int>::max())
|
||||||
|
{
|
||||||
|
edges[edge++] = reverse_edge;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::cout << "merged " << edges.size() - edge << " edges out of " << edges.size()
|
||||||
|
<< std::endl;
|
||||||
|
edges.resize(edge);
|
||||||
|
contractor_graph = std::make_shared<ContractorGraph>(nodes, edges);
|
||||||
|
edges.clear();
|
||||||
|
edges.shrink_to_fit();
|
||||||
|
|
||||||
|
BOOST_ASSERT(0 == edges.capacity());
|
||||||
|
// unsigned maxdegree = 0;
|
||||||
|
// NodeID highestNode = 0;
|
||||||
|
//
|
||||||
|
// for(unsigned i = 0; i < contractor_graph->GetNumberOfNodes(); ++i) {
|
||||||
|
// unsigned degree = contractor_graph->EndEdges(i) -
|
||||||
|
// contractor_graph->BeginEdges(i);
|
||||||
|
// if(degree > maxdegree) {
|
||||||
|
// maxdegree = degree;
|
||||||
|
// highestNode = i;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// SimpleLogger().Write() << "edges at node with id " << highestNode << " has degree
|
||||||
|
// " << maxdegree;
|
||||||
|
// for(unsigned i = contractor_graph->BeginEdges(highestNode); i <
|
||||||
|
// contractor_graph->EndEdges(highestNode); ++i) {
|
||||||
|
// SimpleLogger().Write() << " ->(" << highestNode << "," <<
|
||||||
|
// contractor_graph->GetTarget(i)
|
||||||
|
// << "); via: " << contractor_graph->GetEdgeData(i).via;
|
||||||
|
// }
|
||||||
|
|
||||||
|
std::cout << "contractor finished initalization" << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
~Contractor() {}
|
||||||
|
|
||||||
|
void Run()
|
||||||
|
{
|
||||||
|
// for the preperation we can use a big grain size, which is much faster (probably cache)
|
||||||
|
constexpr size_t InitGrainSize = 100000;
|
||||||
|
constexpr size_t PQGrainSize = 100000;
|
||||||
|
// auto_partitioner will automatically increase the blocksize if we have
|
||||||
|
// a lot of data. It is *important* for the last loop iterations
|
||||||
|
// (which have a very small dataset) that it is devisible.
|
||||||
|
constexpr size_t IndependentGrainSize = 1;
|
||||||
|
constexpr size_t ContractGrainSize = 1;
|
||||||
|
constexpr size_t NeighboursGrainSize = 1;
|
||||||
|
constexpr size_t DeleteGrainSize = 1;
|
||||||
|
|
||||||
|
const NodeID number_of_nodes = contractor_graph->GetNumberOfNodes();
|
||||||
|
Percent p(number_of_nodes);
|
||||||
|
|
||||||
|
ThreadDataContainer thread_data_list(number_of_nodes);
|
||||||
|
|
||||||
|
NodeID number_of_contracted_nodes = 0;
|
||||||
|
std::vector<RemainingNodeData> remaining_nodes(number_of_nodes);
|
||||||
|
std::vector<float> node_priorities(number_of_nodes);
|
||||||
|
std::vector<NodePriorityData> node_data(number_of_nodes);
|
||||||
|
|
||||||
|
// initialize priorities in parallel
|
||||||
|
tbb::parallel_for(tbb::blocked_range<int>(0, number_of_nodes, InitGrainSize),
|
||||||
|
[&remaining_nodes](const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
for (int x = range.begin(); x != range.end(); ++x)
|
||||||
|
{
|
||||||
|
remaining_nodes[x].id = x;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
std::cout << "initializing elimination PQ ..." << std::flush;
|
||||||
|
tbb::parallel_for(tbb::blocked_range<int>(0, number_of_nodes, PQGrainSize),
|
||||||
|
[this, &node_priorities, &node_data, &thread_data_list](
|
||||||
|
const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
ContractorThreadData *data = thread_data_list.getThreadData();
|
||||||
|
for (int x = range.begin(); x != range.end(); ++x)
|
||||||
|
{
|
||||||
|
node_priorities[x] =
|
||||||
|
this->EvaluateNodePriority(data, &node_data[x], x);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
std::cout << "ok" << std::endl << "preprocessing " << number_of_nodes << " nodes ..."
|
||||||
|
<< std::flush;
|
||||||
|
|
||||||
|
bool flushed_contractor = false;
|
||||||
|
while (number_of_nodes > 2 && number_of_contracted_nodes < number_of_nodes)
|
||||||
|
{
|
||||||
|
if (!flushed_contractor && (number_of_contracted_nodes > (number_of_nodes * 0.65)))
|
||||||
|
{
|
||||||
|
DeallocatingVector<ContractorEdge> new_edge_set; // this one is not explicitely
|
||||||
|
// cleared since it goes out of
|
||||||
|
// scope anywa
|
||||||
|
std::cout << " [flush " << number_of_contracted_nodes << " nodes] " << std::flush;
|
||||||
|
|
||||||
|
// Delete old heap data to free memory that we need for the coming operations
|
||||||
|
thread_data_list.data.clear();
|
||||||
|
|
||||||
|
// Create new priority array
|
||||||
|
std::vector<float> new_node_priority(remaining_nodes.size());
|
||||||
|
// this map gives the old IDs from the new ones, necessary to get a consistent graph
|
||||||
|
// at the end of contraction
|
||||||
|
orig_node_id_to_new_id_map.resize(remaining_nodes.size());
|
||||||
|
// this map gives the new IDs from the old ones, necessary to remap targets from the
|
||||||
|
// remaining graph
|
||||||
|
std::vector<NodeID> new_node_id_from_orig_id_map(number_of_nodes, UINT_MAX);
|
||||||
|
|
||||||
|
// build forward and backward renumbering map and remap ids in remaining_nodes and
|
||||||
|
// Priorities.
|
||||||
|
for (const auto new_node_id : osrm::irange<std::size_t>(0, remaining_nodes.size()))
|
||||||
|
{
|
||||||
|
// create renumbering maps in both directions
|
||||||
|
orig_node_id_to_new_id_map[new_node_id] = remaining_nodes[new_node_id].id;
|
||||||
|
new_node_id_from_orig_id_map[remaining_nodes[new_node_id].id] = new_node_id;
|
||||||
|
new_node_priority[new_node_id] =
|
||||||
|
node_priorities[remaining_nodes[new_node_id].id];
|
||||||
|
remaining_nodes[new_node_id].id = new_node_id;
|
||||||
|
}
|
||||||
|
// walk over all nodes
|
||||||
|
for (const auto i :
|
||||||
|
osrm::irange<std::size_t>(0, contractor_graph->GetNumberOfNodes()))
|
||||||
|
{
|
||||||
|
const NodeID source = i;
|
||||||
|
for (auto current_edge : contractor_graph->GetAdjacentEdgeRange(source))
|
||||||
|
{
|
||||||
|
ContractorGraph::EdgeData &data =
|
||||||
|
contractor_graph->GetEdgeData(current_edge);
|
||||||
|
const NodeID target = contractor_graph->GetTarget(current_edge);
|
||||||
|
if (SPECIAL_NODEID == new_node_id_from_orig_id_map[i])
|
||||||
|
{
|
||||||
|
external_edge_list.push_back({source, target, data});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// node is not yet contracted.
|
||||||
|
// add (renumbered) outgoing edges to new DynamicGraph.
|
||||||
|
ContractorEdge new_edge = {new_node_id_from_orig_id_map[source],
|
||||||
|
new_node_id_from_orig_id_map[target],
|
||||||
|
data};
|
||||||
|
|
||||||
|
new_edge.data.is_original_via_node_ID = true;
|
||||||
|
BOOST_ASSERT_MSG(UINT_MAX != new_node_id_from_orig_id_map[source],
|
||||||
|
"new source id not resolveable");
|
||||||
|
BOOST_ASSERT_MSG(UINT_MAX != new_node_id_from_orig_id_map[target],
|
||||||
|
"new target id not resolveable");
|
||||||
|
new_edge_set.push_back(new_edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete map from old NodeIDs to new ones.
|
||||||
|
new_node_id_from_orig_id_map.clear();
|
||||||
|
new_node_id_from_orig_id_map.shrink_to_fit();
|
||||||
|
|
||||||
|
// Replace old priorities array by new one
|
||||||
|
node_priorities.swap(new_node_priority);
|
||||||
|
// Delete old node_priorities vector
|
||||||
|
new_node_priority.clear();
|
||||||
|
new_node_priority.shrink_to_fit();
|
||||||
|
// old Graph is removed
|
||||||
|
contractor_graph.reset();
|
||||||
|
|
||||||
|
// create new graph
|
||||||
|
std::sort(new_edge_set.begin(), new_edge_set.end());
|
||||||
|
contractor_graph =
|
||||||
|
std::make_shared<ContractorGraph>(remaining_nodes.size(), new_edge_set);
|
||||||
|
|
||||||
|
new_edge_set.clear();
|
||||||
|
flushed_contractor = true;
|
||||||
|
|
||||||
|
// INFO: MAKE SURE THIS IS THE LAST OPERATION OF THE FLUSH!
|
||||||
|
// reinitialize heaps and ThreadData objects with appropriate size
|
||||||
|
thread_data_list.number_of_nodes = contractor_graph->GetNumberOfNodes();
|
||||||
|
}
|
||||||
|
|
||||||
|
const int last = (int)remaining_nodes.size();
|
||||||
|
tbb::parallel_for(tbb::blocked_range<int>(0, last, IndependentGrainSize),
|
||||||
|
[this, &node_priorities, &remaining_nodes, &thread_data_list](
|
||||||
|
const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
ContractorThreadData *data = thread_data_list.getThreadData();
|
||||||
|
// determine independent node set
|
||||||
|
for (int i = range.begin(); i != range.end(); ++i)
|
||||||
|
{
|
||||||
|
const NodeID node = remaining_nodes[i].id;
|
||||||
|
remaining_nodes[i].is_independent =
|
||||||
|
this->IsNodeIndependent(node_priorities, data, node);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const auto first = stable_partition(remaining_nodes.begin(), remaining_nodes.end(),
|
||||||
|
[](RemainingNodeData node_data)
|
||||||
|
{
|
||||||
|
return !node_data.is_independent;
|
||||||
|
});
|
||||||
|
const int first_independent_node = static_cast<int>(first - remaining_nodes.begin());
|
||||||
|
|
||||||
|
// contract independent nodes
|
||||||
|
tbb::parallel_for(
|
||||||
|
tbb::blocked_range<int>(first_independent_node, last, ContractGrainSize),
|
||||||
|
[this, &remaining_nodes, &thread_data_list](const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
ContractorThreadData *data = thread_data_list.getThreadData();
|
||||||
|
for (int position = range.begin(); position != range.end(); ++position)
|
||||||
|
{
|
||||||
|
const NodeID x = remaining_nodes[position].id;
|
||||||
|
this->ContractNode<false>(data, x);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// make sure we really sort each block
|
||||||
|
tbb::parallel_for(
|
||||||
|
thread_data_list.data.range(),
|
||||||
|
[&](const ThreadDataContainer::EnumerableThreadData::range_type &range)
|
||||||
|
{
|
||||||
|
for (auto &data : range)
|
||||||
|
std::sort(data->inserted_edges.begin(), data->inserted_edges.end());
|
||||||
|
});
|
||||||
|
tbb::parallel_for(
|
||||||
|
tbb::blocked_range<int>(first_independent_node, last, DeleteGrainSize),
|
||||||
|
[this, &remaining_nodes, &thread_data_list](const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
ContractorThreadData *data = thread_data_list.getThreadData();
|
||||||
|
for (int position = range.begin(); position != range.end(); ++position)
|
||||||
|
{
|
||||||
|
const NodeID x = remaining_nodes[position].id;
|
||||||
|
this->DeleteIncomingEdges(data, x);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// insert new edges
|
||||||
|
for (auto &data : thread_data_list.data)
|
||||||
|
{
|
||||||
|
for (const ContractorEdge &edge : data->inserted_edges)
|
||||||
|
{
|
||||||
|
const EdgeID current_edge_ID =
|
||||||
|
contractor_graph->FindEdge(edge.source, edge.target);
|
||||||
|
if (current_edge_ID < contractor_graph->EndEdges(edge.source))
|
||||||
|
{
|
||||||
|
ContractorGraph::EdgeData ¤t_data =
|
||||||
|
contractor_graph->GetEdgeData(current_edge_ID);
|
||||||
|
if (current_data.shortcut && edge.data.forward == current_data.forward &&
|
||||||
|
edge.data.backward == current_data.backward &&
|
||||||
|
edge.data.distance < current_data.distance)
|
||||||
|
{
|
||||||
|
// found a duplicate edge with smaller weight, update it.
|
||||||
|
current_data = edge.data;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
contractor_graph->InsertEdge(edge.source, edge.target, edge.data);
|
||||||
|
}
|
||||||
|
data->inserted_edges.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
tbb::parallel_for(
|
||||||
|
tbb::blocked_range<int>(first_independent_node, last, NeighboursGrainSize),
|
||||||
|
[this, &remaining_nodes, &node_priorities, &node_data, &thread_data_list](
|
||||||
|
const tbb::blocked_range<int> &range)
|
||||||
|
{
|
||||||
|
ContractorThreadData *data = thread_data_list.getThreadData();
|
||||||
|
for (int position = range.begin(); position != range.end(); ++position)
|
||||||
|
{
|
||||||
|
NodeID x = remaining_nodes[position].id;
|
||||||
|
this->UpdateNodeNeighbours(node_priorities, node_data, data, x);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// remove contracted nodes from the pool
|
||||||
|
number_of_contracted_nodes += last - first_independent_node;
|
||||||
|
remaining_nodes.resize(first_independent_node);
|
||||||
|
remaining_nodes.shrink_to_fit();
|
||||||
|
// unsigned maxdegree = 0;
|
||||||
|
// unsigned avgdegree = 0;
|
||||||
|
// unsigned mindegree = UINT_MAX;
|
||||||
|
// unsigned quaddegree = 0;
|
||||||
|
//
|
||||||
|
// for(unsigned i = 0; i < remaining_nodes.size(); ++i) {
|
||||||
|
// unsigned degree = contractor_graph->EndEdges(remaining_nodes[i].first)
|
||||||
|
// -
|
||||||
|
// contractor_graph->BeginEdges(remaining_nodes[i].first);
|
||||||
|
// if(degree > maxdegree)
|
||||||
|
// maxdegree = degree;
|
||||||
|
// if(degree < mindegree)
|
||||||
|
// mindegree = degree;
|
||||||
|
//
|
||||||
|
// avgdegree += degree;
|
||||||
|
// quaddegree += (degree*degree);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// avgdegree /= std::max((unsigned)1,(unsigned)remaining_nodes.size() );
|
||||||
|
// quaddegree /= std::max((unsigned)1,(unsigned)remaining_nodes.size() );
|
||||||
|
//
|
||||||
|
// SimpleLogger().Write() << "rest: " << remaining_nodes.size() << ", max: "
|
||||||
|
// << maxdegree << ", min: " << mindegree << ", avg: " << avgdegree << ",
|
||||||
|
// quad: " << quaddegree;
|
||||||
|
|
||||||
|
p.printStatus(number_of_contracted_nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
thread_data_list.data.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class Edge> inline void GetEdges(DeallocatingVector<Edge> &edges)
|
||||||
|
{
|
||||||
|
Percent p(contractor_graph->GetNumberOfNodes());
|
||||||
|
SimpleLogger().Write() << "Getting edges of minimized graph";
|
||||||
|
const NodeID number_of_nodes = contractor_graph->GetNumberOfNodes();
|
||||||
|
if (contractor_graph->GetNumberOfNodes())
|
||||||
|
{
|
||||||
|
Edge new_edge;
|
||||||
|
for (const auto node : osrm::irange(0u, number_of_nodes))
|
||||||
|
{
|
||||||
|
p.printStatus(node);
|
||||||
|
for (auto edge : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const NodeID target = contractor_graph->GetTarget(edge);
|
||||||
|
const ContractorGraph::EdgeData &data = contractor_graph->GetEdgeData(edge);
|
||||||
|
if (!orig_node_id_to_new_id_map.empty())
|
||||||
|
{
|
||||||
|
new_edge.source = orig_node_id_to_new_id_map[node];
|
||||||
|
new_edge.target = orig_node_id_to_new_id_map[target];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
new_edge.source = node;
|
||||||
|
new_edge.target = target;
|
||||||
|
}
|
||||||
|
BOOST_ASSERT_MSG(UINT_MAX != new_edge.source, "Source id invalid");
|
||||||
|
BOOST_ASSERT_MSG(UINT_MAX != new_edge.target, "Target id invalid");
|
||||||
|
new_edge.data.distance = data.distance;
|
||||||
|
new_edge.data.shortcut = data.shortcut;
|
||||||
|
if (!data.is_original_via_node_ID && !orig_node_id_to_new_id_map.empty())
|
||||||
|
{
|
||||||
|
new_edge.data.id = orig_node_id_to_new_id_map[data.id];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
new_edge.data.id = data.id;
|
||||||
|
}
|
||||||
|
BOOST_ASSERT_MSG(new_edge.data.id != INT_MAX, // 2^31
|
||||||
|
"edge id invalid");
|
||||||
|
new_edge.data.forward = data.forward;
|
||||||
|
new_edge.data.backward = data.backward;
|
||||||
|
edges.push_back(new_edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
contractor_graph.reset();
|
||||||
|
orig_node_id_to_new_id_map.clear();
|
||||||
|
orig_node_id_to_new_id_map.shrink_to_fit();
|
||||||
|
|
||||||
|
BOOST_ASSERT(0 == orig_node_id_to_new_id_map.capacity());
|
||||||
|
|
||||||
|
edges.append(external_edge_list.begin(), external_edge_list.end());
|
||||||
|
external_edge_list.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
inline void Dijkstra(const int max_distance,
|
||||||
|
const unsigned number_of_targets,
|
||||||
|
const int maxNodes,
|
||||||
|
ContractorThreadData *const data,
|
||||||
|
const NodeID middleNode)
|
||||||
|
{
|
||||||
|
|
||||||
|
ContractorHeap &heap = data->heap;
|
||||||
|
|
||||||
|
int nodes = 0;
|
||||||
|
unsigned number_of_targets_found = 0;
|
||||||
|
while (!heap.Empty())
|
||||||
|
{
|
||||||
|
const NodeID node = heap.DeleteMin();
|
||||||
|
const int distance = heap.GetKey(node);
|
||||||
|
const short current_hop = heap.GetData(node).hop + 1;
|
||||||
|
|
||||||
|
if (++nodes > maxNodes)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (distance > max_distance)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destination settled?
|
||||||
|
if (heap.GetData(node).target)
|
||||||
|
{
|
||||||
|
++number_of_targets_found;
|
||||||
|
if (number_of_targets_found >= number_of_targets)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate over all edges of node
|
||||||
|
for (auto edge : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const ContractorEdgeData &data = contractor_graph->GetEdgeData(edge);
|
||||||
|
if (!data.forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const NodeID to = contractor_graph->GetTarget(edge);
|
||||||
|
if (middleNode == to)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const int to_distance = distance + data.distance;
|
||||||
|
|
||||||
|
// New Node discovered -> Add to Heap + Node Info Storage
|
||||||
|
if (!heap.WasInserted(to))
|
||||||
|
{
|
||||||
|
heap.Insert(to, to_distance, ContractorHeapData(current_hop, false));
|
||||||
|
}
|
||||||
|
// Found a shorter Path -> Update distance
|
||||||
|
else if (to_distance < heap.GetKey(to))
|
||||||
|
{
|
||||||
|
heap.DecreaseKey(to, to_distance);
|
||||||
|
heap.GetData(to).hop = current_hop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline float EvaluateNodePriority(ContractorThreadData *const data,
|
||||||
|
NodePriorityData *const node_data,
|
||||||
|
const NodeID node)
|
||||||
|
{
|
||||||
|
ContractionStats stats;
|
||||||
|
|
||||||
|
// perform simulated contraction
|
||||||
|
ContractNode<true>(data, node, &stats);
|
||||||
|
|
||||||
|
// Result will contain the priority
|
||||||
|
float result;
|
||||||
|
if (0 == (stats.edges_deleted_count * stats.original_edges_deleted_count))
|
||||||
|
{
|
||||||
|
result = 1.f * node_data->depth;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result = 2.f * (((float)stats.edges_added_count) / stats.edges_deleted_count) +
|
||||||
|
4.f * (((float)stats.original_edges_added_count) /
|
||||||
|
stats.original_edges_deleted_count) +
|
||||||
|
1.f * node_data->depth;
|
||||||
|
}
|
||||||
|
BOOST_ASSERT(result >= 0);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <bool RUNSIMULATION>
|
||||||
|
inline bool
|
||||||
|
ContractNode(ContractorThreadData *data, const NodeID node, ContractionStats *stats = nullptr)
|
||||||
|
{
|
||||||
|
ContractorHeap &heap = data->heap;
|
||||||
|
int inserted_edges_size = data->inserted_edges.size();
|
||||||
|
std::vector<ContractorEdge> &inserted_edges = data->inserted_edges;
|
||||||
|
|
||||||
|
for (auto in_edge : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const ContractorEdgeData &in_data = contractor_graph->GetEdgeData(in_edge);
|
||||||
|
const NodeID source = contractor_graph->GetTarget(in_edge);
|
||||||
|
if (RUNSIMULATION)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(stats != nullptr);
|
||||||
|
++stats->edges_deleted_count;
|
||||||
|
stats->original_edges_deleted_count += in_data.originalEdges;
|
||||||
|
}
|
||||||
|
if (!in_data.backward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
heap.Clear();
|
||||||
|
heap.Insert(source, 0, ContractorHeapData());
|
||||||
|
int max_distance = 0;
|
||||||
|
unsigned number_of_targets = 0;
|
||||||
|
|
||||||
|
for (auto out_edge : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const ContractorEdgeData &out_data = contractor_graph->GetEdgeData(out_edge);
|
||||||
|
if (!out_data.forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const NodeID target = contractor_graph->GetTarget(out_edge);
|
||||||
|
const int path_distance = in_data.distance + out_data.distance;
|
||||||
|
max_distance = std::max(max_distance, path_distance);
|
||||||
|
if (!heap.WasInserted(target))
|
||||||
|
{
|
||||||
|
heap.Insert(target, INT_MAX, ContractorHeapData(0, true));
|
||||||
|
++number_of_targets;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (RUNSIMULATION)
|
||||||
|
{
|
||||||
|
Dijkstra(max_distance, number_of_targets, 1000, data, node);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Dijkstra(max_distance, number_of_targets, 2000, data, node);
|
||||||
|
}
|
||||||
|
for (auto out_edge : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const ContractorEdgeData &out_data = contractor_graph->GetEdgeData(out_edge);
|
||||||
|
if (!out_data.forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const NodeID target = contractor_graph->GetTarget(out_edge);
|
||||||
|
const int path_distance = in_data.distance + out_data.distance;
|
||||||
|
const int distance = heap.GetKey(target);
|
||||||
|
if (path_distance < distance)
|
||||||
|
{
|
||||||
|
if (RUNSIMULATION)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(stats != nullptr);
|
||||||
|
stats->edges_added_count += 2;
|
||||||
|
stats->original_edges_added_count +=
|
||||||
|
2 * (out_data.originalEdges + in_data.originalEdges);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
inserted_edges.emplace_back(source, target, path_distance,
|
||||||
|
out_data.originalEdges + in_data.originalEdges,
|
||||||
|
node, true, true, false);
|
||||||
|
|
||||||
|
inserted_edges.emplace_back(target, source, path_distance,
|
||||||
|
out_data.originalEdges + in_data.originalEdges,
|
||||||
|
node, true, false, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!RUNSIMULATION)
|
||||||
|
{
|
||||||
|
int iend = inserted_edges.size();
|
||||||
|
for (int i = inserted_edges_size; i < iend; ++i)
|
||||||
|
{
|
||||||
|
bool found = false;
|
||||||
|
for (int other = i + 1; other < iend; ++other)
|
||||||
|
{
|
||||||
|
if (inserted_edges[other].source != inserted_edges[i].source)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (inserted_edges[other].target != inserted_edges[i].target)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (inserted_edges[other].data.distance != inserted_edges[i].data.distance)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (inserted_edges[other].data.shortcut != inserted_edges[i].data.shortcut)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
inserted_edges[other].data.forward |= inserted_edges[i].data.forward;
|
||||||
|
inserted_edges[other].data.backward |= inserted_edges[i].data.backward;
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (!found)
|
||||||
|
{
|
||||||
|
inserted_edges[inserted_edges_size++] = inserted_edges[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inserted_edges.resize(inserted_edges_size);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void DeleteIncomingEdges(ContractorThreadData *data, const NodeID node)
|
||||||
|
{
|
||||||
|
std::vector<NodeID> &neighbours = data->neighbours;
|
||||||
|
neighbours.clear();
|
||||||
|
|
||||||
|
// find all neighbours
|
||||||
|
for (auto e : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const NodeID u = contractor_graph->GetTarget(e);
|
||||||
|
if (u != node)
|
||||||
|
{
|
||||||
|
neighbours.push_back(u);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// eliminate duplicate entries ( forward + backward edges )
|
||||||
|
std::sort(neighbours.begin(), neighbours.end());
|
||||||
|
neighbours.resize(std::unique(neighbours.begin(), neighbours.end()) - neighbours.begin());
|
||||||
|
|
||||||
|
for (const auto i : osrm::irange<std::size_t>(0, neighbours.size()))
|
||||||
|
{
|
||||||
|
contractor_graph->DeleteEdgesTo(neighbours[i], node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool UpdateNodeNeighbours(std::vector<float> &priorities,
|
||||||
|
std::vector<NodePriorityData> &node_data,
|
||||||
|
ContractorThreadData *const data,
|
||||||
|
const NodeID node)
|
||||||
|
{
|
||||||
|
std::vector<NodeID> &neighbours = data->neighbours;
|
||||||
|
neighbours.clear();
|
||||||
|
|
||||||
|
// find all neighbours
|
||||||
|
for (auto e : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const NodeID u = contractor_graph->GetTarget(e);
|
||||||
|
if (u == node)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
neighbours.push_back(u);
|
||||||
|
node_data[u].depth = (std::max)(node_data[node].depth + 1, node_data[u].depth);
|
||||||
|
}
|
||||||
|
// eliminate duplicate entries ( forward + backward edges )
|
||||||
|
std::sort(neighbours.begin(), neighbours.end());
|
||||||
|
neighbours.resize(std::unique(neighbours.begin(), neighbours.end()) - neighbours.begin());
|
||||||
|
|
||||||
|
// re-evaluate priorities of neighboring nodes
|
||||||
|
for (const NodeID u : neighbours)
|
||||||
|
{
|
||||||
|
priorities[u] = EvaluateNodePriority(data, &(node_data)[u], u);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool IsNodeIndependent(const std::vector<float> &priorities,
|
||||||
|
ContractorThreadData *const data,
|
||||||
|
NodeID node) const
|
||||||
|
{
|
||||||
|
const float priority = priorities[node];
|
||||||
|
|
||||||
|
std::vector<NodeID> &neighbours = data->neighbours;
|
||||||
|
neighbours.clear();
|
||||||
|
|
||||||
|
for (auto e : contractor_graph->GetAdjacentEdgeRange(node))
|
||||||
|
{
|
||||||
|
const NodeID target = contractor_graph->GetTarget(e);
|
||||||
|
if (node == target)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const float target_priority = priorities[target];
|
||||||
|
BOOST_ASSERT(target_priority >= 0);
|
||||||
|
// found a neighbour with lower priority?
|
||||||
|
if (priority > target_priority)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// tie breaking
|
||||||
|
if (std::abs(priority - target_priority) < std::numeric_limits<float>::epsilon() &&
|
||||||
|
bias(node, target))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
neighbours.push_back(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::sort(neighbours.begin(), neighbours.end());
|
||||||
|
neighbours.resize(std::unique(neighbours.begin(), neighbours.end()) - neighbours.begin());
|
||||||
|
|
||||||
|
// examine all neighbours that are at most 2 hops away
|
||||||
|
for (const NodeID u : neighbours)
|
||||||
|
{
|
||||||
|
for (auto e : contractor_graph->GetAdjacentEdgeRange(u))
|
||||||
|
{
|
||||||
|
const NodeID target = contractor_graph->GetTarget(e);
|
||||||
|
if (node == target)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const float target_priority = priorities[target];
|
||||||
|
BOOST_ASSERT(target_priority >= 0);
|
||||||
|
// found a neighbour with lower priority?
|
||||||
|
if (priority > target_priority)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// tie breaking
|
||||||
|
if (std::abs(priority - target_priority) < std::numeric_limits<float>::epsilon() &&
|
||||||
|
bias(node, target))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This bias function takes up 22 assembly instructions in total on X86
|
||||||
|
inline bool bias(const NodeID a, const NodeID b) const
|
||||||
|
{
|
||||||
|
const unsigned short hasha = fast_hash(a);
|
||||||
|
const unsigned short hashb = fast_hash(b);
|
||||||
|
|
||||||
|
// The compiler optimizes that to conditional register flags but without branching
|
||||||
|
// statements!
|
||||||
|
if (hasha != hashb)
|
||||||
|
{
|
||||||
|
return hasha < hashb;
|
||||||
|
}
|
||||||
|
return a < b;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<ContractorGraph> contractor_graph;
|
||||||
|
stxxl::vector<QueryEdge> external_edge_list;
|
||||||
|
std::vector<NodeID> orig_node_id_to_new_id_map;
|
||||||
|
XORFastHash fast_hash;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // CONTRACTOR_HPP
|
135
contractor/contractor_options.cpp
Normal file
135
contractor/contractor_options.cpp
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "contractor_options.hpp"
|
||||||
|
|
||||||
|
#include "../util/git_sha.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
|
||||||
|
#include <boost/filesystem.hpp>
|
||||||
|
#include <boost/program_options.hpp>
|
||||||
|
|
||||||
|
#include <tbb/task_scheduler_init.h>
|
||||||
|
|
||||||
|
return_code
|
||||||
|
ContractorOptions::ParseArguments(int argc, char *argv[], ContractorConfig &contractor_config)
|
||||||
|
{
|
||||||
|
// declare a group of options that will be allowed only on command line
|
||||||
|
boost::program_options::options_description generic_options("Options");
|
||||||
|
generic_options.add_options()("version,v", "Show version")("help,h", "Show this help message")(
|
||||||
|
"config,c", boost::program_options::value<boost::filesystem::path>(&contractor_config.config_file_path)
|
||||||
|
->default_value("contractor.ini"),
|
||||||
|
"Path to a configuration file.");
|
||||||
|
|
||||||
|
// declare a group of options that will be allowed both on command line and in config file
|
||||||
|
boost::program_options::options_description config_options("Configuration");
|
||||||
|
config_options.add_options()(
|
||||||
|
"restrictions,r",
|
||||||
|
boost::program_options::value<boost::filesystem::path>(&contractor_config.restrictions_path),
|
||||||
|
"Restrictions file in .osrm.restrictions format")(
|
||||||
|
"profile,p", boost::program_options::value<boost::filesystem::path>(&contractor_config.profile_path)
|
||||||
|
->default_value("profile.lua"),
|
||||||
|
"Path to LUA routing profile")(
|
||||||
|
"threads,t", boost::program_options::value<unsigned int>(&contractor_config.requested_num_threads)
|
||||||
|
->default_value(tbb::task_scheduler_init::default_num_threads()),
|
||||||
|
"Number of threads to use");
|
||||||
|
|
||||||
|
// hidden options, will be allowed both on command line and in config file, but will not be
|
||||||
|
// shown to the user
|
||||||
|
boost::program_options::options_description hidden_options("Hidden options");
|
||||||
|
hidden_options.add_options()(
|
||||||
|
"input,i", boost::program_options::value<boost::filesystem::path>(&contractor_config.osrm_input_path),
|
||||||
|
"Input file in .osm, .osm.bz2 or .osm.pbf format");
|
||||||
|
|
||||||
|
// positional option
|
||||||
|
boost::program_options::positional_options_description positional_options;
|
||||||
|
positional_options.add("input", 1);
|
||||||
|
|
||||||
|
// combine above options for parsing
|
||||||
|
boost::program_options::options_description cmdline_options;
|
||||||
|
cmdline_options.add(generic_options).add(config_options).add(hidden_options);
|
||||||
|
|
||||||
|
boost::program_options::options_description config_file_options;
|
||||||
|
config_file_options.add(config_options).add(hidden_options);
|
||||||
|
|
||||||
|
boost::program_options::options_description visible_options(
|
||||||
|
"Usage: " + boost::filesystem::basename(argv[0]) + " <input.osrm> [options]");
|
||||||
|
visible_options.add(generic_options).add(config_options);
|
||||||
|
|
||||||
|
// parse command line options
|
||||||
|
boost::program_options::variables_map option_variables;
|
||||||
|
boost::program_options::store(boost::program_options::command_line_parser(argc, argv)
|
||||||
|
.options(cmdline_options)
|
||||||
|
.positional(positional_options)
|
||||||
|
.run(),
|
||||||
|
option_variables);
|
||||||
|
|
||||||
|
const auto &temp_config_path = option_variables["config"].as<boost::filesystem::path>();
|
||||||
|
if (boost::filesystem::is_regular_file(temp_config_path))
|
||||||
|
{
|
||||||
|
boost::program_options::store(boost::program_options::parse_config_file<char>(
|
||||||
|
temp_config_path.string().c_str(), cmdline_options, true),
|
||||||
|
option_variables);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option_variables.count("version"))
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << g_GIT_DESCRIPTION;
|
||||||
|
return return_code::exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option_variables.count("help"))
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "\n" << visible_options;
|
||||||
|
return return_code::exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
boost::program_options::notify(option_variables);
|
||||||
|
|
||||||
|
if (!option_variables.count("restrictions"))
|
||||||
|
{
|
||||||
|
contractor_config.restrictions_path = contractor_config.osrm_input_path.string() + ".restrictions";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!option_variables.count("input"))
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "\n" << visible_options;
|
||||||
|
return return_code::fail;
|
||||||
|
}
|
||||||
|
|
||||||
|
return return_code::ok;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ContractorOptions::GenerateOutputFilesNames(ContractorConfig &contractor_config)
|
||||||
|
{
|
||||||
|
contractor_config.node_output_path = contractor_config.osrm_input_path.string() + ".nodes";
|
||||||
|
contractor_config.edge_output_path = contractor_config.osrm_input_path.string() + ".edges";
|
||||||
|
contractor_config.geometry_output_path = contractor_config.osrm_input_path.string() + ".geometry";
|
||||||
|
contractor_config.graph_output_path = contractor_config.osrm_input_path.string() + ".hsgr";
|
||||||
|
contractor_config.rtree_nodes_output_path = contractor_config.osrm_input_path.string() + ".ramIndex";
|
||||||
|
contractor_config.rtree_leafs_output_path = contractor_config.osrm_input_path.string() + ".fileIndex";
|
||||||
|
}
|
68
contractor/contractor_options.hpp
Normal file
68
contractor/contractor_options.hpp
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef CONTRACTOR_OPTIONS_HPP
|
||||||
|
#define CONTRACTOR_OPTIONS_HPP
|
||||||
|
|
||||||
|
#include <boost/filesystem/path.hpp>
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
enum class return_code : unsigned
|
||||||
|
{
|
||||||
|
ok,
|
||||||
|
fail,
|
||||||
|
exit
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ContractorConfig
|
||||||
|
{
|
||||||
|
ContractorConfig() noexcept : requested_num_threads(0) {}
|
||||||
|
|
||||||
|
boost::filesystem::path config_file_path;
|
||||||
|
boost::filesystem::path osrm_input_path;
|
||||||
|
boost::filesystem::path restrictions_path;
|
||||||
|
boost::filesystem::path profile_path;
|
||||||
|
|
||||||
|
std::string node_output_path;
|
||||||
|
std::string edge_output_path;
|
||||||
|
std::string geometry_output_path;
|
||||||
|
std::string graph_output_path;
|
||||||
|
std::string rtree_nodes_output_path;
|
||||||
|
std::string rtree_leafs_output_path;
|
||||||
|
|
||||||
|
unsigned requested_num_threads;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ContractorOptions
|
||||||
|
{
|
||||||
|
static return_code ParseArguments(int argc, char *argv[], ContractorConfig &extractor_config);
|
||||||
|
|
||||||
|
static void GenerateOutputFilesNames(ContractorConfig &extractor_config);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // EXTRACTOR_OPTIONS_HPP
|
796
contractor/edge_based_graph_factory.cpp
Normal file
796
contractor/edge_based_graph_factory.cpp
Normal file
@ -0,0 +1,796 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "edge_based_graph_factory.hpp"
|
||||||
|
#include "../algorithms/tiny_components.hpp"
|
||||||
|
#include "../data_structures/percent.hpp"
|
||||||
|
#include "../util/compute_angle.hpp"
|
||||||
|
#include "../util/integer_range.hpp"
|
||||||
|
#include "../util/lua_util.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
#include "../util/timing_util.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <fstream>
|
||||||
|
#include <iomanip>
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
EdgeBasedGraphFactory::EdgeBasedGraphFactory(std::shared_ptr<NodeBasedDynamicGraph> node_based_graph,
|
||||||
|
std::shared_ptr<RestrictionMap> restriction_map,
|
||||||
|
std::unique_ptr<std::vector<NodeID>> barrier_node_list,
|
||||||
|
std::unique_ptr<std::vector<NodeID>> traffic_light_node_list,
|
||||||
|
const std::vector<QueryNode> &node_info_list,
|
||||||
|
const SpeedProfileProperties &speed_profile)
|
||||||
|
: speed_profile(speed_profile),
|
||||||
|
m_number_of_edge_based_nodes(std::numeric_limits<unsigned>::max()),
|
||||||
|
m_node_info_list(node_info_list),
|
||||||
|
m_node_based_graph(std::move(node_based_graph)),
|
||||||
|
m_restriction_map(std::move(restriction_map)), max_id(0), removed_node_count(0)
|
||||||
|
{
|
||||||
|
// insert into unordered sets for fast lookup
|
||||||
|
m_barrier_nodes.insert(barrier_node_list->begin(), barrier_node_list->end());
|
||||||
|
m_traffic_lights.insert(traffic_light_node_list->begin(), traffic_light_node_list->end());
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::GetEdgeBasedEdges(DeallocatingVector<EdgeBasedEdge> &output_edge_list)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT_MSG(0 == output_edge_list.size(), "Vector is not empty");
|
||||||
|
m_edge_based_edge_list.swap(output_edge_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::GetEdgeBasedNodes(std::vector<EdgeBasedNode> &nodes)
|
||||||
|
{
|
||||||
|
#ifndef NDEBUG
|
||||||
|
for (const EdgeBasedNode &node : m_edge_based_node_list)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(m_node_info_list.at(node.u).lat != INT_MAX);
|
||||||
|
BOOST_ASSERT(m_node_info_list.at(node.u).lon != INT_MAX);
|
||||||
|
BOOST_ASSERT(m_node_info_list.at(node.v).lon != INT_MAX);
|
||||||
|
BOOST_ASSERT(m_node_info_list.at(node.v).lat != INT_MAX);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
nodes.swap(m_edge_based_node_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u,
|
||||||
|
const NodeID node_v,
|
||||||
|
const unsigned component_id)
|
||||||
|
{
|
||||||
|
// merge edges together into one EdgeBasedNode
|
||||||
|
BOOST_ASSERT(node_u != SPECIAL_NODEID);
|
||||||
|
BOOST_ASSERT(node_v != SPECIAL_NODEID);
|
||||||
|
|
||||||
|
// find forward edge id and
|
||||||
|
const EdgeID edge_id_1 = m_node_based_graph->FindEdge(node_u, node_v);
|
||||||
|
BOOST_ASSERT(edge_id_1 != SPECIAL_EDGEID);
|
||||||
|
|
||||||
|
const EdgeData &forward_data = m_node_based_graph->GetEdgeData(edge_id_1);
|
||||||
|
|
||||||
|
// find reverse edge id and
|
||||||
|
const EdgeID edge_id_2 = m_node_based_graph->FindEdge(node_v, node_u);
|
||||||
|
BOOST_ASSERT(edge_id_2 != SPECIAL_EDGEID);
|
||||||
|
|
||||||
|
const EdgeData &reverse_data = m_node_based_graph->GetEdgeData(edge_id_2);
|
||||||
|
|
||||||
|
if (forward_data.edgeBasedNodeID == SPECIAL_NODEID &&
|
||||||
|
reverse_data.edgeBasedNodeID == SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(m_geometry_compressor.HasEntryForID(edge_id_1) ==
|
||||||
|
m_geometry_compressor.HasEntryForID(edge_id_2));
|
||||||
|
if (m_geometry_compressor.HasEntryForID(edge_id_1))
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(m_geometry_compressor.HasEntryForID(edge_id_2));
|
||||||
|
|
||||||
|
// reconstruct geometry and put in each individual edge with its offset
|
||||||
|
const std::vector<GeometryCompressor::CompressedNode> &forward_geometry =
|
||||||
|
m_geometry_compressor.GetBucketReference(edge_id_1);
|
||||||
|
const std::vector<GeometryCompressor::CompressedNode> &reverse_geometry =
|
||||||
|
m_geometry_compressor.GetBucketReference(edge_id_2);
|
||||||
|
BOOST_ASSERT(forward_geometry.size() == reverse_geometry.size());
|
||||||
|
BOOST_ASSERT(0 != forward_geometry.size());
|
||||||
|
const unsigned geometry_size = static_cast<unsigned>(forward_geometry.size());
|
||||||
|
BOOST_ASSERT(geometry_size > 1);
|
||||||
|
|
||||||
|
// reconstruct bidirectional edge with individual weights and put each into the NN index
|
||||||
|
|
||||||
|
std::vector<int> forward_dist_prefix_sum(forward_geometry.size(), 0);
|
||||||
|
std::vector<int> reverse_dist_prefix_sum(reverse_geometry.size(), 0);
|
||||||
|
|
||||||
|
// quick'n'dirty prefix sum as std::partial_sum needs addtional casts
|
||||||
|
// TODO: move to lambda function with C++11
|
||||||
|
int temp_sum = 0;
|
||||||
|
|
||||||
|
for (const auto i : osrm::irange(0u, geometry_size))
|
||||||
|
{
|
||||||
|
forward_dist_prefix_sum[i] = temp_sum;
|
||||||
|
temp_sum += forward_geometry[i].second;
|
||||||
|
|
||||||
|
BOOST_ASSERT(forward_data.distance >= temp_sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
temp_sum = 0;
|
||||||
|
for (const auto i : osrm::irange(0u, geometry_size))
|
||||||
|
{
|
||||||
|
temp_sum += reverse_geometry[reverse_geometry.size() - 1 - i].second;
|
||||||
|
reverse_dist_prefix_sum[i] = reverse_data.distance - temp_sum;
|
||||||
|
// BOOST_ASSERT(reverse_data.distance >= temp_sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeID current_edge_source_coordinate_id = node_u;
|
||||||
|
|
||||||
|
if (SPECIAL_NODEID != forward_data.edgeBasedNodeID)
|
||||||
|
{
|
||||||
|
max_id = std::max(forward_data.edgeBasedNodeID, max_id);
|
||||||
|
}
|
||||||
|
if (SPECIAL_NODEID != reverse_data.edgeBasedNodeID)
|
||||||
|
{
|
||||||
|
max_id = std::max(reverse_data.edgeBasedNodeID, max_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// traverse arrays from start and end respectively
|
||||||
|
for (const auto i : osrm::irange(0u, geometry_size))
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(current_edge_source_coordinate_id ==
|
||||||
|
reverse_geometry[geometry_size - 1 - i].first);
|
||||||
|
const NodeID current_edge_target_coordinate_id = forward_geometry[i].first;
|
||||||
|
BOOST_ASSERT(current_edge_target_coordinate_id != current_edge_source_coordinate_id);
|
||||||
|
|
||||||
|
// build edges
|
||||||
|
m_edge_based_node_list.emplace_back(
|
||||||
|
forward_data.edgeBasedNodeID, reverse_data.edgeBasedNodeID,
|
||||||
|
current_edge_source_coordinate_id, current_edge_target_coordinate_id,
|
||||||
|
forward_data.nameID, forward_geometry[i].second,
|
||||||
|
reverse_geometry[geometry_size - 1 - i].second, forward_dist_prefix_sum[i],
|
||||||
|
reverse_dist_prefix_sum[i], m_geometry_compressor.GetPositionForID(edge_id_1),
|
||||||
|
component_id, i, forward_data.travel_mode, reverse_data.travel_mode);
|
||||||
|
current_edge_source_coordinate_id = current_edge_target_coordinate_id;
|
||||||
|
|
||||||
|
BOOST_ASSERT(m_edge_based_node_list.back().IsCompressed());
|
||||||
|
|
||||||
|
BOOST_ASSERT(node_u != m_edge_based_node_list.back().u ||
|
||||||
|
node_v != m_edge_based_node_list.back().v);
|
||||||
|
|
||||||
|
BOOST_ASSERT(node_u != m_edge_based_node_list.back().v ||
|
||||||
|
node_v != m_edge_based_node_list.back().u);
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(current_edge_source_coordinate_id == node_v);
|
||||||
|
BOOST_ASSERT(m_edge_based_node_list.back().IsCompressed());
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(!m_geometry_compressor.HasEntryForID(edge_id_2));
|
||||||
|
|
||||||
|
if (forward_data.edgeBasedNodeID != SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(forward_data.forward);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(!forward_data.forward);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reverse_data.edgeBasedNodeID != SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(reverse_data.forward);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(!reverse_data.forward);
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(forward_data.edgeBasedNodeID != SPECIAL_NODEID ||
|
||||||
|
reverse_data.edgeBasedNodeID != SPECIAL_NODEID);
|
||||||
|
|
||||||
|
m_edge_based_node_list.emplace_back(
|
||||||
|
forward_data.edgeBasedNodeID, reverse_data.edgeBasedNodeID, node_u, node_v,
|
||||||
|
forward_data.nameID, forward_data.distance, reverse_data.distance, 0, 0, SPECIAL_EDGEID,
|
||||||
|
component_id, 0, forward_data.travel_mode, reverse_data.travel_mode);
|
||||||
|
BOOST_ASSERT(!m_edge_based_node_list.back().IsCompressed());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::FlushVectorToStream(
|
||||||
|
std::ofstream &edge_data_file, std::vector<OriginalEdgeData> &original_edge_data_vector) const
|
||||||
|
{
|
||||||
|
if (original_edge_data_vector.empty())
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
edge_data_file.write((char *)&(original_edge_data_vector[0]),
|
||||||
|
original_edge_data_vector.size() * sizeof(OriginalEdgeData));
|
||||||
|
original_edge_data_vector.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::Run(const std::string &original_edge_data_filename,
|
||||||
|
const std::string &geometry_filename,
|
||||||
|
lua_State *lua_state)
|
||||||
|
{
|
||||||
|
TIMER_START(geometry);
|
||||||
|
CompressGeometry();
|
||||||
|
TIMER_STOP(geometry);
|
||||||
|
|
||||||
|
TIMER_START(renumber);
|
||||||
|
RenumberEdges();
|
||||||
|
TIMER_STOP(renumber);
|
||||||
|
|
||||||
|
TIMER_START(generate_nodes);
|
||||||
|
GenerateEdgeExpandedNodes();
|
||||||
|
TIMER_STOP(generate_nodes);
|
||||||
|
|
||||||
|
TIMER_START(generate_edges);
|
||||||
|
GenerateEdgeExpandedEdges(original_edge_data_filename, lua_state);
|
||||||
|
TIMER_STOP(generate_edges);
|
||||||
|
|
||||||
|
m_geometry_compressor.SerializeInternalVector(geometry_filename);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Timing statistics for edge-expanded graph:";
|
||||||
|
SimpleLogger().Write() << "Geometry compression: " << TIMER_SEC(geometry) << "s";
|
||||||
|
SimpleLogger().Write() << "Renumbering edges: " << TIMER_SEC(renumber) << "s";
|
||||||
|
SimpleLogger().Write() << "Generating nodes: " << TIMER_SEC(generate_nodes) << "s";
|
||||||
|
SimpleLogger().Write() << "Generating edges: " << TIMER_SEC(generate_edges) << "s";
|
||||||
|
}
|
||||||
|
|
||||||
|
void EdgeBasedGraphFactory::CompressGeometry()
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "Removing graph geometry while preserving topology";
|
||||||
|
|
||||||
|
const unsigned original_number_of_nodes = m_node_based_graph->GetNumberOfNodes();
|
||||||
|
const unsigned original_number_of_edges = m_node_based_graph->GetNumberOfEdges();
|
||||||
|
|
||||||
|
Percent progress(original_number_of_nodes);
|
||||||
|
|
||||||
|
for (const NodeID node_v : osrm::irange(0u, original_number_of_nodes))
|
||||||
|
{
|
||||||
|
progress.printStatus(node_v);
|
||||||
|
|
||||||
|
// only contract degree 2 vertices
|
||||||
|
if (2 != m_node_based_graph->GetOutDegree(node_v))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't contract barrier node
|
||||||
|
if (m_barrier_nodes.end() != m_barrier_nodes.find(node_v))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if v is a via node for a turn restriction, i.e. a 'directed' barrier node
|
||||||
|
if (m_restriction_map->IsViaNode(node_v))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* reverse_e2 forward_e2
|
||||||
|
* u <---------- v -----------> w
|
||||||
|
* ----------> <-----------
|
||||||
|
* forward_e1 reverse_e1
|
||||||
|
*
|
||||||
|
* Will be compressed to:
|
||||||
|
*
|
||||||
|
* reverse_e1
|
||||||
|
* u <---------- w
|
||||||
|
* ---------->
|
||||||
|
* forward_e1
|
||||||
|
*
|
||||||
|
* If the edges are compatible.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
const bool reverse_edge_order =
|
||||||
|
!(m_node_based_graph->GetEdgeData(m_node_based_graph->BeginEdges(node_v)).forward);
|
||||||
|
const EdgeID forward_e2 = m_node_based_graph->BeginEdges(node_v) + reverse_edge_order;
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != forward_e2);
|
||||||
|
BOOST_ASSERT(forward_e2 >= m_node_based_graph->BeginEdges(node_v) &&
|
||||||
|
forward_e2 < m_node_based_graph->EndEdges(node_v));
|
||||||
|
const EdgeID reverse_e2 = m_node_based_graph->BeginEdges(node_v) + 1 - reverse_edge_order;
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != reverse_e2);
|
||||||
|
BOOST_ASSERT(reverse_e2 >= m_node_based_graph->BeginEdges(node_v) &&
|
||||||
|
reverse_e2 < m_node_based_graph->EndEdges(node_v));
|
||||||
|
|
||||||
|
const EdgeData &fwd_edge_data2 = m_node_based_graph->GetEdgeData(forward_e2);
|
||||||
|
const EdgeData &rev_edge_data2 = m_node_based_graph->GetEdgeData(reverse_e2);
|
||||||
|
|
||||||
|
const NodeID node_w = m_node_based_graph->GetTarget(forward_e2);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != node_w);
|
||||||
|
BOOST_ASSERT(node_v != node_w);
|
||||||
|
const NodeID node_u = m_node_based_graph->GetTarget(reverse_e2);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != node_u);
|
||||||
|
BOOST_ASSERT(node_u != node_v);
|
||||||
|
|
||||||
|
const EdgeID forward_e1 = m_node_based_graph->FindEdge(node_u, node_v);
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != forward_e1);
|
||||||
|
BOOST_ASSERT(node_v == m_node_based_graph->GetTarget(forward_e1));
|
||||||
|
const EdgeID reverse_e1 = m_node_based_graph->FindEdge(node_w, node_v);
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != reverse_e1);
|
||||||
|
BOOST_ASSERT(node_v == m_node_based_graph->GetTarget(reverse_e1));
|
||||||
|
|
||||||
|
const EdgeData &fwd_edge_data1 = m_node_based_graph->GetEdgeData(forward_e1);
|
||||||
|
const EdgeData &rev_edge_data1 = m_node_based_graph->GetEdgeData(reverse_e1);
|
||||||
|
|
||||||
|
if (m_node_based_graph->FindEdgeInEitherDirection(node_u, node_w) != SPECIAL_EDGEID)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// this case can happen if two ways with different names overlap
|
||||||
|
if (fwd_edge_data1.nameID != rev_edge_data1.nameID ||
|
||||||
|
fwd_edge_data2.nameID != rev_edge_data2.nameID)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fwd_edge_data1.IsCompatibleTo(fwd_edge_data2) && rev_edge_data1.IsCompatibleTo(rev_edge_data2))
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(m_node_based_graph->GetEdgeData(forward_e1).nameID ==
|
||||||
|
m_node_based_graph->GetEdgeData(reverse_e1).nameID);
|
||||||
|
BOOST_ASSERT(m_node_based_graph->GetEdgeData(forward_e2).nameID ==
|
||||||
|
m_node_based_graph->GetEdgeData(reverse_e2).nameID);
|
||||||
|
|
||||||
|
// Get distances before graph is modified
|
||||||
|
const int forward_weight1 = m_node_based_graph->GetEdgeData(forward_e1).distance;
|
||||||
|
const int forward_weight2 = m_node_based_graph->GetEdgeData(forward_e2).distance;
|
||||||
|
|
||||||
|
BOOST_ASSERT(0 != forward_weight1);
|
||||||
|
BOOST_ASSERT(0 != forward_weight2);
|
||||||
|
|
||||||
|
const int reverse_weight1 = m_node_based_graph->GetEdgeData(reverse_e1).distance;
|
||||||
|
const int reverse_weight2 = m_node_based_graph->GetEdgeData(reverse_e2).distance;
|
||||||
|
|
||||||
|
BOOST_ASSERT(0 != reverse_weight1);
|
||||||
|
BOOST_ASSERT(0 != reverse_weight2);
|
||||||
|
|
||||||
|
const bool has_node_penalty = m_traffic_lights.find(node_v) != m_traffic_lights.end();
|
||||||
|
|
||||||
|
// add weight of e2's to e1
|
||||||
|
m_node_based_graph->GetEdgeData(forward_e1).distance += fwd_edge_data2.distance;
|
||||||
|
m_node_based_graph->GetEdgeData(reverse_e1).distance += rev_edge_data2.distance;
|
||||||
|
if (has_node_penalty)
|
||||||
|
{
|
||||||
|
m_node_based_graph->GetEdgeData(forward_e1).distance +=
|
||||||
|
speed_profile.traffic_signal_penalty;
|
||||||
|
m_node_based_graph->GetEdgeData(reverse_e1).distance +=
|
||||||
|
speed_profile.traffic_signal_penalty;
|
||||||
|
}
|
||||||
|
|
||||||
|
// extend e1's to targets of e2's
|
||||||
|
m_node_based_graph->SetTarget(forward_e1, node_w);
|
||||||
|
m_node_based_graph->SetTarget(reverse_e1, node_u);
|
||||||
|
|
||||||
|
// remove e2's (if bidir, otherwise only one)
|
||||||
|
m_node_based_graph->DeleteEdge(node_v, forward_e2);
|
||||||
|
m_node_based_graph->DeleteEdge(node_v, reverse_e2);
|
||||||
|
|
||||||
|
// update any involved turn restrictions
|
||||||
|
m_restriction_map->FixupStartingTurnRestriction(node_u, node_v, node_w);
|
||||||
|
m_restriction_map->FixupArrivingTurnRestriction(node_u, node_v, node_w,
|
||||||
|
*m_node_based_graph);
|
||||||
|
|
||||||
|
m_restriction_map->FixupStartingTurnRestriction(node_w, node_v, node_u);
|
||||||
|
m_restriction_map->FixupArrivingTurnRestriction(node_w, node_v, node_u,
|
||||||
|
*m_node_based_graph);
|
||||||
|
|
||||||
|
// store compressed geometry in container
|
||||||
|
m_geometry_compressor.CompressEdge(
|
||||||
|
forward_e1, forward_e2, node_v, node_w,
|
||||||
|
forward_weight1 + (has_node_penalty ? speed_profile.traffic_signal_penalty : 0),
|
||||||
|
forward_weight2);
|
||||||
|
m_geometry_compressor.CompressEdge(
|
||||||
|
reverse_e1, reverse_e2, node_v, node_u, reverse_weight1,
|
||||||
|
reverse_weight2 + (has_node_penalty ? speed_profile.traffic_signal_penalty : 0));
|
||||||
|
++removed_node_count;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SimpleLogger().Write() << "removed " << removed_node_count << " nodes";
|
||||||
|
m_geometry_compressor.PrintStatistics();
|
||||||
|
|
||||||
|
unsigned new_node_count = 0;
|
||||||
|
unsigned new_edge_count = 0;
|
||||||
|
|
||||||
|
for (const auto i : osrm::irange(0u, m_node_based_graph->GetNumberOfNodes()))
|
||||||
|
{
|
||||||
|
if (m_node_based_graph->GetOutDegree(i) > 0)
|
||||||
|
{
|
||||||
|
++new_node_count;
|
||||||
|
new_edge_count += (m_node_based_graph->EndEdges(i) - m_node_based_graph->BeginEdges(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SimpleLogger().Write() << "new nodes: " << new_node_count << ", edges " << new_edge_count;
|
||||||
|
SimpleLogger().Write() << "Node compression ratio: "
|
||||||
|
<< new_node_count / (double)original_number_of_nodes;
|
||||||
|
SimpleLogger().Write() << "Edge compression ratio: "
|
||||||
|
<< new_edge_count / (double)original_number_of_edges;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renumbers all _forward_ edges and sets the edgeBasedNodeID.
|
||||||
|
/// A specific numbering is not important. Any unique ID will do.
|
||||||
|
void EdgeBasedGraphFactory::RenumberEdges()
|
||||||
|
{
|
||||||
|
// renumber edge based node of outgoing edges
|
||||||
|
unsigned numbered_edges_count = 0;
|
||||||
|
for (const auto current_node : osrm::irange(0u, m_node_based_graph->GetNumberOfNodes()))
|
||||||
|
{
|
||||||
|
for (const auto current_edge : m_node_based_graph->GetAdjacentEdgeRange(current_node))
|
||||||
|
{
|
||||||
|
EdgeData &edge_data = m_node_based_graph->GetEdgeData(current_edge);
|
||||||
|
|
||||||
|
// this edge is an incoming edge
|
||||||
|
if (!edge_data.forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(numbered_edges_count < m_node_based_graph->GetNumberOfEdges());
|
||||||
|
edge_data.edgeBasedNodeID = numbered_edges_count;
|
||||||
|
++numbered_edges_count;
|
||||||
|
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != edge_data.edgeBasedNodeID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
m_number_of_edge_based_nodes = numbered_edges_count;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates the nodes in the edge expanded graph from edges in the node-based graph.
|
||||||
|
*/
|
||||||
|
void EdgeBasedGraphFactory::GenerateEdgeExpandedNodes()
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "Identifying components of the (compressed) road network";
|
||||||
|
|
||||||
|
// Run a BFS on the undirected graph and identify small components
|
||||||
|
TarjanSCC<NodeBasedDynamicGraph> component_explorer(m_node_based_graph, *m_restriction_map,
|
||||||
|
m_barrier_nodes);
|
||||||
|
|
||||||
|
component_explorer.run();
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "identified: "
|
||||||
|
<< component_explorer.get_number_of_components() - removed_node_count
|
||||||
|
<< " (compressed) components";
|
||||||
|
SimpleLogger().Write() << "identified "
|
||||||
|
<< component_explorer.get_size_one_count() - removed_node_count
|
||||||
|
<< " (compressed) SCCs of size 1";
|
||||||
|
SimpleLogger().Write() << "generating edge-expanded nodes";
|
||||||
|
|
||||||
|
Percent progress(m_node_based_graph->GetNumberOfNodes());
|
||||||
|
|
||||||
|
// loop over all edges and generate new set of nodes
|
||||||
|
for (const auto node_u : osrm::irange(0u, m_node_based_graph->GetNumberOfNodes()))
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(node_u != SPECIAL_NODEID);
|
||||||
|
BOOST_ASSERT(node_u < m_node_based_graph->GetNumberOfNodes());
|
||||||
|
progress.printStatus(node_u);
|
||||||
|
for (EdgeID e1 : m_node_based_graph->GetAdjacentEdgeRange(node_u))
|
||||||
|
{
|
||||||
|
const EdgeData &edge_data = m_node_based_graph->GetEdgeData(e1);
|
||||||
|
BOOST_ASSERT(e1 != SPECIAL_EDGEID);
|
||||||
|
const NodeID node_v = m_node_based_graph->GetTarget(e1);
|
||||||
|
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != node_v);
|
||||||
|
// pick only every other edge, since we have every edge as an outgoing
|
||||||
|
// and incoming egde
|
||||||
|
if (node_u > node_v)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(node_u < node_v);
|
||||||
|
|
||||||
|
// Note: edges that end on barrier nodes or on a turn restriction
|
||||||
|
// may actually be in two distinct components. We choose the smallest
|
||||||
|
const unsigned size_of_component =
|
||||||
|
std::min(component_explorer.get_component_size(node_u),
|
||||||
|
component_explorer.get_component_size(node_v));
|
||||||
|
|
||||||
|
const unsigned id_of_smaller_component = [node_u, node_v, &component_explorer]
|
||||||
|
{
|
||||||
|
if (component_explorer.get_component_size(node_u) <
|
||||||
|
component_explorer.get_component_size(node_v))
|
||||||
|
{
|
||||||
|
return component_explorer.get_component_id(node_u);
|
||||||
|
}
|
||||||
|
return component_explorer.get_component_id(node_v);
|
||||||
|
}();
|
||||||
|
|
||||||
|
const bool component_is_tiny = size_of_component < 1000;
|
||||||
|
|
||||||
|
// we only set edgeBasedNodeID for forward edges
|
||||||
|
if (edge_data.edgeBasedNodeID == SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
InsertEdgeBasedNode(node_v, node_u,
|
||||||
|
(component_is_tiny ? id_of_smaller_component + 1 : 0));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
InsertEdgeBasedNode(node_u, node_v,
|
||||||
|
(component_is_tiny ? id_of_smaller_component + 1 : 0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Generated " << m_edge_based_node_list.size()
|
||||||
|
<< " nodes in edge-expanded graph";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Actually it also generates OriginalEdgeData and serializes them...
|
||||||
|
*/
|
||||||
|
void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||||
|
const std::string &original_edge_data_filename, lua_State *lua_state)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "generating edge-expanded edges";
|
||||||
|
|
||||||
|
unsigned node_based_edge_counter = 0;
|
||||||
|
unsigned original_edges_counter = 0;
|
||||||
|
|
||||||
|
std::ofstream edge_data_file(original_edge_data_filename.c_str(), std::ios::binary);
|
||||||
|
|
||||||
|
// writes a dummy value that is updated later
|
||||||
|
edge_data_file.write((char *)&original_edges_counter, sizeof(unsigned));
|
||||||
|
|
||||||
|
std::vector<OriginalEdgeData> original_edge_data_vector;
|
||||||
|
original_edge_data_vector.reserve(1024 * 1024);
|
||||||
|
|
||||||
|
// Loop over all turns and generate new set of edges.
|
||||||
|
// Three nested loop look super-linear, but we are dealing with a (kind of)
|
||||||
|
// linear number of turns only.
|
||||||
|
unsigned restricted_turns_counter = 0;
|
||||||
|
unsigned skipped_uturns_counter = 0;
|
||||||
|
unsigned skipped_barrier_turns_counter = 0;
|
||||||
|
unsigned compressed = 0;
|
||||||
|
|
||||||
|
Percent progress(m_node_based_graph->GetNumberOfNodes());
|
||||||
|
|
||||||
|
for (const auto node_u : osrm::irange(0u, m_node_based_graph->GetNumberOfNodes()))
|
||||||
|
{
|
||||||
|
progress.printStatus(node_u);
|
||||||
|
for (const EdgeID e1 : m_node_based_graph->GetAdjacentEdgeRange(node_u))
|
||||||
|
{
|
||||||
|
if (!m_node_based_graph->GetEdgeData(e1).forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
++node_based_edge_counter;
|
||||||
|
const NodeID node_v = m_node_based_graph->GetTarget(e1);
|
||||||
|
const NodeID only_restriction_to_node =
|
||||||
|
m_restriction_map->CheckForEmanatingIsOnlyTurn(node_u, node_v);
|
||||||
|
const bool is_barrier_node = m_barrier_nodes.find(node_v) != m_barrier_nodes.end();
|
||||||
|
|
||||||
|
for (const EdgeID e2 : m_node_based_graph->GetAdjacentEdgeRange(node_v))
|
||||||
|
{
|
||||||
|
if (!m_node_based_graph->GetEdgeData(e2).forward)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const NodeID node_w = m_node_based_graph->GetTarget(e2);
|
||||||
|
|
||||||
|
if ((only_restriction_to_node != SPECIAL_NODEID) &&
|
||||||
|
(node_w != only_restriction_to_node))
|
||||||
|
{
|
||||||
|
// We are at an only_-restriction but not at the right turn.
|
||||||
|
++restricted_turns_counter;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (is_barrier_node)
|
||||||
|
{
|
||||||
|
if (node_u != node_w)
|
||||||
|
{
|
||||||
|
++skipped_barrier_turns_counter;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if ((node_u == node_w) && (m_node_based_graph->GetOutDegree(node_v) > 1))
|
||||||
|
{
|
||||||
|
++skipped_uturns_counter;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// only add an edge if turn is not a U-turn except when it is
|
||||||
|
// at the end of a dead-end street
|
||||||
|
if (m_restriction_map->CheckIfTurnIsRestricted(node_u, node_v, node_w) &&
|
||||||
|
(only_restriction_to_node == SPECIAL_NODEID) &&
|
||||||
|
(node_w != only_restriction_to_node))
|
||||||
|
{
|
||||||
|
// We are at an only_-restriction but not at the right turn.
|
||||||
|
++restricted_turns_counter;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// only add an edge if turn is not prohibited
|
||||||
|
const EdgeData &edge_data1 = m_node_based_graph->GetEdgeData(e1);
|
||||||
|
const EdgeData &edge_data2 = m_node_based_graph->GetEdgeData(e2);
|
||||||
|
|
||||||
|
BOOST_ASSERT(edge_data1.edgeBasedNodeID != edge_data2.edgeBasedNodeID);
|
||||||
|
BOOST_ASSERT(edge_data1.forward);
|
||||||
|
BOOST_ASSERT(edge_data2.forward);
|
||||||
|
|
||||||
|
// the following is the core of the loop.
|
||||||
|
unsigned distance = edge_data1.distance;
|
||||||
|
if (m_traffic_lights.find(node_v) != m_traffic_lights.end())
|
||||||
|
{
|
||||||
|
distance += speed_profile.traffic_signal_penalty;
|
||||||
|
}
|
||||||
|
|
||||||
|
// unpack last node of first segment if packed
|
||||||
|
const auto first_coordinate =
|
||||||
|
m_node_info_list[(m_geometry_compressor.HasEntryForID(e1)
|
||||||
|
? m_geometry_compressor.GetLastNodeIDOfBucket(e1)
|
||||||
|
: node_u)];
|
||||||
|
|
||||||
|
// unpack first node of second segment if packed
|
||||||
|
const auto third_coordinate =
|
||||||
|
m_node_info_list[(m_geometry_compressor.HasEntryForID(e2)
|
||||||
|
? m_geometry_compressor.GetFirstNodeIDOfBucket(e2)
|
||||||
|
: node_w)];
|
||||||
|
|
||||||
|
const double turn_angle = ComputeAngle::OfThreeFixedPointCoordinates(
|
||||||
|
first_coordinate, m_node_info_list[node_v], third_coordinate);
|
||||||
|
|
||||||
|
const int turn_penalty = GetTurnPenalty(turn_angle, lua_state);
|
||||||
|
TurnInstruction turn_instruction = AnalyzeTurn(node_u, node_v, node_w, turn_angle);
|
||||||
|
if (turn_instruction == TurnInstruction::UTurn)
|
||||||
|
{
|
||||||
|
distance += speed_profile.u_turn_penalty;
|
||||||
|
}
|
||||||
|
distance += turn_penalty;
|
||||||
|
|
||||||
|
const bool edge_is_compressed = m_geometry_compressor.HasEntryForID(e1);
|
||||||
|
|
||||||
|
if (edge_is_compressed)
|
||||||
|
{
|
||||||
|
++compressed;
|
||||||
|
}
|
||||||
|
|
||||||
|
original_edge_data_vector.emplace_back(
|
||||||
|
(edge_is_compressed ? m_geometry_compressor.GetPositionForID(e1) : node_v),
|
||||||
|
edge_data1.nameID, turn_instruction, edge_is_compressed,
|
||||||
|
edge_data2.travel_mode);
|
||||||
|
|
||||||
|
++original_edges_counter;
|
||||||
|
|
||||||
|
if (original_edge_data_vector.size() > 1024 * 1024 * 10)
|
||||||
|
{
|
||||||
|
FlushVectorToStream(edge_data_file, original_edge_data_vector);
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != edge_data1.edgeBasedNodeID);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != edge_data2.edgeBasedNodeID);
|
||||||
|
|
||||||
|
m_edge_based_edge_list.emplace_back(
|
||||||
|
EdgeBasedEdge(edge_data1.edgeBasedNodeID, edge_data2.edgeBasedNodeID,
|
||||||
|
m_edge_based_edge_list.size(), distance, true, false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FlushVectorToStream(edge_data_file, original_edge_data_vector);
|
||||||
|
|
||||||
|
edge_data_file.seekp(std::ios::beg);
|
||||||
|
edge_data_file.write((char *)&original_edges_counter, sizeof(unsigned));
|
||||||
|
edge_data_file.close();
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Generated " << m_edge_based_node_list.size() << " edge based nodes";
|
||||||
|
SimpleLogger().Write() << "Node-based graph contains " << node_based_edge_counter << " edges";
|
||||||
|
SimpleLogger().Write() << "Edge-expanded graph ...";
|
||||||
|
SimpleLogger().Write() << " contains " << m_edge_based_edge_list.size() << " edges";
|
||||||
|
SimpleLogger().Write() << " skips " << restricted_turns_counter << " turns, "
|
||||||
|
"defined by "
|
||||||
|
<< m_restriction_map->size() << " restrictions";
|
||||||
|
SimpleLogger().Write() << " skips " << skipped_uturns_counter << " U turns";
|
||||||
|
SimpleLogger().Write() << " skips " << skipped_barrier_turns_counter << " turns over barriers";
|
||||||
|
}
|
||||||
|
|
||||||
|
int EdgeBasedGraphFactory::GetTurnPenalty(double angle, lua_State *lua_state) const
|
||||||
|
{
|
||||||
|
|
||||||
|
if (speed_profile.has_turn_penalty_function)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// call lua profile to compute turn penalty
|
||||||
|
return luabind::call_function<int>(lua_state, "turn_function", 180. - angle);
|
||||||
|
}
|
||||||
|
catch (const luabind::error &er)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << er.what();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID node_u,
|
||||||
|
const NodeID node_v,
|
||||||
|
const NodeID node_w,
|
||||||
|
const double angle) const
|
||||||
|
{
|
||||||
|
if (node_u == node_w)
|
||||||
|
{
|
||||||
|
return TurnInstruction::UTurn;
|
||||||
|
}
|
||||||
|
|
||||||
|
const EdgeID edge1 = m_node_based_graph->FindEdge(node_u, node_v);
|
||||||
|
const EdgeID edge2 = m_node_based_graph->FindEdge(node_v, node_w);
|
||||||
|
|
||||||
|
const EdgeData &data1 = m_node_based_graph->GetEdgeData(edge1);
|
||||||
|
const EdgeData &data2 = m_node_based_graph->GetEdgeData(edge2);
|
||||||
|
|
||||||
|
// roundabouts need to be handled explicitely
|
||||||
|
if (data1.roundabout && data2.roundabout)
|
||||||
|
{
|
||||||
|
// Is a turn possible? If yes, we stay on the roundabout!
|
||||||
|
if (1 == m_node_based_graph->GetDirectedOutDegree(node_v))
|
||||||
|
{
|
||||||
|
// No turn possible.
|
||||||
|
return TurnInstruction::NoTurn;
|
||||||
|
}
|
||||||
|
return TurnInstruction::StayOnRoundAbout;
|
||||||
|
}
|
||||||
|
// Does turn start or end on roundabout?
|
||||||
|
if (data1.roundabout || data2.roundabout)
|
||||||
|
{
|
||||||
|
// We are entering the roundabout
|
||||||
|
if ((!data1.roundabout) && data2.roundabout)
|
||||||
|
{
|
||||||
|
return TurnInstruction::EnterRoundAbout;
|
||||||
|
}
|
||||||
|
// We are leaving the roundabout
|
||||||
|
if (data1.roundabout && (!data2.roundabout))
|
||||||
|
{
|
||||||
|
return TurnInstruction::LeaveRoundAbout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If street names stay the same and if we are certain that it is not a
|
||||||
|
// a segment of a roundabout, we skip it.
|
||||||
|
if (data1.nameID == data2.nameID)
|
||||||
|
{
|
||||||
|
// TODO: Here we should also do a small graph exploration to check for
|
||||||
|
// more complex situations
|
||||||
|
if (0 != data1.nameID || m_node_based_graph->GetOutDegree(node_v) <= 2)
|
||||||
|
{
|
||||||
|
return TurnInstruction::NoTurn;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return TurnInstructionsClass::GetTurnDirectionOfInstruction(angle);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned EdgeBasedGraphFactory::GetNumberOfEdgeBasedNodes() const
|
||||||
|
{
|
||||||
|
return m_number_of_edge_based_nodes;
|
||||||
|
}
|
129
contractor/edge_based_graph_factory.hpp
Normal file
129
contractor/edge_based_graph_factory.hpp
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
// This class constructs the edge-expanded routing graph
|
||||||
|
|
||||||
|
#ifndef EDGE_BASED_GRAPH_FACTORY_HPP_
|
||||||
|
#define EDGE_BASED_GRAPH_FACTORY_HPP_
|
||||||
|
|
||||||
|
#include "geometry_compressor.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
#include "../data_structures/deallocating_vector.hpp"
|
||||||
|
#include "../data_structures/edge_based_node.hpp"
|
||||||
|
#include "../data_structures/original_edge_data.hpp"
|
||||||
|
#include "../data_structures/query_node.hpp"
|
||||||
|
#include "../data_structures/turn_instructions.hpp"
|
||||||
|
#include "../data_structures/node_based_graph.hpp"
|
||||||
|
#include "../data_structures/restriction_map.hpp"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <iosfwd>
|
||||||
|
#include <memory>
|
||||||
|
#include <queue>
|
||||||
|
#include <string>
|
||||||
|
#include <unordered_map>
|
||||||
|
#include <unordered_set>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct lua_State;
|
||||||
|
|
||||||
|
class EdgeBasedGraphFactory
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
EdgeBasedGraphFactory() = delete;
|
||||||
|
EdgeBasedGraphFactory(const EdgeBasedGraphFactory &) = delete;
|
||||||
|
|
||||||
|
struct SpeedProfileProperties;
|
||||||
|
|
||||||
|
explicit EdgeBasedGraphFactory(std::shared_ptr<NodeBasedDynamicGraph> node_based_graph,
|
||||||
|
std::shared_ptr<RestrictionMap> restricion_map,
|
||||||
|
std::unique_ptr<std::vector<NodeID>> barrier_node_list,
|
||||||
|
std::unique_ptr<std::vector<NodeID>> traffic_light_node_list,
|
||||||
|
const std::vector<QueryNode> &node_info_list,
|
||||||
|
const SpeedProfileProperties &speed_profile);
|
||||||
|
|
||||||
|
void Run(const std::string &original_edge_data_filename,
|
||||||
|
const std::string &geometry_filename,
|
||||||
|
lua_State *lua_state);
|
||||||
|
|
||||||
|
void GetEdgeBasedEdges(DeallocatingVector<EdgeBasedEdge> &edges);
|
||||||
|
|
||||||
|
void GetEdgeBasedNodes(std::vector<EdgeBasedNode> &nodes);
|
||||||
|
|
||||||
|
TurnInstruction AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w, const double angle) const;
|
||||||
|
|
||||||
|
int GetTurnPenalty(double angle, lua_State *lua_state) const;
|
||||||
|
|
||||||
|
unsigned GetNumberOfEdgeBasedNodes() const;
|
||||||
|
|
||||||
|
struct SpeedProfileProperties
|
||||||
|
{
|
||||||
|
SpeedProfileProperties()
|
||||||
|
: traffic_signal_penalty(0), u_turn_penalty(0), has_turn_penalty_function(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
int traffic_signal_penalty;
|
||||||
|
int u_turn_penalty;
|
||||||
|
bool has_turn_penalty_function;
|
||||||
|
} speed_profile;
|
||||||
|
|
||||||
|
private:
|
||||||
|
using EdgeData = NodeBasedDynamicGraph::EdgeData;
|
||||||
|
|
||||||
|
unsigned m_number_of_edge_based_nodes;
|
||||||
|
|
||||||
|
std::vector<EdgeBasedNode> m_edge_based_node_list;
|
||||||
|
DeallocatingVector<EdgeBasedEdge> m_edge_based_edge_list;
|
||||||
|
|
||||||
|
const std::vector<QueryNode>& m_node_info_list;
|
||||||
|
std::shared_ptr<NodeBasedDynamicGraph> m_node_based_graph;
|
||||||
|
std::shared_ptr<RestrictionMap> m_restriction_map;
|
||||||
|
|
||||||
|
std::unordered_set<NodeID> m_barrier_nodes;
|
||||||
|
std::unordered_set<NodeID> m_traffic_lights;
|
||||||
|
|
||||||
|
|
||||||
|
GeometryCompressor m_geometry_compressor;
|
||||||
|
|
||||||
|
void CompressGeometry();
|
||||||
|
void RenumberEdges();
|
||||||
|
void GenerateEdgeExpandedNodes();
|
||||||
|
void GenerateEdgeExpandedEdges(const std::string &original_edge_data_filename,
|
||||||
|
lua_State *lua_state);
|
||||||
|
|
||||||
|
void InsertEdgeBasedNode(const NodeID u, const NodeID v, const unsigned component_id);
|
||||||
|
|
||||||
|
void FlushVectorToStream(std::ofstream &edge_data_file,
|
||||||
|
std::vector<OriginalEdgeData> &original_edge_data_vector) const;
|
||||||
|
|
||||||
|
NodeID max_id;
|
||||||
|
std::size_t removed_node_count;
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* EDGE_BASED_GRAPH_FACTORY_HPP_ */
|
236
contractor/geometry_compressor.cpp
Normal file
236
contractor/geometry_compressor.cpp
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "geometry_compressor.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
#include <boost/filesystem.hpp>
|
||||||
|
#include <boost/filesystem/fstream.hpp>
|
||||||
|
|
||||||
|
#include <limits>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
GeometryCompressor::GeometryCompressor()
|
||||||
|
{
|
||||||
|
m_free_list.reserve(100);
|
||||||
|
IncreaseFreeList();
|
||||||
|
}
|
||||||
|
|
||||||
|
void GeometryCompressor::IncreaseFreeList()
|
||||||
|
{
|
||||||
|
m_compressed_geometries.resize(m_compressed_geometries.size() + 100);
|
||||||
|
for (unsigned i = 100; i > 0; --i)
|
||||||
|
{
|
||||||
|
m_free_list.emplace_back(free_list_maximum);
|
||||||
|
++free_list_maximum;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool GeometryCompressor::HasEntryForID(const EdgeID edge_id) const
|
||||||
|
{
|
||||||
|
auto iter = m_edge_id_to_list_index_map.find(edge_id);
|
||||||
|
return iter != m_edge_id_to_list_index_map.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned GeometryCompressor::GetPositionForID(const EdgeID edge_id) const
|
||||||
|
{
|
||||||
|
auto map_iterator = m_edge_id_to_list_index_map.find(edge_id);
|
||||||
|
BOOST_ASSERT(map_iterator != m_edge_id_to_list_index_map.end());
|
||||||
|
BOOST_ASSERT(map_iterator->second < m_compressed_geometries.size());
|
||||||
|
return map_iterator->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
void GeometryCompressor::SerializeInternalVector(const std::string &path) const
|
||||||
|
{
|
||||||
|
|
||||||
|
boost::filesystem::fstream geometry_out_stream(path, std::ios::binary | std::ios::out);
|
||||||
|
const unsigned compressed_geometries = m_compressed_geometries.size() + 1;
|
||||||
|
BOOST_ASSERT(std::numeric_limits<unsigned>::max() != compressed_geometries);
|
||||||
|
geometry_out_stream.write((char *)&compressed_geometries, sizeof(unsigned));
|
||||||
|
|
||||||
|
// write indices array
|
||||||
|
unsigned prefix_sum_of_list_indices = 0;
|
||||||
|
for (const auto &elem : m_compressed_geometries)
|
||||||
|
{
|
||||||
|
geometry_out_stream.write((char *)&prefix_sum_of_list_indices, sizeof(unsigned));
|
||||||
|
|
||||||
|
const std::vector<CompressedNode> ¤t_vector = elem;
|
||||||
|
const unsigned unpacked_size = current_vector.size();
|
||||||
|
BOOST_ASSERT(std::numeric_limits<unsigned>::max() != unpacked_size);
|
||||||
|
prefix_sum_of_list_indices += unpacked_size;
|
||||||
|
}
|
||||||
|
// sentinel element
|
||||||
|
geometry_out_stream.write((char *)&prefix_sum_of_list_indices, sizeof(unsigned));
|
||||||
|
|
||||||
|
// number of geometry entries to follow, it is the (inclusive) prefix sum
|
||||||
|
geometry_out_stream.write((char *)&prefix_sum_of_list_indices, sizeof(unsigned));
|
||||||
|
|
||||||
|
unsigned control_sum = 0;
|
||||||
|
// write compressed geometries
|
||||||
|
for (auto &elem : m_compressed_geometries)
|
||||||
|
{
|
||||||
|
const std::vector<CompressedNode> ¤t_vector = elem;
|
||||||
|
const unsigned unpacked_size = current_vector.size();
|
||||||
|
control_sum += unpacked_size;
|
||||||
|
BOOST_ASSERT(std::numeric_limits<unsigned>::max() != unpacked_size);
|
||||||
|
for (const CompressedNode current_node : current_vector)
|
||||||
|
{
|
||||||
|
geometry_out_stream.write((char *)&(current_node.first), sizeof(NodeID));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BOOST_ASSERT(control_sum == prefix_sum_of_list_indices);
|
||||||
|
// all done, let's close the resource
|
||||||
|
geometry_out_stream.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
void GeometryCompressor::CompressEdge(const EdgeID edge_id_1,
|
||||||
|
const EdgeID edge_id_2,
|
||||||
|
const NodeID via_node_id,
|
||||||
|
const NodeID target_node_id,
|
||||||
|
const EdgeWeight weight1,
|
||||||
|
const EdgeWeight weight2)
|
||||||
|
{
|
||||||
|
// remove super-trivial geometries
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != edge_id_1);
|
||||||
|
BOOST_ASSERT(SPECIAL_EDGEID != edge_id_2);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != via_node_id);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != target_node_id);
|
||||||
|
BOOST_ASSERT(INVALID_EDGE_WEIGHT != weight1);
|
||||||
|
BOOST_ASSERT(INVALID_EDGE_WEIGHT != weight2);
|
||||||
|
|
||||||
|
// append list of removed edge_id plus via node to surviving edge id:
|
||||||
|
// <surv_1, .. , surv_n, via_node_id, rem_1, .. rem_n
|
||||||
|
//
|
||||||
|
// General scheme:
|
||||||
|
// 1. append via node id to list of edge_id_1
|
||||||
|
// 2. find list for edge_id_2, if yes add all elements and delete it
|
||||||
|
|
||||||
|
// Add via node id. List is created if it does not exist
|
||||||
|
if (!HasEntryForID(edge_id_1))
|
||||||
|
{
|
||||||
|
// create a new entry in the map
|
||||||
|
if (0 == m_free_list.size())
|
||||||
|
{
|
||||||
|
// make sure there is a place to put the entries
|
||||||
|
IncreaseFreeList();
|
||||||
|
}
|
||||||
|
BOOST_ASSERT(!m_free_list.empty());
|
||||||
|
m_edge_id_to_list_index_map[edge_id_1] = m_free_list.back();
|
||||||
|
m_free_list.pop_back();
|
||||||
|
}
|
||||||
|
|
||||||
|
// find bucket index
|
||||||
|
const auto iter = m_edge_id_to_list_index_map.find(edge_id_1);
|
||||||
|
BOOST_ASSERT(iter != m_edge_id_to_list_index_map.end());
|
||||||
|
const unsigned edge_bucket_id1 = iter->second;
|
||||||
|
BOOST_ASSERT(edge_bucket_id1 == GetPositionForID(edge_id_1));
|
||||||
|
BOOST_ASSERT(edge_bucket_id1 < m_compressed_geometries.size());
|
||||||
|
|
||||||
|
std::vector<CompressedNode> &edge_bucket_list1 = m_compressed_geometries[edge_bucket_id1];
|
||||||
|
|
||||||
|
if (edge_bucket_list1.empty())
|
||||||
|
{
|
||||||
|
edge_bucket_list1.emplace_back(via_node_id, weight1);
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(0 < edge_bucket_list1.size());
|
||||||
|
BOOST_ASSERT(!edge_bucket_list1.empty());
|
||||||
|
|
||||||
|
if (HasEntryForID(edge_id_2))
|
||||||
|
{
|
||||||
|
// second edge is not atomic anymore
|
||||||
|
const unsigned list_to_remove_index = GetPositionForID(edge_id_2);
|
||||||
|
BOOST_ASSERT(list_to_remove_index < m_compressed_geometries.size());
|
||||||
|
|
||||||
|
std::vector<CompressedNode> &edge_bucket_list2 =
|
||||||
|
m_compressed_geometries[list_to_remove_index];
|
||||||
|
|
||||||
|
// found an existing list, append it to the list of edge_id_1
|
||||||
|
edge_bucket_list1.insert(edge_bucket_list1.end(), edge_bucket_list2.begin(),
|
||||||
|
edge_bucket_list2.end());
|
||||||
|
|
||||||
|
// remove the list of edge_id_2
|
||||||
|
m_edge_id_to_list_index_map.erase(edge_id_2);
|
||||||
|
BOOST_ASSERT(m_edge_id_to_list_index_map.end() ==
|
||||||
|
m_edge_id_to_list_index_map.find(edge_id_2));
|
||||||
|
edge_bucket_list2.clear();
|
||||||
|
BOOST_ASSERT(0 == edge_bucket_list2.size());
|
||||||
|
m_free_list.emplace_back(list_to_remove_index);
|
||||||
|
BOOST_ASSERT(list_to_remove_index == m_free_list.back());
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// we are certain that the second edge is atomic.
|
||||||
|
edge_bucket_list1.emplace_back(target_node_id, weight2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void GeometryCompressor::PrintStatistics() const
|
||||||
|
{
|
||||||
|
const uint64_t compressed_edges = m_compressed_geometries.size();
|
||||||
|
BOOST_ASSERT(0 == compressed_edges % 2);
|
||||||
|
BOOST_ASSERT(m_compressed_geometries.size() + m_free_list.size() > 0);
|
||||||
|
|
||||||
|
uint64_t compressed_geometries = 0;
|
||||||
|
uint64_t longest_chain_length = 0;
|
||||||
|
for (const std::vector<CompressedNode> ¤t_vector : m_compressed_geometries)
|
||||||
|
{
|
||||||
|
compressed_geometries += current_vector.size();
|
||||||
|
longest_chain_length = std::max(longest_chain_length, (uint64_t)current_vector.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Geometry successfully removed:"
|
||||||
|
"\n compressed edges: " << compressed_edges
|
||||||
|
<< "\n compressed geometries: " << compressed_geometries
|
||||||
|
<< "\n longest chain length: " << longest_chain_length
|
||||||
|
<< "\n cmpr ratio: " << ((float)compressed_edges /
|
||||||
|
std::max(compressed_geometries, (uint64_t)1))
|
||||||
|
<< "\n avg chain length: "
|
||||||
|
<< (float)compressed_geometries /
|
||||||
|
std::max((uint64_t)1, compressed_edges);
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::vector<GeometryCompressor::CompressedNode> &
|
||||||
|
GeometryCompressor::GetBucketReference(const EdgeID edge_id) const
|
||||||
|
{
|
||||||
|
const unsigned index = m_edge_id_to_list_index_map.at(edge_id);
|
||||||
|
return m_compressed_geometries.at(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeID GeometryCompressor::GetFirstNodeIDOfBucket(const EdgeID edge_id) const
|
||||||
|
{
|
||||||
|
const auto &bucket = GetBucketReference(edge_id);
|
||||||
|
BOOST_ASSERT(bucket.size() >= 2);
|
||||||
|
return bucket[1].first;
|
||||||
|
}
|
||||||
|
NodeID GeometryCompressor::GetLastNodeIDOfBucket(const EdgeID edge_id) const
|
||||||
|
{
|
||||||
|
const auto &bucket = GetBucketReference(edge_id);
|
||||||
|
BOOST_ASSERT(bucket.size() >= 2);
|
||||||
|
return bucket[bucket.size() - 2].first;
|
||||||
|
}
|
69
contractor/geometry_compressor.hpp
Normal file
69
contractor/geometry_compressor.hpp
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef GEOMETRY_COMPRESSOR_HPP_
|
||||||
|
#define GEOMETRY_COMPRESSOR_HPP_
|
||||||
|
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
class GeometryCompressor
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
using CompressedNode = std::pair<NodeID, EdgeWeight>;
|
||||||
|
|
||||||
|
GeometryCompressor();
|
||||||
|
void CompressEdge(const EdgeID surviving_edge_id,
|
||||||
|
const EdgeID removed_edge_id,
|
||||||
|
const NodeID via_node_id,
|
||||||
|
const NodeID target_node,
|
||||||
|
const EdgeWeight weight1,
|
||||||
|
const EdgeWeight weight2);
|
||||||
|
|
||||||
|
bool HasEntryForID(const EdgeID edge_id) const;
|
||||||
|
void PrintStatistics() const;
|
||||||
|
void SerializeInternalVector(const std::string &path) const;
|
||||||
|
unsigned GetPositionForID(const EdgeID edge_id) const;
|
||||||
|
const std::vector<GeometryCompressor::CompressedNode> &
|
||||||
|
GetBucketReference(const EdgeID edge_id) const;
|
||||||
|
NodeID GetFirstNodeIDOfBucket(const EdgeID edge_id) const;
|
||||||
|
NodeID GetLastNodeIDOfBucket(const EdgeID edge_id) const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
int free_list_maximum = 0;
|
||||||
|
|
||||||
|
void IncreaseFreeList();
|
||||||
|
std::vector<std::vector<CompressedNode>> m_compressed_geometries;
|
||||||
|
std::vector<unsigned> m_free_list;
|
||||||
|
std::unordered_map<EdgeID, unsigned> m_edge_id_to_list_index_map;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // GEOMETRY_COMPRESSOR_HPP_
|
430
contractor/processing_chain.cpp
Normal file
430
contractor/processing_chain.cpp
Normal file
@ -0,0 +1,430 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "processing_chain.hpp"
|
||||||
|
|
||||||
|
#include "contractor.hpp"
|
||||||
|
|
||||||
|
#include "../algorithms/crc32_processor.hpp"
|
||||||
|
#include "../data_structures/deallocating_vector.hpp"
|
||||||
|
#include "../data_structures/static_rtree.hpp"
|
||||||
|
#include "../data_structures/restriction_map.hpp"
|
||||||
|
|
||||||
|
#include "../util/git_sha.hpp"
|
||||||
|
#include "../util/graph_loader.hpp"
|
||||||
|
#include "../util/integer_range.hpp"
|
||||||
|
#include "../util/lua_util.hpp"
|
||||||
|
#include "../util/make_unique.hpp"
|
||||||
|
#include "../util/osrm_exception.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
#include "../util/string_util.hpp"
|
||||||
|
#include "../util/timing_util.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
#include <boost/filesystem/fstream.hpp>
|
||||||
|
#include <boost/program_options.hpp>
|
||||||
|
|
||||||
|
#include <tbb/parallel_sort.h>
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
#include <thread>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
Prepare::~Prepare() {}
|
||||||
|
|
||||||
|
int Prepare::Run()
|
||||||
|
{
|
||||||
|
#ifdef WIN32
|
||||||
|
#pragma message("Memory consumption on Windows can be higher due to different bit packing")
|
||||||
|
#else
|
||||||
|
static_assert(sizeof(NodeBasedEdge) == 20,
|
||||||
|
"changing NodeBasedEdge type has influence on memory consumption!");
|
||||||
|
static_assert(sizeof(EdgeBasedEdge) == 16,
|
||||||
|
"changing EdgeBasedEdge type has influence on memory consumption!");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
TIMER_START(preparing);
|
||||||
|
|
||||||
|
// Create a new lua state
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Generating edge-expanded graph representation";
|
||||||
|
|
||||||
|
TIMER_START(expansion);
|
||||||
|
|
||||||
|
auto node_based_edge_list = osrm::make_unique<std::vector<EdgeBasedNode>>();;
|
||||||
|
DeallocatingVector<EdgeBasedEdge> edge_based_edge_list;
|
||||||
|
auto internal_to_external_node_map = osrm::make_unique<std::vector<QueryNode>>();
|
||||||
|
auto graph_size =
|
||||||
|
BuildEdgeExpandedGraph(*internal_to_external_node_map,
|
||||||
|
*node_based_edge_list, edge_based_edge_list);
|
||||||
|
|
||||||
|
auto number_of_node_based_nodes = graph_size.first;
|
||||||
|
auto number_of_edge_based_nodes = graph_size.second;
|
||||||
|
|
||||||
|
TIMER_STOP(expansion);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "building r-tree ...";
|
||||||
|
TIMER_START(rtree);
|
||||||
|
|
||||||
|
BuildRTree(*node_based_edge_list, *internal_to_external_node_map);
|
||||||
|
|
||||||
|
TIMER_STOP(rtree);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "writing node map ...";
|
||||||
|
WriteNodeMapping(std::move(internal_to_external_node_map));
|
||||||
|
|
||||||
|
// Contracting the edge-expanded graph
|
||||||
|
|
||||||
|
TIMER_START(contraction);
|
||||||
|
auto contracted_edge_list = osrm::make_unique<DeallocatingVector<QueryEdge>>();
|
||||||
|
ContractGraph(number_of_edge_based_nodes, edge_based_edge_list, *contracted_edge_list);
|
||||||
|
TIMER_STOP(contraction);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Contraction took " << TIMER_SEC(contraction) << " sec";
|
||||||
|
|
||||||
|
std::size_t number_of_used_edges = WriteContractedGraph(number_of_edge_based_nodes,
|
||||||
|
std::move(node_based_edge_list),
|
||||||
|
std::move(contracted_edge_list));
|
||||||
|
|
||||||
|
TIMER_STOP(preparing);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Preprocessing : " << TIMER_SEC(preparing) << " seconds";
|
||||||
|
SimpleLogger().Write() << "Expansion : " << (number_of_node_based_nodes / TIMER_SEC(expansion))
|
||||||
|
<< " nodes/sec and "
|
||||||
|
<< (number_of_edge_based_nodes / TIMER_SEC(expansion)) << " edges/sec";
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Contraction: "
|
||||||
|
<< (number_of_edge_based_nodes / TIMER_SEC(contraction))
|
||||||
|
<< " nodes/sec and " << number_of_used_edges / TIMER_SEC(contraction)
|
||||||
|
<< " edges/sec";
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "finished preprocessing";
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t Prepare::WriteContractedGraph(unsigned number_of_edge_based_nodes,
|
||||||
|
std::unique_ptr<std::vector<EdgeBasedNode>> node_based_edge_list,
|
||||||
|
std::unique_ptr<DeallocatingVector<QueryEdge>> contracted_edge_list)
|
||||||
|
{
|
||||||
|
const unsigned crc32_value = CalculateEdgeChecksum(std::move(node_based_edge_list));
|
||||||
|
|
||||||
|
// Sorting contracted edges in a way that the static query graph can read some in in-place.
|
||||||
|
tbb::parallel_sort(contracted_edge_list->begin(), contracted_edge_list->end());
|
||||||
|
const unsigned contracted_edge_count = contracted_edge_list->size();
|
||||||
|
SimpleLogger().Write() << "Serializing compacted graph of " << contracted_edge_count
|
||||||
|
<< " edges";
|
||||||
|
|
||||||
|
const FingerPrint fingerprint = FingerPrint::GetValid();
|
||||||
|
boost::filesystem::ofstream hsgr_output_stream(config.graph_output_path, std::ios::binary);
|
||||||
|
hsgr_output_stream.write((char *)&fingerprint, sizeof(FingerPrint));
|
||||||
|
const unsigned max_used_node_id = 1 + [&contracted_edge_list]
|
||||||
|
{
|
||||||
|
unsigned tmp_max = 0;
|
||||||
|
for (const QueryEdge &edge : *contracted_edge_list)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != edge.source);
|
||||||
|
BOOST_ASSERT(SPECIAL_NODEID != edge.target);
|
||||||
|
tmp_max = std::max(tmp_max, edge.source);
|
||||||
|
tmp_max = std::max(tmp_max, edge.target);
|
||||||
|
}
|
||||||
|
return tmp_max;
|
||||||
|
}();
|
||||||
|
|
||||||
|
SimpleLogger().Write(logDEBUG) << "input graph has " << number_of_edge_based_nodes << " nodes";
|
||||||
|
SimpleLogger().Write(logDEBUG) << "contracted graph has " << max_used_node_id << " nodes";
|
||||||
|
|
||||||
|
std::vector<StaticGraph<EdgeData>::NodeArrayEntry> node_array;
|
||||||
|
node_array.resize(number_of_edge_based_nodes + 1);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Building node array";
|
||||||
|
StaticGraph<EdgeData>::EdgeIterator edge = 0;
|
||||||
|
StaticGraph<EdgeData>::EdgeIterator position = 0;
|
||||||
|
StaticGraph<EdgeData>::EdgeIterator last_edge = edge;
|
||||||
|
|
||||||
|
// initializing 'first_edge'-field of nodes:
|
||||||
|
for (const auto node : osrm::irange(0u, max_used_node_id))
|
||||||
|
{
|
||||||
|
last_edge = edge;
|
||||||
|
while ((edge < contracted_edge_count) && ((*contracted_edge_list)[edge].source == node))
|
||||||
|
{
|
||||||
|
++edge;
|
||||||
|
}
|
||||||
|
node_array[node].first_edge = position; //=edge
|
||||||
|
position += edge - last_edge; // remove
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const auto sentinel_counter : osrm::irange<unsigned>(max_used_node_id, node_array.size()))
|
||||||
|
{
|
||||||
|
// sentinel element, guarded against underflow
|
||||||
|
node_array[sentinel_counter].first_edge = contracted_edge_count;
|
||||||
|
}
|
||||||
|
|
||||||
|
SimpleLogger().Write() << "Serializing node array";
|
||||||
|
|
||||||
|
const unsigned node_array_size = node_array.size();
|
||||||
|
// serialize crc32, aka checksum
|
||||||
|
hsgr_output_stream.write((char *)&crc32_value, sizeof(unsigned));
|
||||||
|
// serialize number of nodes
|
||||||
|
hsgr_output_stream.write((char *)&node_array_size, sizeof(unsigned));
|
||||||
|
// serialize number of edges
|
||||||
|
hsgr_output_stream.write((char *)&contracted_edge_count, sizeof(unsigned));
|
||||||
|
// serialize all nodes
|
||||||
|
if (node_array_size > 0)
|
||||||
|
{
|
||||||
|
hsgr_output_stream.write((char *)&node_array[0],
|
||||||
|
sizeof(StaticGraph<EdgeData>::NodeArrayEntry) * node_array_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
// serialize all edges
|
||||||
|
SimpleLogger().Write() << "Building edge array";
|
||||||
|
edge = 0;
|
||||||
|
int number_of_used_edges = 0;
|
||||||
|
|
||||||
|
StaticGraph<EdgeData>::EdgeArrayEntry current_edge;
|
||||||
|
for (const auto edge : osrm::irange<std::size_t>(0, contracted_edge_list->size()))
|
||||||
|
{
|
||||||
|
// no eigen loops
|
||||||
|
BOOST_ASSERT((*contracted_edge_list)[edge].source != (*contracted_edge_list)[edge].target);
|
||||||
|
current_edge.target = (*contracted_edge_list)[edge].target;
|
||||||
|
current_edge.data = (*contracted_edge_list)[edge].data;
|
||||||
|
|
||||||
|
// every target needs to be valid
|
||||||
|
BOOST_ASSERT(current_edge.target < max_used_node_id);
|
||||||
|
#ifndef NDEBUG
|
||||||
|
if (current_edge.data.distance <= 0)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Edge: " << edge
|
||||||
|
<< ",source: " << (*contracted_edge_list)[edge].source
|
||||||
|
<< ", target: " << (*contracted_edge_list)[edge].target
|
||||||
|
<< ", dist: " << current_edge.data.distance;
|
||||||
|
|
||||||
|
SimpleLogger().Write(logWARNING) << "Failed at adjacency list of node "
|
||||||
|
<< (*contracted_edge_list)[edge].source << "/"
|
||||||
|
<< node_array.size() - 1;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
hsgr_output_stream.write((char *)¤t_edge,
|
||||||
|
sizeof(StaticGraph<EdgeData>::EdgeArrayEntry));
|
||||||
|
|
||||||
|
++number_of_used_edges;
|
||||||
|
}
|
||||||
|
|
||||||
|
return number_of_used_edges;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned Prepare::CalculateEdgeChecksum(std::unique_ptr<std::vector<EdgeBasedNode>> node_based_edge_list)
|
||||||
|
{
|
||||||
|
RangebasedCRC32 crc32;
|
||||||
|
if (crc32.using_hardware())
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "using hardware based CRC32 computation";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
SimpleLogger().Write() << "using software based CRC32 computation";
|
||||||
|
}
|
||||||
|
|
||||||
|
const unsigned crc32_value = crc32(*node_based_edge_list);
|
||||||
|
SimpleLogger().Write() << "CRC32: " << crc32_value;
|
||||||
|
|
||||||
|
return crc32_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Setups scripting environment (lua-scripting)
|
||||||
|
Also initializes speed profile.
|
||||||
|
*/
|
||||||
|
void Prepare::SetupScriptingEnvironment(
|
||||||
|
lua_State *lua_state, EdgeBasedGraphFactory::SpeedProfileProperties &speed_profile)
|
||||||
|
{
|
||||||
|
// open utility libraries string library;
|
||||||
|
luaL_openlibs(lua_state);
|
||||||
|
|
||||||
|
// adjust lua load path
|
||||||
|
luaAddScriptFolderToLoadPath(lua_state, config.profile_path.string().c_str());
|
||||||
|
|
||||||
|
// Now call our function in a lua script
|
||||||
|
if (0 != luaL_dofile(lua_state, config.profile_path.string().c_str()))
|
||||||
|
{
|
||||||
|
std::stringstream msg;
|
||||||
|
msg << lua_tostring(lua_state, -1) << " occured in scripting block";
|
||||||
|
throw osrm::exception(msg.str());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (0 != luaL_dostring(lua_state, "return traffic_signal_penalty\n"))
|
||||||
|
{
|
||||||
|
std::stringstream msg;
|
||||||
|
msg << lua_tostring(lua_state, -1) << " occured in scripting block";
|
||||||
|
throw osrm::exception(msg.str());
|
||||||
|
}
|
||||||
|
speed_profile.traffic_signal_penalty = 10 * lua_tointeger(lua_state, -1);
|
||||||
|
SimpleLogger().Write(logDEBUG)
|
||||||
|
<< "traffic_signal_penalty: " << speed_profile.traffic_signal_penalty;
|
||||||
|
|
||||||
|
if (0 != luaL_dostring(lua_state, "return u_turn_penalty\n"))
|
||||||
|
{
|
||||||
|
std::stringstream msg;
|
||||||
|
msg << lua_tostring(lua_state, -1) << " occured in scripting block";
|
||||||
|
throw osrm::exception(msg.str());
|
||||||
|
}
|
||||||
|
|
||||||
|
speed_profile.u_turn_penalty = 10 * lua_tointeger(lua_state, -1);
|
||||||
|
speed_profile.has_turn_penalty_function = lua_function_exists(lua_state, "turn_function");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Build load restrictions from .restriction file
|
||||||
|
*/
|
||||||
|
std::shared_ptr<RestrictionMap> Prepare::LoadRestrictionMap()
|
||||||
|
{
|
||||||
|
boost::filesystem::ifstream input_stream(config.restrictions_path, std::ios::in | std::ios::binary);
|
||||||
|
|
||||||
|
std::vector<TurnRestriction> restriction_list;
|
||||||
|
loadRestrictionsFromFile(input_stream, restriction_list);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << " - " << restriction_list.size() << " restrictions.";
|
||||||
|
|
||||||
|
return std::make_shared<RestrictionMap>(restriction_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Load node based graph from .osrm file
|
||||||
|
*/
|
||||||
|
std::shared_ptr<NodeBasedDynamicGraph>
|
||||||
|
Prepare::LoadNodeBasedGraph(std::vector<NodeID> &barrier_node_list,
|
||||||
|
std::vector<NodeID> &traffic_light_list,
|
||||||
|
std::vector<QueryNode>& internal_to_external_node_map)
|
||||||
|
{
|
||||||
|
std::vector<NodeBasedEdge> edge_list;
|
||||||
|
|
||||||
|
boost::filesystem::ifstream input_stream(config.osrm_input_path, std::ios::in | std::ios::binary);
|
||||||
|
|
||||||
|
NodeID number_of_node_based_nodes = loadNodesFromFile(input_stream,
|
||||||
|
barrier_node_list, traffic_light_list,
|
||||||
|
internal_to_external_node_map);
|
||||||
|
|
||||||
|
SimpleLogger().Write() << " - " << barrier_node_list.size() << " bollard nodes, "
|
||||||
|
<< traffic_light_list.size() << " traffic lights";
|
||||||
|
|
||||||
|
loadEdgesFromFile(input_stream, edge_list);
|
||||||
|
|
||||||
|
if (edge_list.empty())
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "The input data is empty, exiting.";
|
||||||
|
return std::shared_ptr<NodeBasedDynamicGraph>();
|
||||||
|
}
|
||||||
|
|
||||||
|
return NodeBasedDynamicGraphFromImportEdges(number_of_node_based_nodes, edge_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Building an edge-expanded graph from node-based input and turn restrictions
|
||||||
|
*/
|
||||||
|
std::pair<std::size_t, std::size_t>
|
||||||
|
Prepare::BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map,
|
||||||
|
std::vector<EdgeBasedNode> &node_based_edge_list,
|
||||||
|
DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list)
|
||||||
|
{
|
||||||
|
lua_State *lua_state = luaL_newstate();
|
||||||
|
luabind::open(lua_state);
|
||||||
|
|
||||||
|
EdgeBasedGraphFactory::SpeedProfileProperties speed_profile;
|
||||||
|
|
||||||
|
SetupScriptingEnvironment(lua_state, speed_profile);
|
||||||
|
|
||||||
|
auto barrier_node_list = osrm::make_unique<std::vector<NodeID>>();
|
||||||
|
auto traffic_light_list = osrm::make_unique<std::vector<NodeID>>();
|
||||||
|
|
||||||
|
auto restriction_map = LoadRestrictionMap();
|
||||||
|
auto node_based_graph = LoadNodeBasedGraph(*barrier_node_list, *traffic_light_list, internal_to_external_node_map);
|
||||||
|
|
||||||
|
const std::size_t number_of_node_based_nodes = node_based_graph->GetNumberOfNodes();
|
||||||
|
|
||||||
|
EdgeBasedGraphFactory edge_based_graph_factory(node_based_graph,
|
||||||
|
restriction_map,
|
||||||
|
std::move(barrier_node_list),
|
||||||
|
std::move(traffic_light_list),
|
||||||
|
internal_to_external_node_map,
|
||||||
|
speed_profile);
|
||||||
|
|
||||||
|
edge_based_graph_factory.Run(config.edge_output_path, config.geometry_output_path, lua_state);
|
||||||
|
lua_close(lua_state);
|
||||||
|
|
||||||
|
const std::size_t number_of_edge_based_nodes =
|
||||||
|
edge_based_graph_factory.GetNumberOfEdgeBasedNodes();
|
||||||
|
|
||||||
|
BOOST_ASSERT(number_of_edge_based_nodes != std::numeric_limits<unsigned>::max());
|
||||||
|
|
||||||
|
edge_based_graph_factory.GetEdgeBasedEdges(edge_based_edge_list);
|
||||||
|
edge_based_graph_factory.GetEdgeBasedNodes(node_based_edge_list);
|
||||||
|
|
||||||
|
return std::make_pair(number_of_node_based_nodes, number_of_edge_based_nodes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Build contracted graph.
|
||||||
|
*/
|
||||||
|
void Prepare::ContractGraph(const std::size_t number_of_edge_based_nodes,
|
||||||
|
DeallocatingVector<EdgeBasedEdge>& edge_based_edge_list,
|
||||||
|
DeallocatingVector<QueryEdge>& contracted_edge_list)
|
||||||
|
{
|
||||||
|
Contractor contractor(number_of_edge_based_nodes, edge_based_edge_list);
|
||||||
|
contractor.Run();
|
||||||
|
contractor.GetEdges(contracted_edge_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Writing info on original (node-based) nodes
|
||||||
|
*/
|
||||||
|
void Prepare::WriteNodeMapping(std::unique_ptr<std::vector<QueryNode>> internal_to_external_node_map)
|
||||||
|
{
|
||||||
|
boost::filesystem::ofstream node_stream(config.node_output_path, std::ios::binary);
|
||||||
|
const unsigned size_of_mapping = internal_to_external_node_map->size();
|
||||||
|
node_stream.write((char *)&size_of_mapping, sizeof(unsigned));
|
||||||
|
if (size_of_mapping > 0)
|
||||||
|
{
|
||||||
|
node_stream.write((char *) internal_to_external_node_map->data(),
|
||||||
|
size_of_mapping * sizeof(QueryNode));
|
||||||
|
}
|
||||||
|
node_stream.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief Building rtree-based nearest-neighbor data structure
|
||||||
|
|
||||||
|
Saves tree into '.ramIndex' and leaves into '.fileIndex'.
|
||||||
|
*/
|
||||||
|
void Prepare::BuildRTree(const std::vector<EdgeBasedNode> &node_based_edge_list, const std::vector<QueryNode>& internal_to_external_node_map)
|
||||||
|
{
|
||||||
|
StaticRTree<EdgeBasedNode>(node_based_edge_list, config.rtree_nodes_output_path.c_str(),
|
||||||
|
config.rtree_leafs_output_path.c_str(), internal_to_external_node_map);
|
||||||
|
}
|
85
contractor/processing_chain.hpp
Normal file
85
contractor/processing_chain.hpp
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM, Dennis Luxen, others
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef PROCESSING_CHAIN_HPP
|
||||||
|
#define PROCESSING_CHAIN_HPP
|
||||||
|
|
||||||
|
#include "contractor_options.hpp"
|
||||||
|
#include "edge_based_graph_factory.hpp"
|
||||||
|
#include "../data_structures/query_edge.hpp"
|
||||||
|
#include "../data_structures/static_graph.hpp"
|
||||||
|
|
||||||
|
struct EdgeBasedNode;
|
||||||
|
struct lua_State;
|
||||||
|
|
||||||
|
#include <boost/filesystem.hpp>
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
/**
|
||||||
|
\brief class of 'prepare' utility.
|
||||||
|
*/
|
||||||
|
class Prepare
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
using EdgeData = QueryEdge::EdgeData;
|
||||||
|
using InputEdge = DynamicGraph<EdgeData>::InputEdge;
|
||||||
|
using StaticEdge = StaticGraph<EdgeData>::InputEdge;
|
||||||
|
|
||||||
|
explicit Prepare(const ContractorConfig& contractor_config)
|
||||||
|
: config(contractor_config) {}
|
||||||
|
Prepare(const Prepare &) = delete;
|
||||||
|
~Prepare();
|
||||||
|
|
||||||
|
int Run();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void SetupScriptingEnvironment(lua_State *myLuaState,
|
||||||
|
EdgeBasedGraphFactory::SpeedProfileProperties &speed_profile);
|
||||||
|
std::shared_ptr<RestrictionMap> LoadRestrictionMap();
|
||||||
|
unsigned CalculateEdgeChecksum(std::unique_ptr<std::vector<EdgeBasedNode>> node_based_edge_list);
|
||||||
|
void ContractGraph(const std::size_t number_of_edge_based_nodes,
|
||||||
|
DeallocatingVector<EdgeBasedEdge>& edge_based_edge_list,
|
||||||
|
DeallocatingVector<QueryEdge>& contracted_edge_list);
|
||||||
|
std::size_t WriteContractedGraph(unsigned number_of_edge_based_nodes,
|
||||||
|
std::unique_ptr<std::vector<EdgeBasedNode>> node_based_edge_list,
|
||||||
|
std::unique_ptr<DeallocatingVector<QueryEdge>> contracted_edge_list);
|
||||||
|
std::shared_ptr<NodeBasedDynamicGraph> LoadNodeBasedGraph(std::vector<NodeID> &barrier_node_list,
|
||||||
|
std::vector<NodeID> &traffic_light_list,
|
||||||
|
std::vector<QueryNode>& internal_to_external_node_map);
|
||||||
|
std::pair<std::size_t, std::size_t>
|
||||||
|
BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map,
|
||||||
|
std::vector<EdgeBasedNode> &node_based_edge_list,
|
||||||
|
DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list);
|
||||||
|
void WriteNodeMapping(std::unique_ptr<std::vector<QueryNode>> internal_to_external_node_map);
|
||||||
|
void BuildRTree(const std::vector<EdgeBasedNode> &node_based_edge_list,
|
||||||
|
const std::vector<QueryNode> &internal_to_external_node_map);
|
||||||
|
private:
|
||||||
|
ContractorConfig config;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // PROCESSING_CHAIN_HPP
|
@ -1,7 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
default: '--strict --tags ~@stress --tags ~@todo --tags ~@mld --require features/support --require features/step_definitions',
|
|
||||||
ch: '--strict --tags ~@stress --tags ~@todo --tags ~@mld -f progress --require features/support --require features/step_definitions',
|
|
||||||
todo: '--strict --tags @todo --require features/support --require features/step_definitions',
|
|
||||||
all: '--strict --require features/support --require features/step_definitions',
|
|
||||||
mld: '--strict --tags ~@stress --tags ~@todo --tags ~@ch --require features/support --require features/step_definitions -f progress'
|
|
||||||
};
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
308
data_structures/binary_heap.hpp
Normal file
308
data_structures/binary_heap.hpp
Normal file
@ -0,0 +1,308 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef BINARY_HEAP_H
|
||||||
|
#define BINARY_HEAP_H
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <limits>
|
||||||
|
#include <map>
|
||||||
|
#include <type_traits>
|
||||||
|
#include <unordered_map>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
template <typename NodeID, typename Key> class ArrayStorage
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit ArrayStorage(size_t size) : positions(size, 0) {}
|
||||||
|
|
||||||
|
~ArrayStorage() {}
|
||||||
|
|
||||||
|
Key &operator[](NodeID node) { return positions[node]; }
|
||||||
|
|
||||||
|
Key peek_index(const NodeID node) const { return positions[node]; }
|
||||||
|
|
||||||
|
void Clear() {}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::vector<Key> positions;
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename NodeID, typename Key> class MapStorage
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit MapStorage(size_t) {}
|
||||||
|
|
||||||
|
Key &operator[](NodeID node) { return nodes[node]; }
|
||||||
|
|
||||||
|
void Clear() { nodes.clear(); }
|
||||||
|
|
||||||
|
Key peek_index(const NodeID node) const
|
||||||
|
{
|
||||||
|
const auto iter = nodes.find(node);
|
||||||
|
if (nodes.end() != iter)
|
||||||
|
{
|
||||||
|
return iter->second;
|
||||||
|
}
|
||||||
|
return std::numeric_limits<Key>::max();
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::map<NodeID, Key> nodes;
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename NodeID, typename Key> class UnorderedMapStorage
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit UnorderedMapStorage(size_t) { nodes.rehash(1000); }
|
||||||
|
|
||||||
|
Key &operator[](const NodeID node) { return nodes[node]; }
|
||||||
|
|
||||||
|
Key peek_index(const NodeID node) const
|
||||||
|
{
|
||||||
|
const auto iter = nodes.find(node);
|
||||||
|
if (std::end(nodes) != iter)
|
||||||
|
{
|
||||||
|
return iter->second;
|
||||||
|
}
|
||||||
|
return std::numeric_limits<Key>::max();
|
||||||
|
}
|
||||||
|
|
||||||
|
Key const &operator[](const NodeID node) const
|
||||||
|
{
|
||||||
|
auto iter = nodes.find(node);
|
||||||
|
return iter->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Clear() { nodes.clear(); }
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::unordered_map<NodeID, Key> nodes;
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename NodeID,
|
||||||
|
typename Key,
|
||||||
|
typename Weight,
|
||||||
|
typename Data,
|
||||||
|
typename IndexStorage = ArrayStorage<NodeID, NodeID>>
|
||||||
|
class BinaryHeap
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
BinaryHeap(const BinaryHeap &right);
|
||||||
|
void operator=(const BinaryHeap &right);
|
||||||
|
|
||||||
|
public:
|
||||||
|
using WeightType = Weight;
|
||||||
|
using DataType = Data;
|
||||||
|
|
||||||
|
explicit BinaryHeap(size_t maxID) : node_index(maxID) { Clear(); }
|
||||||
|
|
||||||
|
void Clear()
|
||||||
|
{
|
||||||
|
heap.resize(1);
|
||||||
|
inserted_nodes.clear();
|
||||||
|
heap[0].weight = std::numeric_limits<Weight>::min();
|
||||||
|
node_index.Clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t Size() const { return (heap.size() - 1); }
|
||||||
|
|
||||||
|
bool Empty() const { return 0 == Size(); }
|
||||||
|
|
||||||
|
void Insert(NodeID node, Weight weight, const Data &data)
|
||||||
|
{
|
||||||
|
HeapElement element;
|
||||||
|
element.index = static_cast<NodeID>(inserted_nodes.size());
|
||||||
|
element.weight = weight;
|
||||||
|
const Key key = static_cast<Key>(heap.size());
|
||||||
|
heap.emplace_back(element);
|
||||||
|
inserted_nodes.emplace_back(node, key, weight, data);
|
||||||
|
node_index[node] = element.index;
|
||||||
|
Upheap(key);
|
||||||
|
CheckHeap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Data &GetData(NodeID node)
|
||||||
|
{
|
||||||
|
const Key index = node_index.peek_index(node);
|
||||||
|
return inserted_nodes[index].data;
|
||||||
|
}
|
||||||
|
|
||||||
|
Data const &GetData(NodeID node) const
|
||||||
|
{
|
||||||
|
const Key index = node_index.peek_index(node);
|
||||||
|
return inserted_nodes[index].data;
|
||||||
|
}
|
||||||
|
|
||||||
|
Weight &GetKey(NodeID node)
|
||||||
|
{
|
||||||
|
const Key index = node_index[node];
|
||||||
|
return inserted_nodes[index].weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WasRemoved(const NodeID node) const
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(WasInserted(node));
|
||||||
|
const Key index = node_index.peek_index(node);
|
||||||
|
return inserted_nodes[index].key == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool WasInserted(const NodeID node) const
|
||||||
|
{
|
||||||
|
const auto index = node_index.peek_index(node);
|
||||||
|
if (index >= static_cast<decltype(index)>(inserted_nodes.size()))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return inserted_nodes[index].node == node;
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeID Min() const
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(heap.size() > 1);
|
||||||
|
return inserted_nodes[heap[1].index].node;
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeID DeleteMin()
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(heap.size() > 1);
|
||||||
|
const Key removedIndex = heap[1].index;
|
||||||
|
heap[1] = heap[heap.size() - 1];
|
||||||
|
heap.pop_back();
|
||||||
|
if (heap.size() > 1)
|
||||||
|
{
|
||||||
|
Downheap(1);
|
||||||
|
}
|
||||||
|
inserted_nodes[removedIndex].key = 0;
|
||||||
|
CheckHeap();
|
||||||
|
return inserted_nodes[removedIndex].node;
|
||||||
|
}
|
||||||
|
|
||||||
|
void DeleteAll()
|
||||||
|
{
|
||||||
|
auto iend = heap.end();
|
||||||
|
for (typename std::vector<HeapElement>::iterator i = heap.begin() + 1; i != iend; ++i)
|
||||||
|
{
|
||||||
|
inserted_nodes[i->index].key = 0;
|
||||||
|
}
|
||||||
|
heap.resize(1);
|
||||||
|
heap[0].weight = (std::numeric_limits<Weight>::min)();
|
||||||
|
}
|
||||||
|
|
||||||
|
void DecreaseKey(NodeID node, Weight weight)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(std::numeric_limits<NodeID>::max() != node);
|
||||||
|
const Key &index = node_index.peek_index(node);
|
||||||
|
Key &key = inserted_nodes[index].key;
|
||||||
|
BOOST_ASSERT(key >= 0);
|
||||||
|
|
||||||
|
inserted_nodes[index].weight = weight;
|
||||||
|
heap[key].weight = weight;
|
||||||
|
Upheap(key);
|
||||||
|
CheckHeap();
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
class HeapNode
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
HeapNode(NodeID n, Key k, Weight w, Data d) : node(n), key(k), weight(w), data(d) {}
|
||||||
|
|
||||||
|
NodeID node;
|
||||||
|
Key key;
|
||||||
|
Weight weight;
|
||||||
|
Data data;
|
||||||
|
};
|
||||||
|
struct HeapElement
|
||||||
|
{
|
||||||
|
Key index;
|
||||||
|
Weight weight;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<HeapNode> inserted_nodes;
|
||||||
|
std::vector<HeapElement> heap;
|
||||||
|
IndexStorage node_index;
|
||||||
|
|
||||||
|
void Downheap(Key key)
|
||||||
|
{
|
||||||
|
const Key droppingIndex = heap[key].index;
|
||||||
|
const Weight weight = heap[key].weight;
|
||||||
|
const Key heap_size = static_cast<Key>(heap.size());
|
||||||
|
Key nextKey = key << 1;
|
||||||
|
while (nextKey < heap_size)
|
||||||
|
{
|
||||||
|
const Key nextKeyOther = nextKey + 1;
|
||||||
|
if ((nextKeyOther < heap_size) && (heap[nextKey].weight > heap[nextKeyOther].weight))
|
||||||
|
{
|
||||||
|
nextKey = nextKeyOther;
|
||||||
|
}
|
||||||
|
if (weight <= heap[nextKey].weight)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
heap[key] = heap[nextKey];
|
||||||
|
inserted_nodes[heap[key].index].key = key;
|
||||||
|
key = nextKey;
|
||||||
|
nextKey <<= 1;
|
||||||
|
}
|
||||||
|
heap[key].index = droppingIndex;
|
||||||
|
heap[key].weight = weight;
|
||||||
|
inserted_nodes[droppingIndex].key = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Upheap(Key key)
|
||||||
|
{
|
||||||
|
const Key risingIndex = heap[key].index;
|
||||||
|
const Weight weight = heap[key].weight;
|
||||||
|
Key nextKey = key >> 1;
|
||||||
|
while (heap[nextKey].weight > weight)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(nextKey != 0);
|
||||||
|
heap[key] = heap[nextKey];
|
||||||
|
inserted_nodes[heap[key].index].key = key;
|
||||||
|
key = nextKey;
|
||||||
|
nextKey >>= 1;
|
||||||
|
}
|
||||||
|
heap[key].index = risingIndex;
|
||||||
|
heap[key].weight = weight;
|
||||||
|
inserted_nodes[risingIndex].key = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
void CheckHeap()
|
||||||
|
{
|
||||||
|
#ifndef NDEBUG
|
||||||
|
for (std::size_t i = 2; i < heap.size(); ++i)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(heap[i].weight >= heap[i >> 1].weight);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // BINARY_HEAP_H
|
85
data_structures/concurrent_queue.hpp
Normal file
85
data_structures/concurrent_queue.hpp
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef CONCURRENT_QUEUE_HPP
|
||||||
|
#define CONCURRENT_QUEUE_HPP
|
||||||
|
|
||||||
|
#include <boost/circular_buffer.hpp>
|
||||||
|
#include <condition_variable>
|
||||||
|
#include <mutex>
|
||||||
|
|
||||||
|
template <typename Data> class ConcurrentQueue
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit ConcurrentQueue(const size_t max_size) : m_internal_queue(max_size) {}
|
||||||
|
|
||||||
|
inline void push(const Data &data)
|
||||||
|
{
|
||||||
|
std::unique_lock<std::mutex> lock(m_mutex);
|
||||||
|
m_not_full.wait(lock, [this]
|
||||||
|
{
|
||||||
|
return m_internal_queue.size() < m_internal_queue.capacity();
|
||||||
|
});
|
||||||
|
m_internal_queue.push_back(data);
|
||||||
|
m_not_empty.notify_one();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool empty() const { return m_internal_queue.empty(); }
|
||||||
|
|
||||||
|
inline void wait_and_pop(Data &popped_value)
|
||||||
|
{
|
||||||
|
std::unique_lock<std::mutex> lock(m_mutex);
|
||||||
|
m_not_empty.wait(lock, [this]
|
||||||
|
{
|
||||||
|
return !m_internal_queue.empty();
|
||||||
|
});
|
||||||
|
popped_value = m_internal_queue.front();
|
||||||
|
m_internal_queue.pop_front();
|
||||||
|
m_not_full.notify_one();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool try_pop(Data &popped_value)
|
||||||
|
{
|
||||||
|
std::unique_lock<std::mutex> lock(m_mutex);
|
||||||
|
if (m_internal_queue.empty())
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
popped_value = m_internal_queue.front();
|
||||||
|
m_internal_queue.pop_front();
|
||||||
|
m_not_full.notify_one();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
boost::circular_buffer<Data> m_internal_queue;
|
||||||
|
std::mutex m_mutex;
|
||||||
|
std::condition_variable m_not_empty;
|
||||||
|
std::condition_variable m_not_full;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // CONCURRENT_QUEUE_HPP
|
87
data_structures/coordinate.cpp
Normal file
87
data_structures/coordinate.cpp
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "coordinate_calculation.hpp"
|
||||||
|
|
||||||
|
#ifndef NDEBUG
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
#endif
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#ifndef NDEBUG
|
||||||
|
#include <bitset>
|
||||||
|
#endif
|
||||||
|
#include <iostream>
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
FixedPointCoordinate::FixedPointCoordinate()
|
||||||
|
: lat(std::numeric_limits<int>::min()), lon(std::numeric_limits<int>::min())
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointCoordinate::FixedPointCoordinate(int lat, int lon) : lat(lat), lon(lon)
|
||||||
|
{
|
||||||
|
#ifndef NDEBUG
|
||||||
|
if (0 != (std::abs(lat) >> 30))
|
||||||
|
{
|
||||||
|
std::bitset<32> y_coordinate_vector(lat);
|
||||||
|
SimpleLogger().Write(logDEBUG) << "broken lat: " << lat
|
||||||
|
<< ", bits: " << y_coordinate_vector;
|
||||||
|
}
|
||||||
|
if (0 != (std::abs(lon) >> 30))
|
||||||
|
{
|
||||||
|
std::bitset<32> x_coordinate_vector(lon);
|
||||||
|
SimpleLogger().Write(logDEBUG) << "broken lon: " << lon
|
||||||
|
<< ", bits: " << x_coordinate_vector;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FixedPointCoordinate::is_valid() const
|
||||||
|
{
|
||||||
|
if (lat > 90 * COORDINATE_PRECISION || lat < -90 * COORDINATE_PRECISION ||
|
||||||
|
lon > 180 * COORDINATE_PRECISION || lon < -180 * COORDINATE_PRECISION)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FixedPointCoordinate::operator==(const FixedPointCoordinate &other) const
|
||||||
|
{
|
||||||
|
return lat == other.lat && lon == other.lon;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FixedPointCoordinate::output(std::ostream &out) const
|
||||||
|
{
|
||||||
|
out << "(" << lat / COORDINATE_PRECISION << "," << lon / COORDINATE_PRECISION << ")";
|
||||||
|
}
|
||||||
|
|
||||||
|
float FixedPointCoordinate::bearing(const FixedPointCoordinate &other) const
|
||||||
|
{
|
||||||
|
return coordinate_calculation::bearing(other, *this);
|
||||||
|
}
|
268
data_structures/coordinate_calculation.cpp
Normal file
268
data_structures/coordinate_calculation.cpp
Normal file
@ -0,0 +1,268 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "coordinate_calculation.hpp"
|
||||||
|
|
||||||
|
#include "../util/mercator.hpp"
|
||||||
|
#include "../util/string_util.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <cmath>
|
||||||
|
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
namespace
|
||||||
|
{
|
||||||
|
constexpr static const float RAD = 0.017453292519943295769236907684886f;
|
||||||
|
// earth radius varies between 6,356.750-6,378.135 km (3,949.901-3,963.189mi)
|
||||||
|
// The IUGG value for the equatorial radius is 6378.137 km (3963.19 miles)
|
||||||
|
constexpr static const float earth_radius = 6372797.560856f;
|
||||||
|
}
|
||||||
|
|
||||||
|
double coordinate_calculation::great_circle_distance(const int lat1,
|
||||||
|
const int lon1,
|
||||||
|
const int lat2,
|
||||||
|
const int lon2)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(lat1 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lon1 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lat2 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lon2 != std::numeric_limits<int>::min());
|
||||||
|
const double lt1 = lat1 / COORDINATE_PRECISION;
|
||||||
|
const double ln1 = lon1 / COORDINATE_PRECISION;
|
||||||
|
const double lt2 = lat2 / COORDINATE_PRECISION;
|
||||||
|
const double ln2 = lon2 / COORDINATE_PRECISION;
|
||||||
|
const double dlat1 = lt1 * (RAD);
|
||||||
|
|
||||||
|
const double dlong1 = ln1 * (RAD);
|
||||||
|
const double dlat2 = lt2 * (RAD);
|
||||||
|
const double dlong2 = ln2 * (RAD);
|
||||||
|
|
||||||
|
const double dLong = dlong1 - dlong2;
|
||||||
|
const double dLat = dlat1 - dlat2;
|
||||||
|
|
||||||
|
const double aHarv = std::pow(std::sin(dLat / 2.0), 2.0) +
|
||||||
|
std::cos(dlat1) * std::cos(dlat2) * std::pow(std::sin(dLong / 2.), 2);
|
||||||
|
const double cHarv = 2. * std::atan2(std::sqrt(aHarv), std::sqrt(1.0 - aHarv));
|
||||||
|
return earth_radius * cHarv;
|
||||||
|
}
|
||||||
|
|
||||||
|
double coordinate_calculation::great_circle_distance(const FixedPointCoordinate &coordinate_1,
|
||||||
|
const FixedPointCoordinate &coordinate_2)
|
||||||
|
{
|
||||||
|
return great_circle_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat,
|
||||||
|
coordinate_2.lon);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::euclidean_distance(const FixedPointCoordinate &coordinate_1,
|
||||||
|
const FixedPointCoordinate &coordinate_2)
|
||||||
|
{
|
||||||
|
return euclidean_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat,
|
||||||
|
coordinate_2.lon);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::euclidean_distance(const int lat1,
|
||||||
|
const int lon1,
|
||||||
|
const int lat2,
|
||||||
|
const int lon2)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(lat1 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lon1 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lat2 != std::numeric_limits<int>::min());
|
||||||
|
BOOST_ASSERT(lon2 != std::numeric_limits<int>::min());
|
||||||
|
|
||||||
|
const float float_lat1 = (lat1 / COORDINATE_PRECISION) * RAD;
|
||||||
|
const float float_lon1 = (lon1 / COORDINATE_PRECISION) * RAD;
|
||||||
|
const float float_lat2 = (lat2 / COORDINATE_PRECISION) * RAD;
|
||||||
|
const float float_lon2 = (lon2 / COORDINATE_PRECISION) * RAD;
|
||||||
|
|
||||||
|
const float x_value = (float_lon2 - float_lon1) * std::cos((float_lat1 + float_lat2) / 2.f);
|
||||||
|
const float y_value = float_lat2 - float_lat1;
|
||||||
|
return std::hypot(x_value, y_value) * earth_radius;
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::perpendicular_distance(const FixedPointCoordinate &source_coordinate,
|
||||||
|
const FixedPointCoordinate &target_coordinate,
|
||||||
|
const FixedPointCoordinate &query_location)
|
||||||
|
{
|
||||||
|
float ratio;
|
||||||
|
FixedPointCoordinate nearest_location;
|
||||||
|
|
||||||
|
return perpendicular_distance(source_coordinate, target_coordinate, query_location,
|
||||||
|
nearest_location, ratio);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::perpendicular_distance(const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
FixedPointCoordinate &nearest_location,
|
||||||
|
float &ratio)
|
||||||
|
{
|
||||||
|
return perpendicular_distance_from_projected_coordinate(
|
||||||
|
segment_source, segment_target, query_location,
|
||||||
|
{mercator::lat2y(query_location.lat / COORDINATE_PRECISION),
|
||||||
|
query_location.lon / COORDINATE_PRECISION},
|
||||||
|
nearest_location, ratio);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::perpendicular_distance_from_projected_coordinate(
|
||||||
|
const FixedPointCoordinate &source_coordinate,
|
||||||
|
const FixedPointCoordinate &target_coordinate,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
const std::pair<double, double> &projected_coordinate)
|
||||||
|
{
|
||||||
|
float ratio;
|
||||||
|
FixedPointCoordinate nearest_location;
|
||||||
|
|
||||||
|
return perpendicular_distance_from_projected_coordinate(source_coordinate, target_coordinate,
|
||||||
|
query_location, projected_coordinate,
|
||||||
|
nearest_location, ratio);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::perpendicular_distance_from_projected_coordinate(
|
||||||
|
const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
const std::pair<double, double> &projected_coordinate,
|
||||||
|
FixedPointCoordinate &nearest_location,
|
||||||
|
float &ratio)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(query_location.is_valid());
|
||||||
|
|
||||||
|
// initialize values
|
||||||
|
const double x = projected_coordinate.first;
|
||||||
|
const double y = projected_coordinate.second;
|
||||||
|
const double a = mercator::lat2y(segment_source.lat / COORDINATE_PRECISION);
|
||||||
|
const double b = segment_source.lon / COORDINATE_PRECISION;
|
||||||
|
const double c = mercator::lat2y(segment_target.lat / COORDINATE_PRECISION);
|
||||||
|
const double d = segment_target.lon / COORDINATE_PRECISION;
|
||||||
|
double p, q /*,mX*/, nY;
|
||||||
|
if (std::abs(a - c) > std::numeric_limits<double>::epsilon())
|
||||||
|
{
|
||||||
|
const double m = (d - b) / (c - a); // slope
|
||||||
|
// Projection of (x,y) on line joining (a,b) and (c,d)
|
||||||
|
p = ((x + (m * y)) + (m * m * a - m * b)) / (1.f + m * m);
|
||||||
|
q = b + m * (p - a);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
p = c;
|
||||||
|
q = y;
|
||||||
|
}
|
||||||
|
nY = (d * p - c * q) / (a * d - b * c);
|
||||||
|
|
||||||
|
// discretize the result to coordinate precision. it's a hack!
|
||||||
|
if (std::abs(nY) < (1.f / COORDINATE_PRECISION))
|
||||||
|
{
|
||||||
|
nY = 0.f;
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute ratio
|
||||||
|
ratio =
|
||||||
|
static_cast<float>((p - nY * a) / c); // These values are actually n/m+n and m/m+n , we need
|
||||||
|
// not calculate the explicit values of m an n as we
|
||||||
|
// are just interested in the ratio
|
||||||
|
if (std::isnan(ratio))
|
||||||
|
{
|
||||||
|
ratio = (segment_target == query_location ? 1.f : 0.f);
|
||||||
|
}
|
||||||
|
else if (std::abs(ratio) <= std::numeric_limits<float>::epsilon())
|
||||||
|
{
|
||||||
|
ratio = 0.f;
|
||||||
|
}
|
||||||
|
else if (std::abs(ratio - 1.f) <= std::numeric_limits<float>::epsilon())
|
||||||
|
{
|
||||||
|
ratio = 1.f;
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute nearest location
|
||||||
|
BOOST_ASSERT(!std::isnan(ratio));
|
||||||
|
if (ratio <= 0.f)
|
||||||
|
{
|
||||||
|
nearest_location = segment_source;
|
||||||
|
}
|
||||||
|
else if (ratio >= 1.f)
|
||||||
|
{
|
||||||
|
nearest_location = segment_target;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// point lies in between
|
||||||
|
nearest_location.lat = static_cast<int>(mercator::y2lat(p) * COORDINATE_PRECISION);
|
||||||
|
nearest_location.lon = static_cast<int>(q * COORDINATE_PRECISION);
|
||||||
|
}
|
||||||
|
BOOST_ASSERT(nearest_location.is_valid());
|
||||||
|
|
||||||
|
const float approximate_distance =
|
||||||
|
coordinate_calculation::euclidean_distance(query_location, nearest_location);
|
||||||
|
BOOST_ASSERT(0.f <= approximate_distance);
|
||||||
|
return approximate_distance;
|
||||||
|
}
|
||||||
|
|
||||||
|
void coordinate_calculation::lat_or_lon_to_string(const int value, std::string &output)
|
||||||
|
{
|
||||||
|
char buffer[12];
|
||||||
|
buffer[11] = 0; // zero termination
|
||||||
|
output = printInt<11, 6>(buffer, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::deg_to_rad(const float degree)
|
||||||
|
{
|
||||||
|
return degree * (static_cast<float>(M_PI) / 180.f);
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::rad_to_deg(const float radian)
|
||||||
|
{
|
||||||
|
return radian * (180.f * static_cast<float>(M_1_PI));
|
||||||
|
}
|
||||||
|
|
||||||
|
float coordinate_calculation::bearing(const FixedPointCoordinate &first_coordinate,
|
||||||
|
const FixedPointCoordinate &second_coordinate)
|
||||||
|
{
|
||||||
|
const float lon_diff =
|
||||||
|
second_coordinate.lon / COORDINATE_PRECISION - first_coordinate.lon / COORDINATE_PRECISION;
|
||||||
|
const float lon_delta = deg_to_rad(lon_diff);
|
||||||
|
const float lat1 = deg_to_rad(first_coordinate.lat / COORDINATE_PRECISION);
|
||||||
|
const float lat2 = deg_to_rad(second_coordinate.lat / COORDINATE_PRECISION);
|
||||||
|
const float y = std::sin(lon_delta) * std::cos(lat2);
|
||||||
|
const float x =
|
||||||
|
std::cos(lat1) * std::sin(lat2) - std::sin(lat1) * std::cos(lat2) * std::cos(lon_delta);
|
||||||
|
float result = rad_to_deg(std::atan2(y, x));
|
||||||
|
while (result < 0.f)
|
||||||
|
{
|
||||||
|
result += 360.f;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (result >= 360.f)
|
||||||
|
{
|
||||||
|
result -= 360.f;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
82
data_structures/coordinate_calculation.hpp
Normal file
82
data_structures/coordinate_calculation.hpp
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef COORDINATE_CALCULATION
|
||||||
|
#define COORDINATE_CALCULATION
|
||||||
|
|
||||||
|
struct FixedPointCoordinate;
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
struct coordinate_calculation
|
||||||
|
{
|
||||||
|
static double
|
||||||
|
great_circle_distance(const int lat1, const int lon1, const int lat2, const int lon2);
|
||||||
|
|
||||||
|
static double great_circle_distance(const FixedPointCoordinate &first_coordinate,
|
||||||
|
const FixedPointCoordinate &second_coordinate);
|
||||||
|
|
||||||
|
static float euclidean_distance(const FixedPointCoordinate &first_coordinate,
|
||||||
|
const FixedPointCoordinate &second_coordinate);
|
||||||
|
|
||||||
|
static float euclidean_distance(const int lat1, const int lon1, const int lat2, const int lon2);
|
||||||
|
|
||||||
|
static void lat_or_lon_to_string(const int value, std::string &output);
|
||||||
|
|
||||||
|
static float perpendicular_distance(const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location);
|
||||||
|
|
||||||
|
static float perpendicular_distance(const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
FixedPointCoordinate &nearest_location,
|
||||||
|
float &ratio);
|
||||||
|
|
||||||
|
static float perpendicular_distance_from_projected_coordinate(
|
||||||
|
const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
const std::pair<double, double> &projected_coordinate);
|
||||||
|
|
||||||
|
static float perpendicular_distance_from_projected_coordinate(
|
||||||
|
const FixedPointCoordinate &segment_source,
|
||||||
|
const FixedPointCoordinate &segment_target,
|
||||||
|
const FixedPointCoordinate &query_location,
|
||||||
|
const std::pair<double, double> &projected_coordinate,
|
||||||
|
FixedPointCoordinate &nearest_location,
|
||||||
|
float &ratio);
|
||||||
|
|
||||||
|
static float deg_to_rad(const float degree);
|
||||||
|
static float rad_to_deg(const float radian);
|
||||||
|
|
||||||
|
static float bearing(const FixedPointCoordinate &first_coordinate,
|
||||||
|
const FixedPointCoordinate &second_coordinate);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // COORDINATE_CALCULATION
|
@ -1,45 +1,41 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
#ifndef DEALLOCATING_VECTOR_HPP
|
#ifndef DEALLOCATING_VECTOR_HPP
|
||||||
#define DEALLOCATING_VECTOR_HPP
|
#define DEALLOCATING_VECTOR_HPP
|
||||||
|
|
||||||
#include "storage/io_fwd.hpp"
|
#include "../util/integer_range.hpp"
|
||||||
#include "util/integer_range.hpp"
|
|
||||||
|
|
||||||
#include <boost/iterator/iterator_facade.hpp>
|
#include <boost/iterator/iterator_facade.hpp>
|
||||||
|
|
||||||
#include <algorithm>
|
|
||||||
#include <limits>
|
#include <limits>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
namespace osrm::util
|
|
||||||
{
|
|
||||||
template <typename ElementT> struct ConstDeallocatingVectorIteratorState
|
|
||||||
{
|
|
||||||
ConstDeallocatingVectorIteratorState()
|
|
||||||
: index(std::numeric_limits<std::size_t>::max()), bucket_list(nullptr)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
explicit ConstDeallocatingVectorIteratorState(const ConstDeallocatingVectorIteratorState &r)
|
|
||||||
: index(r.index), bucket_list(r.bucket_list)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
explicit ConstDeallocatingVectorIteratorState(const std::size_t idx,
|
|
||||||
const std::vector<ElementT *> *input_list)
|
|
||||||
: index(idx), bucket_list(input_list)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
std::size_t index;
|
|
||||||
const std::vector<ElementT *> *bucket_list;
|
|
||||||
|
|
||||||
ConstDeallocatingVectorIteratorState &
|
|
||||||
operator=(const ConstDeallocatingVectorIteratorState &other)
|
|
||||||
{
|
|
||||||
index = other.index;
|
|
||||||
bucket_list = other.bucket_list;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename ElementT> struct DeallocatingVectorIteratorState
|
template <typename ElementT> struct DeallocatingVectorIteratorState
|
||||||
{
|
{
|
||||||
DeallocatingVectorIteratorState()
|
DeallocatingVectorIteratorState()
|
||||||
@ -66,55 +62,6 @@ template <typename ElementT> struct DeallocatingVectorIteratorState
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK>
|
|
||||||
class ConstDeallocatingVectorIterator
|
|
||||||
: public boost::iterator_facade<ConstDeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>,
|
|
||||||
ElementT,
|
|
||||||
std::random_access_iterator_tag>
|
|
||||||
{
|
|
||||||
ConstDeallocatingVectorIteratorState<ElementT> current_state;
|
|
||||||
|
|
||||||
public:
|
|
||||||
ConstDeallocatingVectorIterator() {}
|
|
||||||
ConstDeallocatingVectorIterator(std::size_t idx, const std::vector<ElementT *> *input_list)
|
|
||||||
: current_state(idx, input_list)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
friend class boost::iterator_core_access;
|
|
||||||
|
|
||||||
void advance(std::size_t n) { current_state.index += n; }
|
|
||||||
|
|
||||||
void increment() { advance(1); }
|
|
||||||
|
|
||||||
void decrement() { advance(-1); }
|
|
||||||
|
|
||||||
bool equal(ConstDeallocatingVectorIterator const &other) const
|
|
||||||
{
|
|
||||||
return current_state.index == other.current_state.index;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::ptrdiff_t distance_to(ConstDeallocatingVectorIterator const &other) const
|
|
||||||
{
|
|
||||||
// it is important to implement it 'other minus this'. otherwise sorting breaks
|
|
||||||
return other.current_state.index - current_state.index;
|
|
||||||
}
|
|
||||||
|
|
||||||
ElementT &dereference() const
|
|
||||||
{
|
|
||||||
const std::size_t current_bucket = current_state.index / ELEMENTS_PER_BLOCK;
|
|
||||||
const std::size_t current_index = current_state.index % ELEMENTS_PER_BLOCK;
|
|
||||||
return (current_state.bucket_list->at(current_bucket)[current_index]);
|
|
||||||
}
|
|
||||||
|
|
||||||
ElementT &operator[](const std::size_t index) const
|
|
||||||
{
|
|
||||||
const std::size_t current_bucket = (index + current_state.index) / ELEMENTS_PER_BLOCK;
|
|
||||||
const std::size_t current_index = (index + current_state.index) % ELEMENTS_PER_BLOCK;
|
|
||||||
return (current_state.bucket_list->at(current_bucket)[current_index]);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK>
|
template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK>
|
||||||
class DeallocatingVectorIterator
|
class DeallocatingVectorIterator
|
||||||
: public boost::iterator_facade<DeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>,
|
: public boost::iterator_facade<DeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>,
|
||||||
@ -164,67 +111,78 @@ class DeallocatingVectorIterator
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename ElementT> class DeallocatingVector;
|
template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK>
|
||||||
|
class DeallocatingVectorRemoveIterator
|
||||||
template <typename T> void swap(DeallocatingVector<T> &lhs, DeallocatingVector<T> &rhs) noexcept;
|
: public boost::iterator_facade<DeallocatingVectorRemoveIterator<ElementT, ELEMENTS_PER_BLOCK>,
|
||||||
|
ElementT,
|
||||||
template <typename ElementT> class DeallocatingVector
|
boost::forward_traversal_tag>
|
||||||
|
{
|
||||||
|
DeallocatingVectorIteratorState<ElementT> current_state;
|
||||||
|
|
||||||
|
public:
|
||||||
|
DeallocatingVectorRemoveIterator(std::size_t idx, std::vector<ElementT *> *input_list)
|
||||||
|
: current_state(idx, input_list)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
friend class boost::iterator_core_access;
|
||||||
|
|
||||||
|
void increment()
|
||||||
|
{
|
||||||
|
const std::size_t old_bucket = current_state.index / ELEMENTS_PER_BLOCK;
|
||||||
|
|
||||||
|
++current_state.index;
|
||||||
|
const std::size_t new_bucket = current_state.index / ELEMENTS_PER_BLOCK;
|
||||||
|
if (old_bucket != new_bucket)
|
||||||
|
{
|
||||||
|
// delete old bucket entry
|
||||||
|
if (nullptr != current_state.bucket_list->at(old_bucket))
|
||||||
|
{
|
||||||
|
delete[] current_state.bucket_list->at(old_bucket);
|
||||||
|
current_state.bucket_list->at(old_bucket) = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool equal(DeallocatingVectorRemoveIterator const &other) const
|
||||||
|
{
|
||||||
|
return current_state.index == other.current_state.index;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ptrdiff_t distance_to(DeallocatingVectorRemoveIterator const &other) const
|
||||||
|
{
|
||||||
|
return other.current_state.index - current_state.index;
|
||||||
|
}
|
||||||
|
|
||||||
|
ElementT &dereference() const
|
||||||
|
{
|
||||||
|
const std::size_t current_bucket = current_state.index / ELEMENTS_PER_BLOCK;
|
||||||
|
const std::size_t current_index = current_state.index % ELEMENTS_PER_BLOCK;
|
||||||
|
return (current_state.bucket_list->at(current_bucket)[current_index]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK = 8388608 / sizeof(ElementT)>
|
||||||
|
class DeallocatingVector
|
||||||
{
|
{
|
||||||
static constexpr std::size_t ELEMENTS_PER_BLOCK = 8388608 / sizeof(ElementT);
|
|
||||||
std::size_t current_size;
|
std::size_t current_size;
|
||||||
std::vector<ElementT *> bucket_list;
|
std::vector<ElementT *> bucket_list;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
using value_type = ElementT;
|
|
||||||
using iterator = DeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>;
|
using iterator = DeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>;
|
||||||
using const_iterator = ConstDeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>;
|
using const_iterator = DeallocatingVectorIterator<ElementT, ELEMENTS_PER_BLOCK>;
|
||||||
|
|
||||||
|
// this forward-only iterator deallocates all buckets that have been visited
|
||||||
|
using deallocation_iterator = DeallocatingVectorRemoveIterator<ElementT, ELEMENTS_PER_BLOCK>;
|
||||||
|
|
||||||
DeallocatingVector() : current_size(0)
|
DeallocatingVector() : current_size(0)
|
||||||
{
|
{
|
||||||
bucket_list.emplace_back(new ElementT[ELEMENTS_PER_BLOCK]);
|
bucket_list.emplace_back(new ElementT[ELEMENTS_PER_BLOCK]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Performs a deep copy of the buckets
|
|
||||||
DeallocatingVector(const DeallocatingVector &other)
|
|
||||||
{
|
|
||||||
bucket_list.resize(other.bucket_list.size());
|
|
||||||
for (const auto index : util::irange<std::size_t>(0, bucket_list.size()))
|
|
||||||
{
|
|
||||||
bucket_list[index] = new ElementT[ELEMENTS_PER_BLOCK];
|
|
||||||
std::copy_n(other.bucket_list[index], ELEMENTS_PER_BLOCK, bucket_list[index]);
|
|
||||||
}
|
|
||||||
current_size = other.current_size;
|
|
||||||
}
|
|
||||||
// Note we capture other by value
|
|
||||||
DeallocatingVector &operator=(const DeallocatingVector &other)
|
|
||||||
{
|
|
||||||
auto copy_other = other;
|
|
||||||
swap(copy_other);
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
// moving is fine
|
|
||||||
DeallocatingVector(DeallocatingVector &&other) noexcept { swap(other); }
|
|
||||||
DeallocatingVector &operator=(DeallocatingVector &&other) noexcept
|
|
||||||
{
|
|
||||||
swap(other);
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
DeallocatingVector(std::initializer_list<ElementT> elements) : DeallocatingVector()
|
|
||||||
{
|
|
||||||
for (auto &&elem : elements)
|
|
||||||
{
|
|
||||||
emplace_back(std::move(elem));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
~DeallocatingVector() { clear(); }
|
~DeallocatingVector() { clear(); }
|
||||||
|
|
||||||
friend void swap<>(DeallocatingVector<ElementT> &lhs,
|
void swap(DeallocatingVector<ElementT, ELEMENTS_PER_BLOCK> &other)
|
||||||
DeallocatingVector<ElementT> &rhs) noexcept;
|
|
||||||
|
|
||||||
void swap(DeallocatingVector<ElementT> &other) noexcept
|
|
||||||
{
|
{
|
||||||
std::swap(current_size, other.current_size);
|
std::swap(current_size, other.current_size);
|
||||||
bucket_list.swap(other.bucket_list);
|
bucket_list.swap(other.bucket_list);
|
||||||
@ -235,7 +193,11 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
// Delete[]'ing ptr's to all Buckets
|
// Delete[]'ing ptr's to all Buckets
|
||||||
for (auto bucket : bucket_list)
|
for (auto bucket : bucket_list)
|
||||||
{
|
{
|
||||||
delete[] bucket;
|
if (nullptr != bucket)
|
||||||
|
{
|
||||||
|
delete[] bucket;
|
||||||
|
bucket = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
bucket_list.clear();
|
bucket_list.clear();
|
||||||
bucket_list.shrink_to_fit();
|
bucket_list.shrink_to_fit();
|
||||||
@ -255,7 +217,7 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
++current_size;
|
++current_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename... Ts> void emplace_back(Ts &&...element)
|
template <typename... Ts> void emplace_back(Ts &&... element)
|
||||||
{
|
{
|
||||||
const std::size_t current_capacity = capacity();
|
const std::size_t current_capacity = capacity();
|
||||||
if (current_size == current_capacity)
|
if (current_size == current_capacity)
|
||||||
@ -268,9 +230,7 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
++current_size;
|
++current_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
void reserve(const std::size_t) const
|
void reserve(const std::size_t) const { /* don't do anything */}
|
||||||
{ /* don't do anything */
|
|
||||||
}
|
|
||||||
|
|
||||||
void resize(const std::size_t new_size)
|
void resize(const std::size_t new_size)
|
||||||
{
|
{
|
||||||
@ -284,7 +244,8 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
else
|
else
|
||||||
{ // down-size
|
{ // down-size
|
||||||
const std::size_t number_of_necessary_buckets = 1 + (new_size / ELEMENTS_PER_BLOCK);
|
const std::size_t number_of_necessary_buckets = 1 + (new_size / ELEMENTS_PER_BLOCK);
|
||||||
for (const auto bucket_index : irange(number_of_necessary_buckets, bucket_list.size()))
|
for (const auto bucket_index :
|
||||||
|
osrm::irange(number_of_necessary_buckets, bucket_list.size()))
|
||||||
{
|
{
|
||||||
if (nullptr != bucket_list[bucket_index])
|
if (nullptr != bucket_list[bucket_index])
|
||||||
{
|
{
|
||||||
@ -304,6 +265,13 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
|
|
||||||
iterator end() { return iterator(size(), &bucket_list); }
|
iterator end() { return iterator(size(), &bucket_list); }
|
||||||
|
|
||||||
|
deallocation_iterator dbegin()
|
||||||
|
{
|
||||||
|
return deallocation_iterator(static_cast<std::size_t>(0), &bucket_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
deallocation_iterator dend() { return deallocation_iterator(size(), &bucket_list); }
|
||||||
|
|
||||||
const_iterator begin() const
|
const_iterator begin() const
|
||||||
{
|
{
|
||||||
return const_iterator(static_cast<std::size_t>(0), &bucket_list);
|
return const_iterator(static_cast<std::size_t>(0), &bucket_list);
|
||||||
@ -327,8 +295,8 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
|
|
||||||
ElementT &back() const
|
ElementT &back() const
|
||||||
{
|
{
|
||||||
const std::size_t _bucket = (current_size - 1) / ELEMENTS_PER_BLOCK;
|
const std::size_t _bucket = current_size / ELEMENTS_PER_BLOCK;
|
||||||
const std::size_t _index = (current_size - 1) % ELEMENTS_PER_BLOCK;
|
const std::size_t _index = current_size % ELEMENTS_PER_BLOCK;
|
||||||
return (bucket_list[_bucket][_index]);
|
return (bucket_list[_bucket][_index]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,10 +311,4 @@ template <typename ElementT> class DeallocatingVector
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename T> void swap(DeallocatingVector<T> &lhs, DeallocatingVector<T> &rhs) noexcept
|
|
||||||
{
|
|
||||||
lhs.swap(rhs);
|
|
||||||
}
|
|
||||||
} // namespace osrm::util
|
|
||||||
|
|
||||||
#endif /* DEALLOCATING_VECTOR_HPP */
|
#endif /* DEALLOCATING_VECTOR_HPP */
|
@ -1,12 +1,36 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
#ifndef DYNAMICGRAPH_HPP
|
#ifndef DYNAMICGRAPH_HPP
|
||||||
#define DYNAMICGRAPH_HPP
|
#define DYNAMICGRAPH_HPP
|
||||||
|
|
||||||
#include "util/deallocating_vector.hpp"
|
#include "deallocating_vector.hpp"
|
||||||
#include "util/exception.hpp"
|
#include "../util/integer_range.hpp"
|
||||||
#include "util/exception_utils.hpp"
|
#include "../typedefs.h"
|
||||||
#include "util/integer_range.hpp"
|
|
||||||
#include "util/permutation.hpp"
|
|
||||||
#include "util/typedefs.hpp"
|
|
||||||
|
|
||||||
#include <boost/assert.hpp>
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
@ -18,41 +42,13 @@
|
|||||||
#include <tuple>
|
#include <tuple>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
namespace osrm::util
|
|
||||||
{
|
|
||||||
namespace detail
|
|
||||||
{
|
|
||||||
// These types need to live outside of DynamicGraph
|
|
||||||
// to be not dependable. We need this for transforming graphs
|
|
||||||
// with different data.
|
|
||||||
|
|
||||||
template <typename EdgeIterator> struct DynamicNode
|
|
||||||
{
|
|
||||||
// index of the first edge
|
|
||||||
EdgeIterator first_edge;
|
|
||||||
// amount of edges
|
|
||||||
unsigned edges;
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename NodeIterator, typename EdgeDataT> struct DynamicEdge
|
|
||||||
{
|
|
||||||
NodeIterator target;
|
|
||||||
EdgeDataT data;
|
|
||||||
};
|
|
||||||
} // namespace detail
|
|
||||||
|
|
||||||
template <typename EdgeDataT> class DynamicGraph
|
template <typename EdgeDataT> class DynamicGraph
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
using EdgeData = EdgeDataT;
|
using EdgeData = EdgeDataT;
|
||||||
using NodeIterator = std::uint32_t;
|
using NodeIterator = unsigned;
|
||||||
using EdgeIterator = std::uint32_t;
|
using EdgeIterator = unsigned;
|
||||||
using EdgeRange = range<EdgeIterator>;
|
using EdgeRange = osrm::range<EdgeIterator>;
|
||||||
|
|
||||||
using Node = detail::DynamicNode<EdgeIterator>;
|
|
||||||
using Edge = detail::DynamicEdge<NodeIterator, EdgeDataT>;
|
|
||||||
|
|
||||||
template <typename E> friend class DynamicGraph;
|
|
||||||
|
|
||||||
class InputEdge
|
class InputEdge
|
||||||
{
|
{
|
||||||
@ -68,7 +64,7 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename... Ts>
|
template <typename... Ts>
|
||||||
InputEdge(NodeIterator source, NodeIterator target, Ts &&...data)
|
InputEdge(NodeIterator source, NodeIterator target, Ts &&... data)
|
||||||
: source(source), target(target), data(std::forward<Ts>(data)...)
|
: source(source), target(target), data(std::forward<Ts>(data)...)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
@ -79,8 +75,6 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
DynamicGraph() : DynamicGraph(0) {}
|
|
||||||
|
|
||||||
// Constructs an empty graph with a given number of nodes.
|
// Constructs an empty graph with a given number of nodes.
|
||||||
explicit DynamicGraph(NodeIterator nodes) : number_of_nodes(nodes), number_of_edges(0)
|
explicit DynamicGraph(NodeIterator nodes) : number_of_nodes(nodes), number_of_edges(0)
|
||||||
{
|
{
|
||||||
@ -96,16 +90,13 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
*/
|
*/
|
||||||
template <class ContainerT> DynamicGraph(const NodeIterator nodes, const ContainerT &graph)
|
template <class ContainerT> DynamicGraph(const NodeIterator nodes, const ContainerT &graph)
|
||||||
{
|
{
|
||||||
// we need to cast here because DeallocatingVector does not have a valid const iterator
|
|
||||||
BOOST_ASSERT(std::is_sorted(const_cast<ContainerT &>(graph).begin(),
|
|
||||||
const_cast<ContainerT &>(graph).end()));
|
|
||||||
|
|
||||||
number_of_nodes = nodes;
|
number_of_nodes = nodes;
|
||||||
number_of_edges = static_cast<EdgeIterator>(graph.size());
|
number_of_edges = static_cast<EdgeIterator>(graph.size());
|
||||||
node_array.resize(number_of_nodes);
|
// node_array.reserve(number_of_nodes + 1);
|
||||||
|
node_array.resize(number_of_nodes + 1);
|
||||||
EdgeIterator edge = 0;
|
EdgeIterator edge = 0;
|
||||||
EdgeIterator position = 0;
|
EdgeIterator position = 0;
|
||||||
for (const auto node : irange(0u, number_of_nodes))
|
for (const auto node : osrm::irange(0u, number_of_nodes))
|
||||||
{
|
{
|
||||||
EdgeIterator last_edge = edge;
|
EdgeIterator last_edge = edge;
|
||||||
while (edge < number_of_edges && graph[edge].source == node)
|
while (edge < number_of_edges && graph[edge].source == node)
|
||||||
@ -116,118 +107,36 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
node_array[node].edges = edge - last_edge;
|
node_array[node].edges = edge - last_edge;
|
||||||
position += node_array[node].edges;
|
position += node_array[node].edges;
|
||||||
}
|
}
|
||||||
|
node_array.back().first_edge = position;
|
||||||
edge_list.reserve(static_cast<std::size_t>(edge_list.size() * 1.1));
|
edge_list.reserve(static_cast<std::size_t>(edge_list.size() * 1.1));
|
||||||
edge_list.resize(position);
|
edge_list.resize(position);
|
||||||
edge = 0;
|
edge = 0;
|
||||||
for (const auto node : irange(0u, number_of_nodes))
|
for (const auto node : osrm::irange(0u, number_of_nodes))
|
||||||
{
|
{
|
||||||
for (const auto i : irange(node_array[node].first_edge,
|
for (const auto i : osrm::irange(node_array[node].first_edge,
|
||||||
node_array[node].first_edge + node_array[node].edges))
|
node_array[node].first_edge + node_array[node].edges))
|
||||||
{
|
{
|
||||||
edge_list[i].target = graph[edge].target;
|
edge_list[i].target = graph[edge].target;
|
||||||
BOOST_ASSERT(edge_list[i].target < number_of_nodes);
|
|
||||||
edge_list[i].data = graph[edge].data;
|
edge_list[i].data = graph[edge].data;
|
||||||
++edge;
|
++edge;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
BOOST_ASSERT(node_array.size() == number_of_nodes);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy&move for the same data
|
~DynamicGraph() {}
|
||||||
//
|
|
||||||
|
|
||||||
DynamicGraph(const DynamicGraph &other)
|
|
||||||
{
|
|
||||||
number_of_nodes = other.number_of_nodes;
|
|
||||||
// atomics can't be moved this is why we need an own constructor
|
|
||||||
number_of_edges = static_cast<std::uint32_t>(other.number_of_edges);
|
|
||||||
|
|
||||||
node_array = other.node_array;
|
|
||||||
edge_list = other.edge_list;
|
|
||||||
}
|
|
||||||
|
|
||||||
DynamicGraph &operator=(const DynamicGraph &other)
|
|
||||||
{
|
|
||||||
auto copy_other = other;
|
|
||||||
*this = std::move(other);
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
DynamicGraph(DynamicGraph &&other) noexcept
|
|
||||||
{
|
|
||||||
number_of_nodes = other.number_of_nodes;
|
|
||||||
// atomics can't be moved this is why we need an own constructor
|
|
||||||
number_of_edges = static_cast<std::uint32_t>(other.number_of_edges);
|
|
||||||
|
|
||||||
node_array = std::move(other.node_array);
|
|
||||||
edge_list = std::move(other.edge_list);
|
|
||||||
}
|
|
||||||
|
|
||||||
DynamicGraph &operator=(DynamicGraph &&other) noexcept
|
|
||||||
{
|
|
||||||
number_of_nodes = other.number_of_nodes;
|
|
||||||
// atomics can't be moved this is why we need an own constructor
|
|
||||||
number_of_edges = static_cast<std::uint32_t>(other.number_of_edges);
|
|
||||||
|
|
||||||
node_array = std::move(other.node_array);
|
|
||||||
edge_list = std::move(other.edge_list);
|
|
||||||
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Removes all edges to and from nodes for which filter(node_id) returns false
|
|
||||||
template <typename Pred> auto Filter(Pred filter) const &
|
|
||||||
{
|
|
||||||
BOOST_ASSERT(node_array.size() == number_of_nodes);
|
|
||||||
|
|
||||||
DynamicGraph other;
|
|
||||||
|
|
||||||
other.number_of_nodes = number_of_nodes;
|
|
||||||
other.number_of_edges = static_cast<std::uint32_t>(number_of_edges);
|
|
||||||
other.edge_list.reserve(edge_list.size());
|
|
||||||
other.node_array.resize(node_array.size());
|
|
||||||
|
|
||||||
NodeID node_id = 0;
|
|
||||||
std::transform(node_array.begin(),
|
|
||||||
node_array.end(),
|
|
||||||
other.node_array.begin(),
|
|
||||||
[&](const Node &node)
|
|
||||||
{
|
|
||||||
const EdgeIterator first_edge = other.edge_list.size();
|
|
||||||
|
|
||||||
BOOST_ASSERT(node_id < number_of_nodes);
|
|
||||||
if (filter(node_id++))
|
|
||||||
{
|
|
||||||
std::copy_if(edge_list.begin() + node.first_edge,
|
|
||||||
edge_list.begin() + node.first_edge + node.edges,
|
|
||||||
std::back_inserter(other.edge_list),
|
|
||||||
[&](const auto &edge) { return filter(edge.target); });
|
|
||||||
const unsigned num_edges = other.edge_list.size() - first_edge;
|
|
||||||
return Node{first_edge, num_edges};
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return Node{first_edge, 0};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return other;
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned GetNumberOfNodes() const { return number_of_nodes; }
|
unsigned GetNumberOfNodes() const { return number_of_nodes; }
|
||||||
|
|
||||||
unsigned GetNumberOfEdges() const { return number_of_edges; }
|
unsigned GetNumberOfEdges() const { return number_of_edges; }
|
||||||
auto GetEdgeCapacity() const { return edge_list.size(); }
|
|
||||||
|
|
||||||
unsigned GetOutDegree(const NodeIterator n) const { return node_array[n].edges; }
|
unsigned GetOutDegree(const NodeIterator n) const { return node_array[n].edges; }
|
||||||
|
|
||||||
unsigned GetDirectedOutDegree(const NodeIterator n) const
|
unsigned GetDirectedOutDegree(const NodeIterator n) const
|
||||||
{
|
{
|
||||||
unsigned degree = 0;
|
unsigned degree = 0;
|
||||||
for (const auto edge : irange(BeginEdges(n), EndEdges(n)))
|
for (const auto edge : osrm::irange(BeginEdges(n), EndEdges(n)))
|
||||||
{
|
{
|
||||||
if (!GetEdgeData(edge).reversed)
|
if (GetEdgeData(edge).forward)
|
||||||
{
|
{
|
||||||
++degree;
|
++degree;
|
||||||
}
|
}
|
||||||
@ -255,7 +164,7 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
|
|
||||||
EdgeRange GetAdjacentEdgeRange(const NodeIterator node) const
|
EdgeRange GetAdjacentEdgeRange(const NodeIterator node) const
|
||||||
{
|
{
|
||||||
return irange(BeginEdges(node), EndEdges(node));
|
return osrm::irange(BeginEdges(node), EndEdges(node));
|
||||||
}
|
}
|
||||||
|
|
||||||
NodeIterator InsertNode()
|
NodeIterator InsertNode()
|
||||||
@ -270,13 +179,9 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
EdgeIterator InsertEdge(const NodeIterator from, const NodeIterator to, const EdgeDataT &data)
|
EdgeIterator InsertEdge(const NodeIterator from, const NodeIterator to, const EdgeDataT &data)
|
||||||
{
|
{
|
||||||
Node &node = node_array[from];
|
Node &node = node_array[from];
|
||||||
EdgeIterator one_beyond_last_of_node = node.edges + node.first_edge;
|
EdgeIterator newFirstEdge = node.edges + node.first_edge;
|
||||||
// if we can't write at the end of this nodes edges
|
if (newFirstEdge >= edge_list.size() || !isDummy(newFirstEdge))
|
||||||
// that is: the end is the end of the edge_list,
|
|
||||||
// or the beginning of the next nodes edges
|
|
||||||
if (one_beyond_last_of_node == edge_list.size() || !isDummy(one_beyond_last_of_node))
|
|
||||||
{
|
{
|
||||||
// can we write before this nodes edges?
|
|
||||||
if (node.first_edge != 0 && isDummy(node.first_edge - 1))
|
if (node.first_edge != 0 && isDummy(node.first_edge - 1))
|
||||||
{
|
{
|
||||||
node.first_edge--;
|
node.first_edge--;
|
||||||
@ -284,33 +189,27 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// we have to move this nodes edges to the end of the edge_list
|
|
||||||
EdgeIterator newFirstEdge = (EdgeIterator)edge_list.size();
|
EdgeIterator newFirstEdge = (EdgeIterator)edge_list.size();
|
||||||
unsigned newSize = node.edges * 1.1 + 2;
|
unsigned newSize = node.edges * 1.1 + 2;
|
||||||
EdgeIterator requiredCapacity = newSize + edge_list.size();
|
EdgeIterator requiredCapacity = newSize + edge_list.size();
|
||||||
EdgeIterator oldCapacity = edge_list.capacity();
|
EdgeIterator oldCapacity = edge_list.capacity();
|
||||||
// make sure there is enough space at the end
|
|
||||||
if (requiredCapacity >= oldCapacity)
|
if (requiredCapacity >= oldCapacity)
|
||||||
{
|
{
|
||||||
edge_list.reserve(requiredCapacity * 1.1);
|
edge_list.reserve(requiredCapacity * 1.1);
|
||||||
}
|
}
|
||||||
edge_list.resize(edge_list.size() + newSize);
|
edge_list.resize(edge_list.size() + newSize);
|
||||||
// move the edges over and invalidate the old ones
|
for (const auto i : osrm::irange(0u, node.edges))
|
||||||
for (const auto i : irange(0u, node.edges))
|
|
||||||
{
|
{
|
||||||
edge_list[newFirstEdge + i] = edge_list[node.first_edge + i];
|
edge_list[newFirstEdge + i] = edge_list[node.first_edge + i];
|
||||||
makeDummy(node.first_edge + i);
|
makeDummy(node.first_edge + i);
|
||||||
}
|
}
|
||||||
// invalidate until the end of edge_list
|
for (const auto i : osrm::irange(node.edges + 1, newSize))
|
||||||
for (const auto i : irange(node.edges + 1, newSize))
|
|
||||||
{
|
{
|
||||||
makeDummy(newFirstEdge + i);
|
makeDummy(newFirstEdge + i);
|
||||||
}
|
}
|
||||||
node.first_edge = newFirstEdge;
|
node.first_edge = newFirstEdge;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// get the position for the edge that is to be inserted
|
|
||||||
// and write it
|
|
||||||
Edge &edge = edge_list[node.first_edge + node.edges];
|
Edge &edge = edge_list[node.first_edge + node.edges];
|
||||||
edge.target = to;
|
edge.target = to;
|
||||||
edge.data = data;
|
edge.data = data;
|
||||||
@ -359,7 +258,7 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
// searches for a specific edge
|
// searches for a specific edge
|
||||||
EdgeIterator FindEdge(const NodeIterator from, const NodeIterator to) const
|
EdgeIterator FindEdge(const NodeIterator from, const NodeIterator to) const
|
||||||
{
|
{
|
||||||
for (const auto i : irange(BeginEdges(from), EndEdges(from)))
|
for (const auto i : osrm::irange(BeginEdges(from), EndEdges(from)))
|
||||||
{
|
{
|
||||||
if (to == edge_list[i].target)
|
if (to == edge_list[i].target)
|
||||||
{
|
{
|
||||||
@ -408,50 +307,6 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
return current_iterator;
|
return current_iterator;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Renumber(const std::vector<NodeID> &old_to_new_node)
|
|
||||||
{
|
|
||||||
// permutate everything but the sentinel
|
|
||||||
util::inplacePermutation(node_array.begin(), node_array.end(), old_to_new_node);
|
|
||||||
|
|
||||||
// Build up edge permutation
|
|
||||||
if (edge_list.size() >= std::numeric_limits<EdgeID>::max())
|
|
||||||
{
|
|
||||||
throw util::exception("There are too many edges, OSRM only supports 2^32" + SOURCE_REF);
|
|
||||||
}
|
|
||||||
|
|
||||||
EdgeID new_edge_index = 0;
|
|
||||||
std::vector<EdgeID> old_to_new_edge(edge_list.size(), SPECIAL_EDGEID);
|
|
||||||
for (auto node : util::irange<NodeID>(0, number_of_nodes))
|
|
||||||
{
|
|
||||||
auto new_first_edge = new_edge_index;
|
|
||||||
// move all filled edges
|
|
||||||
for (auto edge : GetAdjacentEdgeRange(node))
|
|
||||||
{
|
|
||||||
edge_list[edge].target = old_to_new_node[edge_list[edge].target];
|
|
||||||
BOOST_ASSERT(edge_list[edge].target != SPECIAL_NODEID);
|
|
||||||
old_to_new_edge[edge] = new_edge_index++;
|
|
||||||
}
|
|
||||||
node_array[node].first_edge = new_first_edge;
|
|
||||||
}
|
|
||||||
auto number_of_valid_edges = new_edge_index;
|
|
||||||
|
|
||||||
// move all dummy edges to the end of the renumbered range
|
|
||||||
for (auto edge : util::irange<NodeID>(0, edge_list.size()))
|
|
||||||
{
|
|
||||||
if (old_to_new_edge[edge] == SPECIAL_EDGEID)
|
|
||||||
{
|
|
||||||
BOOST_ASSERT(isDummy(edge));
|
|
||||||
old_to_new_edge[edge] = new_edge_index++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
BOOST_ASSERT(std::find(old_to_new_edge.begin(), old_to_new_edge.end(), SPECIAL_EDGEID) ==
|
|
||||||
old_to_new_edge.end());
|
|
||||||
util::inplacePermutation(edge_list.begin(), edge_list.end(), old_to_new_edge);
|
|
||||||
// Remove useless dummy nodes at the end
|
|
||||||
edge_list.resize(number_of_valid_edges);
|
|
||||||
number_of_edges = number_of_valid_edges;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
bool isDummy(const EdgeIterator edge) const
|
bool isDummy(const EdgeIterator edge) const
|
||||||
{
|
{
|
||||||
@ -463,12 +318,25 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
edge_list[edge].target = (std::numeric_limits<NodeIterator>::max)();
|
edge_list[edge].target = (std::numeric_limits<NodeIterator>::max)();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct Node
|
||||||
|
{
|
||||||
|
// index of the first edge
|
||||||
|
EdgeIterator first_edge;
|
||||||
|
// amount of edges
|
||||||
|
unsigned edges;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Edge
|
||||||
|
{
|
||||||
|
NodeIterator target;
|
||||||
|
EdgeDataT data;
|
||||||
|
};
|
||||||
|
|
||||||
NodeIterator number_of_nodes;
|
NodeIterator number_of_nodes;
|
||||||
std::atomic_uint number_of_edges;
|
std::atomic_uint number_of_edges;
|
||||||
|
|
||||||
std::vector<Node> node_array;
|
std::vector<Node> node_array;
|
||||||
DeallocatingVector<Edge> edge_list;
|
DeallocatingVector<Edge> edge_list;
|
||||||
};
|
};
|
||||||
} // namespace osrm::util
|
|
||||||
|
|
||||||
#endif // DYNAMICGRAPH_HPP
|
#endif // DYNAMICGRAPH_HPP
|
109
data_structures/edge_based_node.hpp
Normal file
109
data_structures/edge_based_node.hpp
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef EDGE_BASED_NODE_HPP
|
||||||
|
#define EDGE_BASED_NODE_HPP
|
||||||
|
|
||||||
|
#include "../data_structures/travel_mode.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
struct EdgeBasedNode
|
||||||
|
{
|
||||||
|
EdgeBasedNode()
|
||||||
|
: forward_edge_based_node_id(SPECIAL_NODEID), reverse_edge_based_node_id(SPECIAL_NODEID),
|
||||||
|
u(SPECIAL_NODEID), v(SPECIAL_NODEID), name_id(0),
|
||||||
|
forward_weight(INVALID_EDGE_WEIGHT >> 1), reverse_weight(INVALID_EDGE_WEIGHT >> 1),
|
||||||
|
forward_offset(0), reverse_offset(0), packed_geometry_id(SPECIAL_EDGEID),
|
||||||
|
component_id(-1), fwd_segment_position(std::numeric_limits<unsigned short>::max()),
|
||||||
|
forward_travel_mode(TRAVEL_MODE_INACCESSIBLE),
|
||||||
|
backward_travel_mode(TRAVEL_MODE_INACCESSIBLE)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
explicit EdgeBasedNode(NodeID forward_edge_based_node_id,
|
||||||
|
NodeID reverse_edge_based_node_id,
|
||||||
|
NodeID u,
|
||||||
|
NodeID v,
|
||||||
|
unsigned name_id,
|
||||||
|
int forward_weight,
|
||||||
|
int reverse_weight,
|
||||||
|
int forward_offset,
|
||||||
|
int reverse_offset,
|
||||||
|
unsigned packed_geometry_id,
|
||||||
|
unsigned component_id,
|
||||||
|
unsigned short fwd_segment_position,
|
||||||
|
TravelMode forward_travel_mode,
|
||||||
|
TravelMode backward_travel_mode)
|
||||||
|
: forward_edge_based_node_id(forward_edge_based_node_id),
|
||||||
|
reverse_edge_based_node_id(reverse_edge_based_node_id), u(u), v(v), name_id(name_id),
|
||||||
|
forward_weight(forward_weight), reverse_weight(reverse_weight),
|
||||||
|
forward_offset(forward_offset), reverse_offset(reverse_offset),
|
||||||
|
packed_geometry_id(packed_geometry_id), component_id(component_id),
|
||||||
|
fwd_segment_position(fwd_segment_position), forward_travel_mode(forward_travel_mode),
|
||||||
|
backward_travel_mode(backward_travel_mode)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT((forward_edge_based_node_id != SPECIAL_NODEID) ||
|
||||||
|
(reverse_edge_based_node_id != SPECIAL_NODEID));
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline FixedPointCoordinate Centroid(const FixedPointCoordinate &a,
|
||||||
|
const FixedPointCoordinate &b)
|
||||||
|
{
|
||||||
|
FixedPointCoordinate centroid;
|
||||||
|
// The coordinates of the midpoint are given by:
|
||||||
|
centroid.lat = (a.lat + b.lat) / 2;
|
||||||
|
centroid.lon = (a.lon + b.lon) / 2;
|
||||||
|
return centroid;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool IsCompressed() const { return packed_geometry_id != SPECIAL_EDGEID; }
|
||||||
|
|
||||||
|
bool is_in_tiny_cc() const { return 0 != component_id; }
|
||||||
|
|
||||||
|
NodeID forward_edge_based_node_id; // needed for edge-expanded graph
|
||||||
|
NodeID reverse_edge_based_node_id; // needed for edge-expanded graph
|
||||||
|
NodeID u; // indices into the coordinates array
|
||||||
|
NodeID v; // indices into the coordinates array
|
||||||
|
unsigned name_id; // id of the edge name
|
||||||
|
int forward_weight; // weight of the edge
|
||||||
|
int reverse_weight; // weight in the other direction (may be different)
|
||||||
|
int forward_offset; // prefix sum of the weight up the edge TODO: short must suffice
|
||||||
|
int reverse_offset; // prefix sum of the weight from the edge TODO: short must suffice
|
||||||
|
unsigned packed_geometry_id; // if set, then the edge represents a packed geometry
|
||||||
|
unsigned component_id;
|
||||||
|
unsigned short fwd_segment_position; // segment id in a compressed geometry
|
||||||
|
TravelMode forward_travel_mode : 4;
|
||||||
|
TravelMode backward_travel_mode : 4;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // EDGE_BASED_NODE_HPP
|
66
data_structures/external_memory_node.cpp
Normal file
66
data_structures/external_memory_node.cpp
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "external_memory_node.hpp"
|
||||||
|
#include "query_node.hpp"
|
||||||
|
|
||||||
|
#include <limits>
|
||||||
|
|
||||||
|
ExternalMemoryNode::ExternalMemoryNode(
|
||||||
|
int lat, int lon, unsigned int node_id, bool barrier, bool traffic_lights)
|
||||||
|
: QueryNode(lat, lon, node_id), barrier(barrier), traffic_lights(traffic_lights)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
ExternalMemoryNode::ExternalMemoryNode() : barrier(false), traffic_lights(false) {}
|
||||||
|
|
||||||
|
ExternalMemoryNode ExternalMemoryNode::min_value()
|
||||||
|
{
|
||||||
|
return ExternalMemoryNode(0, 0, 0, false, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
ExternalMemoryNode ExternalMemoryNode::max_value()
|
||||||
|
{
|
||||||
|
return ExternalMemoryNode(std::numeric_limits<int>::max(), std::numeric_limits<int>::max(),
|
||||||
|
std::numeric_limits<unsigned>::max(), false, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ExternalMemoryNodeSTXXLCompare::operator()(const ExternalMemoryNode &left,
|
||||||
|
const ExternalMemoryNode &right) const
|
||||||
|
{
|
||||||
|
return left.node_id < right.node_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
ExternalMemoryNodeSTXXLCompare::value_type ExternalMemoryNodeSTXXLCompare::max_value()
|
||||||
|
{
|
||||||
|
return ExternalMemoryNode::max_value();
|
||||||
|
}
|
||||||
|
|
||||||
|
ExternalMemoryNodeSTXXLCompare::value_type ExternalMemoryNodeSTXXLCompare::min_value()
|
||||||
|
{
|
||||||
|
return ExternalMemoryNode::min_value();
|
||||||
|
}
|
57
data_structures/external_memory_node.hpp
Normal file
57
data_structures/external_memory_node.hpp
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef EXTERNAL_MEMORY_NODE_HPP_
|
||||||
|
#define EXTERNAL_MEMORY_NODE_HPP_
|
||||||
|
|
||||||
|
#include "query_node.hpp"
|
||||||
|
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
struct ExternalMemoryNode : QueryNode
|
||||||
|
{
|
||||||
|
ExternalMemoryNode(int lat, int lon, NodeID id, bool barrier, bool traffic_light);
|
||||||
|
|
||||||
|
ExternalMemoryNode();
|
||||||
|
|
||||||
|
static ExternalMemoryNode min_value();
|
||||||
|
|
||||||
|
static ExternalMemoryNode max_value();
|
||||||
|
|
||||||
|
bool barrier;
|
||||||
|
bool traffic_lights;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ExternalMemoryNodeSTXXLCompare
|
||||||
|
{
|
||||||
|
using value_type = ExternalMemoryNode;
|
||||||
|
bool operator()(const ExternalMemoryNode &left, const ExternalMemoryNode &right) const;
|
||||||
|
value_type max_value();
|
||||||
|
value_type min_value();
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* EXTERNAL_MEMORY_NODE_HPP_ */
|
216
data_structures/fixed_point_number.hpp
Normal file
216
data_structures/fixed_point_number.hpp
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef FIXED_POINT_NUMBER_HPP
|
||||||
|
#define FIXED_POINT_NUMBER_HPP
|
||||||
|
|
||||||
|
#include <cmath>
|
||||||
|
#include <cstdint>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
#include <limits>
|
||||||
|
#include <type_traits>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
namespace osrm
|
||||||
|
{
|
||||||
|
|
||||||
|
// implements an binary based fixed point number type
|
||||||
|
template <unsigned FractionalBitSize,
|
||||||
|
bool use_64_bits = false,
|
||||||
|
bool is_unsigned = false,
|
||||||
|
bool truncate_results = false>
|
||||||
|
class FixedPointNumber
|
||||||
|
{
|
||||||
|
static_assert(FractionalBitSize > 0, "FractionalBitSize must be greater than 0");
|
||||||
|
static_assert(FractionalBitSize <= 32, "FractionalBitSize must at most 32");
|
||||||
|
|
||||||
|
typename std::conditional<use_64_bits, int64_t, int32_t>::type m_fixed_point_state;
|
||||||
|
constexpr static const decltype(m_fixed_point_state) PRECISION = 1 << FractionalBitSize;
|
||||||
|
|
||||||
|
// state signage encapsulates whether the state should either represent a
|
||||||
|
// signed or an unsigned floating point number
|
||||||
|
using state_signage =
|
||||||
|
typename std::conditional<is_unsigned,
|
||||||
|
typename std::make_unsigned<decltype(m_fixed_point_state)>::type,
|
||||||
|
decltype(m_fixed_point_state)>::type;
|
||||||
|
|
||||||
|
public:
|
||||||
|
FixedPointNumber() : m_fixed_point_state(0) {}
|
||||||
|
|
||||||
|
// the type is either initialized with a floating point value or an
|
||||||
|
// integral state. Anything else will throw at compile-time.
|
||||||
|
template <class T>
|
||||||
|
constexpr FixedPointNumber(const T &&input) noexcept
|
||||||
|
: m_fixed_point_state(static_cast<decltype(m_fixed_point_state)>(
|
||||||
|
std::round(std::forward<const T>(input) * PRECISION)))
|
||||||
|
{
|
||||||
|
static_assert(
|
||||||
|
std::is_floating_point<T>::value || std::is_integral<T>::value,
|
||||||
|
"FixedPointNumber needs to be initialized with floating point or integral value");
|
||||||
|
}
|
||||||
|
|
||||||
|
// get max value
|
||||||
|
template <typename T,
|
||||||
|
typename std::enable_if<std::is_floating_point<T>::value>::type * = nullptr>
|
||||||
|
constexpr static auto max() noexcept -> T
|
||||||
|
{
|
||||||
|
return static_cast<T>(std::numeric_limits<state_signage>::max()) / PRECISION;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get min value
|
||||||
|
template <typename T,
|
||||||
|
typename std::enable_if<std::is_floating_point<T>::value>::type * = nullptr>
|
||||||
|
constexpr static auto min() noexcept -> T
|
||||||
|
{
|
||||||
|
return static_cast<T>(1) / PRECISION;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get lowest value
|
||||||
|
template <typename T,
|
||||||
|
typename std::enable_if<std::is_floating_point<T>::value>::type * = nullptr>
|
||||||
|
constexpr static auto lowest() noexcept -> T
|
||||||
|
{
|
||||||
|
return static_cast<T>(std::numeric_limits<state_signage>::min()) / PRECISION;
|
||||||
|
}
|
||||||
|
|
||||||
|
// cast to floating point type T, return value
|
||||||
|
template <typename T,
|
||||||
|
typename std::enable_if<std::is_floating_point<T>::value>::type * = nullptr>
|
||||||
|
explicit operator const T() const noexcept
|
||||||
|
{
|
||||||
|
// casts to external type (signed or unsigned) and then to float
|
||||||
|
return static_cast<T>(static_cast<state_signage>(m_fixed_point_state)) / PRECISION;
|
||||||
|
}
|
||||||
|
|
||||||
|
// warn about cast to integral type T, its disabled for good reason
|
||||||
|
template <typename T, typename std::enable_if<std::is_integral<T>::value>::type * = nullptr>
|
||||||
|
explicit operator T() const
|
||||||
|
{
|
||||||
|
static_assert(std::is_integral<T>::value,
|
||||||
|
"casts to integral types have been disabled on purpose");
|
||||||
|
}
|
||||||
|
|
||||||
|
// compare, ie. sort fixed-point numbers
|
||||||
|
bool operator<(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
return m_fixed_point_state < other.m_fixed_point_state;
|
||||||
|
}
|
||||||
|
|
||||||
|
// equality, ie. sort fixed-point numbers
|
||||||
|
bool operator==(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
return m_fixed_point_state == other.m_fixed_point_state;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator!=(const FixedPointNumber &other) const { return !(*this == other); }
|
||||||
|
bool operator>(const FixedPointNumber &other) const { return other < *this; }
|
||||||
|
bool operator<=(const FixedPointNumber &other) const { return !(other < *this); }
|
||||||
|
bool operator>=(const FixedPointNumber &other) const { return !(*this < other); }
|
||||||
|
|
||||||
|
// arithmetic operators
|
||||||
|
FixedPointNumber operator+(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
FixedPointNumber tmp = *this;
|
||||||
|
tmp.m_fixed_point_state += other.m_fixed_point_state;
|
||||||
|
return tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber &operator+=(const FixedPointNumber &other) noexcept
|
||||||
|
{
|
||||||
|
this->m_fixed_point_state += other.m_fixed_point_state;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber operator-(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
FixedPointNumber tmp = *this;
|
||||||
|
tmp.m_fixed_point_state -= other.m_fixed_point_state;
|
||||||
|
return tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber &operator-=(const FixedPointNumber &other) noexcept
|
||||||
|
{
|
||||||
|
this->m_fixed_point_state -= other.m_fixed_point_state;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber operator*(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
int64_t temp = this->m_fixed_point_state;
|
||||||
|
temp *= other.m_fixed_point_state;
|
||||||
|
|
||||||
|
// rounding!
|
||||||
|
if (!truncate_results)
|
||||||
|
{
|
||||||
|
temp = temp + ((temp & 1 << (FractionalBitSize - 1)) << 1);
|
||||||
|
}
|
||||||
|
temp >>= FractionalBitSize;
|
||||||
|
FixedPointNumber tmp;
|
||||||
|
tmp.m_fixed_point_state = static_cast<decltype(m_fixed_point_state)>(temp);
|
||||||
|
return tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber &operator*=(const FixedPointNumber &other) noexcept
|
||||||
|
{
|
||||||
|
int64_t temp = this->m_fixed_point_state;
|
||||||
|
temp *= other.m_fixed_point_state;
|
||||||
|
|
||||||
|
// rounding!
|
||||||
|
if (!truncate_results)
|
||||||
|
{
|
||||||
|
temp = temp + ((temp & 1 << (FractionalBitSize - 1)) << 1);
|
||||||
|
}
|
||||||
|
temp >>= FractionalBitSize;
|
||||||
|
this->m_fixed_point_state = static_cast<decltype(m_fixed_point_state)>(temp);
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber operator/(const FixedPointNumber &other) const noexcept
|
||||||
|
{
|
||||||
|
int64_t temp = this->m_fixed_point_state;
|
||||||
|
temp <<= FractionalBitSize;
|
||||||
|
temp /= static_cast<int64_t>(other.m_fixed_point_state);
|
||||||
|
FixedPointNumber tmp;
|
||||||
|
tmp.m_fixed_point_state = static_cast<decltype(m_fixed_point_state)>(temp);
|
||||||
|
return tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
FixedPointNumber &operator/=(const FixedPointNumber &other) noexcept
|
||||||
|
{
|
||||||
|
int64_t temp = this->m_fixed_point_state;
|
||||||
|
temp <<= FractionalBitSize;
|
||||||
|
temp /= static_cast<int64_t>(other.m_fixed_point_state);
|
||||||
|
FixedPointNumber tmp;
|
||||||
|
this->m_fixed_point_state = static_cast<decltype(m_fixed_point_state)>(temp);
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static_assert(4 == sizeof(FixedPointNumber<1>), "FP19 has wrong size != 4");
|
||||||
|
}
|
||||||
|
#endif // FIXED_POINT_NUMBER_HPP
|
158
data_structures/hidden_markov_model.hpp
Normal file
158
data_structures/hidden_markov_model.hpp
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef HIDDEN_MARKOV_MODEL
|
||||||
|
#define HIDDEN_MARKOV_MODEL
|
||||||
|
|
||||||
|
#include "../util/integer_range.hpp"
|
||||||
|
|
||||||
|
#include <boost/assert.hpp>
|
||||||
|
|
||||||
|
#include <cmath>
|
||||||
|
|
||||||
|
#include <limits>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
namespace osrm
|
||||||
|
{
|
||||||
|
namespace matching
|
||||||
|
{
|
||||||
|
static const double log_2_pi = std::log(2. * M_PI);
|
||||||
|
static const double IMPOSSIBLE_LOG_PROB = -std::numeric_limits<double>::infinity();
|
||||||
|
static const double MINIMAL_LOG_PROB = std::numeric_limits<double>::lowest();
|
||||||
|
static const std::size_t INVALID_STATE = std::numeric_limits<std::size_t>::max();
|
||||||
|
} // namespace matching
|
||||||
|
} // namespace osrm
|
||||||
|
|
||||||
|
// closures to precompute log -> only simple floating point operations
|
||||||
|
struct EmissionLogProbability
|
||||||
|
{
|
||||||
|
double sigma_z;
|
||||||
|
double log_sigma_z;
|
||||||
|
|
||||||
|
EmissionLogProbability(const double sigma_z) : sigma_z(sigma_z), log_sigma_z(std::log(sigma_z))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
double operator()(const double distance) const
|
||||||
|
{
|
||||||
|
return -0.5 * (osrm::matching::log_2_pi + (distance / sigma_z) * (distance / sigma_z)) -
|
||||||
|
log_sigma_z;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TransitionLogProbability
|
||||||
|
{
|
||||||
|
double beta;
|
||||||
|
double log_beta;
|
||||||
|
TransitionLogProbability(const double beta) : beta(beta), log_beta(std::log(beta)) {}
|
||||||
|
|
||||||
|
double operator()(const double d_t) const { return -log_beta - d_t / beta; }
|
||||||
|
};
|
||||||
|
|
||||||
|
template <class CandidateLists> struct HiddenMarkovModel
|
||||||
|
{
|
||||||
|
std::vector<std::vector<double>> viterbi;
|
||||||
|
std::vector<std::vector<std::pair<unsigned, unsigned>>> parents;
|
||||||
|
std::vector<std::vector<float>> path_lengths;
|
||||||
|
std::vector<std::vector<bool>> pruned;
|
||||||
|
std::vector<std::vector<bool>> suspicious;
|
||||||
|
std::vector<bool> breakage;
|
||||||
|
|
||||||
|
const CandidateLists &candidates_list;
|
||||||
|
const EmissionLogProbability &emission_log_probability;
|
||||||
|
|
||||||
|
HiddenMarkovModel(const CandidateLists &candidates_list,
|
||||||
|
const EmissionLogProbability &emission_log_probability)
|
||||||
|
: breakage(candidates_list.size()), candidates_list(candidates_list),
|
||||||
|
emission_log_probability(emission_log_probability)
|
||||||
|
{
|
||||||
|
for (const auto &l : candidates_list)
|
||||||
|
{
|
||||||
|
viterbi.emplace_back(l.size());
|
||||||
|
parents.emplace_back(l.size());
|
||||||
|
path_lengths.emplace_back(l.size());
|
||||||
|
suspicious.emplace_back(l.size());
|
||||||
|
pruned.emplace_back(l.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
clear(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
void clear(std::size_t initial_timestamp)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(viterbi.size() == parents.size() && parents.size() == path_lengths.size() &&
|
||||||
|
path_lengths.size() == pruned.size() && pruned.size() == breakage.size());
|
||||||
|
|
||||||
|
for (const auto t : osrm::irange(initial_timestamp, viterbi.size()))
|
||||||
|
{
|
||||||
|
std::fill(viterbi[t].begin(), viterbi[t].end(), osrm::matching::IMPOSSIBLE_LOG_PROB);
|
||||||
|
std::fill(parents[t].begin(), parents[t].end(), std::make_pair(0u, 0u));
|
||||||
|
std::fill(path_lengths[t].begin(), path_lengths[t].end(), 0);
|
||||||
|
std::fill(suspicious[t].begin(), suspicious[t].end(), true);
|
||||||
|
std::fill(pruned[t].begin(), pruned[t].end(), true);
|
||||||
|
}
|
||||||
|
std::fill(breakage.begin() + initial_timestamp, breakage.end(), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t initialize(std::size_t initial_timestamp)
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(initial_timestamp < candidates_list.size());
|
||||||
|
|
||||||
|
do
|
||||||
|
{
|
||||||
|
for (const auto s : osrm::irange<std::size_t>(0u, viterbi[initial_timestamp].size()))
|
||||||
|
{
|
||||||
|
viterbi[initial_timestamp][s] =
|
||||||
|
emission_log_probability(candidates_list[initial_timestamp][s].second);
|
||||||
|
parents[initial_timestamp][s] = std::make_pair(initial_timestamp, s);
|
||||||
|
pruned[initial_timestamp][s] =
|
||||||
|
viterbi[initial_timestamp][s] < osrm::matching::MINIMAL_LOG_PROB;
|
||||||
|
suspicious[initial_timestamp][s] = false;
|
||||||
|
|
||||||
|
breakage[initial_timestamp] =
|
||||||
|
breakage[initial_timestamp] && pruned[initial_timestamp][s];
|
||||||
|
}
|
||||||
|
|
||||||
|
++initial_timestamp;
|
||||||
|
} while (breakage[initial_timestamp - 1]);
|
||||||
|
|
||||||
|
if (initial_timestamp >= viterbi.size())
|
||||||
|
{
|
||||||
|
return osrm::matching::INVALID_STATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(initial_timestamp > 0);
|
||||||
|
--initial_timestamp;
|
||||||
|
|
||||||
|
BOOST_ASSERT(breakage[initial_timestamp] == false);
|
||||||
|
|
||||||
|
return initial_timestamp;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // HIDDEN_MARKOV_MODEL
|
100
data_structures/hilbert_value.cpp
Normal file
100
data_structures/hilbert_value.cpp
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "hilbert_value.hpp"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
uint64_t HilbertCode::operator()(const FixedPointCoordinate ¤t_coordinate) const
|
||||||
|
{
|
||||||
|
unsigned location[2];
|
||||||
|
location[0] = current_coordinate.lat + static_cast<int>(90 * COORDINATE_PRECISION);
|
||||||
|
location[1] = current_coordinate.lon + static_cast<int>(180 * COORDINATE_PRECISION);
|
||||||
|
|
||||||
|
TransposeCoordinate(location);
|
||||||
|
return BitInterleaving(location[0], location[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t HilbertCode::BitInterleaving(const uint32_t latitude, const uint32_t longitude) const
|
||||||
|
{
|
||||||
|
uint64_t result = 0;
|
||||||
|
for (int8_t index = 31; index >= 0; --index)
|
||||||
|
{
|
||||||
|
result |= (latitude >> index) & 1;
|
||||||
|
result <<= 1;
|
||||||
|
result |= (longitude >> index) & 1;
|
||||||
|
if (0 != index)
|
||||||
|
{
|
||||||
|
result <<= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void HilbertCode::TransposeCoordinate(uint32_t *X) const
|
||||||
|
{
|
||||||
|
uint32_t M = 1 << (32 - 1), P, Q, t;
|
||||||
|
int i;
|
||||||
|
// Inverse undo
|
||||||
|
for (Q = M; Q > 1; Q >>= 1)
|
||||||
|
{
|
||||||
|
P = Q - 1;
|
||||||
|
for (i = 0; i < 2; ++i)
|
||||||
|
{
|
||||||
|
|
||||||
|
const bool condition = (X[i] & Q);
|
||||||
|
if (condition)
|
||||||
|
{
|
||||||
|
X[0] ^= P; // invert
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
t = (X[0] ^ X[i]) & P;
|
||||||
|
X[0] ^= t;
|
||||||
|
X[i] ^= t;
|
||||||
|
}
|
||||||
|
} // exchange
|
||||||
|
}
|
||||||
|
// Gray encode
|
||||||
|
for (i = 1; i < 2; ++i)
|
||||||
|
{
|
||||||
|
X[i] ^= X[i - 1];
|
||||||
|
}
|
||||||
|
t = 0;
|
||||||
|
for (Q = M; Q > 1; Q >>= 1)
|
||||||
|
{
|
||||||
|
const bool condition = (X[2 - 1] & Q);
|
||||||
|
if (condition)
|
||||||
|
{
|
||||||
|
t ^= Q - 1;
|
||||||
|
}
|
||||||
|
} // check if this for loop is wrong
|
||||||
|
for (i = 0; i < 2; ++i)
|
||||||
|
{
|
||||||
|
X[i] ^= t;
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
|
|
||||||
Copyright (c) 2017, Project OSRM contributors
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
@ -25,28 +25,25 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef CONTRACTOR_CONTRACTOR_HPP
|
#ifndef HILBERT_VALUE_HPP
|
||||||
#define CONTRACTOR_CONTRACTOR_HPP
|
#define HILBERT_VALUE_HPP
|
||||||
|
|
||||||
#include "contractor/contractor_config.hpp"
|
#include <cstdint>
|
||||||
|
|
||||||
namespace osrm::contractor
|
// computes a 64 bit value that corresponds to the hilbert space filling curve
|
||||||
{
|
|
||||||
|
|
||||||
/// Base class of osrm-contract
|
struct FixedPointCoordinate;
|
||||||
class Contractor
|
|
||||||
|
class HilbertCode
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
explicit Contractor(const ContractorConfig &config_) : config{config_} {}
|
uint64_t operator()(const FixedPointCoordinate ¤t_coordinate) const;
|
||||||
|
HilbertCode() {}
|
||||||
Contractor(const Contractor &) = delete;
|
HilbertCode(const HilbertCode &) = delete;
|
||||||
Contractor &operator=(const Contractor &) = delete;
|
|
||||||
|
|
||||||
int Run();
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
ContractorConfig config;
|
inline uint64_t BitInterleaving(const uint32_t a, const uint32_t b) const;
|
||||||
|
inline void TransposeCoordinate(uint32_t *X) const;
|
||||||
};
|
};
|
||||||
} // namespace osrm::contractor
|
|
||||||
|
|
||||||
#endif // PROCESSING_CHAIN_HPP
|
#endif /* HILBERT_VALUE_HPP */
|
113
data_structures/import_edge.cpp
Normal file
113
data_structures/import_edge.cpp
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "import_edge.hpp"
|
||||||
|
|
||||||
|
#include "travel_mode.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
bool NodeBasedEdge::operator<(const NodeBasedEdge &other) const
|
||||||
|
{
|
||||||
|
if (source == other.source)
|
||||||
|
{
|
||||||
|
if (target == other.target)
|
||||||
|
{
|
||||||
|
if (weight == other.weight)
|
||||||
|
{
|
||||||
|
return forward && backward && ((!other.forward) || (!other.backward));
|
||||||
|
}
|
||||||
|
return weight < other.weight;
|
||||||
|
}
|
||||||
|
return target < other.target;
|
||||||
|
}
|
||||||
|
return source < other.source;
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeBasedEdge::NodeBasedEdge()
|
||||||
|
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), name_id(0), weight(0), forward(false),
|
||||||
|
backward(false), roundabout(false), in_tiny_cc(false),
|
||||||
|
access_restricted(false), is_split(false), travel_mode(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
NodeBasedEdge::NodeBasedEdge(NodeID source,
|
||||||
|
NodeID target,
|
||||||
|
NodeID name_id,
|
||||||
|
EdgeWeight weight,
|
||||||
|
bool forward,
|
||||||
|
bool backward,
|
||||||
|
bool roundabout,
|
||||||
|
bool in_tiny_cc,
|
||||||
|
bool access_restricted,
|
||||||
|
TravelMode travel_mode,
|
||||||
|
bool is_split)
|
||||||
|
: source(source), target(target), name_id(name_id), weight(weight), forward(forward),
|
||||||
|
backward(backward), roundabout(roundabout), in_tiny_cc(in_tiny_cc),
|
||||||
|
access_restricted(access_restricted), is_split(is_split), travel_mode(travel_mode)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
bool EdgeBasedEdge::operator<(const EdgeBasedEdge &other) const
|
||||||
|
{
|
||||||
|
if (source == other.source)
|
||||||
|
{
|
||||||
|
if (target == other.target)
|
||||||
|
{
|
||||||
|
if (weight == other.weight)
|
||||||
|
{
|
||||||
|
return forward && backward && ((!other.forward) || (!other.backward));
|
||||||
|
}
|
||||||
|
return weight < other.weight;
|
||||||
|
}
|
||||||
|
return target < other.target;
|
||||||
|
}
|
||||||
|
return source < other.source;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class EdgeT>
|
||||||
|
EdgeBasedEdge::EdgeBasedEdge(const EdgeT &other)
|
||||||
|
: source(other.source), target(other.target), edge_id(other.data.via),
|
||||||
|
weight(other.data.distance), forward(other.data.forward), backward(other.data.backward)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Default constructor. target and weight are set to 0.*/
|
||||||
|
EdgeBasedEdge::EdgeBasedEdge()
|
||||||
|
: source(0), target(0), edge_id(0), weight(0), forward(false), backward(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
EdgeBasedEdge::EdgeBasedEdge(const NodeID source,
|
||||||
|
const NodeID target,
|
||||||
|
const NodeID edge_id,
|
||||||
|
const EdgeWeight weight,
|
||||||
|
const bool forward,
|
||||||
|
const bool backward)
|
||||||
|
: source(source), target(target), edge_id(edge_id), weight(weight), forward(forward),
|
||||||
|
backward(backward)
|
||||||
|
{
|
||||||
|
}
|
88
data_structures/import_edge.hpp
Normal file
88
data_structures/import_edge.hpp
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2013, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef IMPORT_EDGE_HPP
|
||||||
|
#define IMPORT_EDGE_HPP
|
||||||
|
|
||||||
|
#include "../data_structures/travel_mode.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
struct NodeBasedEdge
|
||||||
|
{
|
||||||
|
bool operator<(const NodeBasedEdge &e) const;
|
||||||
|
|
||||||
|
NodeBasedEdge();
|
||||||
|
explicit NodeBasedEdge(NodeID source,
|
||||||
|
NodeID target,
|
||||||
|
NodeID name_id,
|
||||||
|
EdgeWeight weight,
|
||||||
|
bool forward,
|
||||||
|
bool backward,
|
||||||
|
bool roundabout,
|
||||||
|
bool in_tiny_cc,
|
||||||
|
bool access_restricted,
|
||||||
|
TravelMode travel_mode,
|
||||||
|
bool is_split);
|
||||||
|
|
||||||
|
NodeID source;
|
||||||
|
NodeID target;
|
||||||
|
NodeID name_id;
|
||||||
|
EdgeWeight weight;
|
||||||
|
bool forward : 1;
|
||||||
|
bool backward : 1;
|
||||||
|
bool roundabout : 1;
|
||||||
|
bool in_tiny_cc : 1;
|
||||||
|
bool access_restricted : 1;
|
||||||
|
bool is_split : 1;
|
||||||
|
TravelMode travel_mode : 4;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct EdgeBasedEdge
|
||||||
|
{
|
||||||
|
|
||||||
|
public:
|
||||||
|
bool operator<(const EdgeBasedEdge &e) const;
|
||||||
|
|
||||||
|
template <class EdgeT> explicit EdgeBasedEdge(const EdgeT &myEdge);
|
||||||
|
|
||||||
|
EdgeBasedEdge();
|
||||||
|
|
||||||
|
explicit EdgeBasedEdge(const NodeID source,
|
||||||
|
const NodeID target,
|
||||||
|
const NodeID edge_id,
|
||||||
|
const EdgeWeight weight,
|
||||||
|
const bool forward,
|
||||||
|
const bool backward);
|
||||||
|
NodeID source;
|
||||||
|
NodeID target;
|
||||||
|
NodeID edge_id;
|
||||||
|
EdgeWeight weight : 30;
|
||||||
|
bool forward : 1;
|
||||||
|
bool backward : 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* IMPORT_EDGE_HPP */
|
87
data_structures/internal_route_result.hpp
Normal file
87
data_structures/internal_route_result.hpp
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2013, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef RAW_ROUTE_DATA_H
|
||||||
|
#define RAW_ROUTE_DATA_H
|
||||||
|
|
||||||
|
#include "../data_structures/phantom_node.hpp"
|
||||||
|
#include "../data_structures/travel_mode.hpp"
|
||||||
|
#include "../data_structures/turn_instructions.hpp"
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
#include <osrm/coordinate.hpp>
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct PathData
|
||||||
|
{
|
||||||
|
PathData()
|
||||||
|
: node(SPECIAL_NODEID), name_id(INVALID_EDGE_WEIGHT), segment_duration(INVALID_EDGE_WEIGHT),
|
||||||
|
turn_instruction(TurnInstruction::NoTurn), travel_mode(TRAVEL_MODE_INACCESSIBLE)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
PathData(NodeID node,
|
||||||
|
unsigned name_id,
|
||||||
|
TurnInstruction turn_instruction,
|
||||||
|
EdgeWeight segment_duration,
|
||||||
|
TravelMode travel_mode)
|
||||||
|
: node(node), name_id(name_id), segment_duration(segment_duration),
|
||||||
|
turn_instruction(turn_instruction), travel_mode(travel_mode)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
NodeID node;
|
||||||
|
unsigned name_id;
|
||||||
|
EdgeWeight segment_duration;
|
||||||
|
TurnInstruction turn_instruction;
|
||||||
|
TravelMode travel_mode : 4;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct InternalRouteResult
|
||||||
|
{
|
||||||
|
std::vector<std::vector<PathData>> unpacked_path_segments;
|
||||||
|
std::vector<PathData> unpacked_alternative;
|
||||||
|
std::vector<PhantomNodes> segment_end_coordinates;
|
||||||
|
std::vector<bool> source_traversed_in_reverse;
|
||||||
|
std::vector<bool> target_traversed_in_reverse;
|
||||||
|
std::vector<bool> alt_source_traversed_in_reverse;
|
||||||
|
std::vector<bool> alt_target_traversed_in_reverse;
|
||||||
|
int shortest_path_length;
|
||||||
|
int alternative_path_length;
|
||||||
|
|
||||||
|
bool is_via_leg(const std::size_t leg) const
|
||||||
|
{
|
||||||
|
return (leg != unpacked_path_segments.size() - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
InternalRouteResult()
|
||||||
|
: shortest_path_length(INVALID_EDGE_WEIGHT), alternative_path_length(INVALID_EDGE_WEIGHT)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // RAW_ROUTE_DATA_H
|
97
data_structures/lru_cache.hpp
Normal file
97
data_structures/lru_cache.hpp
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef LRUCACHE_HPP
|
||||||
|
#define LRUCACHE_HPP
|
||||||
|
|
||||||
|
#include <list>
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
|
template <typename KeyT, typename ValueT> class LRUCache
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
struct CacheEntry
|
||||||
|
{
|
||||||
|
CacheEntry(KeyT k, ValueT v) : key(k), value(v) {}
|
||||||
|
KeyT key;
|
||||||
|
ValueT value;
|
||||||
|
};
|
||||||
|
unsigned capacity;
|
||||||
|
std::list<CacheEntry> itemsInCache;
|
||||||
|
std::unordered_map<KeyT, typename std::list<CacheEntry>::iterator> positionMap;
|
||||||
|
|
||||||
|
public:
|
||||||
|
explicit LRUCache(unsigned c) : capacity(c) {}
|
||||||
|
|
||||||
|
bool Holds(KeyT key)
|
||||||
|
{
|
||||||
|
if (positionMap.find(key) != positionMap.end())
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Insert(const KeyT key, ValueT &value)
|
||||||
|
{
|
||||||
|
itemsInCache.push_front(CacheEntry(key, value));
|
||||||
|
positionMap.insert(std::make_pair(key, itemsInCache.begin()));
|
||||||
|
if (itemsInCache.size() > capacity)
|
||||||
|
{
|
||||||
|
positionMap.erase(itemsInCache.back().key);
|
||||||
|
itemsInCache.pop_back();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Insert(const KeyT key, ValueT value)
|
||||||
|
{
|
||||||
|
itemsInCache.push_front(CacheEntry(key, value));
|
||||||
|
positionMap.insert(std::make_pair(key, itemsInCache.begin()));
|
||||||
|
if (itemsInCache.size() > capacity)
|
||||||
|
{
|
||||||
|
positionMap.erase(itemsInCache.back().key);
|
||||||
|
itemsInCache.pop_back();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Fetch(const KeyT key, ValueT &result)
|
||||||
|
{
|
||||||
|
if (Holds(key))
|
||||||
|
{
|
||||||
|
CacheEntry e = *(positionMap.find(key)->second);
|
||||||
|
result = e.value;
|
||||||
|
|
||||||
|
// move to front
|
||||||
|
itemsInCache.splice(itemsInCache.begin(), itemsInCache, positionMap.find(key)->second);
|
||||||
|
positionMap.find(key)->second = itemsInCache.begin();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
unsigned Size() const { return itemsInCache.size(); }
|
||||||
|
};
|
||||||
|
#endif // LRUCACHE_HPP
|
235
data_structures/node_based_graph.hpp
Normal file
235
data_structures/node_based_graph.hpp
Normal file
@ -0,0 +1,235 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2015, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef NODE_BASED_GRAPH_HPP
|
||||||
|
#define NODE_BASED_GRAPH_HPP
|
||||||
|
|
||||||
|
#include "dynamic_graph.hpp"
|
||||||
|
#include "import_edge.hpp"
|
||||||
|
#include "../util/simple_logger.hpp"
|
||||||
|
|
||||||
|
#include <tbb/parallel_sort.h>
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
struct NodeBasedEdgeData
|
||||||
|
{
|
||||||
|
NodeBasedEdgeData()
|
||||||
|
: distance(INVALID_EDGE_WEIGHT), edgeBasedNodeID(SPECIAL_NODEID),
|
||||||
|
nameID(std::numeric_limits<unsigned>::max()), isAccessRestricted(false), shortcut(false),
|
||||||
|
forward(false), backward(false), roundabout(false), ignore_in_grid(false),
|
||||||
|
travel_mode(TRAVEL_MODE_INACCESSIBLE)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
int distance;
|
||||||
|
unsigned edgeBasedNodeID;
|
||||||
|
unsigned nameID;
|
||||||
|
bool isAccessRestricted : 1;
|
||||||
|
bool shortcut : 1;
|
||||||
|
bool forward : 1;
|
||||||
|
bool backward : 1;
|
||||||
|
bool roundabout : 1;
|
||||||
|
bool ignore_in_grid : 1;
|
||||||
|
TravelMode travel_mode : 4;
|
||||||
|
|
||||||
|
void SwapDirectionFlags()
|
||||||
|
{
|
||||||
|
bool temp_flag = forward;
|
||||||
|
forward = backward;
|
||||||
|
backward = temp_flag;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool IsCompatibleTo(const NodeBasedEdgeData &other) const
|
||||||
|
{
|
||||||
|
return (forward == other.forward) && (backward == other.backward) &&
|
||||||
|
(nameID == other.nameID) && (ignore_in_grid == other.ignore_in_grid) &&
|
||||||
|
(travel_mode == other.travel_mode);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
using NodeBasedDynamicGraph = DynamicGraph<NodeBasedEdgeData>;
|
||||||
|
|
||||||
|
inline bool validateNeighborHood(const NodeBasedDynamicGraph& graph, const NodeID source)
|
||||||
|
{
|
||||||
|
for (auto edge = graph.BeginEdges(source); edge < graph.EndEdges(source); ++edge)
|
||||||
|
{
|
||||||
|
const auto& data = graph.GetEdgeData(edge);
|
||||||
|
if (!data.forward && !data.backward)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Invalid edge directions";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto target = graph.GetTarget(edge);
|
||||||
|
if (target == SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Invalid edge target";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool found_reverse = false;
|
||||||
|
for (auto rev_edge = graph.BeginEdges(target); rev_edge < graph.EndEdges(target); ++rev_edge)
|
||||||
|
{
|
||||||
|
auto rev_target = graph.GetTarget(rev_edge);
|
||||||
|
if (rev_target == SPECIAL_NODEID)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Invalid reverse edge target";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rev_target != source)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (found_reverse)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Found more than one reverse edge";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const auto& rev_data = graph.GetEdgeData(rev_edge);
|
||||||
|
|
||||||
|
// edge is incoming, this must be an outgoing edge
|
||||||
|
if (data.backward && !rev_data.forward)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Found no outgoing edge to an incoming edge!";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// edge is bi-directional, reverse must be as well
|
||||||
|
if (data.forward && data.backward && (!rev_data.forward || !rev_data.backward))
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Found bi-directional edge that is not bi-directional to both ends";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
found_reverse = true;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!found_reverse)
|
||||||
|
{
|
||||||
|
SimpleLogger().Write(logWARNING) << "Could not find reverse edge";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function checks if the overal graph is undirected (has an edge in each direction).
|
||||||
|
inline bool validateNodeBasedGraph(const NodeBasedDynamicGraph& graph)
|
||||||
|
{
|
||||||
|
for (auto source = 0u; source < graph.GetNumberOfNodes(); ++source)
|
||||||
|
{
|
||||||
|
if (!validateNeighborHood(graph, source))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Factory method to create NodeBasedDynamicGraph from NodeBasedEdges
|
||||||
|
// The since DynamicGraph expects directed edges, we need to insert
|
||||||
|
// two edges for undirected edges.
|
||||||
|
inline std::shared_ptr<NodeBasedDynamicGraph>
|
||||||
|
NodeBasedDynamicGraphFromImportEdges(int number_of_nodes, std::vector<NodeBasedEdge> &input_edge_list)
|
||||||
|
{
|
||||||
|
static_assert(sizeof(NodeBasedEdgeData) == 16,
|
||||||
|
"changing node based edge data size changes memory consumption");
|
||||||
|
|
||||||
|
DeallocatingVector<NodeBasedDynamicGraph::InputEdge> edges_list;
|
||||||
|
NodeBasedDynamicGraph::InputEdge edge;
|
||||||
|
|
||||||
|
// Since DynamicGraph assumes directed edges we have to make sure we transformed
|
||||||
|
// the compressed edge format into single directed edges. We do this to make sure
|
||||||
|
// every node also knows its incoming edges, not only its outgoing edges and use the backward=true
|
||||||
|
// flag to indicate which is which.
|
||||||
|
//
|
||||||
|
// We do the transformation in the following way:
|
||||||
|
//
|
||||||
|
// if the edge (a, b) is split:
|
||||||
|
// 1. this edge must be in only one direction, so its a --> b
|
||||||
|
// 2. there must be another directed edge b --> a somewhere in the data
|
||||||
|
// if the edge (a, b) is not split:
|
||||||
|
// 1. this edge be on of a --> b od a <-> b
|
||||||
|
// (a <-- b gets reducted to b --> a)
|
||||||
|
// 2. a --> b will be transformed to a --> b and b <-- a
|
||||||
|
// 3. a <-> b will be transformed to a <-> b and b <-> a (I think a --> b and b <-- a would work as well though)
|
||||||
|
for (const NodeBasedEdge &import_edge : input_edge_list)
|
||||||
|
{
|
||||||
|
// edges that are not forward get converted by flipping the end points
|
||||||
|
BOOST_ASSERT(import_edge.forward);
|
||||||
|
|
||||||
|
if (import_edge.forward)
|
||||||
|
{
|
||||||
|
edge.source = import_edge.source;
|
||||||
|
edge.target = import_edge.target;
|
||||||
|
edge.data.forward = import_edge.forward;
|
||||||
|
edge.data.backward = import_edge.backward;
|
||||||
|
}
|
||||||
|
|
||||||
|
BOOST_ASSERT(edge.source != edge.target);
|
||||||
|
|
||||||
|
edge.data.distance = static_cast<int>(import_edge.weight);
|
||||||
|
BOOST_ASSERT(edge.data.distance > 0);
|
||||||
|
edge.data.shortcut = false;
|
||||||
|
edge.data.roundabout = import_edge.roundabout;
|
||||||
|
edge.data.ignore_in_grid = import_edge.in_tiny_cc;
|
||||||
|
edge.data.nameID = import_edge.name_id;
|
||||||
|
edge.data.isAccessRestricted = import_edge.access_restricted;
|
||||||
|
edge.data.travel_mode = import_edge.travel_mode;
|
||||||
|
|
||||||
|
edges_list.push_back(edge);
|
||||||
|
|
||||||
|
if (!import_edge.is_split)
|
||||||
|
{
|
||||||
|
using std::swap; // enable ADL
|
||||||
|
swap(edge.source, edge.target);
|
||||||
|
edge.data.SwapDirectionFlags();
|
||||||
|
edges_list.push_back(edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tbb::parallel_sort(edges_list.begin(), edges_list.end());
|
||||||
|
|
||||||
|
auto graph = std::make_shared<NodeBasedDynamicGraph>(
|
||||||
|
static_cast<NodeBasedDynamicGraph::NodeIterator>(number_of_nodes), edges_list);
|
||||||
|
|
||||||
|
|
||||||
|
#ifndef NDEBUG
|
||||||
|
BOOST_ASSERT(validateNodeBasedGraph(*graph));
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return graph;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif // NODE_BASED_GRAPH_HPP
|
41
data_structures/node_id.hpp
Normal file
41
data_structures/node_id.hpp
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Copyright (c) 2014, Project OSRM contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef NODE_ID_HPP
|
||||||
|
#define NODE_ID_HPP
|
||||||
|
|
||||||
|
#include "../typedefs.h"
|
||||||
|
|
||||||
|
struct Cmp
|
||||||
|
{
|
||||||
|
using value_type = NodeID;
|
||||||
|
bool operator()(const NodeID left, const NodeID right) const { return left < right; }
|
||||||
|
value_type max_value() { return 0xffffffff; }
|
||||||
|
value_type min_value() { return 0x0; }
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // NODE_ID_HPP
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user