Merge branch 'master' into boost_optional_merge
This commit is contained in:
commit
56d2d4dacd
5
.github/ISSUE_TEMPLATE/question.md
vendored
5
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,5 +0,0 @@
|
|||||||
---
|
|
||||||
name: Question
|
|
||||||
about: Ask a question about OSRM
|
|
||||||
labels: question
|
|
||||||
---
|
|
89
.github/workflows/osrm-backend.yml
vendored
89
.github/workflows/osrm-backend.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
|||||||
- run: cmake --version
|
- run: cmake --version
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 16
|
node-version: 18
|
||||||
- run: node --version
|
- run: node --version
|
||||||
- run: npm --version
|
- run: npm --version
|
||||||
- name: Prepare environment
|
- name: Prepare environment
|
||||||
@ -71,13 +71,13 @@ jobs:
|
|||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
format-taginfo-docs:
|
format-taginfo-docs:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Use Node.js
|
- name: Use Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 16
|
node-version: 18
|
||||||
- name: Enable Node.js cache
|
- name: Enable Node.js cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
@ -88,7 +88,7 @@ jobs:
|
|||||||
- name: Prepare environment
|
- name: Prepare environment
|
||||||
run: |
|
run: |
|
||||||
npm ci --ignore-scripts
|
npm ci --ignore-scripts
|
||||||
clang-format-10 --version
|
clang-format-15 --version
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: |
|
run: |
|
||||||
./scripts/check_taginfo.py taginfo.json profiles/car.lua
|
./scripts/check_taginfo.py taginfo.json profiles/car.lua
|
||||||
@ -147,7 +147,7 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- name: gcc-9-debug-cov
|
- name: gcc-9-debug-cov
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Debug
|
BUILD_TYPE: Debug
|
||||||
@ -158,7 +158,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-9-debug-asan-ubsan
|
- name: gcc-9-debug-asan-ubsan
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Debug
|
BUILD_TYPE: Debug
|
||||||
@ -172,7 +172,7 @@ jobs:
|
|||||||
|
|
||||||
- name: clang-6.0-debug
|
- name: clang-6.0-debug
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Debug
|
BUILD_TYPE: Debug
|
||||||
@ -182,7 +182,7 @@ jobs:
|
|||||||
|
|
||||||
- name: clang-15.0-debug-clang-tidy
|
- name: clang-15.0-debug-clang-tidy
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Debug
|
BUILD_TYPE: Debug
|
||||||
@ -193,7 +193,7 @@ jobs:
|
|||||||
|
|
||||||
- name: conan-linux-debug-asan-ubsan
|
- name: conan-linux-debug-asan-ubsan
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -204,7 +204,7 @@ jobs:
|
|||||||
|
|
||||||
- name: conan-linux-release
|
- name: conan-linux-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -214,7 +214,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-12-release
|
- name: gcc-12-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -225,7 +225,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-11-release
|
- name: gcc-11-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -235,7 +235,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-10-release
|
- name: gcc-10-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -244,7 +244,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-9-release
|
- name: gcc-9-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -254,7 +254,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-9-conan-release-i686
|
- name: gcc-9-conan-release-i686
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -267,7 +267,7 @@ jobs:
|
|||||||
|
|
||||||
- name: gcc-8-release
|
- name: gcc-8-release
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 16
|
node: 18
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TOOLS: ON
|
BUILD_TOOLS: ON
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
@ -278,7 +278,7 @@ jobs:
|
|||||||
- name: conan-linux-release-node
|
- name: conan-linux-release-node
|
||||||
build_node_package: true
|
build_node_package: true
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 18
|
node: 20
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
CCOMPILER: clang-6.0
|
CCOMPILER: clang-6.0
|
||||||
@ -289,7 +289,7 @@ jobs:
|
|||||||
- name: conan-linux-debug-node
|
- name: conan-linux-debug-node
|
||||||
build_node_package: true
|
build_node_package: true
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
node: 18
|
node: 20
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
BUILD_TYPE: Debug
|
BUILD_TYPE: Debug
|
||||||
CCOMPILER: clang-6.0
|
CCOMPILER: clang-6.0
|
||||||
@ -300,7 +300,7 @@ jobs:
|
|||||||
- name: conan-macos-x64-release-node
|
- name: conan-macos-x64-release-node
|
||||||
build_node_package: true
|
build_node_package: true
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
node: 18
|
node: 20
|
||||||
runs-on: macos-11
|
runs-on: macos-11
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
CCOMPILER: clang
|
CCOMPILER: clang
|
||||||
@ -312,7 +312,7 @@ jobs:
|
|||||||
- name: conan-macos-arm64-release-node
|
- name: conan-macos-arm64-release-node
|
||||||
build_node_package: true
|
build_node_package: true
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
node: 18
|
node: 20
|
||||||
runs-on: macos-11
|
runs-on: macos-11
|
||||||
BUILD_TYPE: Release
|
BUILD_TYPE: Release
|
||||||
CCOMPILER: clang
|
CCOMPILER: clang
|
||||||
@ -369,9 +369,9 @@ jobs:
|
|||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ~/.conan
|
path: ~/.conan
|
||||||
key: v6-conan-${{ matrix.name }}-${{ github.sha }}
|
key: v7-conan-${{ matrix.name }}-${{ github.sha }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
v6-conan-${{ matrix.name }}-
|
v7-conan-${{ matrix.name }}-
|
||||||
- name: Enable test cache
|
- name: Enable test cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
@ -462,6 +462,12 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
echo "CC=${CCOMPILER}" >> $GITHUB_ENV
|
echo "CC=${CCOMPILER}" >> $GITHUB_ENV
|
||||||
echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV
|
echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV
|
||||||
|
if [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||||
|
# missing from GCC path, needed for conan builds of libiconv, for example.
|
||||||
|
sudo xcode-select --switch /Library/Developer/CommandLineTools
|
||||||
|
echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV
|
||||||
|
echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Build and install OSRM
|
- name: Build and install OSRM
|
||||||
run: |
|
run: |
|
||||||
@ -541,16 +547,6 @@ jobs:
|
|||||||
./src/benchmarks/rtree-bench ../test/data/monaco.osrm.ramIndex ../test/data/monaco.osrm.fileIndex ../test/data/monaco.osrm.nbg_nodes
|
./src/benchmarks/rtree-bench ../test/data/monaco.osrm.ramIndex ../test/data/monaco.osrm.fileIndex ../test/data/monaco.osrm.nbg_nodes
|
||||||
popd
|
popd
|
||||||
|
|
||||||
- name: Use Node 16
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 16
|
|
||||||
- name: Run Node package tests on Node 16
|
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
|
||||||
run: |
|
|
||||||
node --version
|
|
||||||
npm run nodejs-tests
|
|
||||||
- name: Use Node 18
|
- name: Use Node 18
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
@ -561,6 +557,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
node --version
|
node --version
|
||||||
npm run nodejs-tests
|
npm run nodejs-tests
|
||||||
|
- name: Use Node 20
|
||||||
|
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
- name: Run Node package tests on Node 20
|
||||||
|
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
||||||
|
run: |
|
||||||
|
node --version
|
||||||
|
npm run nodejs-tests
|
||||||
- name: Use Node latest
|
- name: Use Node latest
|
||||||
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' && matrix.ENABLE_APPLE_SILICON != 'ON' }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
@ -585,15 +591,16 @@ jobs:
|
|||||||
lcov --directory . --capture --output-file coverage.info # capture coverage info
|
lcov --directory . --capture --output-file coverage.info # capture coverage info
|
||||||
lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system
|
lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system
|
||||||
lcov --list coverage.info #debug info
|
lcov --list coverage.info #debug info
|
||||||
# Uploading report to CodeCov
|
|
||||||
- name: Upload code coverage
|
# # Uploading report to CodeCov
|
||||||
if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
# - name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v1
|
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
|
||||||
with:
|
# uses: codecov/codecov-action@v1
|
||||||
files: coverage.info
|
# with:
|
||||||
name: codecov-osrm-backend
|
# files: coverage.info
|
||||||
fail_ci_if_error: true
|
# name: codecov-osrm-backend
|
||||||
verbose: true
|
# fail_ci_if_error: true
|
||||||
|
# verbose: true
|
||||||
- name: Check Apple Silicon binary
|
- name: Check Apple Silicon binary
|
||||||
if: ${{ matrix.ENABLE_APPLE_SILICON == 'ON' }}
|
if: ${{ matrix.ENABLE_APPLE_SILICON == 'ON' }}
|
||||||
run: |
|
run: |
|
||||||
|
22
CHANGELOG.md
22
CHANGELOG.md
@ -1,16 +1,27 @@
|
|||||||
# Unreleased
|
# Unreleased
|
||||||
- Changes from 5.27.1
|
- Changes from 5.27.1
|
||||||
- Features
|
- Features
|
||||||
|
- ADDED: Add support for a keepalive_timeout flag. [#6674](https://github.com/Project-OSRM/osrm-backend/pull/6674)
|
||||||
- ADDED: Add support for a default_radius flag. [#6575](https://github.com/Project-OSRM/osrm-backend/pull/6575)
|
- ADDED: Add support for a default_radius flag. [#6575](https://github.com/Project-OSRM/osrm-backend/pull/6575)
|
||||||
|
- ADDED: Add support for disabling feature datasets. [#6666](https://github.com/Project-OSRM/osrm-backend/pull/6666)
|
||||||
|
- ADDED: Add support for opposite approach request parameter. [#6842](https://github.com/Project-OSRM/osrm-backend/pull/6842)
|
||||||
|
- ADDED: Add support for accessing edge flags in `process_segment` [#6658](https://github.com/Project-OSRM/osrm-backend/pull/6658)
|
||||||
- Build:
|
- Build:
|
||||||
- ADDED: Add CI job which builds OSRM with gcc 12. [#6455](https://github.com/Project-OSRM/osrm-backend/pull/6455)
|
- ADDED: Add CI job which builds OSRM with gcc 12. [#6455](https://github.com/Project-OSRM/osrm-backend/pull/6455)
|
||||||
- CHANGED: Upgrade to clang-tidy 15. [#6439](https://github.com/Project-OSRM/osrm-backend/pull/6439)
|
- CHANGED: Upgrade to clang-tidy 15. [#6439](https://github.com/Project-OSRM/osrm-backend/pull/6439)
|
||||||
- CHANGED: Update actions/cache to v3. [#6420](https://github.com/Project-OSRM/osrm-backend/pull/6420)
|
- CHANGED: Update actions/cache to v3. [#6420](https://github.com/Project-OSRM/osrm-backend/pull/6420)
|
||||||
- REMOVED: Drop support of Node 12 & 14. [#6431](https://github.com/Project-OSRM/osrm-backend/pull/6431)
|
- REMOVED: Drop support of Node 12 & 14. [#6431](https://github.com/Project-OSRM/osrm-backend/pull/6431)
|
||||||
|
- ADDED: Add 'load directly' mode to default Cucumber test suite. [#6663](https://github.com/Project-OSRM/osrm-backend/pull/6663)
|
||||||
|
- CHANGED: Drop support for Node 16 [#6855](https://github.com/Project-OSRM/osrm-backend/pull/6855)
|
||||||
|
- REMOVED: Remove unused AppVeyor files [#6860](https://github.com/Project-OSRM/osrm-backend/pull/6860)
|
||||||
|
- CHANGED: Upgrade clang-format to version 15 [#6859](https://github.com/Project-OSRM/osrm-backend/pull/6859)
|
||||||
- NodeJS:
|
- NodeJS:
|
||||||
- CHANGED: Use node-api instead of NAN. [#6452](https://github.com/Project-OSRM/osrm-backend/pull/6452)
|
- CHANGED: Use node-api instead of NAN. [#6452](https://github.com/Project-OSRM/osrm-backend/pull/6452)
|
||||||
- Misc:
|
- Misc:
|
||||||
- FIXED: Partial fix migration from boost::optional to std::optional [#6551](https://github.com/Project-OSRM/osrm-backend/issues/6551), see also [#6592](https://github.com/Project-OSRM/osrm-backend/issues/6592)
|
- FIXED: Partial fix migration from boost::optional to std::optional [#6551](https://github.com/Project-OSRM/osrm-backend/issues/6551), see also [#6592](https://github.com/Project-OSRM/osrm-backend/issues/6592)
|
||||||
|
- FIXED: Fix an error in a RouteParameters AnnotationsType operator overload. [#6646](https://github.com/Project-OSRM/osrm-backend/pull/6646)
|
||||||
|
- ADDED: Add support for "unlimited" to be passed as a value for the default-radius and max-matching-radius flags. [#6599](https://github.com/Project-OSRM/osrm-backend/pull/6599)
|
||||||
|
- CHANGED: Allow -1.0 as unlimited for default_radius value. [#6599](https://github.com/Project-OSRM/osrm-backend/pull/6599)
|
||||||
- CHANGED: keep libosrm* in the docker image for downstream linking [#6602](https://github.com/Project-OSRM/osrm-backend/pull/6602)
|
- CHANGED: keep libosrm* in the docker image for downstream linking [#6602](https://github.com/Project-OSRM/osrm-backend/pull/6602)
|
||||||
- CHANGED: Move vector in CSVFilesParser instead copying it. [#6470](https://github.com/Project-OSRM/osrm-backend/pull/6470)
|
- CHANGED: Move vector in CSVFilesParser instead copying it. [#6470](https://github.com/Project-OSRM/osrm-backend/pull/6470)
|
||||||
- REMOVED: Get rid of unused functions in util/json_util.hpp. [#6446](https://github.com/Project-OSRM/osrm-backend/pull/6446)
|
- REMOVED: Get rid of unused functions in util/json_util.hpp. [#6446](https://github.com/Project-OSRM/osrm-backend/pull/6446)
|
||||||
@ -24,8 +35,19 @@
|
|||||||
- CHANGED: Replace boost::string_ref with std::string_view [#6433](https://github.com/Project-OSRM/osrm-backend/pull/6433)
|
- CHANGED: Replace boost::string_ref with std::string_view [#6433](https://github.com/Project-OSRM/osrm-backend/pull/6433)
|
||||||
- ADDED: Print tracebacks for Lua runtime errors [#6564](https://github.com/Project-OSRM/osrm-backend/pull/6564)
|
- ADDED: Print tracebacks for Lua runtime errors [#6564](https://github.com/Project-OSRM/osrm-backend/pull/6564)
|
||||||
- FIXED: Added a variable to preprocessor guard in file osrm-backend/include/util/range_table.hpp to solve build error. [#6596](https://github.com/Project-OSRM/osrm-backend/pull/6596)
|
- FIXED: Added a variable to preprocessor guard in file osrm-backend/include/util/range_table.hpp to solve build error. [#6596](https://github.com/Project-OSRM/osrm-backend/pull/6596)
|
||||||
|
- FIXED: Ensure required file check in osrm-routed is correctly enforced. [#6655](https://github.com/Project-OSRM/osrm-backend/pull/6655)
|
||||||
|
- FIXED: Correct HTTP docs to reflect summary output dependency on steps parameter. [#6655](https://github.com/Project-OSRM/osrm-backend/pull/6655)
|
||||||
|
- ADDED: Extract prerelease/build information from package semver [#6839](https://github.com/Project-OSRM/osrm-backend/pull/6839)
|
||||||
|
- Profiles:
|
||||||
|
- FIXED: Bicycle and foot profiles now don't route on proposed ways [#6615](https://github.com/Project-OSRM/osrm-backend/pull/6615)
|
||||||
- Routing:
|
- Routing:
|
||||||
- FIXED: Fix adding traffic signal penalties during compression [#6419](https://github.com/Project-OSRM/osrm-backend/pull/6419)
|
- FIXED: Fix adding traffic signal penalties during compression [#6419](https://github.com/Project-OSRM/osrm-backend/pull/6419)
|
||||||
|
- FIXED: Correctly handle compressed traffic signals. [#6724](https://github.com/Project-OSRM/osrm-backend/pull/6724)
|
||||||
|
- FIXED: Fix bug when searching for maneuver overrides [#6739](https://github.com/Project-OSRM/osrm-backend/pull/6739)
|
||||||
|
- FIXED: Remove force-loop checks for routes with u-turns [#6858](https://github.com/Project-OSRM/osrm-backend/pull/6858)
|
||||||
|
- Debug tiles:
|
||||||
|
- FIXED: Ensure speed layer features have unique ids. [#6726](https://github.com/Project-OSRM/osrm-backend/pull/6726)
|
||||||
|
|
||||||
# 5.27.1
|
# 5.27.1
|
||||||
- Changes from 5.27.0
|
- Changes from 5.27.0
|
||||||
- Misc:
|
- Misc:
|
||||||
|
@ -49,9 +49,6 @@ endif()
|
|||||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||||
|
|
||||||
# be compatible with version handling before cmake 3.x
|
# be compatible with version handling before cmake 3.x
|
||||||
if (POLICY CMP0048)
|
|
||||||
cmake_policy(SET CMP0048 OLD)
|
|
||||||
endif()
|
|
||||||
if (POLICY CMP0057)
|
if (POLICY CMP0057)
|
||||||
cmake_policy(SET CMP0057 NEW)
|
cmake_policy(SET CMP0057 NEW)
|
||||||
endif()
|
endif()
|
||||||
@ -73,14 +70,17 @@ include(JSONParser)
|
|||||||
file(READ "package.json" packagejsonraw)
|
file(READ "package.json" packagejsonraw)
|
||||||
sbeParseJson(packagejson packagejsonraw)
|
sbeParseJson(packagejson packagejsonraw)
|
||||||
|
|
||||||
if (packagejson.version MATCHES "^([0-9]+)\.([0-9]+)\.([0-9]+)")
|
# This regex is not strict enough, but the correct one is too complicated for cmake matching.
|
||||||
|
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
||||||
|
if (packagejson.version MATCHES "^([0-9]+)\.([0-9]+)\.([0-9]+)([-+][0-9a-zA-Z.-]+)?$")
|
||||||
set(OSRM_VERSION_MAJOR ${CMAKE_MATCH_1})
|
set(OSRM_VERSION_MAJOR ${CMAKE_MATCH_1})
|
||||||
set(OSRM_VERSION_MINOR ${CMAKE_MATCH_2})
|
set(OSRM_VERSION_MINOR ${CMAKE_MATCH_2})
|
||||||
set(OSRM_VERSION_PATCH ${CMAKE_MATCH_3})
|
set(OSRM_VERSION_PATCH ${CMAKE_MATCH_3})
|
||||||
|
set(OSRM_VERSION_PRERELEASE_BUILD ${CMAKE_MATCH_4})
|
||||||
|
|
||||||
set(OSRM_VERSION "${OSRM_VERSION_MAJOR}.${OSRM_VERSION_MINOR}.${OSRM_VERSION_PATCH}")
|
set(OSRM_VERSION packagejson.version)
|
||||||
else()
|
else()
|
||||||
message(FATAL_ERROR "Version from package.json cannot be parsed, expected semver compatible X.Y.Z, but found ${packagejson.version}")
|
message(FATAL_ERROR "Version from package.json cannot be parsed, expected semver compatible label, but found ${packagejson.version}")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (MSVC)
|
if (MSVC)
|
||||||
@ -275,8 +275,11 @@ elseif(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
|||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(UNIX AND NOT APPLE)
|
if(UNIX AND NOT APPLE)
|
||||||
|
find_library(RT_LIB rt)
|
||||||
|
if (RT_LIB)
|
||||||
set(MAYBE_RT_LIBRARY -lrt)
|
set(MAYBE_RT_LIBRARY -lrt)
|
||||||
endif()
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
find_package(Threads REQUIRED)
|
find_package(Threads REQUIRED)
|
||||||
|
|
||||||
@ -325,11 +328,13 @@ if(ENABLE_CONAN)
|
|||||||
|
|
||||||
include(${CMAKE_CURRENT_LIST_DIR}/cmake/conan.cmake)
|
include(${CMAKE_CURRENT_LIST_DIR}/cmake/conan.cmake)
|
||||||
|
|
||||||
set(CONAN_BOOST_VERSION "1.79.0#96e4902111a2e343a8ba0aa95391bb58")
|
conan_check(REQUIRED)
|
||||||
set(CONAN_BZIP2_VERSION "1.0.8#d1b2d5816f25865acf978501dff1f897")
|
|
||||||
set(CONAN_EXPAT_VERSION "2.2.10#916908d4a570ad839edd25322c3268cd")
|
set(CONAN_BOOST_VERSION "1.79.0@#96e4902111a2e343a8ba0aa95391bb58")
|
||||||
set(CONAN_LUA_VERSION "5.4.4#3ec62efc37cd0a5d80b9e5cb35277360")
|
set(CONAN_BZIP2_VERSION "1.0.8@#d1b2d5816f25865acf978501dff1f897")
|
||||||
set(CONAN_TBB_VERSION "2021.3.0#507ec17cbd51a84167e143b20d170eea")
|
set(CONAN_EXPAT_VERSION "2.2.10@#916908d4a570ad839edd25322c3268cd")
|
||||||
|
set(CONAN_LUA_VERSION "5.4.4@#3ec62efc37cd0a5d80b9e5cb35277360")
|
||||||
|
set(CONAN_TBB_VERSION "2021.3.0@#507ec17cbd51a84167e143b20d170eea")
|
||||||
|
|
||||||
set(CONAN_SYSTEM_INCLUDES ON)
|
set(CONAN_SYSTEM_INCLUDES ON)
|
||||||
|
|
||||||
@ -358,6 +363,13 @@ if(ENABLE_CONAN)
|
|||||||
boost:without_stacktrace=True # Apple Silicon cross-compilation fails without it
|
boost:without_stacktrace=True # Apple Silicon cross-compilation fails without it
|
||||||
BUILD missing
|
BUILD missing
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Enable revisions in the conan config
|
||||||
|
execute_process(COMMAND ${CONAN_CMD} config set general.revisions_enabled=1 RESULT_VARIABLE RET_CODE)
|
||||||
|
if(NOT "${RET_CODE}" STREQUAL "0")
|
||||||
|
message(FATAL_ERROR "Error setting revisions for Conan: '${RET_CODE}'")
|
||||||
|
endif()
|
||||||
|
|
||||||
# explicitly say Conan to use x86 dependencies if build for x86 platforms (https://github.com/conan-io/cmake-conan/issues/141)
|
# explicitly say Conan to use x86 dependencies if build for x86 platforms (https://github.com/conan-io/cmake-conan/issues/141)
|
||||||
if(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
if(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
conan_cmake_run("${CONAN_ARGS};ARCH;x86")
|
conan_cmake_run("${CONAN_ARGS};ARCH;x86")
|
||||||
@ -459,6 +471,9 @@ add_dependency_defines(-DBOOST_SPIRIT_USE_PHOENIX_V3)
|
|||||||
add_dependency_defines(-DBOOST_RESULT_OF_USE_DECLTYPE)
|
add_dependency_defines(-DBOOST_RESULT_OF_USE_DECLTYPE)
|
||||||
add_dependency_defines(-DBOOST_FILESYSTEM_NO_DEPRECATED)
|
add_dependency_defines(-DBOOST_FILESYSTEM_NO_DEPRECATED)
|
||||||
|
|
||||||
|
# Workaround for https://github.com/boostorg/phoenix/issues/111
|
||||||
|
add_dependency_defines(-DBOOST_PHOENIX_STL_TUPLE_H_)
|
||||||
|
|
||||||
add_definitions(${OSRM_DEFINES})
|
add_definitions(${OSRM_DEFINES})
|
||||||
include_directories(SYSTEM ${DEPENDENCIES_INCLUDE_DIRS})
|
include_directories(SYSTEM ${DEPENDENCIES_INCLUDE_DIRS})
|
||||||
|
|
||||||
@ -636,6 +651,10 @@ install(TARGETS osrm_guidance DESTINATION lib)
|
|||||||
set(DefaultProfilesDir profiles)
|
set(DefaultProfilesDir profiles)
|
||||||
install(DIRECTORY ${DefaultProfilesDir} DESTINATION share/osrm)
|
install(DIRECTORY ${DefaultProfilesDir} DESTINATION share/osrm)
|
||||||
|
|
||||||
|
# Install data geojson files to /usr/local/share/osrm/data by default
|
||||||
|
set(DefaultProfilesDir data)
|
||||||
|
install(DIRECTORY ${DefaultProfilesDir} DESTINATION share/osrm)
|
||||||
|
|
||||||
# Setup exporting variables for pkgconfig and subproject
|
# Setup exporting variables for pkgconfig and subproject
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ You can add a :+1: emoji reaction to the issue if you want to express interest i
|
|||||||
|
|
||||||
# Developer
|
# Developer
|
||||||
|
|
||||||
We use `clang-format` version `3.8` to consistently format the code base. There is a helper script under `scripts/format.sh`.
|
We use `clang-format` version `15` to consistently format the code base. There is a helper script under `scripts/format.sh`.
|
||||||
The format is automatically checked by the `mason-linux-release` job of a Travis CI build.
|
The format is automatically checked by the `mason-linux-release` job of a Travis CI build.
|
||||||
To save development time a local hook `.git/hooks/pre-push`
|
To save development time a local hook `.git/hooks/pre-push`
|
||||||
```
|
```
|
||||||
|
@ -20,7 +20,7 @@ For a quick introduction about how the road network is represented in OpenStreet
|
|||||||
Related [Project-OSRM](https://github.com/Project-OSRM) repositories:
|
Related [Project-OSRM](https://github.com/Project-OSRM) repositories:
|
||||||
- [osrm-frontend](https://github.com/Project-OSRM/osrm-frontend) - User-facing frontend with map. The demo server runs this on top of the backend
|
- [osrm-frontend](https://github.com/Project-OSRM/osrm-frontend) - User-facing frontend with map. The demo server runs this on top of the backend
|
||||||
- [osrm-text-instructions](https://github.com/Project-OSRM/osrm-text-instructions) - Text instructions from OSRM route response
|
- [osrm-text-instructions](https://github.com/Project-OSRM/osrm-text-instructions) - Text instructions from OSRM route response
|
||||||
- [osrm-backend-docker](https://hub.docker.com/r/osrm/osrm-backend/) - Ready to use Docker images
|
- [osrm-backend-docker](https://github.com/project-osrm/osrm-backend/pkgs/container/osrm-backend) - Ready to use Docker images
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
@ -58,12 +58,12 @@ Download OpenStreetMap extracts for example from [Geofabrik](http://download.geo
|
|||||||
|
|
||||||
Pre-process the extract with the car profile and start a routing engine HTTP server on port 5000
|
Pre-process the extract with the car profile and start a routing engine HTTP server on port 5000
|
||||||
|
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-extract -p /opt/car.lua /data/berlin-latest.osm.pbf || "osrm-extract failed"
|
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-extract -p /opt/car.lua /data/berlin-latest.osm.pbf || echo "osrm-extract failed"
|
||||||
|
|
||||||
The flag `-v "${PWD}:/data"` creates the directory `/data` inside the docker container and makes the current working directory `"${PWD}"` available there. The file `/data/berlin-latest.osm.pbf` inside the container is referring to `"${PWD}/berlin-latest.osm.pbf"` on the host.
|
The flag `-v "${PWD}:/data"` creates the directory `/data` inside the docker container and makes the current working directory `"${PWD}"` available there. The file `/data/berlin-latest.osm.pbf` inside the container is referring to `"${PWD}/berlin-latest.osm.pbf"` on the host.
|
||||||
|
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-partition /data/berlin-latest.osrm || "osrm-partition failed"
|
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-partition /data/berlin-latest.osrm || echo "osrm-partition failed"
|
||||||
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-customize /data/berlin-latest.osrm || "osrm-customize failed"
|
docker run -t -v "${PWD}:/data" ghcr.io/project-osrm/osrm-backend osrm-customize /data/berlin-latest.osrm || echo "osrm-customize failed"
|
||||||
|
|
||||||
Note there is no `berlin-latest.osrm` file, but multiple `berlin-latest.osrm.*` files, i.e. `berlin-latest.osrm` is not file path, but "base" path referring to set of files and there is an option to omit this `.osrm` suffix completely(e.g. `osrm-partition /data/berlin-latest`).
|
Note there is no `berlin-latest.osrm` file, but multiple `berlin-latest.osrm.*` files, i.e. `berlin-latest.osrm` is not file path, but "base" path referring to set of files and there is an option to omit this `.osrm` suffix completely(e.g. `osrm-partition /data/berlin-latest`).
|
||||||
|
|
||||||
|
15
appveyor.yml
15
appveyor.yml
@ -1,15 +0,0 @@
|
|||||||
os: Visual Studio 2019
|
|
||||||
|
|
||||||
# clone directory
|
|
||||||
clone_folder: c:\projects\osrm
|
|
||||||
|
|
||||||
platform: x64
|
|
||||||
|
|
||||||
# no-op for the time being until someone with access to GitHub checks settings will remove integration with AppVeyor
|
|
||||||
# https://github.com/Project-OSRM/osrm-backend/pull/6312#issuecomment-1217237055
|
|
||||||
build_script:
|
|
||||||
- EXIT 0
|
|
||||||
|
|
||||||
branches:
|
|
||||||
only:
|
|
||||||
- master
|
|
@ -1,32 +0,0 @@
|
|||||||
@ECHO OFF
|
|
||||||
SETLOCAL
|
|
||||||
SET EL=0
|
|
||||||
|
|
||||||
ECHO ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %~f0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
SET PLATFORM=x64
|
|
||||||
SET CONFIGURATION=Release
|
|
||||||
::SET LOCAL_DEV=1
|
|
||||||
|
|
||||||
FOR /F "tokens=*" %%i in ('git rev-parse --abbrev-ref HEAD') do SET APPVEYOR_REPO_BRANCH=%%i
|
|
||||||
ECHO APPVEYOR_REPO_BRANCH^: %APPVEYOR_REPO_BRANCH%
|
|
||||||
|
|
||||||
SET PATH=C:\Program Files\7-Zip;%PATH%
|
|
||||||
|
|
||||||
powershell Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted -Force
|
|
||||||
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
|
||||||
CALL appveyor-build.bat
|
|
||||||
IF %ERRORLEVEL% NEQ 0 GOTO ERROR
|
|
||||||
|
|
||||||
GOTO DONE
|
|
||||||
|
|
||||||
|
|
||||||
:ERROR
|
|
||||||
ECHO ~~~~~~~~~~~~~~~~~~~~~~ ERROR %~f0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
ECHO ERRORLEVEL^: %ERRORLEVEL%
|
|
||||||
SET EL=%ERRORLEVEL%
|
|
||||||
|
|
||||||
:DONE
|
|
||||||
ECHO ~~~~~~~~~~~~~~~~~~~~~~ DONE %~f0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
EXIT /b %EL%
|
|
@ -1,6 +1,6 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
default: '--strict --tags ~@stress --tags ~@todo --tags ~@mld-only --require features/support --require features/step_definitions',
|
default: '--strict --tags ~@stress --tags ~@todo --tags ~@mld --require features/support --require features/step_definitions',
|
||||||
verify: '--strict --tags ~@stress --tags ~@todo --tags ~@mld-only -f progress --require features/support --require features/step_definitions',
|
ch: '--strict --tags ~@stress --tags ~@todo --tags ~@mld -f progress --require features/support --require features/step_definitions',
|
||||||
todo: '--strict --tags @todo --require features/support --require features/step_definitions',
|
todo: '--strict --tags @todo --require features/support --require features/step_definitions',
|
||||||
all: '--strict --require features/support --require features/step_definitions',
|
all: '--strict --require features/support --require features/step_definitions',
|
||||||
mld: '--strict --tags ~@stress --tags ~@todo --tags ~@ch --require features/support --require features/step_definitions -f progress'
|
mld: '--strict --tags ~@stress --tags ~@todo --tags ~@ch --require features/support --require features/step_definitions -f progress'
|
||||||
|
11
docs/http.md
11
docs/http.md
@ -35,7 +35,7 @@ To pass parameters to each location some options support an array-like encoding:
|
|||||||
|radiuses |`{radius};{radius}[;{radius} ...]` |Limits the search to given radius in meters. |
|
|radiuses |`{radius};{radius}[;{radius} ...]` |Limits the search to given radius in meters. |
|
||||||
|generate\_hints |`true` (default), `false` |Adds a Hint to the response which can be used in subsequent requests, see `hints` parameter. |
|
|generate\_hints |`true` (default), `false` |Adds a Hint to the response which can be used in subsequent requests, see `hints` parameter. |
|
||||||
|hints |`{hint};{hint}[;{hint} ...]` |Hint from previous request to derive position in street network. |
|
|hints |`{hint};{hint}[;{hint} ...]` |Hint from previous request to derive position in street network. |
|
||||||
|approaches |`{approach};{approach}[;{approach} ...]` |Keep waypoints on curbside. |
|
|approaches |`{approach};{approach}[;{approach} ...]` |Restrict the direction on the road network at a waypoint, relative to the input coordinate. |
|
||||||
|exclude |`{class}[,{class}]` |Additive list of classes to avoid, the order does not matter. |
|
|exclude |`{class}[,{class}]` |Additive list of classes to avoid, the order does not matter. |
|
||||||
|snapping |`default` (default), `any` |Default snapping avoids is_startpoint (see profile) edges, `any` will snap to any edge in the graph |
|
|snapping |`default` (default), `any` |Default snapping avoids is_startpoint (see profile) edges, `any` will snap to any edge in the graph |
|
||||||
|skip_waypoints |`true`, `false` (default) |Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of the response and do not want to transfer waste data. |
|
|skip_waypoints |`true`, `false` (default) |Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of the response and do not want to transfer waste data. |
|
||||||
@ -47,7 +47,7 @@ Where the elements follow the following format:
|
|||||||
|bearing |`{value},{range}` `integer 0 .. 360,integer 0 .. 180` |
|
|bearing |`{value},{range}` `integer 0 .. 360,integer 0 .. 180` |
|
||||||
|radius |`double >= 0` or `unlimited` (default) |
|
|radius |`double >= 0` or `unlimited` (default) |
|
||||||
|hint |Base64 `string` |
|
|hint |Base64 `string` |
|
||||||
|approach |`curb` or `unrestricted` (default) |
|
|approach |`curb`, `opposite` or `unrestricted` (default) |
|
||||||
|class |A class name determined by the profile or `none`. |
|
|class |A class name determined by the profile or `none`. |
|
||||||
|
|
||||||
```
|
```
|
||||||
@ -92,6 +92,7 @@ Every response object has a `code` property containing one of the strings below
|
|||||||
| `InvalidValue` | The successfully parsed query parameters are invalid. |
|
| `InvalidValue` | The successfully parsed query parameters are invalid. |
|
||||||
| `NoSegment` | One of the supplied input coordinates could not snap to the street segment. |
|
| `NoSegment` | One of the supplied input coordinates could not snap to the street segment. |
|
||||||
| `TooBig` | The request size violates one of the service-specific request size restrictions. |
|
| `TooBig` | The request size violates one of the service-specific request size restrictions. |
|
||||||
|
| `DisabledDataset` | The request tried to access a disabled dataset. |
|
||||||
|
|
||||||
- `message` is a **optional** human-readable error message. All other status types are service-dependent.
|
- `message` is a **optional** human-readable error message. All other status types are service-dependent.
|
||||||
- In case of an error the HTTP status code will be `400`. Otherwise, the HTTP status code will be `200` and `code` will be `Ok`.
|
- In case of an error the HTTP status code will be `400`. Otherwise, the HTTP status code will be `200` and `code` will be `Ok`.
|
||||||
@ -648,10 +649,10 @@ Represents a route between two waypoints.
|
|||||||
- `distance`: The distance traveled by this route leg, in `float` meters.
|
- `distance`: The distance traveled by this route leg, in `float` meters.
|
||||||
- `duration`: The estimated travel time, in `float` number of seconds.
|
- `duration`: The estimated travel time, in `float` number of seconds.
|
||||||
- `weight`: The calculated weight of the route leg.
|
- `weight`: The calculated weight of the route leg.
|
||||||
- `summary`: Summary of the route taken as `string`. Depends on the `summary` parameter:
|
- `summary`: Summary of the route taken as `string`. Depends on the `steps` parameter:
|
||||||
|
|
||||||
| summary | |
|
| steps | |
|
||||||
|--------------|-----------------------------------------------------------------------|
|
|-------|-----------------------------------------------------------------------|
|
||||||
| true | Names of the two major roads used. Can be empty if the route is too short.|
|
| true | Names of the two major roads used. Can be empty if the route is too short.|
|
||||||
| false | empty `string` |
|
| false | empty `string` |
|
||||||
|
|
||||||
|
@ -31,6 +31,7 @@ var osrm = new OSRM('network.osrm');
|
|||||||
Old behaviour: Path to a file on disk to store the memory using mmap. Current behaviour: setting this value is the same as setting `mmap_memory: true`.
|
Old behaviour: Path to a file on disk to store the memory using mmap. Current behaviour: setting this value is the same as setting `mmap_memory: true`.
|
||||||
- `options.mmap_memory` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Map on-disk files to virtual memory addresses (mmap), rather than loading into RAM.
|
- `options.mmap_memory` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Map on-disk files to virtual memory addresses (mmap), rather than loading into RAM.
|
||||||
- `options.path` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** The path to the `.osrm` files. This is mutually exclusive with setting {options.shared_memory} to true.
|
- `options.path` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** The path to the `.osrm` files. This is mutually exclusive with setting {options.shared_memory} to true.
|
||||||
|
- `options.disable_feature_dataset` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Disables a feature dataset from being loaded into memory if not needed. Options: `ROUTE_STEPS`, `ROUTE_GEOMETRY`.
|
||||||
- `options.max_locations_trip` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in trip query (default: unlimited).
|
- `options.max_locations_trip` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in trip query (default: unlimited).
|
||||||
- `options.max_locations_viaroute` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in viaroute query (default: unlimited).
|
- `options.max_locations_viaroute` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in viaroute query (default: unlimited).
|
||||||
- `options.max_locations_distance_table` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in distance table query (default: unlimited).
|
- `options.max_locations_distance_table` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in distance table query (default: unlimited).
|
||||||
@ -62,7 +63,7 @@ Returns the fastest route between two or more coordinates while visiting the way
|
|||||||
- `options.geometries` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Returned route geometry format (influences overview and per step). Can also be `geojson`. (optional, default `polyline`)
|
- `options.geometries` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Returned route geometry format (influences overview and per step). Can also be `geojson`. (optional, default `polyline`)
|
||||||
- `options.overview` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Add overview geometry either `full`, `simplified` according to highest zoom level it could be display on, or not at all (`false`). (optional, default `simplified`)
|
- `options.overview` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Add overview geometry either `full`, `simplified` according to highest zoom level it could be display on, or not at all (`false`). (optional, default `simplified`)
|
||||||
- `options.continue_straight` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Forces the route to keep going straight at waypoints and don't do a uturn even if it would be faster. Default value depends on the profile.
|
- `options.continue_straight` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Forces the route to keep going straight at waypoints and don't do a uturn even if it would be faster. Default value depends on the profile.
|
||||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Restrict the direction on the road network at a waypoint, relative to the input coordinate. Can be `null` (unrestricted, default), `curb` or `opposite`.
|
||||||
`null`/`true`/`false`
|
`null`/`true`/`false`
|
||||||
- `options.waypoints` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Indices to coordinates to treat as waypoints. If not supplied, all coordinates are waypoints. Must include first and last coordinate index.
|
- `options.waypoints` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Indices to coordinates to treat as waypoints. If not supplied, all coordinates are waypoints. Must include first and last coordinate index.
|
||||||
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||||
@ -100,7 +101,7 @@ Note: `coordinates` in the general options only supports a single `{longitude},{
|
|||||||
- `options.generate_hints` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Whether or not adds a Hint to the response which can be used in subsequent requests. (optional, default `true`)
|
- `options.generate_hints` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Whether or not adds a Hint to the response which can be used in subsequent requests. (optional, default `true`)
|
||||||
- `options.number` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** Number of nearest segments that should be returned.
|
- `options.number` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** Number of nearest segments that should be returned.
|
||||||
Must be an integer greater than or equal to `1`. (optional, default `1`)
|
Must be an integer greater than or equal to `1`. (optional, default `1`)
|
||||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Restrict the direction on the road network at a waypoint, relative to the input coordinate. Can be `null` (unrestricted, default), `curb` or `opposite`.
|
||||||
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||||
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||||
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
||||||
@ -140,7 +141,7 @@ Optionally returns distance table.
|
|||||||
- `options.sources` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** An array of `index` elements (`0 <= integer < #coordinates`) to use
|
- `options.sources` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** An array of `index` elements (`0 <= integer < #coordinates`) to use
|
||||||
location with given index as source. Default is to use all.
|
location with given index as source. Default is to use all.
|
||||||
- `options.destinations` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** An array of `index` elements (`0 <= integer < #coordinates`) to use location with given index as destination. Default is to use all.
|
- `options.destinations` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** An array of `index` elements (`0 <= integer < #coordinates`) to use location with given index as destination. Default is to use all.
|
||||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Restrict the direction on the road network at a waypoint, relative to the input coordinate.. Can be `null` (unrestricted, default), `curb` or `opposite`.
|
||||||
- `options.fallback_speed` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Replace `null` responses in result with as-the-crow-flies estimates based on `fallback_speed`. Value is in metres/second.
|
- `options.fallback_speed` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Replace `null` responses in result with as-the-crow-flies estimates based on `fallback_speed`. Value is in metres/second.
|
||||||
- `options.fallback_coordinate` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Either `input` (default) or `snapped`. If using a `fallback_speed`, use either the user-supplied coordinate (`input`), or the snapped coordinate (`snapped`) for calculating the as-the-crow-flies distance between two points.
|
- `options.fallback_coordinate` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Either `input` (default) or `snapped`. If using a `fallback_speed`, use either the user-supplied coordinate (`input`), or the snapped coordinate (`snapped`) for calculating the as-the-crow-flies distance between two points.
|
||||||
- `options.scale_factor` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Multiply the table duration values in the table by this number for more controlled input into a route optimization solver.
|
- `options.scale_factor` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Multiply the table duration values in the table by this number for more controlled input into a route optimization solver.
|
||||||
@ -297,7 +298,7 @@ Right now, the following combinations are possible:
|
|||||||
- `options.roundtrip` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Return route is a roundtrip. (optional, default `true`)
|
- `options.roundtrip` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Return route is a roundtrip. (optional, default `true`)
|
||||||
- `options.source` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Return route starts at `any` or `first` coordinate. (optional, default `any`)
|
- `options.source` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Return route starts at `any` or `first` coordinate. (optional, default `any`)
|
||||||
- `options.destination` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Return route ends at `any` or `last` coordinate. (optional, default `any`)
|
- `options.destination` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** Return route ends at `any` or `last` coordinate. (optional, default `any`)
|
||||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Restrict the direction on the road network at a waypoint, relative to the input coordinate. Can be `null` (unrestricted, default), `curb` or `opposite`.
|
||||||
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||||
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
||||||
|
|
||||||
|
@ -78,6 +78,15 @@ Feature: Bike - Accessability of different way types
|
|||||||
| construction | yes | | |
|
| construction | yes | | |
|
||||||
| construction | | yes | |
|
| construction | | yes | |
|
||||||
|
|
||||||
|
@proposed
|
||||||
|
Scenario: Bike - Don't allow routing on ways still being proposed
|
||||||
|
Then routability should be
|
||||||
|
| highway | foot | bicycle | proposed | bothw |
|
||||||
|
| primary | | | | x |
|
||||||
|
| proposed | | | | |
|
||||||
|
| proposed | yes | | yes | |
|
||||||
|
| proposed | | yes | yes | |
|
||||||
|
|
||||||
@roundabout
|
@roundabout
|
||||||
Scenario: Bike - Don't push bikes against oneway flow on roundabouts
|
Scenario: Bike - Don't push bikes against oneway flow on roundabouts
|
||||||
Then routability should be
|
Then routability should be
|
||||||
|
@ -39,7 +39,7 @@ Feature: Car - Handle traffic lights
|
|||||||
| k | n | 20.7s | turn with traffic light |
|
| k | n | 20.7s | turn with traffic light |
|
||||||
|
|
||||||
|
|
||||||
Scenario: Car - Traffic signal direction
|
Scenario: Car - Traffic signal direction straight
|
||||||
Given the node map
|
Given the node map
|
||||||
"""
|
"""
|
||||||
a-1-b-2-c
|
a-1-b-2-c
|
||||||
@ -112,14 +112,14 @@ Feature: Car - Handle traffic lights
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
Scenario: Car - Encounters a traffic light
|
Scenario: Car - Encounters a traffic light direction
|
||||||
Given the node map
|
Given the node map
|
||||||
"""
|
"""
|
||||||
a f k
|
a f k p
|
||||||
| | |
|
| | | |
|
||||||
b-c-d h-g-i l-m-n
|
b-c-d h-g-i l-m-n q-r-s
|
||||||
| | |
|
| | | |
|
||||||
e j o
|
e j o t
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -131,53 +131,70 @@ Feature: Car - Handle traffic lights
|
|||||||
| fgj | primary |
|
| fgj | primary |
|
||||||
| lmn | primary |
|
| lmn | primary |
|
||||||
| kmo | primary |
|
| kmo | primary |
|
||||||
|
| qrs | primary |
|
||||||
|
| prt | primary |
|
||||||
|
|
||||||
And the nodes
|
And the nodes
|
||||||
| node | highway | traffic_signals:direction |
|
| node | highway | traffic_signals:direction |
|
||||||
| g | traffic_signals | forward |
|
| g | traffic_signals | |
|
||||||
| m | traffic_signals | backward |
|
| m | traffic_signals | forward |
|
||||||
|
| r | traffic_signals | backward |
|
||||||
|
|
||||||
|
|
||||||
When I route I should get
|
When I route I should get
|
||||||
|
# Base case
|
||||||
| from | to | time | # |
|
| from | to | time | # |
|
||||||
| a | d | 21.9s | no turn with no traffic light |
|
|
||||||
| a | e | 22.2s | no turn with traffic light |
|
|
||||||
| a | b | 18.7s | turn with no traffic light |
|
| a | b | 18.7s | turn with no traffic light |
|
||||||
| e | b | 21.9s | no turn with no traffic light |
|
| a | e | 22.2s | no turn with no traffic light |
|
||||||
| e | a | 22.2s | no turn with traffic light |
|
| a | d | 21.9s | turn with no traffic light |
|
||||||
|
| e | b | 21.9s | turn with no traffic light |
|
||||||
|
| e | a | 22.2s | no turn with no traffic light |
|
||||||
| e | d | 18.7s | turn with no traffic light |
|
| e | d | 18.7s | turn with no traffic light |
|
||||||
| d | e | 21.9s | no turn with no traffic light |
|
| d | e | 21.9s | turn with no traffic light |
|
||||||
| d | b | 11s | no turn with traffic light |
|
| d | b | 11s | no turn with no traffic light |
|
||||||
| d | a | 18.7s | turn with no traffic light |
|
| d | a | 18.7s | turn with no traffic light |
|
||||||
| b | a | 21.9s | no turn with no traffic light |
|
| b | a | 21.9s | turn with no traffic light |
|
||||||
| b | d | 11s | no turn with traffic light |
|
| b | d | 11s | no turn with no traffic light |
|
||||||
| b | e | 18.7s | turn with no traffic light |
|
| b | e | 18.7s | turn with no traffic light |
|
||||||
|
# All have traffic lights - 2s penalty
|
||||||
| f | i | 23.9s | no turn with no traffic light |
|
| f | h | 20.7s | turn with traffic light |
|
||||||
| f | j | 24.2s | no turn with traffic light |
|
| f | j | 24.2s | no turn with traffic light |
|
||||||
| f | h | 20.7s | turn with no traffic light |
|
| f | i | 23.9s | turn with traffic light |
|
||||||
| j | h | 21.9s | no turn with no traffic light |
|
| j | h | 23.9s | turn with traffic light |
|
||||||
| j | f | 22.2s | no turn with traffic light |
|
| j | f | 24.2s | no turn with traffic light |
|
||||||
| j | i | 18.7s | turn with no traffic light |
|
| j | i | 20.7s | turn with traffic light |
|
||||||
| i | j | 21.9s | no turn with no traffic light |
|
| i | j | 23.9s | turn with traffic light |
|
||||||
| i | h | 11s | no turn with traffic light |
|
| i | h | 13s | no turn with traffic light |
|
||||||
| i | f | 18.7s | turn with no traffic light |
|
| i | f | 20.7s | turn with traffic light |
|
||||||
| h | f | 23.9s | no turn with no traffic light |
|
| h | f | 23.9s | turn with traffic light |
|
||||||
| h | i | 13s | no turn with traffic light |
|
| h | i | 13s | no turn with traffic light |
|
||||||
| h | j | 20.7s | turn with no traffic light |
|
| h | j | 20.7s | turn with traffic light |
|
||||||
|
# Front direction have traffic lights - 2s penalty
|
||||||
| k | n | 21.9s | no turn with no traffic light |
|
| k | l | 20.7s | turn with traffic light |
|
||||||
| k | o | 22.2s | no turn with traffic light |
|
| k | o | 24.2s | no turn with traffic light |
|
||||||
| k | l | 18.7s | turn with no traffic light |
|
| k | n | 23.9s | turn with traffic light |
|
||||||
| o | l | 23.9s | no turn with no traffic light |
|
| o | l | 21.9s | turn with no traffic light |
|
||||||
| o | k | 24.2s | no turn with traffic light |
|
| o | k | 22.2s | no turn with no traffic light |
|
||||||
| o | n | 20.7s | turn with no traffic light |
|
| o | n | 18.7s | turn with no traffic light |
|
||||||
| n | o | 23.9s | no turn with no traffic light |
|
| n | o | 21.9s | turn with no traffic light |
|
||||||
| n | l | 13s | no turn with traffic light |
|
| n | l | 11s | no turn with no traffic light |
|
||||||
| n | k | 20.7s | turn with no traffic light |
|
| n | k | 18.7s | turn with no traffic light |
|
||||||
| l | k | 21.9s | no turn with no traffic light |
|
| l | k | 23.9s | turn with traffic light |
|
||||||
| l | n | 11s | no turn with traffic light |
|
| l | n | 13s | no turn with traffic light |
|
||||||
| l | o | 18.7s | turn with no traffic light |
|
| l | o | 20.7s | turn with traffic light |
|
||||||
|
# Reverse direction have traffic lights - 2s penalty
|
||||||
|
| p | q | 18.7s | turn with no traffic light |
|
||||||
|
| p | t | 22.2s | no turn with no traffic light |
|
||||||
|
| p | s | 21.9s | turn with no traffic light |
|
||||||
|
| t | q | 23.9s | turn with traffic light |
|
||||||
|
| t | p | 24.2s | no turn with traffic light |
|
||||||
|
| t | s | 20.7s | turn with traffic light |
|
||||||
|
| s | t | 23.9s | turn with traffic light |
|
||||||
|
| s | q | 13s | no turn with traffic light |
|
||||||
|
| s | p | 20.7s | turn with traffic light |
|
||||||
|
| q | p | 21.9s | turn with no traffic light |
|
||||||
|
| q | s | 11s | no turn with no traffic light |
|
||||||
|
| q | t | 18.7s | turn with no traffic light |
|
||||||
|
|
||||||
|
|
||||||
Scenario: Traffic Signal Geometry
|
Scenario: Traffic Signal Geometry
|
||||||
@ -343,3 +360,106 @@ Feature: Car - Handle traffic lights
|
|||||||
| from | to | route | speed | weights | time | distances | a:datasources | a:nodes | a:speed | a:duration | a:weight |
|
| from | to | route | speed | weights | time | distances | a:datasources | a:nodes | a:speed | a:duration | a:weight |
|
||||||
| a | c | abc,abc | 65 km/h | 22.2,0 | 22.2s | 400m,0m | 1:0 | 1:2:3 | 18:18 | 11.1:11.1 | 11.1:11.1 |
|
| a | c | abc,abc | 65 km/h | 22.2,0 | 22.2s | 400m,0m | 1:0 | 1:2:3 | 18:18 | 11.1:11.1 | 11.1:11.1 |
|
||||||
| c | a | abc,abc | 60 km/h | 24.2,0 | 24.2s | 400m,0m | 0:1 | 3:2:1 | 18:18 | 11.1:11.1 | 11.1:11.1 |
|
| c | a | abc,abc | 60 km/h | 24.2,0 | 24.2s | 400m,0m | 0:1 | 3:2:1 | 18:18 | 11.1:11.1 | 11.1:11.1 |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: Car - Traffic signal straight direction with edge compression
|
||||||
|
Given the node map
|
||||||
|
"""
|
||||||
|
a-1-b - c - d-2-e
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | highway |
|
||||||
|
| abcde | primary |
|
||||||
|
|
||||||
|
And the nodes
|
||||||
|
| node | highway | traffic_signals:direction |
|
||||||
|
| c | traffic_signals | forward |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | time | weight | # |
|
||||||
|
| 1 | 2 | 35.3s | 35.3 | no turn with traffic light |
|
||||||
|
| 2 | 1 | 33.3s | 33.3 | no turn with no traffic light |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: Car - Traffic signal turn direction with edge compression
|
||||||
|
Given the node map
|
||||||
|
"""
|
||||||
|
d
|
||||||
|
|
|
||||||
|
2
|
||||||
|
|
|
||||||
|
a-1-b - c - f
|
||||||
|
|
|
||||||
|
e
|
||||||
|
|
||||||
|
j
|
||||||
|
|
|
||||||
|
4
|
||||||
|
|
|
||||||
|
g-3-h - i - k
|
||||||
|
|
|
||||||
|
l
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | highway |
|
||||||
|
| abc | primary |
|
||||||
|
| cf | primary |
|
||||||
|
| fd | primary |
|
||||||
|
| fe | primary |
|
||||||
|
| ghi | primary |
|
||||||
|
| ik | primary |
|
||||||
|
| kj | primary |
|
||||||
|
| kl | primary |
|
||||||
|
|
||||||
|
And the nodes
|
||||||
|
| node | highway | traffic_signals:direction |
|
||||||
|
| k | traffic_signals | forward |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | time | weight | # |
|
||||||
|
| 1 | 2 | 44.2s | 44.2 | turn with no traffic light |
|
||||||
|
| 2 | 1 | 41s | 41 | turn with no traffic light |
|
||||||
|
| 3 | 4 | 46.2s | 46.2 | turn with traffic light |
|
||||||
|
| 4 | 3 | 41s | 41 | turn with no traffic light |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: Car - Traffic signal turn direction with turn restriction
|
||||||
|
Given the node map
|
||||||
|
"""
|
||||||
|
d
|
||||||
|
|
|
||||||
|
2
|
||||||
|
|
|
||||||
|
a-1-b - c - f
|
||||||
|
|
|
||||||
|
e
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | highway |
|
||||||
|
| abc | primary |
|
||||||
|
| cf | primary |
|
||||||
|
| fd | primary |
|
||||||
|
| fe | primary |
|
||||||
|
|
||||||
|
And the nodes
|
||||||
|
| node | highway | traffic_signals:direction |
|
||||||
|
| f | traffic_signals | forward |
|
||||||
|
|
||||||
|
And the relations
|
||||||
|
| type | way:from | way:to | way:via | restriction |
|
||||||
|
| restriction | abc | fe | cf | no_right_turn |
|
||||||
|
|
||||||
|
And the relations
|
||||||
|
| type | way:from | way:to | node:via | restriction |
|
||||||
|
| restriction | df | fc | f | right_turn_only |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | time | weight | # |
|
||||||
|
| 1 | 2 | 46.2s | 46.2 | turn with traffic light |
|
||||||
|
| 2 | 1 | 41s | 41 | turn with no traffic light |
|
||||||
|
@ -36,3 +36,9 @@ Feature: Foot - Accessability of different way types
|
|||||||
| highway | leisure | forw |
|
| highway | leisure | forw |
|
||||||
| (nil) | track | x |
|
| (nil) | track | x |
|
||||||
|
|
||||||
|
Scenario: Foot - Proposed ways
|
||||||
|
Then routability should be
|
||||||
|
| highway | foot | proposed | forw |
|
||||||
|
| footway | | | x |
|
||||||
|
| proposed | | | |
|
||||||
|
| proposed | yes | yes | |
|
||||||
|
@ -68,8 +68,9 @@ class OSRMDirectLoader extends OSRMBaseLoader {
|
|||||||
super(scope);
|
super(scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
load (inputFile, callback) {
|
load (ctx, callback) {
|
||||||
this.inputFile = inputFile;
|
this.inputFile = ctx.inputFile;
|
||||||
|
this.loaderArgs = ctx.loaderArgs;
|
||||||
this.shutdown(() => {
|
this.shutdown(() => {
|
||||||
this.launch(callback);
|
this.launch(callback);
|
||||||
});
|
});
|
||||||
@ -78,7 +79,7 @@ class OSRMDirectLoader extends OSRMBaseLoader {
|
|||||||
osrmUp (callback) {
|
osrmUp (callback) {
|
||||||
if (this.osrmIsRunning()) return callback(new Error("osrm-routed already running!"));
|
if (this.osrmIsRunning()) return callback(new Error("osrm-routed already running!"));
|
||||||
|
|
||||||
const command_arguments = util.format('%s -p %d -i %s -a %s', this.inputFile, this.scope.OSRM_PORT, this.scope.OSRM_IP, this.scope.ROUTING_ALGORITHM);
|
const command_arguments = util.format('%s -p %d -i %s -a %s %s', this.inputFile, this.scope.OSRM_PORT, this.scope.OSRM_IP, this.scope.ROUTING_ALGORITHM, this.loaderArgs);
|
||||||
this.child = this.scope.runBin('osrm-routed', command_arguments, this.scope.environment, (err) => {
|
this.child = this.scope.runBin('osrm-routed', command_arguments, this.scope.environment, (err) => {
|
||||||
if (err && err.signal !== 'SIGINT') {
|
if (err && err.signal !== 'SIGINT') {
|
||||||
this.child = null;
|
this.child = null;
|
||||||
@ -101,8 +102,9 @@ class OSRMmmapLoader extends OSRMBaseLoader {
|
|||||||
super(scope);
|
super(scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
load (inputFile, callback) {
|
load (ctx, callback) {
|
||||||
this.inputFile = inputFile;
|
this.inputFile = ctx.inputFile;
|
||||||
|
this.loaderArgs = ctx.loaderArgs;
|
||||||
this.shutdown(() => {
|
this.shutdown(() => {
|
||||||
this.launch(callback);
|
this.launch(callback);
|
||||||
});
|
});
|
||||||
@ -111,7 +113,7 @@ class OSRMmmapLoader extends OSRMBaseLoader {
|
|||||||
osrmUp (callback) {
|
osrmUp (callback) {
|
||||||
if (this.osrmIsRunning()) return callback(new Error("osrm-routed already running!"));
|
if (this.osrmIsRunning()) return callback(new Error("osrm-routed already running!"));
|
||||||
|
|
||||||
const command_arguments = util.format('%s -p %d -i %s -a %s --mmap', this.inputFile, this.scope.OSRM_PORT, this.scope.OSRM_IP, this.scope.ROUTING_ALGORITHM);
|
const command_arguments = util.format('%s -p %d -i %s -a %s --mmap %s', this.inputFile, this.scope.OSRM_PORT, this.scope.OSRM_IP, this.scope.ROUTING_ALGORITHM, this.loaderArgs);
|
||||||
this.child = this.scope.runBin('osrm-routed', command_arguments, this.scope.environment, (err) => {
|
this.child = this.scope.runBin('osrm-routed', command_arguments, this.scope.environment, (err) => {
|
||||||
if (err && err.signal !== 'SIGINT') {
|
if (err && err.signal !== 'SIGINT') {
|
||||||
this.child = null;
|
this.child = null;
|
||||||
@ -134,8 +136,9 @@ class OSRMDatastoreLoader extends OSRMBaseLoader {
|
|||||||
super(scope);
|
super(scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
load (inputFile, callback) {
|
load (ctx, callback) {
|
||||||
this.inputFile = inputFile;
|
this.inputFile = ctx.inputFile;
|
||||||
|
this.loaderArgs = ctx.loaderArgs;
|
||||||
|
|
||||||
this.loadData((err) => {
|
this.loadData((err) => {
|
||||||
if (err) return callback(err);
|
if (err) return callback(err);
|
||||||
@ -148,7 +151,7 @@ class OSRMDatastoreLoader extends OSRMBaseLoader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
loadData (callback) {
|
loadData (callback) {
|
||||||
const command_arguments = util.format('--dataset-name=%s %s', this.scope.DATASET_NAME, this.inputFile);
|
const command_arguments = util.format('--dataset-name=%s %s %s', this.scope.DATASET_NAME, this.inputFile, this.loaderArgs);
|
||||||
this.scope.runBin('osrm-datastore', command_arguments, this.scope.environment, (err) => {
|
this.scope.runBin('osrm-datastore', command_arguments, this.scope.environment, (err) => {
|
||||||
if (err) return callback(new Error('*** osrm-datastore exited with ' + err.code + ': ' + err));
|
if (err) return callback(new Error('*** osrm-datastore exited with ' + err.code + ': ' + err));
|
||||||
callback();
|
callback();
|
||||||
|
141
features/options/data/disabled_dataset.feature
Normal file
141
features/options/data/disabled_dataset.feature
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
@routing @disable-feature-dataset
|
||||||
|
Feature: disable-feature-dataset command line options
|
||||||
|
Background:
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
0
|
||||||
|
a b c
|
||||||
|
"""
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - geometry disabled error
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_GEOMETRY"
|
||||||
|
|
||||||
|
# The default values
|
||||||
|
And the query options
|
||||||
|
| overview | simplified |
|
||||||
|
| annotations | false |
|
||||||
|
| steps | false |
|
||||||
|
| skip_waypoints | false |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | code |
|
||||||
|
| a | c | DisabledDataset |
|
||||||
|
|
||||||
|
When I plan a trip I should get
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | DisabledDataset |
|
||||||
|
|
||||||
|
When I match I should get
|
||||||
|
| trace | code |
|
||||||
|
| abc | DisabledDataset |
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - geometry disabled error table
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_GEOMETRY"
|
||||||
|
|
||||||
|
When I request nearest I should get
|
||||||
|
| in | code |
|
||||||
|
| 0 | DisabledDataset |
|
||||||
|
|
||||||
|
When I request a travel time matrix with these waypoints I should get the response code
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | DisabledDataset |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - geometry disabled success
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_GEOMETRY"
|
||||||
|
|
||||||
|
# No geometry values returned
|
||||||
|
And the query options
|
||||||
|
| overview | false |
|
||||||
|
| annotations | false |
|
||||||
|
| steps | false |
|
||||||
|
| skip_waypoints | true |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | code |
|
||||||
|
| a | c | Ok |
|
||||||
|
|
||||||
|
When I plan a trip I should get
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | Ok |
|
||||||
|
|
||||||
|
When I match I should get
|
||||||
|
| trace | code |
|
||||||
|
| abc | Ok |
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - geometry disabled error table
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_GEOMETRY"
|
||||||
|
|
||||||
|
And the query options
|
||||||
|
| skip_waypoints | true |
|
||||||
|
|
||||||
|
# You would never do this, but just to prove the point.
|
||||||
|
When I request nearest I should get
|
||||||
|
| in | code |
|
||||||
|
| 0 | Ok |
|
||||||
|
|
||||||
|
When I request a travel time matrix with these waypoints I should get the response code
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | Ok |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - steps disabled error
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_STEPS"
|
||||||
|
|
||||||
|
# Default + annotations, steps
|
||||||
|
And the query options
|
||||||
|
| overview | simplified |
|
||||||
|
| annotations | true |
|
||||||
|
| steps | true |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | code |
|
||||||
|
| a | c | DisabledDataset |
|
||||||
|
|
||||||
|
When I plan a trip I should get
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | DisabledDataset |
|
||||||
|
|
||||||
|
When I match I should get
|
||||||
|
| trace | code |
|
||||||
|
| abc | DisabledDataset |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - geometry disabled error table
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_STEPS"
|
||||||
|
|
||||||
|
When I request nearest I should get
|
||||||
|
| in | code |
|
||||||
|
| 0 | Ok |
|
||||||
|
|
||||||
|
When I request a travel time matrix with these waypoints I should get the response code
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | Ok |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: disable-feature-dataset - steps disabled success
|
||||||
|
Given the data load extra arguments "--disable-feature-dataset ROUTE_STEPS"
|
||||||
|
|
||||||
|
# Default + steps
|
||||||
|
And the query options
|
||||||
|
| overview | simplified |
|
||||||
|
| annotations | true |
|
||||||
|
| steps | false |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | code |
|
||||||
|
| a | c | Ok |
|
||||||
|
|
||||||
|
When I plan a trip I should get
|
||||||
|
| waypoints | code |
|
||||||
|
| a,b,c | Ok |
|
||||||
|
|
||||||
|
When I match I should get
|
||||||
|
| trace | code |
|
||||||
|
| abc | Ok |
|
||||||
|
|
@ -154,3 +154,27 @@ Feature: osrm-extract lua ways:get_nodes()
|
|||||||
Then it should exit successfully
|
Then it should exit successfully
|
||||||
And stdout should contain "node 42"
|
And stdout should contain "node 42"
|
||||||
And stdout should contain "way 42"
|
And stdout should contain "way 42"
|
||||||
|
|
||||||
|
Scenario: osrm-extract flags accessible in process_segment function
|
||||||
|
Given the profile file
|
||||||
|
"""
|
||||||
|
functions = require('testbot')
|
||||||
|
|
||||||
|
functions.process_segment = function (profile, segment)
|
||||||
|
print('segment forward ' .. tostring(segment.flags.forward) .. ' backward ' .. tostring(segment.flags.backward))
|
||||||
|
end
|
||||||
|
|
||||||
|
return functions
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
a b
|
||||||
|
"""
|
||||||
|
And the ways
|
||||||
|
| nodes | oneway |
|
||||||
|
| ab | yes |
|
||||||
|
And the data has been saved to disk
|
||||||
|
When I run "osrm-extract --profile {profile_file} {osm_file}"
|
||||||
|
Then it should exit successfully
|
||||||
|
And stdout should contain "segment forward true backward false"
|
||||||
|
@ -23,6 +23,7 @@ Feature: osrm-routed command line options: help
|
|||||||
And stdout should contain "--max-table-size"
|
And stdout should contain "--max-table-size"
|
||||||
And stdout should contain "--max-matching-size"
|
And stdout should contain "--max-matching-size"
|
||||||
And stdout should contain "--default-radius"
|
And stdout should contain "--default-radius"
|
||||||
|
And stdout should contain "--keepalive-timeout"
|
||||||
And it should exit successfully
|
And it should exit successfully
|
||||||
|
|
||||||
Scenario: osrm-routed - Help, short
|
Scenario: osrm-routed - Help, short
|
||||||
@ -44,6 +45,7 @@ Feature: osrm-routed command line options: help
|
|||||||
And stdout should contain "--max-table-size"
|
And stdout should contain "--max-table-size"
|
||||||
And stdout should contain "--max-matching-size"
|
And stdout should contain "--max-matching-size"
|
||||||
And stdout should contain "--default-radius"
|
And stdout should contain "--default-radius"
|
||||||
|
And stdout should contain "--keepalive-timeout"
|
||||||
And it should exit successfully
|
And it should exit successfully
|
||||||
|
|
||||||
Scenario: osrm-routed - Help, long
|
Scenario: osrm-routed - Help, long
|
||||||
@ -65,4 +67,5 @@ Feature: osrm-routed command line options: help
|
|||||||
And stdout should contain "--max-table-size"
|
And stdout should contain "--max-table-size"
|
||||||
And stdout should contain "--max-matching-size"
|
And stdout should contain "--max-matching-size"
|
||||||
And stdout should contain "--default-radius"
|
And stdout should contain "--default-radius"
|
||||||
|
And stdout should contain "--keepalive-timeout"
|
||||||
And it should exit successfully
|
And it should exit successfully
|
||||||
|
@ -33,6 +33,11 @@ module.exports = function () {
|
|||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
this.Given(/^the data load extra arguments "(.*?)"$/, (args, callback) => {
|
||||||
|
this.loaderArgs = this.expandOptions(args);
|
||||||
|
callback();
|
||||||
|
});
|
||||||
|
|
||||||
this.Given(/^a grid size of ([0-9.]+) meters$/, (meters, callback) => {
|
this.Given(/^a grid size of ([0-9.]+) meters$/, (meters, callback) => {
|
||||||
this.setGridSize(meters);
|
this.setGridSize(meters);
|
||||||
callback();
|
callback();
|
||||||
|
@ -5,6 +5,7 @@ var FBResult = require('../support/fbresult_generated').osrm.engine.api.fbresult
|
|||||||
|
|
||||||
module.exports = function () {
|
module.exports = function () {
|
||||||
const durationsRegex = new RegExp(/^I request a travel time matrix I should get$/);
|
const durationsRegex = new RegExp(/^I request a travel time matrix I should get$/);
|
||||||
|
const durationsCodeOnlyRegex = new RegExp(/^I request a travel time matrix with these waypoints I should get the response code$/);
|
||||||
const distancesRegex = new RegExp(/^I request a travel distance matrix I should get$/);
|
const distancesRegex = new RegExp(/^I request a travel distance matrix I should get$/);
|
||||||
const estimatesRegex = new RegExp(/^I request a travel time matrix I should get estimates for$/);
|
const estimatesRegex = new RegExp(/^I request a travel time matrix I should get estimates for$/);
|
||||||
const durationsRegexFb = new RegExp(/^I request a travel time matrix with flatbuffers I should get$/);
|
const durationsRegexFb = new RegExp(/^I request a travel time matrix with flatbuffers I should get$/);
|
||||||
@ -17,6 +18,7 @@ module.exports = function () {
|
|||||||
const FORMAT_FB = 'flatbuffers';
|
const FORMAT_FB = 'flatbuffers';
|
||||||
|
|
||||||
this.When(durationsRegex, function(table, callback) {tableParse.call(this, table, DURATIONS_NO_ROUTE, 'durations', FORMAT_JSON, callback);}.bind(this));
|
this.When(durationsRegex, function(table, callback) {tableParse.call(this, table, DURATIONS_NO_ROUTE, 'durations', FORMAT_JSON, callback);}.bind(this));
|
||||||
|
this.When(durationsCodeOnlyRegex, function(table, callback) {tableCodeOnlyParse.call(this, table, 'durations', FORMAT_JSON, callback);}.bind(this));
|
||||||
this.When(distancesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'distances', FORMAT_JSON, callback);}.bind(this));
|
this.When(distancesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'distances', FORMAT_JSON, callback);}.bind(this));
|
||||||
this.When(estimatesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'fallback_speed_cells', FORMAT_JSON, callback);}.bind(this));
|
this.When(estimatesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'fallback_speed_cells', FORMAT_JSON, callback);}.bind(this));
|
||||||
this.When(durationsRegexFb, function(table, callback) {tableParse.call(this, table, DURATIONS_NO_ROUTE, 'durations', FORMAT_FB, callback);}.bind(this));
|
this.When(durationsRegexFb, function(table, callback) {tableParse.call(this, table, DURATIONS_NO_ROUTE, 'durations', FORMAT_FB, callback);}.bind(this));
|
||||||
@ -27,6 +29,64 @@ const durationsParse = function(v) { return isNaN(parseInt(v)); };
|
|||||||
const distancesParse = function(v) { return isNaN(parseFloat(v)); };
|
const distancesParse = function(v) { return isNaN(parseFloat(v)); };
|
||||||
const estimatesParse = function(v) { return isNaN(parseFloat(v)); };
|
const estimatesParse = function(v) { return isNaN(parseFloat(v)); };
|
||||||
|
|
||||||
|
function tableCodeOnlyParse(table, annotation, format, callback) {
|
||||||
|
|
||||||
|
const params = this.queryParams;
|
||||||
|
params.annotations = ['durations','fallback_speed_cells'].indexOf(annotation) !== -1 ? 'duration' : 'distance';
|
||||||
|
params.output = format;
|
||||||
|
|
||||||
|
var got;
|
||||||
|
|
||||||
|
this.reprocessAndLoadData((e) => {
|
||||||
|
if (e) return callback(e);
|
||||||
|
var testRow = (row, ri, cb) => {
|
||||||
|
var afterRequest = (err, res) => {
|
||||||
|
if (err) return cb(err);
|
||||||
|
|
||||||
|
for (var k in row) {
|
||||||
|
var match = k.match(/param:(.*)/);
|
||||||
|
if (match) {
|
||||||
|
if (row[k] === '(nil)') {
|
||||||
|
params[match[1]] = null;
|
||||||
|
} else if (row[k]) {
|
||||||
|
params[match[1]] = [row[k]];
|
||||||
|
}
|
||||||
|
got[k] = row[k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var json;
|
||||||
|
got.code = 'unknown';
|
||||||
|
if (res.body.length) {
|
||||||
|
json = JSON.parse(res.body);
|
||||||
|
got.code = json.code;
|
||||||
|
}
|
||||||
|
|
||||||
|
cb(null, got);
|
||||||
|
};
|
||||||
|
|
||||||
|
var params = this.queryParams,
|
||||||
|
waypoints = [];
|
||||||
|
if (row.waypoints) {
|
||||||
|
row.waypoints.split(',').forEach((n) => {
|
||||||
|
var node = this.findNodeByName(n);
|
||||||
|
if (!node) throw new Error(util.format('*** unknown waypoint node "%s"', n.trim()));
|
||||||
|
waypoints.push({ coord: node, type: 'loc' });
|
||||||
|
|
||||||
|
});
|
||||||
|
got = { waypoints: row.waypoints };
|
||||||
|
|
||||||
|
this.requestTable(waypoints, params, afterRequest);
|
||||||
|
} else {
|
||||||
|
throw new Error('*** no waypoints');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
this.processRowsAndDiff(table, testRow, callback);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
function tableParse(table, noRoute, annotation, format, callback) {
|
function tableParse(table, noRoute, annotation, format, callback) {
|
||||||
|
|
||||||
const parse = annotation == 'distances' ? distancesParse : (annotation == 'durations' ? durationsParse : estimatesParse);
|
const parse = annotation == 'distances' ? distancesParse : (annotation == 'durations' ? durationsParse : estimatesParse);
|
||||||
@ -62,9 +122,6 @@ function tableParse(table, noRoute, annotation, format, callback) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
var actual = [];
|
|
||||||
actual.push(table.headers);
|
|
||||||
|
|
||||||
this.reprocessAndLoadData((e) => {
|
this.reprocessAndLoadData((e) => {
|
||||||
if (e) return callback(e);
|
if (e) return callback(e);
|
||||||
// compute matrix
|
// compute matrix
|
||||||
|
@ -12,25 +12,31 @@ module.exports = function () {
|
|||||||
var inNode = this.findNodeByName(row.in);
|
var inNode = this.findNodeByName(row.in);
|
||||||
if (!inNode) throw new Error(util.format('*** unknown in-node "%s"', row.in));
|
if (!inNode) throw new Error(util.format('*** unknown in-node "%s"', row.in));
|
||||||
|
|
||||||
var outNode = this.findNodeByName(row.out);
|
|
||||||
if (!outNode) throw new Error(util.format('*** unknown out-node "%s"', row.out));
|
|
||||||
|
|
||||||
this.requestNearest(inNode, this.queryParams, (err, response) => {
|
this.requestNearest(inNode, this.queryParams, (err, response) => {
|
||||||
if (err) return cb(err);
|
if (err) return cb(err);
|
||||||
var coord;
|
var coord;
|
||||||
var headers = new Set(table.raw()[0]);
|
var headers = new Set(table.raw()[0]);
|
||||||
|
|
||||||
if (response.statusCode === 200 && response.body.length) {
|
var got = { in: row.in};
|
||||||
|
|
||||||
|
if (response.body.length) {
|
||||||
var json = JSON.parse(response.body);
|
var json = JSON.parse(response.body);
|
||||||
|
got.code = json.code;
|
||||||
|
|
||||||
coord = json.waypoints[0].location;
|
if (response.statusCode === 200) {
|
||||||
|
|
||||||
var got = { in: row.in, out: row.out };
|
|
||||||
|
|
||||||
if (headers.has('data_version')) {
|
if (headers.has('data_version')) {
|
||||||
got.data_version = json.data_version || '';
|
got.data_version = json.data_version || '';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (json.waypoints && json.waypoints.length && row.out) {
|
||||||
|
coord = json.waypoints[0].location;
|
||||||
|
|
||||||
|
got.out = row.out;
|
||||||
|
|
||||||
|
var outNode = this.findNodeByName(row.out);
|
||||||
|
if (!outNode) throw new Error(util.format('*** unknown out-node "%s"', row.out));
|
||||||
|
|
||||||
Object.keys(row).forEach((key) => {
|
Object.keys(row).forEach((key) => {
|
||||||
if (key === 'out') {
|
if (key === 'out') {
|
||||||
if (this.FuzzyMatch.matchLocation(coord, outNode)) {
|
if (this.FuzzyMatch.matchLocation(coord, outNode)) {
|
||||||
@ -40,7 +46,9 @@ module.exports = function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
cb(null, got);
|
cb(null, got);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -91,7 +91,7 @@ module.exports = function () {
|
|||||||
|
|
||||||
var encodedResult = '';
|
var encodedResult = '';
|
||||||
|
|
||||||
if (json.trips) row.trips.split(',').forEach((sub, si) => {
|
if (json.trips && row.trips) row.trips.split(',').forEach((sub, si) => {
|
||||||
if (si >= subTrips.length) {
|
if (si >= subTrips.length) {
|
||||||
ok = false;
|
ok = false;
|
||||||
} else {
|
} else {
|
||||||
@ -134,7 +134,6 @@ module.exports = function () {
|
|||||||
} else {
|
} else {
|
||||||
var params = this.queryParams,
|
var params = this.queryParams,
|
||||||
waypoints = [];
|
waypoints = [];
|
||||||
params['steps'] = 'true';
|
|
||||||
if (row.from && row.to) {
|
if (row.from && row.to) {
|
||||||
var fromNode = this.findNodeByName(row.from);
|
var fromNode = this.findNodeByName(row.from);
|
||||||
if (!fromNode) throw new Error(util.format('*** unknown from-node "%s"', row.from));
|
if (!fromNode) throw new Error(util.format('*** unknown from-node "%s"', row.from));
|
||||||
|
@ -280,10 +280,11 @@ module.exports = function () {
|
|||||||
};
|
};
|
||||||
|
|
||||||
this.reprocessAndLoadData = (callback) => {
|
this.reprocessAndLoadData = (callback) => {
|
||||||
|
let p = {loaderArgs: this.loaderArgs, inputFile: this.processedCacheFile};
|
||||||
let queue = d3.queue(1);
|
let queue = d3.queue(1);
|
||||||
queue.defer(this.writeAndLinkOSM.bind(this));
|
queue.defer(this.writeAndLinkOSM.bind(this));
|
||||||
queue.defer(this.extractContractPartitionAndCustomize.bind(this));
|
queue.defer(this.extractContractPartitionAndCustomize.bind(this));
|
||||||
queue.defer(this.osrmLoader.load.bind(this.osrmLoader), this.processedCacheFile);
|
queue.defer(this.osrmLoader.load.bind(this.osrmLoader), p);
|
||||||
queue.awaitAll(callback);
|
queue.awaitAll(callback);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -27,7 +27,8 @@ module.exports = function () {
|
|||||||
this.DEFAULT_ENVIRONMENT = process.env;
|
this.DEFAULT_ENVIRONMENT = process.env;
|
||||||
this.DEFAULT_PROFILE = 'bicycle';
|
this.DEFAULT_PROFILE = 'bicycle';
|
||||||
this.DEFAULT_INPUT_FORMAT = 'osm';
|
this.DEFAULT_INPUT_FORMAT = 'osm';
|
||||||
this.DEFAULT_LOAD_METHOD = process.argv[process.argv.indexOf('-m') +1].match('mmap') ? 'mmap' : 'datastore';
|
let loadMethod = process.argv[process.argv.indexOf('-m') +1];
|
||||||
|
this.DEFAULT_LOAD_METHOD = loadMethod.match('mmap') ? 'mmap' : loadMethod.match('directly') ? 'directly' : 'datastore';
|
||||||
this.DEFAULT_ORIGIN = [1,1];
|
this.DEFAULT_ORIGIN = [1,1];
|
||||||
this.OSM_USER = 'osrm';
|
this.OSM_USER = 'osrm';
|
||||||
this.OSM_UID = 1;
|
this.OSM_UID = 1;
|
||||||
|
@ -37,6 +37,7 @@ module.exports = function () {
|
|||||||
this.contractArgs = '';
|
this.contractArgs = '';
|
||||||
this.partitionArgs = '';
|
this.partitionArgs = '';
|
||||||
this.customizeArgs = '';
|
this.customizeArgs = '';
|
||||||
|
this.loaderArgs = '';
|
||||||
this.environment = Object.assign(this.DEFAULT_ENVIRONMENT);
|
this.environment = Object.assign(this.DEFAULT_ENVIRONMENT);
|
||||||
this.resetOSM();
|
this.resetOSM();
|
||||||
|
|
||||||
|
@ -101,7 +101,8 @@ module.exports = function () {
|
|||||||
|
|
||||||
this.requestTrip = (waypoints, userParams, callback) => {
|
this.requestTrip = (waypoints, userParams, callback) => {
|
||||||
var defaults = {
|
var defaults = {
|
||||||
output: 'json'
|
output: 'json',
|
||||||
|
steps: 'true'
|
||||||
},
|
},
|
||||||
params = this.overwriteParams(defaults, userParams);
|
params = this.overwriteParams(defaults, userParams);
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ Feature: Alternative route
|
|||||||
| 7 | 8 | ca,ab,bd,dc,ca,ca | |
|
| 7 | 8 | ca,ab,bd,dc,ca,ca | |
|
||||||
|
|
||||||
|
|
||||||
@mld-only
|
@mld
|
||||||
Scenario: Alternative loop paths on a single node with an asymmetric circle
|
Scenario: Alternative loop paths on a single node with an asymmetric circle
|
||||||
# The test checks only MLD implementation, alternatives results are unpredictable for CH on windows (#4691, #4693)
|
# The test checks only MLD implementation, alternatives results are unpredictable for CH on windows (#4691, #4693)
|
||||||
Given a grid size of 10 meters
|
Given a grid size of 10 meters
|
||||||
|
@ -38,7 +38,41 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc,bc |
|
| s | e | unrestricted curb | ab,bc,bc |
|
||||||
|
|
||||||
Scenario: Start End opposite approach, option unrestricted for Start and End
|
Scenario: Start End same approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Start End same approach, option opposite for Start and curb for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | opposite curb | ab,bc,bc |
|
||||||
|
|
||||||
|
Scenario: Start End different approach, option unrestricted for Start and End
|
||||||
Given the profile "testbot"
|
Given the profile "testbot"
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
@ -56,7 +90,7 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted unrestricted | ab,bc |
|
| s | e | unrestricted unrestricted | ab,bc |
|
||||||
|
|
||||||
Scenario: Start End opposite approach, option unrestricted for Start and curb for End
|
Scenario: Start End different approach, option unrestricted for Start and curb for End
|
||||||
Given the profile "testbot"
|
Given the profile "testbot"
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
@ -74,6 +108,43 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc |
|
| s | e | unrestricted curb | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Start End different approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s
|
||||||
|
a------b------c
|
||||||
|
e
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc,bc |
|
||||||
|
|
||||||
|
Scenario: Start End different approach, option curb for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
e
|
||||||
|
a------b------c-----------d
|
||||||
|
s
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
| cd |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | curb opposite | cd,cd,ab,ab |
|
||||||
|
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Oneway Test #
|
# Oneway Test #
|
||||||
@ -114,7 +185,41 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc |
|
| s | e | unrestricted curb | ab,bc |
|
||||||
|
|
||||||
Scenario: Test on oneway segment, Start End opposite approach, option unrestricted for Start and End
|
Scenario: Test on oneway segment, Start End same approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | oneway |
|
||||||
|
| ab | yes |
|
||||||
|
| bc | yes |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Test on oneway segment, Start End same approach, option opposite for Start and curb for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | oneway |
|
||||||
|
| ab | yes |
|
||||||
|
| bc | yes |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | opposite curb | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Test on oneway segment, Start End different approach, option unrestricted for Start and End
|
||||||
Given the profile "testbot"
|
Given the profile "testbot"
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
@ -132,7 +237,7 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted unrestricted | ab,bc |
|
| s | e | unrestricted unrestricted | ab,bc |
|
||||||
|
|
||||||
Scenario: Test on oneway segment, Start End opposite approach, option unrestricted for Start and curb for End
|
Scenario: Test on oneway segment, Start End different approach, option unrestricted for Start and curb for End
|
||||||
Given the profile "testbot"
|
Given the profile "testbot"
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
@ -150,6 +255,42 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc |
|
| s | e | unrestricted curb | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Test on oneway segment, Start End different approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s
|
||||||
|
a------b------c
|
||||||
|
e
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | oneway |
|
||||||
|
| ab | yes |
|
||||||
|
| bc | yes |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Test on oneway segment, Start End different approach, option curb for Start and opposite for End
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s
|
||||||
|
a------b------c
|
||||||
|
e
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes | oneway |
|
||||||
|
| ab | yes |
|
||||||
|
| bc | yes |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | curb opposite | ab,bc |
|
||||||
|
|
||||||
##############
|
##############
|
||||||
# UTurn Test #
|
# UTurn Test #
|
||||||
##############
|
##############
|
||||||
@ -175,6 +316,27 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | |
|
| s | e | unrestricted curb | |
|
||||||
|
|
||||||
|
Scenario: UTurn test, router can find a route because uturn authorized to reach opposite side
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
e s
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
And the relations
|
||||||
|
| type | way:from | way:to | node:via | restriction |
|
||||||
|
| restriction | bc | bc | c | no_u_turn |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | curb opposite | bc,ab,ab |
|
||||||
|
|
||||||
|
|
||||||
Scenario: UTurn test, router can find a route because he can use the roundabout
|
Scenario: UTurn test, router can find a route because he can use the roundabout
|
||||||
Given the profile "testbot"
|
Given the profile "testbot"
|
||||||
@ -200,6 +362,7 @@ Feature: Approach parameter
|
|||||||
When I route I should get
|
When I route I should get
|
||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc,bc |
|
| s | e | unrestricted curb | ab,bc,bc |
|
||||||
|
| s | e | opposite curb | ab,bc,bc |
|
||||||
|
|
||||||
|
|
||||||
Scenario: Start End same approach, option unrestricted for Start and curb for End, left-hand driving
|
Scenario: Start End same approach, option unrestricted for Start and curb for End, left-hand driving
|
||||||
@ -228,6 +391,32 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc |
|
| s | e | unrestricted curb | ab,bc |
|
||||||
|
|
||||||
|
Scenario: Start End same approach, option unrestricted for Start and opposite for End, left-hand driving
|
||||||
|
Given the profile file
|
||||||
|
"""
|
||||||
|
local functions = require('testbot')
|
||||||
|
local testbot_process_way = functions.process_way
|
||||||
|
functions.process_way = function(profile, way, result)
|
||||||
|
testbot_process_way(profile, way, result)
|
||||||
|
result.is_left_hand_driving = true
|
||||||
|
end
|
||||||
|
return functions
|
||||||
|
"""
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc,bc |
|
||||||
|
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Left-side countries #
|
# Left-side countries #
|
||||||
@ -260,9 +449,8 @@ Feature: Approach parameter
|
|||||||
"""
|
"""
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
s
|
s e
|
||||||
a------b------c
|
a------b------c
|
||||||
e
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
And the ways
|
And the ways
|
||||||
@ -272,9 +460,49 @@ Feature: Approach parameter
|
|||||||
|
|
||||||
When I route I should get
|
When I route I should get
|
||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc,bc |
|
| s | e | unrestricted curb | ab,bc |
|
||||||
|
|
||||||
Scenario: [Left-hand-side] Start End opposite approach, option unrestricted for Start and End
|
Scenario: [Left-hand-side] Start End same approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile file "car" initialized with
|
||||||
|
"""
|
||||||
|
profile.properties.left_hand_driving = true
|
||||||
|
"""
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s e
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc,bc |
|
||||||
|
|
||||||
|
Scenario: [Left-hand-side] Start End same approach, option opposite for Start and curb for End
|
||||||
|
Given the profile file "car" initialized with
|
||||||
|
"""
|
||||||
|
profile.properties.left_hand_driving = true
|
||||||
|
"""
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
e s
|
||||||
|
a------b------c
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | opposite curb | bc,ab,ab |
|
||||||
|
|
||||||
|
Scenario: [Left-hand-side] Start End different approach, option unrestricted for Start and End
|
||||||
Given the profile file "car" initialized with
|
Given the profile file "car" initialized with
|
||||||
"""
|
"""
|
||||||
profile.properties.left_hand_driving = true
|
profile.properties.left_hand_driving = true
|
||||||
@ -295,15 +523,16 @@ Feature: Approach parameter
|
|||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted unrestricted | ab,bc |
|
| s | e | unrestricted unrestricted | ab,bc |
|
||||||
|
|
||||||
Scenario: [Left-hand-side] Start End opposite approach, option unrestricted for Start and curb for End
|
Scenario: [Left-hand-side] Start End different approach, option unrestricted for Start and curb for End
|
||||||
Given the profile file "car" initialized with
|
Given the profile file "car" initialized with
|
||||||
"""
|
"""
|
||||||
profile.properties.left_hand_driving = true
|
profile.properties.left_hand_driving = true
|
||||||
"""
|
"""
|
||||||
And the node map
|
And the node map
|
||||||
"""
|
"""
|
||||||
s e
|
s
|
||||||
a------b------c
|
a------b------c
|
||||||
|
e
|
||||||
"""
|
"""
|
||||||
|
|
||||||
And the ways
|
And the ways
|
||||||
@ -313,4 +542,121 @@ Feature: Approach parameter
|
|||||||
|
|
||||||
When I route I should get
|
When I route I should get
|
||||||
| from | to | approaches | route |
|
| from | to | approaches | route |
|
||||||
| s | e | unrestricted curb | ab,bc |
|
| s | e | unrestricted curb | ab,bc,bc |
|
||||||
|
|
||||||
|
Scenario: [Left-hand-side] Start End different approach, option unrestricted for Start and opposite for End
|
||||||
|
Given the profile file "car" initialized with
|
||||||
|
"""
|
||||||
|
profile.properties.left_hand_driving = true
|
||||||
|
"""
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s
|
||||||
|
a------b------c
|
||||||
|
e
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | unrestricted opposite | ab,bc |
|
||||||
|
|
||||||
|
Scenario: [Left-hand-side] Start End different approach, option curb for Start and opposite for End
|
||||||
|
Given the profile file "car" initialized with
|
||||||
|
"""
|
||||||
|
profile.properties.left_hand_driving = true
|
||||||
|
"""
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
s
|
||||||
|
a------b------c
|
||||||
|
e
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| from | to | approaches | route |
|
||||||
|
| s | e | curb opposite | ab,bc |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: Routes with more than two waypoints - uturns allowed
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
2 1
|
||||||
|
a------b------c-----------d
|
||||||
|
|
|
||||||
|
3 | 4
|
||||||
|
e------f------g-----------h
|
||||||
|
|
|
||||||
|
|
|
||||||
|
i
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
| cd |
|
||||||
|
| bf |
|
||||||
|
| ef |
|
||||||
|
| fg |
|
||||||
|
| gh |
|
||||||
|
| ei |
|
||||||
|
|
||||||
|
And the query options
|
||||||
|
| continue_straight | false |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| waypoints | approaches | locations | # |
|
||||||
|
| 1,2,3,4 | curb curb curb curb | _,_,_,a,b,f,_,_,i,h,_ | 1,2,2,a,b,f,3,3,i,h,4 (Only u-turn at end of roads) |
|
||||||
|
| 1,2,3,4 | curb unrestricted unrestricted curb | _,_,_,b,f,_,_,h,_ | 1,2,2,b,f,3,3,h,4 (Can u-turn at 2 and 3) |
|
||||||
|
| 1,2,3,4 | opposite opposite opposite opposite | _,d,a,_,_,b,f,i,_,_,_ | 1,d,a,2,2,b,f,i,3,3,4 (Only u-turn at end of roads) |
|
||||||
|
| 1,2,3,4 | opposite unrestricted unrestricted opposite | _,d,_,_,b,f,_,_,_ | 1,d,2,2,b,f,3,3,4 (Can u-turn at 2 and 3) |
|
||||||
|
|
||||||
|
|
||||||
|
Scenario: Routes with more than two waypoints - uturns forbidden
|
||||||
|
Given the profile "testbot"
|
||||||
|
And the node map
|
||||||
|
"""
|
||||||
|
2 1
|
||||||
|
a------b------c-----------d
|
||||||
|
|
|
||||||
|
3 | 4
|
||||||
|
e------f------g-----------h
|
||||||
|
|
|
||||||
|
|
|
||||||
|
i
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
And the ways
|
||||||
|
| nodes |
|
||||||
|
| ab |
|
||||||
|
| bc |
|
||||||
|
| cd |
|
||||||
|
| bf |
|
||||||
|
| ef |
|
||||||
|
| fg |
|
||||||
|
| gh |
|
||||||
|
| ei |
|
||||||
|
|
||||||
|
And the query options
|
||||||
|
| continue_straight | true |
|
||||||
|
|
||||||
|
When I route I should get
|
||||||
|
| waypoints | approaches | locations | # |
|
||||||
|
| 1,2,3,4 | curb curb curb curb | _,_,_,a,b,f,_,_,i,h,_ | 1,2,2,a,b,f,3,3,i,h,4 (Only u-turn at end of roads) |
|
||||||
|
| 1,2,3,4 | curb opposite opposite curb | _,a,_,_,b,f,i,_,_,h,_ | 1,a,2,2,b,f,i,3,3,h,4 (switches stops with u-turns) |
|
||||||
|
| 1,2,3,4 | opposite opposite opposite opposite | _,d,a,_,_,b,f,i,_,_,_ | 1,d,a,2,2,b,f,i,3,3,4 (Only u-turn at end of roads) |
|
||||||
|
| 1,2,3,4 | opposite curb curb opposite | _,d,_,_,a,b,f,_,_,i,_ | 1,d,2,2,a,b,f,3,3,i,4 (switches stops with u-turns) |
|
||||||
|
@ -61,9 +61,11 @@ inline auto contractExcludableGraph(ContractorGraph contractor_graph_,
|
|||||||
// Add all non-core edges to container
|
// Add all non-core edges to container
|
||||||
{
|
{
|
||||||
auto non_core_edges = toEdges<QueryEdge>(contractor_graph);
|
auto non_core_edges = toEdges<QueryEdge>(contractor_graph);
|
||||||
auto new_end =
|
auto new_end = std::remove_if(non_core_edges.begin(),
|
||||||
std::remove_if(non_core_edges.begin(), non_core_edges.end(), [&](const auto &edge) {
|
non_core_edges.end(),
|
||||||
return is_shared_core[edge.source] && is_shared_core[edge.target];
|
[&](const auto &edge) {
|
||||||
|
return is_shared_core[edge.source] &&
|
||||||
|
is_shared_core[edge.target];
|
||||||
});
|
});
|
||||||
non_core_edges.resize(new_end - non_core_edges.begin());
|
non_core_edges.resize(new_end - non_core_edges.begin());
|
||||||
edge_container.Insert(std::move(non_core_edges));
|
edge_container.Insert(std::move(non_core_edges));
|
||||||
@ -75,8 +77,8 @@ inline auto contractExcludableGraph(ContractorGraph contractor_graph_,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract core graph for further contraction
|
// Extract core graph for further contraction
|
||||||
shared_core_graph = contractor_graph.Filter(
|
shared_core_graph = contractor_graph.Filter([&is_shared_core](const NodeID node)
|
||||||
[&is_shared_core](const NodeID node) { return is_shared_core[node]; });
|
{ return is_shared_core[node]; });
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const auto &filter : filters)
|
for (const auto &filter : filters)
|
||||||
|
@ -89,7 +89,10 @@ struct ContractedEdgeContainer
|
|||||||
|
|
||||||
// Remove all edges that are contained in the old set of edges and set the appropriate flag.
|
// Remove all edges that are contained in the old set of edges and set the appropriate flag.
|
||||||
auto new_end =
|
auto new_end =
|
||||||
std::remove_if(new_edges.begin(), new_edges.end(), [&](const QueryEdge &edge) {
|
std::remove_if(new_edges.begin(),
|
||||||
|
new_edges.end(),
|
||||||
|
[&](const QueryEdge &edge)
|
||||||
|
{
|
||||||
// check if the new edge would be sorted before the currend old edge
|
// check if the new edge would be sorted before the currend old edge
|
||||||
// if so it is not contained yet in the set of old edges
|
// if so it is not contained yet in the set of old edges
|
||||||
if (edge_iter == edge_end || mergeCompare(edge, *edge_iter))
|
if (edge_iter == edge_end || mergeCompare(edge, *edge_iter))
|
||||||
@ -132,10 +135,10 @@ struct ContractedEdgeContainer
|
|||||||
// enforce sorting for next merge step
|
// enforce sorting for next merge step
|
||||||
std::vector<unsigned> ordering(edges_size);
|
std::vector<unsigned> ordering(edges_size);
|
||||||
std::iota(ordering.begin(), ordering.end(), 0);
|
std::iota(ordering.begin(), ordering.end(), 0);
|
||||||
tbb::parallel_sort(
|
tbb::parallel_sort(ordering.begin(),
|
||||||
ordering.begin(), ordering.end(), [&](const auto lhs_idx, const auto rhs_idx) {
|
ordering.end(),
|
||||||
return mergeCompare(edges[lhs_idx], edges[rhs_idx]);
|
[&](const auto lhs_idx, const auto rhs_idx)
|
||||||
});
|
{ return mergeCompare(edges[lhs_idx], edges[rhs_idx]); });
|
||||||
auto permutation = util::orderingToPermutation(ordering);
|
auto permutation = util::orderingToPermutation(ordering);
|
||||||
|
|
||||||
util::inplacePermutation(edges.begin(), edges.end(), permutation);
|
util::inplacePermutation(edges.begin(), edges.end(), permutation);
|
||||||
|
@ -122,7 +122,8 @@ class CellCustomizer
|
|||||||
for (std::size_t level = 1; level < partition.GetNumberOfLevels(); ++level)
|
for (std::size_t level = 1; level < partition.GetNumberOfLevels(); ++level)
|
||||||
{
|
{
|
||||||
tbb::parallel_for(tbb::blocked_range<std::size_t>(0, partition.GetNumberOfCells(level)),
|
tbb::parallel_for(tbb::blocked_range<std::size_t>(0, partition.GetNumberOfCells(level)),
|
||||||
[&](const tbb::blocked_range<std::size_t> &range) {
|
[&](const tbb::blocked_range<std::size_t> &range)
|
||||||
|
{
|
||||||
auto &heap = heaps.local();
|
auto &heap = heaps.local();
|
||||||
for (auto id = range.begin(), end = range.end(); id != end; ++id)
|
for (auto id = range.begin(), end = range.end(); id != end; ++id)
|
||||||
{
|
{
|
||||||
|
@ -40,10 +40,10 @@ class BaseAPI
|
|||||||
util::json::Array waypoints;
|
util::json::Array waypoints;
|
||||||
waypoints.values.resize(parameters.coordinates.size());
|
waypoints.values.resize(parameters.coordinates.size());
|
||||||
|
|
||||||
boost::range::transform(
|
boost::range::transform(waypoint_candidates,
|
||||||
waypoint_candidates,
|
|
||||||
waypoints.values.begin(),
|
waypoints.values.begin(),
|
||||||
[this](const PhantomNodeCandidates &candidates) { return MakeWaypoint(candidates); });
|
[this](const PhantomNodeCandidates &candidates)
|
||||||
|
{ return MakeWaypoint(candidates); });
|
||||||
return waypoints;
|
return waypoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,9 +104,8 @@ class BaseAPI
|
|||||||
std::transform(waypoint_candidates.begin(),
|
std::transform(waypoint_candidates.begin(),
|
||||||
waypoint_candidates.end(),
|
waypoint_candidates.end(),
|
||||||
waypoints.begin(),
|
waypoints.begin(),
|
||||||
[this, builder](const PhantomNodeCandidates &candidates) {
|
[this, builder](const PhantomNodeCandidates &candidates)
|
||||||
return MakeWaypoint(builder, candidates)->Finish();
|
{ return MakeWaypoint(builder, candidates)->Finish(); });
|
||||||
});
|
|
||||||
return builder->CreateVector(waypoints);
|
return builder->CreateVector(waypoints);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,7 +52,8 @@ namespace osrm::engine::api
|
|||||||
* optional per coordinate
|
* optional per coordinate
|
||||||
* - bearings: limits the search for segments in the road network to given bearing(s) in degree
|
* - bearings: limits the search for segments in the road network to given bearing(s) in degree
|
||||||
* towards true north in clockwise direction, optional per coordinate
|
* towards true north in clockwise direction, optional per coordinate
|
||||||
* - approaches: force the phantom node to start towards the node with the road country side.
|
* - approaches: force the phantom node to start towards the node with the road country side or
|
||||||
|
* its opposite
|
||||||
*
|
*
|
||||||
* \see OSRM, Coordinate, Hint, Bearing, RouteParameters, TableParameters,
|
* \see OSRM, Coordinate, Hint, Bearing, RouteParameters, TableParameters,
|
||||||
* NearestParameters, TripParameters, MatchParameters and TileParameters
|
* NearestParameters, TripParameters, MatchParameters and TileParameters
|
||||||
@ -111,7 +112,8 @@ struct BaseParameters
|
|||||||
(approaches.empty() || approaches.size() == coordinates.size()) &&
|
(approaches.empty() || approaches.size() == coordinates.size()) &&
|
||||||
std::all_of(bearings.begin(),
|
std::all_of(bearings.begin(),
|
||||||
bearings.end(),
|
bearings.end(),
|
||||||
[](const boost::optional<Bearing> &bearing_and_range) {
|
[](const boost::optional<Bearing> &bearing_and_range)
|
||||||
|
{
|
||||||
if (bearing_and_range)
|
if (bearing_and_range)
|
||||||
{
|
{
|
||||||
return bearing_and_range->IsValid();
|
return bearing_and_range->IsValid();
|
||||||
|
@ -52,9 +52,10 @@ class MatchAPI final : public RouteAPI
|
|||||||
data_version_string = fb_result.CreateString(data_timestamp);
|
data_version_string = fb_result.CreateString(data_timestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto response = MakeFBResponse(sub_routes, fb_result, [this, &fb_result, &sub_matchings]() {
|
auto response = MakeFBResponse(sub_routes,
|
||||||
return MakeTracepoints(fb_result, sub_matchings);
|
fb_result,
|
||||||
});
|
[this, &fb_result, &sub_matchings]()
|
||||||
|
{ return MakeTracepoints(fb_result, sub_matchings); });
|
||||||
|
|
||||||
if (!data_timestamp.empty())
|
if (!data_timestamp.empty())
|
||||||
{
|
{
|
||||||
|
@ -57,11 +57,11 @@ class NearestAPI final : public BaseAPI
|
|||||||
{
|
{
|
||||||
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
||||||
waypoints.resize(phantom_nodes.front().size());
|
waypoints.resize(phantom_nodes.front().size());
|
||||||
std::transform(
|
std::transform(phantom_nodes.front().begin(),
|
||||||
phantom_nodes.front().begin(),
|
|
||||||
phantom_nodes.front().end(),
|
phantom_nodes.front().end(),
|
||||||
waypoints.begin(),
|
waypoints.begin(),
|
||||||
[this, &fb_result](const PhantomNodeWithDistance &phantom_with_distance) {
|
[this, &fb_result](const PhantomNodeWithDistance &phantom_with_distance)
|
||||||
|
{
|
||||||
auto &phantom_node = phantom_with_distance.phantom_node;
|
auto &phantom_node = phantom_with_distance.phantom_node;
|
||||||
|
|
||||||
auto node_values = MakeNodes(phantom_node);
|
auto node_values = MakeNodes(phantom_node);
|
||||||
@ -94,7 +94,8 @@ class NearestAPI final : public BaseAPI
|
|||||||
std::transform(phantom_nodes.front().begin(),
|
std::transform(phantom_nodes.front().begin(),
|
||||||
phantom_nodes.front().end(),
|
phantom_nodes.front().end(),
|
||||||
waypoints.values.begin(),
|
waypoints.values.begin(),
|
||||||
[this](const PhantomNodeWithDistance &phantom_with_distance) {
|
[this](const PhantomNodeWithDistance &phantom_with_distance)
|
||||||
|
{
|
||||||
auto &phantom_node = phantom_with_distance.phantom_node;
|
auto &phantom_node = phantom_with_distance.phantom_node;
|
||||||
auto waypoint = MakeWaypoint({phantom_node});
|
auto waypoint = MakeWaypoint({phantom_node});
|
||||||
|
|
||||||
|
@ -77,9 +77,10 @@ class RouteAPI : public BaseAPI
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto response =
|
auto response =
|
||||||
MakeFBResponse(raw_routes, fb_result, [this, &waypoint_candidates, &fb_result]() {
|
MakeFBResponse(raw_routes,
|
||||||
return BaseAPI::MakeWaypoints(&fb_result, waypoint_candidates);
|
fb_result,
|
||||||
});
|
[this, &waypoint_candidates, &fb_result]()
|
||||||
|
{ return BaseAPI::MakeWaypoints(&fb_result, waypoint_candidates); });
|
||||||
|
|
||||||
if (!data_timestamp.empty())
|
if (!data_timestamp.empty())
|
||||||
{
|
{
|
||||||
@ -171,8 +172,13 @@ class RouteAPI : public BaseAPI
|
|||||||
}
|
}
|
||||||
std::vector<fbresult::Position> coordinates;
|
std::vector<fbresult::Position> coordinates;
|
||||||
coordinates.resize(std::distance(begin, end));
|
coordinates.resize(std::distance(begin, end));
|
||||||
std::transform(begin, end, coordinates.begin(), [](const Coordinate &c) {
|
std::transform(begin,
|
||||||
return fbresult::Position{static_cast<float>(util::toFloating(c.lon).__value),
|
end,
|
||||||
|
coordinates.begin(),
|
||||||
|
[](const Coordinate &c)
|
||||||
|
{
|
||||||
|
return fbresult::Position{
|
||||||
|
static_cast<float>(util::toFloating(c.lon).__value),
|
||||||
static_cast<float>(util::toFloating(c.lat).__value)};
|
static_cast<float>(util::toFloating(c.lat).__value)};
|
||||||
});
|
});
|
||||||
return builder.CreateVectorOfStructs(coordinates);
|
return builder.CreateVectorOfStructs(coordinates);
|
||||||
@ -354,9 +360,8 @@ class RouteAPI : public BaseAPI
|
|||||||
std::transform(leg.steps.begin(),
|
std::transform(leg.steps.begin(),
|
||||||
leg.steps.end(),
|
leg.steps.end(),
|
||||||
legSteps.begin(),
|
legSteps.begin(),
|
||||||
[this, &fb_result, &leg_geometry](auto &step) {
|
[this, &fb_result, &leg_geometry](auto &step)
|
||||||
return this->MakeFBStep(fb_result, leg_geometry, step);
|
{ return this->MakeFBStep(fb_result, leg_geometry, step); });
|
||||||
});
|
|
||||||
}
|
}
|
||||||
auto steps_vector = fb_result.CreateVector(legSteps);
|
auto steps_vector = fb_result.CreateVector(legSteps);
|
||||||
|
|
||||||
@ -441,7 +446,8 @@ class RouteAPI : public BaseAPI
|
|||||||
speed =
|
speed =
|
||||||
GetAnnotations<float>(fb_result,
|
GetAnnotations<float>(fb_result,
|
||||||
leg_geometry,
|
leg_geometry,
|
||||||
[&prev_speed](const guidance::LegGeometry::Annotation &anno) {
|
[&prev_speed](const guidance::LegGeometry::Annotation &anno)
|
||||||
|
{
|
||||||
if (anno.duration < std::numeric_limits<float>::min())
|
if (anno.duration < std::numeric_limits<float>::min())
|
||||||
{
|
{
|
||||||
return prev_speed;
|
return prev_speed;
|
||||||
@ -459,37 +465,37 @@ class RouteAPI : public BaseAPI
|
|||||||
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> duration;
|
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> duration;
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Duration)
|
if (requested_annotations & RouteParameters::AnnotationsType::Duration)
|
||||||
{
|
{
|
||||||
duration = GetAnnotations<uint32_t>(
|
duration = GetAnnotations<uint32_t>(fb_result,
|
||||||
fb_result, leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
leg_geometry,
|
||||||
return anno.duration;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.duration; });
|
||||||
}
|
}
|
||||||
|
|
||||||
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> distance;
|
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> distance;
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Distance)
|
if (requested_annotations & RouteParameters::AnnotationsType::Distance)
|
||||||
{
|
{
|
||||||
distance = GetAnnotations<uint32_t>(
|
distance = GetAnnotations<uint32_t>(fb_result,
|
||||||
fb_result, leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
leg_geometry,
|
||||||
return anno.distance;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.distance; });
|
||||||
}
|
}
|
||||||
|
|
||||||
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> weight;
|
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> weight;
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Weight)
|
if (requested_annotations & RouteParameters::AnnotationsType::Weight)
|
||||||
{
|
{
|
||||||
weight = GetAnnotations<uint32_t>(
|
weight = GetAnnotations<uint32_t>(fb_result,
|
||||||
fb_result, leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
leg_geometry,
|
||||||
return anno.weight;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.weight; });
|
||||||
}
|
}
|
||||||
|
|
||||||
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> datasources;
|
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> datasources;
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Datasources)
|
if (requested_annotations & RouteParameters::AnnotationsType::Datasources)
|
||||||
{
|
{
|
||||||
datasources = GetAnnotations<uint32_t>(
|
datasources = GetAnnotations<uint32_t>(fb_result,
|
||||||
fb_result, leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
leg_geometry,
|
||||||
return anno.datasource;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.datasource; });
|
||||||
}
|
}
|
||||||
std::vector<uint32_t> nodes;
|
std::vector<uint32_t> nodes;
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Nodes)
|
if (requested_annotations & RouteParameters::AnnotationsType::Nodes)
|
||||||
@ -653,7 +659,8 @@ class RouteAPI : public BaseAPI
|
|||||||
step.intersections.begin(),
|
step.intersections.begin(),
|
||||||
step.intersections.end(),
|
step.intersections.end(),
|
||||||
intersections.begin(),
|
intersections.begin(),
|
||||||
[&fb_result, this](const guidance::IntermediateIntersection &intersection) {
|
[&fb_result, this](const guidance::IntermediateIntersection &intersection)
|
||||||
|
{
|
||||||
std::vector<flatbuffers::Offset<fbresult::Lane>> lanes;
|
std::vector<flatbuffers::Offset<fbresult::Lane>> lanes;
|
||||||
if (json::detail::hasValidLanes(intersection))
|
if (json::detail::hasValidLanes(intersection))
|
||||||
{
|
{
|
||||||
@ -681,11 +688,11 @@ class RouteAPI : public BaseAPI
|
|||||||
auto bearings_vector = fb_result.CreateVector(intersection.bearings);
|
auto bearings_vector = fb_result.CreateVector(intersection.bearings);
|
||||||
std::vector<flatbuffers::Offset<flatbuffers::String>> classes;
|
std::vector<flatbuffers::Offset<flatbuffers::String>> classes;
|
||||||
classes.resize(intersection.classes.size());
|
classes.resize(intersection.classes.size());
|
||||||
std::transform(
|
std::transform(intersection.classes.begin(),
|
||||||
intersection.classes.begin(),
|
|
||||||
intersection.classes.end(),
|
intersection.classes.end(),
|
||||||
classes.begin(),
|
classes.begin(),
|
||||||
[&fb_result](const std::string &cls) { return fb_result.CreateString(cls); });
|
[&fb_result](const std::string &cls)
|
||||||
|
{ return fb_result.CreateString(cls); });
|
||||||
auto classes_vector = fb_result.CreateVector(classes);
|
auto classes_vector = fb_result.CreateVector(classes);
|
||||||
auto entry_vector = fb_result.CreateVector(intersection.entry);
|
auto entry_vector = fb_result.CreateVector(intersection.entry);
|
||||||
|
|
||||||
@ -720,9 +727,10 @@ class RouteAPI : public BaseAPI
|
|||||||
|
|
||||||
std::vector<util::json::Value> step_geometries;
|
std::vector<util::json::Value> step_geometries;
|
||||||
const auto total_step_count =
|
const auto total_step_count =
|
||||||
std::accumulate(legs.begin(), legs.end(), 0, [](const auto &v, const auto &leg) {
|
std::accumulate(legs.begin(),
|
||||||
return v + leg.steps.size();
|
legs.end(),
|
||||||
});
|
0,
|
||||||
|
[](const auto &v, const auto &leg) { return v + leg.steps.size(); });
|
||||||
step_geometries.reserve(total_step_count);
|
step_geometries.reserve(total_step_count);
|
||||||
|
|
||||||
for (const auto idx : util::irange<std::size_t>(0UL, legs.size()))
|
for (const auto idx : util::irange<std::size_t>(0UL, legs.size()))
|
||||||
@ -733,7 +741,8 @@ class RouteAPI : public BaseAPI
|
|||||||
legs[idx].steps.begin(),
|
legs[idx].steps.begin(),
|
||||||
legs[idx].steps.end(),
|
legs[idx].steps.end(),
|
||||||
std::back_inserter(step_geometries),
|
std::back_inserter(step_geometries),
|
||||||
[this, &leg_geometry](const guidance::RouteStep &step) {
|
[this, &leg_geometry](const guidance::RouteStep &step)
|
||||||
|
{
|
||||||
if (parameters.geometries == RouteParameters::GeometriesType::Polyline)
|
if (parameters.geometries == RouteParameters::GeometriesType::Polyline)
|
||||||
{
|
{
|
||||||
return static_cast<util::json::Value>(json::makePolyline<100000>(
|
return static_cast<util::json::Value>(json::makePolyline<100000>(
|
||||||
@ -778,7 +787,9 @@ class RouteAPI : public BaseAPI
|
|||||||
{
|
{
|
||||||
double prev_speed = 0;
|
double prev_speed = 0;
|
||||||
annotation.values["speed"] = GetAnnotations(
|
annotation.values["speed"] = GetAnnotations(
|
||||||
leg_geometry, [&prev_speed](const guidance::LegGeometry::Annotation &anno) {
|
leg_geometry,
|
||||||
|
[&prev_speed](const guidance::LegGeometry::Annotation &anno)
|
||||||
|
{
|
||||||
if (anno.duration < std::numeric_limits<double>::min())
|
if (anno.duration < std::numeric_limits<double>::min())
|
||||||
{
|
{
|
||||||
return prev_speed;
|
return prev_speed;
|
||||||
@ -794,17 +805,17 @@ class RouteAPI : public BaseAPI
|
|||||||
|
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Duration)
|
if (requested_annotations & RouteParameters::AnnotationsType::Duration)
|
||||||
{
|
{
|
||||||
annotation.values["duration"] = GetAnnotations(
|
annotation.values["duration"] =
|
||||||
leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
GetAnnotations(leg_geometry,
|
||||||
return anno.duration;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.duration; });
|
||||||
}
|
}
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Distance)
|
if (requested_annotations & RouteParameters::AnnotationsType::Distance)
|
||||||
{
|
{
|
||||||
annotation.values["distance"] = GetAnnotations(
|
annotation.values["distance"] =
|
||||||
leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
GetAnnotations(leg_geometry,
|
||||||
return anno.distance;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.distance; });
|
||||||
}
|
}
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Weight)
|
if (requested_annotations & RouteParameters::AnnotationsType::Weight)
|
||||||
{
|
{
|
||||||
@ -814,10 +825,10 @@ class RouteAPI : public BaseAPI
|
|||||||
}
|
}
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Datasources)
|
if (requested_annotations & RouteParameters::AnnotationsType::Datasources)
|
||||||
{
|
{
|
||||||
annotation.values["datasources"] = GetAnnotations(
|
annotation.values["datasources"] =
|
||||||
leg_geometry, [](const guidance::LegGeometry::Annotation &anno) {
|
GetAnnotations(leg_geometry,
|
||||||
return anno.datasource;
|
[](const guidance::LegGeometry::Annotation &anno)
|
||||||
});
|
{ return anno.datasource; });
|
||||||
}
|
}
|
||||||
if (requested_annotations & RouteParameters::AnnotationsType::Nodes)
|
if (requested_annotations & RouteParameters::AnnotationsType::Nodes)
|
||||||
{
|
{
|
||||||
|
@ -180,9 +180,9 @@ struct RouteParameters : public BaseParameters
|
|||||||
const auto coordinates_ok = coordinates.size() >= 2;
|
const auto coordinates_ok = coordinates.size() >= 2;
|
||||||
const auto base_params_ok = BaseParameters::IsValid();
|
const auto base_params_ok = BaseParameters::IsValid();
|
||||||
const auto valid_waypoints =
|
const auto valid_waypoints =
|
||||||
std::all_of(waypoints.begin(), waypoints.end(), [this](const auto &w) {
|
std::all_of(waypoints.begin(),
|
||||||
return w < coordinates.size();
|
waypoints.end(),
|
||||||
});
|
[this](const auto &w) { return w < coordinates.size(); });
|
||||||
return coordinates_ok && base_params_ok && valid_waypoints;
|
return coordinates_ok && base_params_ok && valid_waypoints;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -202,7 +202,7 @@ inline RouteParameters::AnnotationsType operator|(RouteParameters::AnnotationsTy
|
|||||||
static_cast<std::underlying_type_t<RouteParameters::AnnotationsType>>(rhs));
|
static_cast<std::underlying_type_t<RouteParameters::AnnotationsType>>(rhs));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline RouteParameters::AnnotationsType operator|=(RouteParameters::AnnotationsType lhs,
|
inline RouteParameters::AnnotationsType &operator|=(RouteParameters::AnnotationsType &lhs,
|
||||||
RouteParameters::AnnotationsType rhs)
|
RouteParameters::AnnotationsType rhs)
|
||||||
{
|
{
|
||||||
return lhs = lhs | rhs;
|
return lhs = lhs | rhs;
|
||||||
|
@ -245,9 +245,8 @@ class TableAPI final : public BaseAPI
|
|||||||
|
|
||||||
boost::range::transform(candidates,
|
boost::range::transform(candidates,
|
||||||
std::back_inserter(waypoints),
|
std::back_inserter(waypoints),
|
||||||
[this, &builder](const PhantomNodeCandidates &candidates) {
|
[this, &builder](const PhantomNodeCandidates &candidates)
|
||||||
return BaseAPI::MakeWaypoint(&builder, candidates)->Finish();
|
{ return BaseAPI::MakeWaypoint(&builder, candidates)->Finish(); });
|
||||||
});
|
|
||||||
return builder.CreateVector(waypoints);
|
return builder.CreateVector(waypoints);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,7 +260,8 @@ class TableAPI final : public BaseAPI
|
|||||||
boost::range::transform(
|
boost::range::transform(
|
||||||
indices,
|
indices,
|
||||||
std::back_inserter(waypoints),
|
std::back_inserter(waypoints),
|
||||||
[this, &builder, &candidates](const std::size_t idx) {
|
[this, &builder, &candidates](const std::size_t idx)
|
||||||
|
{
|
||||||
BOOST_ASSERT(idx < candidates.size());
|
BOOST_ASSERT(idx < candidates.size());
|
||||||
return BaseAPI::MakeWaypoint(&builder, candidates[idx])->Finish();
|
return BaseAPI::MakeWaypoint(&builder, candidates[idx])->Finish();
|
||||||
});
|
});
|
||||||
@ -274,8 +274,11 @@ class TableAPI final : public BaseAPI
|
|||||||
{
|
{
|
||||||
std::vector<float> distance_table;
|
std::vector<float> distance_table;
|
||||||
distance_table.resize(values.size());
|
distance_table.resize(values.size());
|
||||||
std::transform(
|
std::transform(values.begin(),
|
||||||
values.begin(), values.end(), distance_table.begin(), [](const EdgeDuration duration) {
|
values.end(),
|
||||||
|
distance_table.begin(),
|
||||||
|
[](const EdgeDuration duration)
|
||||||
|
{
|
||||||
if (duration == MAXIMAL_EDGE_DURATION)
|
if (duration == MAXIMAL_EDGE_DURATION)
|
||||||
{
|
{
|
||||||
return 0.;
|
return 0.;
|
||||||
@ -291,8 +294,11 @@ class TableAPI final : public BaseAPI
|
|||||||
{
|
{
|
||||||
std::vector<float> duration_table;
|
std::vector<float> duration_table;
|
||||||
duration_table.resize(values.size());
|
duration_table.resize(values.size());
|
||||||
std::transform(
|
std::transform(values.begin(),
|
||||||
values.begin(), values.end(), duration_table.begin(), [](const EdgeDistance distance) {
|
values.end(),
|
||||||
|
duration_table.begin(),
|
||||||
|
[](const EdgeDistance distance)
|
||||||
|
{
|
||||||
if (distance == INVALID_EDGE_DISTANCE)
|
if (distance == INVALID_EDGE_DISTANCE)
|
||||||
{
|
{
|
||||||
return 0.;
|
return 0.;
|
||||||
@ -308,8 +314,10 @@ class TableAPI final : public BaseAPI
|
|||||||
{
|
{
|
||||||
std::vector<uint32_t> fb_table;
|
std::vector<uint32_t> fb_table;
|
||||||
fb_table.reserve(fallback_speed_cells.size());
|
fb_table.reserve(fallback_speed_cells.size());
|
||||||
std::for_each(
|
std::for_each(fallback_speed_cells.begin(),
|
||||||
fallback_speed_cells.begin(), fallback_speed_cells.end(), [&](const auto &cell) {
|
fallback_speed_cells.end(),
|
||||||
|
[&](const auto &cell)
|
||||||
|
{
|
||||||
fb_table.push_back(cell.row);
|
fb_table.push_back(cell.row);
|
||||||
fb_table.push_back(cell.column);
|
fb_table.push_back(cell.column);
|
||||||
});
|
});
|
||||||
@ -325,9 +333,8 @@ class TableAPI final : public BaseAPI
|
|||||||
|
|
||||||
boost::range::transform(candidates,
|
boost::range::transform(candidates,
|
||||||
std::back_inserter(json_waypoints.values),
|
std::back_inserter(json_waypoints.values),
|
||||||
[this](const PhantomNodeCandidates &candidates) {
|
[this](const PhantomNodeCandidates &candidates)
|
||||||
return BaseAPI::MakeWaypoint(candidates);
|
{ return BaseAPI::MakeWaypoint(candidates); });
|
||||||
});
|
|
||||||
return json_waypoints;
|
return json_waypoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,7 +345,8 @@ class TableAPI final : public BaseAPI
|
|||||||
json_waypoints.values.reserve(indices.size());
|
json_waypoints.values.reserve(indices.size());
|
||||||
boost::range::transform(indices,
|
boost::range::transform(indices,
|
||||||
std::back_inserter(json_waypoints.values),
|
std::back_inserter(json_waypoints.values),
|
||||||
[this, &candidates](const std::size_t idx) {
|
[this, &candidates](const std::size_t idx)
|
||||||
|
{
|
||||||
BOOST_ASSERT(idx < candidates.size());
|
BOOST_ASSERT(idx < candidates.size());
|
||||||
return BaseAPI::MakeWaypoint(candidates[idx]);
|
return BaseAPI::MakeWaypoint(candidates[idx]);
|
||||||
});
|
});
|
||||||
@ -359,7 +367,8 @@ class TableAPI final : public BaseAPI
|
|||||||
std::transform(row_begin_iterator,
|
std::transform(row_begin_iterator,
|
||||||
row_end_iterator,
|
row_end_iterator,
|
||||||
json_row.values.begin(),
|
json_row.values.begin(),
|
||||||
[](const EdgeDuration duration) {
|
[](const EdgeDuration duration)
|
||||||
|
{
|
||||||
if (duration == MAXIMAL_EDGE_DURATION)
|
if (duration == MAXIMAL_EDGE_DURATION)
|
||||||
{
|
{
|
||||||
return util::json::Value(util::json::Null());
|
return util::json::Value(util::json::Null());
|
||||||
@ -387,7 +396,8 @@ class TableAPI final : public BaseAPI
|
|||||||
std::transform(row_begin_iterator,
|
std::transform(row_begin_iterator,
|
||||||
row_end_iterator,
|
row_end_iterator,
|
||||||
json_row.values.begin(),
|
json_row.values.begin(),
|
||||||
[](const EdgeDistance distance) {
|
[](const EdgeDistance distance)
|
||||||
|
{
|
||||||
if (distance == INVALID_EDGE_DISTANCE)
|
if (distance == INVALID_EDGE_DISTANCE)
|
||||||
{
|
{
|
||||||
return util::json::Value(util::json::Null());
|
return util::json::Value(util::json::Null());
|
||||||
@ -405,8 +415,10 @@ class TableAPI final : public BaseAPI
|
|||||||
MakeEstimatesTable(const std::vector<TableCellRef> &fallback_speed_cells) const
|
MakeEstimatesTable(const std::vector<TableCellRef> &fallback_speed_cells) const
|
||||||
{
|
{
|
||||||
util::json::Array json_table;
|
util::json::Array json_table;
|
||||||
std::for_each(
|
std::for_each(fallback_speed_cells.begin(),
|
||||||
fallback_speed_cells.begin(), fallback_speed_cells.end(), [&](const auto &cell) {
|
fallback_speed_cells.end(),
|
||||||
|
[&](const auto &cell)
|
||||||
|
{
|
||||||
util::json::Array row;
|
util::json::Array row;
|
||||||
row.values.push_back(util::json::Number(cell.row));
|
row.values.push_back(util::json::Number(cell.row));
|
||||||
row.values.push_back(util::json::Number(cell.column));
|
row.values.push_back(util::json::Number(cell.column));
|
||||||
|
@ -50,10 +50,10 @@ class TripAPI final : public RouteAPI
|
|||||||
data_version_string = fb_result.CreateString(data_timestamp);
|
data_version_string = fb_result.CreateString(data_timestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto response =
|
auto response = MakeFBResponse(sub_routes,
|
||||||
MakeFBResponse(sub_routes, fb_result, [this, &fb_result, &sub_trips, &candidates]() {
|
fb_result,
|
||||||
return MakeWaypoints(fb_result, sub_trips, candidates);
|
[this, &fb_result, &sub_trips, &candidates]()
|
||||||
});
|
{ return MakeWaypoints(fb_result, sub_trips, candidates); });
|
||||||
|
|
||||||
if (!data_timestamp.empty())
|
if (!data_timestamp.empty())
|
||||||
{
|
{
|
||||||
|
@ -36,7 +36,8 @@ namespace osrm::engine
|
|||||||
enum class Approach : std::uint8_t
|
enum class Approach : std::uint8_t
|
||||||
{
|
{
|
||||||
CURB = 0,
|
CURB = 0,
|
||||||
UNRESTRICTED = 1
|
UNRESTRICTED = 1,
|
||||||
|
OPPOSITE = 2
|
||||||
|
|
||||||
};
|
};
|
||||||
} // namespace osrm::engine
|
} // namespace osrm::engine
|
||||||
|
@ -31,6 +31,26 @@
|
|||||||
namespace osrm::engine::datafacade
|
namespace osrm::engine::datafacade
|
||||||
{
|
{
|
||||||
|
|
||||||
|
static const std::string DATASET_TURN_DATA = "TurnData";
|
||||||
|
static const std::string DATASET_TURN_LANE_DATA = "NameLaneData";
|
||||||
|
static const std::string DATASET_NAME_DATA = "NameData";
|
||||||
|
static const std::string DATASET_INTERSECTION_BEARINGS = "IntersectionBearings";
|
||||||
|
static const std::string DATASET_ENTRY_CLASS = "EntryClass";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Macro is not ideal. But without it we either have to:
|
||||||
|
* a) Write this boiler-plate for every usage of an optional dataset.
|
||||||
|
* b) Convert to a function and add lots of polluting NOLINT(bugprone-unchecked-optional-access)
|
||||||
|
* comments. This macro keeps the API code readable.
|
||||||
|
*/
|
||||||
|
#define CHECK_DATASET_DISABLED(val, dataset) \
|
||||||
|
{ \
|
||||||
|
if (!(val)) \
|
||||||
|
{ \
|
||||||
|
throw osrm::util::DisabledDatasetException((dataset)); \
|
||||||
|
} \
|
||||||
|
}
|
||||||
|
|
||||||
template <typename AlgorithmT> class ContiguousInternalMemoryAlgorithmDataFacade;
|
template <typename AlgorithmT> class ContiguousInternalMemoryAlgorithmDataFacade;
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
@ -141,18 +161,15 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
std::string_view m_data_timestamp;
|
std::string_view m_data_timestamp;
|
||||||
util::vector_view<util::Coordinate> m_coordinate_list;
|
util::vector_view<util::Coordinate> m_coordinate_list;
|
||||||
extractor::PackedOSMIDsView m_osmnodeid_list;
|
extractor::PackedOSMIDsView m_osmnodeid_list;
|
||||||
util::vector_view<std::uint32_t> m_lane_description_offsets;
|
std::optional<util::vector_view<std::uint32_t>> m_lane_description_offsets;
|
||||||
util::vector_view<extractor::TurnLaneType::Mask> m_lane_description_masks;
|
std::optional<util::vector_view<extractor::TurnLaneType::Mask>> m_lane_description_masks;
|
||||||
util::vector_view<TurnPenalty> m_turn_weight_penalties;
|
util::vector_view<TurnPenalty> m_turn_weight_penalties;
|
||||||
util::vector_view<TurnPenalty> m_turn_duration_penalties;
|
util::vector_view<TurnPenalty> m_turn_duration_penalties;
|
||||||
extractor::SegmentDataView segment_data;
|
extractor::SegmentDataView segment_data;
|
||||||
extractor::EdgeBasedNodeDataView edge_based_node_data;
|
extractor::EdgeBasedNodeDataView edge_based_node_data;
|
||||||
guidance::TurnDataView turn_data;
|
std::optional<guidance::TurnDataView> turn_data;
|
||||||
|
|
||||||
util::vector_view<char> m_datasource_name_data;
|
std::optional<util::vector_view<util::guidance::LaneTupleIdPair>> m_lane_tuple_id_pairs;
|
||||||
util::vector_view<std::size_t> m_datasource_name_offsets;
|
|
||||||
util::vector_view<std::size_t> m_datasource_name_lengths;
|
|
||||||
util::vector_view<util::guidance::LaneTupleIdPair> m_lane_tupel_id_pairs;
|
|
||||||
|
|
||||||
util::vector_view<extractor::StorageManeuverOverride> m_maneuver_overrides;
|
util::vector_view<extractor::StorageManeuverOverride> m_maneuver_overrides;
|
||||||
util::vector_view<NodeID> m_maneuver_override_node_sequences;
|
util::vector_view<NodeID> m_maneuver_override_node_sequences;
|
||||||
@ -161,16 +178,24 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
std::unique_ptr<SharedGeospatialQuery> m_geospatial_query;
|
std::unique_ptr<SharedGeospatialQuery> m_geospatial_query;
|
||||||
boost::filesystem::path file_index_path;
|
boost::filesystem::path file_index_path;
|
||||||
|
|
||||||
extractor::IntersectionBearingsView intersection_bearings_view;
|
std::optional<extractor::IntersectionBearingsView> intersection_bearings_view;
|
||||||
|
|
||||||
extractor::NameTableView m_name_table;
|
std::optional<extractor::NameTableView> m_name_table;
|
||||||
// the look-up table for entry classes. An entry class lists the possibility of entry for all
|
// the look-up table for entry classes. An entry class lists the possibility of entry for all
|
||||||
// available turns. Such a class id is stored with every edge.
|
// available turns. Such a class id is stored with every edge.
|
||||||
util::vector_view<util::guidance::EntryClass> m_entry_class_table;
|
std::optional<util::vector_view<util::guidance::EntryClass>> m_entry_class_table;
|
||||||
|
|
||||||
// allocator that keeps the allocation data
|
// allocator that keeps the allocation data
|
||||||
std::shared_ptr<ContiguousBlockAllocator> allocator;
|
std::shared_ptr<ContiguousBlockAllocator> allocator;
|
||||||
|
|
||||||
|
bool isIndexed(const storage::SharedDataIndex &index, const std::string &name)
|
||||||
|
{
|
||||||
|
bool result = false;
|
||||||
|
index.List(name,
|
||||||
|
boost::make_function_output_iterator([&](const auto &) { result = true; }));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
void InitializeInternalPointers(const storage::SharedDataIndex &index,
|
void InitializeInternalPointers(const storage::SharedDataIndex &index,
|
||||||
const std::string &metric_name,
|
const std::string &metric_name,
|
||||||
const std::size_t exclude_index)
|
const std::size_t exclude_index)
|
||||||
@ -183,7 +208,17 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
|
|
||||||
exclude_mask = m_profile_properties->excludable_classes[exclude_index];
|
exclude_mask = m_profile_properties->excludable_classes[exclude_index];
|
||||||
|
|
||||||
m_check_sum = *index.GetBlockPtr<std::uint32_t>("/common/connectivity_checksum");
|
// We no longer use "/common/connectivity_checksum", as osrm.edges is an optional dataset.
|
||||||
|
// Instead, we load the value from the MLD or CH graph, whichever is loaded.
|
||||||
|
if (isIndexed(index, "/mld/connectivity_checksum"))
|
||||||
|
{
|
||||||
|
m_check_sum = *index.GetBlockPtr<std::uint32_t>("/mld/connectivity_checksum");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
BOOST_ASSERT(isIndexed(index, "/ch/connectivity_checksum"));
|
||||||
|
m_check_sum = *index.GetBlockPtr<std::uint32_t>("/ch/connectivity_checksum");
|
||||||
|
}
|
||||||
|
|
||||||
m_data_timestamp = make_timestamp_view(index, "/common/timestamp");
|
m_data_timestamp = make_timestamp_view(index, "/common/timestamp");
|
||||||
|
|
||||||
@ -196,13 +231,23 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
|
|
||||||
edge_based_node_data = make_ebn_data_view(index, "/common/ebg_node_data");
|
edge_based_node_data = make_ebn_data_view(index, "/common/ebg_node_data");
|
||||||
|
|
||||||
|
if (isIndexed(index, "/common/turn_data"))
|
||||||
|
{
|
||||||
turn_data = make_turn_data_view(index, "/common/turn_data");
|
turn_data = make_turn_data_view(index, "/common/turn_data");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isIndexed(index, "/common/names"))
|
||||||
|
{
|
||||||
m_name_table = make_name_table_view(index, "/common/names");
|
m_name_table = make_name_table_view(index, "/common/names");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isIndexed(index, "/common/turn_lanes"))
|
||||||
|
{
|
||||||
std::tie(m_lane_description_offsets, m_lane_description_masks) =
|
std::tie(m_lane_description_offsets, m_lane_description_masks) =
|
||||||
make_turn_lane_description_views(index, "/common/turn_lanes");
|
make_turn_lane_description_views(index, "/common/turn_lanes");
|
||||||
m_lane_tupel_id_pairs = make_lane_data_view(index, "/common/turn_lanes");
|
|
||||||
|
m_lane_tuple_id_pairs = make_lane_data_view(index, "/common/turn_lanes");
|
||||||
|
}
|
||||||
|
|
||||||
m_turn_weight_penalties = make_turn_weight_view(index, "/common/turn_penalty");
|
m_turn_weight_penalties = make_turn_weight_view(index, "/common/turn_penalty");
|
||||||
m_turn_duration_penalties = make_turn_duration_view(index, "/common/turn_penalty");
|
m_turn_duration_penalties = make_turn_duration_view(index, "/common/turn_penalty");
|
||||||
@ -211,10 +256,12 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
|
|
||||||
m_datasources = index.GetBlockPtr<extractor::Datasources>("/common/data_sources_names");
|
m_datasources = index.GetBlockPtr<extractor::Datasources>("/common/data_sources_names");
|
||||||
|
|
||||||
|
if (isIndexed(index, "/common/intersection_bearings"))
|
||||||
|
{
|
||||||
intersection_bearings_view =
|
intersection_bearings_view =
|
||||||
make_intersection_bearings_view(index, "/common/intersection_bearings");
|
make_intersection_bearings_view(index, "/common/intersection_bearings");
|
||||||
|
|
||||||
m_entry_class_table = make_entry_classes_view(index, "/common/entry_classes");
|
m_entry_class_table = make_entry_classes_view(index, "/common/entry_classes");
|
||||||
|
}
|
||||||
|
|
||||||
std::tie(m_maneuver_overrides, m_maneuver_override_node_sequences) =
|
std::tie(m_maneuver_overrides, m_maneuver_override_node_sequences) =
|
||||||
make_maneuver_overrides_views(index, "/common/maneuver_overrides");
|
make_maneuver_overrides_views(index, "/common/maneuver_overrides");
|
||||||
@ -305,7 +352,8 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
osrm::guidance::TurnInstruction
|
osrm::guidance::TurnInstruction
|
||||||
GetTurnInstructionForEdgeID(const EdgeID edge_based_edge_id) const override final
|
GetTurnInstructionForEdgeID(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
return turn_data.GetTurnInstruction(edge_based_edge_id);
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
return turn_data->GetTurnInstruction(edge_based_edge_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<RTreeLeaf> GetEdgesInBox(const util::Coordinate south_west,
|
std::vector<RTreeLeaf> GetEdgesInBox(const util::Coordinate south_west,
|
||||||
@ -392,9 +440,11 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
{
|
{
|
||||||
auto indexes = extractor::getClassIndexes(class_data);
|
auto indexes = extractor::getClassIndexes(class_data);
|
||||||
std::vector<std::string> classes(indexes.size());
|
std::vector<std::string> classes(indexes.size());
|
||||||
std::transform(indexes.begin(), indexes.end(), classes.begin(), [this](const auto index) {
|
std::transform(indexes.begin(),
|
||||||
return m_profile_properties->GetClassName(index);
|
indexes.end(),
|
||||||
});
|
classes.begin(),
|
||||||
|
[this](const auto index)
|
||||||
|
{ return m_profile_properties->GetClassName(index); });
|
||||||
|
|
||||||
return classes;
|
return classes;
|
||||||
}
|
}
|
||||||
@ -406,27 +456,32 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
|
|
||||||
std::string_view GetNameForID(const NameID id) const override final
|
std::string_view GetNameForID(const NameID id) const override final
|
||||||
{
|
{
|
||||||
return m_name_table.GetNameForID(id);
|
CHECK_DATASET_DISABLED(m_name_table, DATASET_NAME_DATA);
|
||||||
|
return m_name_table->GetNameForID(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view GetRefForID(const NameID id) const override final
|
std::string_view GetRefForID(const NameID id) const override final
|
||||||
{
|
{
|
||||||
return m_name_table.GetRefForID(id);
|
CHECK_DATASET_DISABLED(m_name_table, DATASET_NAME_DATA);
|
||||||
|
return m_name_table->GetRefForID(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view GetPronunciationForID(const NameID id) const override final
|
std::string_view GetPronunciationForID(const NameID id) const override final
|
||||||
{
|
{
|
||||||
return m_name_table.GetPronunciationForID(id);
|
CHECK_DATASET_DISABLED(m_name_table, DATASET_NAME_DATA);
|
||||||
|
return m_name_table->GetPronunciationForID(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view GetDestinationsForID(const NameID id) const override final
|
std::string_view GetDestinationsForID(const NameID id) const override final
|
||||||
{
|
{
|
||||||
return m_name_table.GetDestinationsForID(id);
|
CHECK_DATASET_DISABLED(m_name_table, DATASET_NAME_DATA);
|
||||||
|
return m_name_table->GetDestinationsForID(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view GetExitsForID(const NameID id) const override final
|
std::string_view GetExitsForID(const NameID id) const override final
|
||||||
{
|
{
|
||||||
return m_name_table.GetExitsForID(id);
|
CHECK_DATASET_DISABLED(m_name_table, DATASET_NAME_DATA);
|
||||||
|
return m_name_table->GetExitsForID(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view GetDatasourceName(const DatasourceID id) const override final
|
std::string_view GetDatasourceName(const DatasourceID id) const override final
|
||||||
@ -459,46 +514,60 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
util::guidance::BearingClass
|
util::guidance::BearingClass
|
||||||
GetBearingClass(const NodeID node_based_node_id) const override final
|
GetBearingClass(const NodeID node_based_node_id) const override final
|
||||||
{
|
{
|
||||||
return intersection_bearings_view.GetBearingClass(node_based_node_id);
|
CHECK_DATASET_DISABLED(intersection_bearings_view, DATASET_INTERSECTION_BEARINGS);
|
||||||
|
return intersection_bearings_view->GetBearingClass(node_based_node_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
guidance::TurnBearing PreTurnBearing(const EdgeID edge_based_edge_id) const override final
|
guidance::TurnBearing PreTurnBearing(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
return turn_data.GetPreTurnBearing(edge_based_edge_id);
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
return turn_data->GetPreTurnBearing(edge_based_edge_id);
|
||||||
}
|
}
|
||||||
guidance::TurnBearing PostTurnBearing(const EdgeID edge_based_edge_id) const override final
|
guidance::TurnBearing PostTurnBearing(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
return turn_data.GetPostTurnBearing(edge_based_edge_id);
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
return turn_data->GetPostTurnBearing(edge_based_edge_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
util::guidance::EntryClass GetEntryClass(const EdgeID edge_based_edge_id) const override final
|
util::guidance::EntryClass GetEntryClass(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
auto entry_class_id = turn_data.GetEntryClassID(edge_based_edge_id);
|
CHECK_DATASET_DISABLED(m_entry_class_table, DATASET_ENTRY_CLASS);
|
||||||
return m_entry_class_table.at(entry_class_id);
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
|
||||||
|
auto entry_class_id = turn_data->GetEntryClassID(edge_based_edge_id);
|
||||||
|
return m_entry_class_table->at(entry_class_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HasLaneData(const EdgeID edge_based_edge_id) const override final
|
bool HasLaneData(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
return turn_data.HasLaneData(edge_based_edge_id);
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
return turn_data->HasLaneData(edge_based_edge_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
util::guidance::LaneTupleIdPair
|
util::guidance::LaneTupleIdPair
|
||||||
GetLaneData(const EdgeID edge_based_edge_id) const override final
|
GetLaneData(const EdgeID edge_based_edge_id) const override final
|
||||||
{
|
{
|
||||||
|
CHECK_DATASET_DISABLED(turn_data, DATASET_TURN_DATA);
|
||||||
|
CHECK_DATASET_DISABLED(m_lane_tuple_id_pairs, DATASET_TURN_LANE_DATA);
|
||||||
|
|
||||||
BOOST_ASSERT(HasLaneData(edge_based_edge_id));
|
BOOST_ASSERT(HasLaneData(edge_based_edge_id));
|
||||||
return m_lane_tupel_id_pairs.at(turn_data.GetLaneDataID(edge_based_edge_id));
|
return m_lane_tuple_id_pairs->at(turn_data->GetLaneDataID(edge_based_edge_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
extractor::TurnLaneDescription
|
extractor::TurnLaneDescription
|
||||||
GetTurnDescription(const LaneDescriptionID lane_description_id) const override final
|
GetTurnDescription(const LaneDescriptionID lane_description_id) const override final
|
||||||
{
|
{
|
||||||
|
CHECK_DATASET_DISABLED(m_lane_description_offsets, DATASET_TURN_LANE_DATA);
|
||||||
|
CHECK_DATASET_DISABLED(m_lane_description_masks, DATASET_TURN_LANE_DATA);
|
||||||
|
|
||||||
if (lane_description_id == INVALID_LANE_DESCRIPTIONID)
|
if (lane_description_id == INVALID_LANE_DESCRIPTIONID)
|
||||||
return {};
|
return {};
|
||||||
else
|
else
|
||||||
return extractor::TurnLaneDescription(
|
return extractor::TurnLaneDescription(
|
||||||
m_lane_description_masks.begin() + m_lane_description_offsets[lane_description_id],
|
m_lane_description_masks->begin() +
|
||||||
m_lane_description_masks.begin() +
|
m_lane_description_offsets->at(lane_description_id),
|
||||||
m_lane_description_offsets[lane_description_id + 1]);
|
m_lane_description_masks->begin() +
|
||||||
|
m_lane_description_offsets->at(lane_description_id + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IsLeftHandDriving(const NodeID edge_based_node_id) const override final
|
bool IsLeftHandDriving(const NodeID edge_based_node_id) const override final
|
||||||
@ -533,10 +602,14 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
|||||||
auto found_range = std::equal_range(
|
auto found_range = std::equal_range(
|
||||||
m_maneuver_overrides.begin(), m_maneuver_overrides.end(), edge_based_node_id, Comp{});
|
m_maneuver_overrides.begin(), m_maneuver_overrides.end(), edge_based_node_id, Comp{});
|
||||||
|
|
||||||
std::for_each(found_range.first, found_range.second, [&](const auto &override) {
|
std::for_each(found_range.first,
|
||||||
std::vector<NodeID> sequence(
|
found_range.second,
|
||||||
m_maneuver_override_node_sequences.begin() + override.node_sequence_offset_begin,
|
[&](const auto &override)
|
||||||
m_maneuver_override_node_sequences.begin() + override.node_sequence_offset_end);
|
{
|
||||||
|
std::vector<NodeID> sequence(m_maneuver_override_node_sequences.begin() +
|
||||||
|
override.node_sequence_offset_begin,
|
||||||
|
m_maneuver_override_node_sequences.begin() +
|
||||||
|
override.node_sequence_offset_end);
|
||||||
results.push_back(extractor::ManeuverOverride{std::move(sequence),
|
results.push_back(extractor::ManeuverOverride{std::move(sequence),
|
||||||
override.instruction_node,
|
override.instruction_node,
|
||||||
override.override_type,
|
override.override_type,
|
||||||
|
@ -29,9 +29,11 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||||||
#define ENGINE_CONFIG_HPP
|
#define ENGINE_CONFIG_HPP
|
||||||
|
|
||||||
#include "storage/storage_config.hpp"
|
#include "storage/storage_config.hpp"
|
||||||
|
#include "osrm/datasets.hpp"
|
||||||
|
|
||||||
#include <boost/filesystem/path.hpp>
|
#include <boost/filesystem/path.hpp>
|
||||||
|
|
||||||
|
#include <set>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
namespace osrm::engine
|
namespace osrm::engine
|
||||||
@ -83,12 +85,13 @@ struct EngineConfig final
|
|||||||
int max_locations_map_matching = -1;
|
int max_locations_map_matching = -1;
|
||||||
double max_radius_map_matching = -1.0;
|
double max_radius_map_matching = -1.0;
|
||||||
int max_results_nearest = -1;
|
int max_results_nearest = -1;
|
||||||
boost::optional<double> default_radius;
|
boost::optional<double> default_radius = -1.0;
|
||||||
int max_alternatives = 3; // set an arbitrary upper bound; can be adjusted by user
|
int max_alternatives = 3; // set an arbitrary upper bound; can be adjusted by user
|
||||||
bool use_shared_memory = true;
|
bool use_shared_memory = true;
|
||||||
boost::filesystem::path memory_file;
|
boost::filesystem::path memory_file;
|
||||||
bool use_mmap = true;
|
bool use_mmap = true;
|
||||||
Algorithm algorithm = Algorithm::CH;
|
Algorithm algorithm = Algorithm::CH;
|
||||||
|
std::vector<storage::FeatureDataset> disable_feature_dataset;
|
||||||
std::string verbosity;
|
std::string verbosity;
|
||||||
std::string dataset_name;
|
std::string dataset_name;
|
||||||
};
|
};
|
||||||
|
@ -60,7 +60,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
auto results = rtree.Nearest(
|
auto results = rtree.Nearest(
|
||||||
input_coordinate,
|
input_coordinate,
|
||||||
[this, approach, &input_coordinate, &bearing_with_range, &use_all_edges](
|
[this, approach, &input_coordinate, &bearing_with_range, &use_all_edges](
|
||||||
const CandidateSegment &segment) {
|
const CandidateSegment &segment)
|
||||||
|
{
|
||||||
auto valid = CheckSegmentExclude(segment) &&
|
auto valid = CheckSegmentExclude(segment) &&
|
||||||
CheckApproach(input_coordinate, segment, approach) &&
|
CheckApproach(input_coordinate, segment, approach) &&
|
||||||
(use_all_edges ? HasValidEdge(segment, *use_all_edges)
|
(use_all_edges ? HasValidEdge(segment, *use_all_edges)
|
||||||
@ -70,9 +71,10 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
return valid;
|
return valid;
|
||||||
},
|
},
|
||||||
[this, &max_distance, &max_results, input_coordinate](const std::size_t num_results,
|
[this, &max_distance, &max_results, input_coordinate](const std::size_t num_results,
|
||||||
const CandidateSegment &segment) {
|
const CandidateSegment &segment)
|
||||||
|
{
|
||||||
return (max_results && num_results >= *max_results) ||
|
return (max_results && num_results >= *max_results) ||
|
||||||
(max_distance &&
|
(max_distance && max_distance != -1.0 &&
|
||||||
CheckSegmentDistance(input_coordinate, segment, *max_distance));
|
CheckSegmentDistance(input_coordinate, segment, *max_distance));
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -107,7 +109,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
&big_component_coord,
|
&big_component_coord,
|
||||||
&big_component_distance,
|
&big_component_distance,
|
||||||
&use_all_edges,
|
&use_all_edges,
|
||||||
&bearing_with_range](const CandidateSegment &segment) {
|
&bearing_with_range](const CandidateSegment &segment)
|
||||||
|
{
|
||||||
auto is_big_component = !IsTinyComponent(segment);
|
auto is_big_component = !IsTinyComponent(segment);
|
||||||
auto not_nearest =
|
auto not_nearest =
|
||||||
has_nearest && segment.fixed_projected_coordinate != nearest_coord;
|
has_nearest && segment.fixed_projected_coordinate != nearest_coord;
|
||||||
@ -159,11 +162,13 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
return use_candidate;
|
return use_candidate;
|
||||||
},
|
},
|
||||||
[this, &has_big_component, &max_distance, input_coordinate, &big_component_distance](
|
[this, &has_big_component, &max_distance, input_coordinate, &big_component_distance](
|
||||||
const std::size_t /*num_results*/, const CandidateSegment &segment) {
|
const std::size_t /*num_results*/, const CandidateSegment &segment)
|
||||||
|
{
|
||||||
auto distance = GetSegmentDistance(input_coordinate, segment);
|
auto distance = GetSegmentDistance(input_coordinate, segment);
|
||||||
auto further_than_big_component = distance > big_component_distance;
|
auto further_than_big_component = distance > big_component_distance;
|
||||||
auto no_more_candidates = has_big_component && further_than_big_component;
|
auto no_more_candidates = has_big_component && further_than_big_component;
|
||||||
auto too_far_away = max_distance && distance > *max_distance;
|
auto too_far_away =
|
||||||
|
max_distance && max_distance != -1.0 && distance > *max_distance;
|
||||||
|
|
||||||
// Time to terminate the search when:
|
// Time to terminate the search when:
|
||||||
// 1. We've found a node from a big component and the next candidate is further away
|
// 1. We've found a node from a big component and the next candidate is further away
|
||||||
@ -189,10 +194,14 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
PhantomNodeCandidates nearest_phantoms;
|
PhantomNodeCandidates nearest_phantoms;
|
||||||
PhantomNodeCandidates big_component_phantoms;
|
PhantomNodeCandidates big_component_phantoms;
|
||||||
|
|
||||||
const auto add_to_candidates = [this, &input_coordinate](PhantomNodeCandidates &candidates,
|
const auto add_to_candidates =
|
||||||
const EdgeData data) {
|
[this, &input_coordinate](PhantomNodeCandidates &candidates, const EdgeData data)
|
||||||
|
{
|
||||||
auto candidate_it =
|
auto candidate_it =
|
||||||
std::find_if(candidates.begin(), candidates.end(), [&](const PhantomNode &node) {
|
std::find_if(candidates.begin(),
|
||||||
|
candidates.end(),
|
||||||
|
[&](const PhantomNode &node)
|
||||||
|
{
|
||||||
return data.forward_segment_id.id == node.forward_segment_id.id &&
|
return data.forward_segment_id.id == node.forward_segment_id.id &&
|
||||||
data.reverse_segment_id.id == node.reverse_segment_id.id;
|
data.reverse_segment_id.id == node.reverse_segment_id.id;
|
||||||
});
|
});
|
||||||
@ -258,7 +267,10 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::for_each(results.begin(), results.end(), [&](const CandidateSegment &segment) {
|
std::for_each(results.begin(),
|
||||||
|
results.end(),
|
||||||
|
[&](const CandidateSegment &segment)
|
||||||
|
{
|
||||||
if (segment.fixed_projected_coordinate == nearest_coord)
|
if (segment.fixed_projected_coordinate == nearest_coord)
|
||||||
{
|
{
|
||||||
add_to_candidates(nearest_phantoms, segment.data);
|
add_to_candidates(nearest_phantoms, segment.data);
|
||||||
@ -280,9 +292,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
std::transform(results.begin(),
|
std::transform(results.begin(),
|
||||||
results.end(),
|
results.end(),
|
||||||
distance_and_phantoms.begin(),
|
distance_and_phantoms.begin(),
|
||||||
[this, &input_coordinate](const CandidateSegment &segment) {
|
[this, &input_coordinate](const CandidateSegment &segment)
|
||||||
return MakePhantomNode(input_coordinate, segment.data);
|
{ return MakePhantomNode(input_coordinate, segment.data); });
|
||||||
});
|
|
||||||
return distance_and_phantoms;
|
return distance_and_phantoms;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,9 +410,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
}
|
}
|
||||||
|
|
||||||
// check phantom node segments validity
|
// check phantom node segments validity
|
||||||
auto areSegmentsValid = [](auto first, auto last) -> bool {
|
auto areSegmentsValid = [](auto first, auto last) -> bool
|
||||||
return std::find(first, last, INVALID_SEGMENT_WEIGHT) == last;
|
{ return std::find(first, last, INVALID_SEGMENT_WEIGHT) == last; };
|
||||||
};
|
|
||||||
bool is_forward_valid_source =
|
bool is_forward_valid_source =
|
||||||
areSegmentsValid(forward_weights.begin(), forward_weights.end());
|
areSegmentsValid(forward_weights.begin(), forward_weights.end());
|
||||||
bool is_forward_valid_target = areSegmentsValid(
|
bool is_forward_valid_target = areSegmentsValid(
|
||||||
@ -557,7 +567,7 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
{
|
{
|
||||||
bool isOnewaySegment =
|
bool isOnewaySegment =
|
||||||
!(segment.data.forward_segment_id.enabled && segment.data.reverse_segment_id.enabled);
|
!(segment.data.forward_segment_id.enabled && segment.data.reverse_segment_id.enabled);
|
||||||
if (!isOnewaySegment && approach == Approach::CURB)
|
if (!isOnewaySegment && (approach == Approach::CURB || approach == Approach::OPPOSITE))
|
||||||
{
|
{
|
||||||
// Check the counter clockwise
|
// Check the counter clockwise
|
||||||
//
|
//
|
||||||
@ -572,6 +582,9 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
|||||||
if (datafacade.IsLeftHandDriving(segment.data.forward_segment_id.id))
|
if (datafacade.IsLeftHandDriving(segment.data.forward_segment_id.id))
|
||||||
input_coordinate_is_at_right = !input_coordinate_is_at_right;
|
input_coordinate_is_at_right = !input_coordinate_is_at_right;
|
||||||
|
|
||||||
|
if (approach == Approach::OPPOSITE)
|
||||||
|
input_coordinate_is_at_right = !input_coordinate_is_at_right;
|
||||||
|
|
||||||
return std::make_pair(input_coordinate_is_at_right, (!input_coordinate_is_at_right));
|
return std::make_pair(input_coordinate_is_at_right, (!input_coordinate_is_at_right));
|
||||||
}
|
}
|
||||||
return std::make_pair(true, true);
|
return std::make_pair(true, true);
|
||||||
|
@ -43,7 +43,8 @@ std::array<std::uint32_t, SegmentNumber> summarizeRoute(const datafacade::BaseDa
|
|||||||
const bool target_traversed_in_reverse)
|
const bool target_traversed_in_reverse)
|
||||||
{
|
{
|
||||||
// merges segments with same name id
|
// merges segments with same name id
|
||||||
const auto collapse_segments = [](std::vector<NamedSegment> &segments) {
|
const auto collapse_segments = [](std::vector<NamedSegment> &segments)
|
||||||
|
{
|
||||||
auto out = segments.begin();
|
auto out = segments.begin();
|
||||||
auto end = segments.end();
|
auto end = segments.end();
|
||||||
|
|
||||||
@ -75,7 +76,8 @@ std::array<std::uint32_t, SegmentNumber> summarizeRoute(const datafacade::BaseDa
|
|||||||
std::transform(route_data.begin(),
|
std::transform(route_data.begin(),
|
||||||
route_data.end(),
|
route_data.end(),
|
||||||
segments.begin(),
|
segments.begin(),
|
||||||
[&index, &facade](const PathData &point) {
|
[&index, &facade](const PathData &point)
|
||||||
|
{
|
||||||
return NamedSegment{point.duration_until_turn,
|
return NamedSegment{point.duration_until_turn,
|
||||||
index++,
|
index++,
|
||||||
facade.GetNameIndex(point.from_edge_based_node)};
|
facade.GetNameIndex(point.from_edge_based_node)};
|
||||||
@ -87,8 +89,10 @@ std::array<std::uint32_t, SegmentNumber> summarizeRoute(const datafacade::BaseDa
|
|||||||
if (target_duration > EdgeDuration{1})
|
if (target_duration > EdgeDuration{1})
|
||||||
segments.push_back({target_duration, index++, facade.GetNameIndex(target_node_id)});
|
segments.push_back({target_duration, index++, facade.GetNameIndex(target_node_id)});
|
||||||
// this makes sure that the segment with the lowest position comes first
|
// this makes sure that the segment with the lowest position comes first
|
||||||
std::sort(
|
std::sort(segments.begin(),
|
||||||
segments.begin(), segments.end(), [](const NamedSegment &lhs, const NamedSegment &rhs) {
|
segments.end(),
|
||||||
|
[](const NamedSegment &lhs, const NamedSegment &rhs)
|
||||||
|
{
|
||||||
return lhs.name_id < rhs.name_id ||
|
return lhs.name_id < rhs.name_id ||
|
||||||
(lhs.name_id == rhs.name_id && lhs.position < rhs.position);
|
(lhs.name_id == rhs.name_id && lhs.position < rhs.position);
|
||||||
});
|
});
|
||||||
@ -96,24 +100,26 @@ std::array<std::uint32_t, SegmentNumber> summarizeRoute(const datafacade::BaseDa
|
|||||||
segments.resize(new_end - segments.begin());
|
segments.resize(new_end - segments.begin());
|
||||||
|
|
||||||
// Filter out segments with an empty name (name_id == 0)
|
// Filter out segments with an empty name (name_id == 0)
|
||||||
new_end = std::remove_if(segments.begin(), segments.end(), [](const NamedSegment &segment) {
|
new_end = std::remove_if(segments.begin(),
|
||||||
return segment.name_id == 0;
|
segments.end(),
|
||||||
});
|
[](const NamedSegment &segment) { return segment.name_id == 0; });
|
||||||
segments.resize(new_end - segments.begin());
|
segments.resize(new_end - segments.begin());
|
||||||
|
|
||||||
// sort descending
|
// sort descending
|
||||||
std::sort(
|
std::sort(segments.begin(),
|
||||||
segments.begin(), segments.end(), [](const NamedSegment &lhs, const NamedSegment &rhs) {
|
segments.end(),
|
||||||
|
[](const NamedSegment &lhs, const NamedSegment &rhs)
|
||||||
|
{
|
||||||
return lhs.duration > rhs.duration ||
|
return lhs.duration > rhs.duration ||
|
||||||
(lhs.duration == rhs.duration && lhs.position < rhs.position);
|
(lhs.duration == rhs.duration && lhs.position < rhs.position);
|
||||||
});
|
});
|
||||||
|
|
||||||
// make sure the segments are sorted by position
|
// make sure the segments are sorted by position
|
||||||
segments.resize(std::min(segments.size(), SegmentNumber));
|
segments.resize(std::min(segments.size(), SegmentNumber));
|
||||||
std::sort(
|
std::sort(segments.begin(),
|
||||||
segments.begin(), segments.end(), [](const NamedSegment &lhs, const NamedSegment &rhs) {
|
segments.end(),
|
||||||
return lhs.position < rhs.position;
|
[](const NamedSegment &lhs, const NamedSegment &rhs)
|
||||||
});
|
{ return lhs.position < rhs.position; });
|
||||||
|
|
||||||
std::array<std::uint32_t, SegmentNumber> summary;
|
std::array<std::uint32_t, SegmentNumber> summary;
|
||||||
std::fill(summary.begin(), summary.end(), EMPTY_NAMEID);
|
std::fill(summary.begin(), summary.end(), EMPTY_NAMEID);
|
||||||
@ -138,7 +144,8 @@ inline std::string assembleSummary(const datafacade::BaseDataFacade &facade,
|
|||||||
|
|
||||||
// transform a name_id into a string containing either the name, or -if the name is empty-
|
// transform a name_id into a string containing either the name, or -if the name is empty-
|
||||||
// the reference.
|
// the reference.
|
||||||
const auto name_id_to_string = [&](const NameID name_id) {
|
const auto name_id_to_string = [&](const NameID name_id)
|
||||||
|
{
|
||||||
const auto name = facade.GetNameForID(name_id);
|
const auto name = facade.GetNameForID(name_id);
|
||||||
if (!name.empty())
|
if (!name.empty())
|
||||||
return std::string(name);
|
return std::string(name);
|
||||||
@ -178,14 +185,16 @@ inline RouteLeg assembleLeg(const datafacade::BaseDataFacade &facade,
|
|||||||
const auto target_weight =
|
const auto target_weight =
|
||||||
(target_traversed_in_reverse ? target_node.reverse_weight : target_node.forward_weight);
|
(target_traversed_in_reverse ? target_node.reverse_weight : target_node.forward_weight);
|
||||||
|
|
||||||
auto duration = std::accumulate(
|
auto duration = std::accumulate(route_data.begin(),
|
||||||
route_data.begin(), route_data.end(), 0, [](const double sum, const PathData &data) {
|
route_data.end(),
|
||||||
return sum + from_alias<double>(data.duration_until_turn);
|
0,
|
||||||
});
|
[](const double sum, const PathData &data)
|
||||||
auto weight = std::accumulate(
|
{ return sum + from_alias<double>(data.duration_until_turn); });
|
||||||
route_data.begin(), route_data.end(), 0, [](const double sum, const PathData &data) {
|
auto weight = std::accumulate(route_data.begin(),
|
||||||
return sum + from_alias<double>(data.weight_until_turn);
|
route_data.end(),
|
||||||
});
|
0,
|
||||||
|
[](const double sum, const PathData &data)
|
||||||
|
{ return sum + from_alias<double>(data.weight_until_turn); });
|
||||||
|
|
||||||
// s
|
// s
|
||||||
// |
|
// |
|
||||||
|
@ -122,9 +122,8 @@ inline bool haveSameMode(const RouteStep &first, const RouteStep &second, const
|
|||||||
// alias for readability
|
// alias for readability
|
||||||
inline bool haveSameName(const RouteStep &lhs, const RouteStep &rhs)
|
inline bool haveSameName(const RouteStep &lhs, const RouteStep &rhs)
|
||||||
{
|
{
|
||||||
const auto has_name_or_ref = [](auto const &step) {
|
const auto has_name_or_ref = [](auto const &step)
|
||||||
return !step.name.empty() || !step.ref.empty();
|
{ return !step.name.empty() || !step.ref.empty(); };
|
||||||
};
|
|
||||||
|
|
||||||
// make sure empty is not involved
|
// make sure empty is not involved
|
||||||
if (!has_name_or_ref(lhs) || !has_name_or_ref(rhs))
|
if (!has_name_or_ref(lhs) || !has_name_or_ref(rhs))
|
||||||
@ -151,12 +150,14 @@ inline bool haveSameName(const RouteStep &lhs, const RouteStep &rhs)
|
|||||||
// alias for readability, both turn right | left
|
// alias for readability, both turn right | left
|
||||||
inline bool areSameSide(const RouteStep &lhs, const RouteStep &rhs)
|
inline bool areSameSide(const RouteStep &lhs, const RouteStep &rhs)
|
||||||
{
|
{
|
||||||
const auto is_left = [](const RouteStep &step) {
|
const auto is_left = [](const RouteStep &step)
|
||||||
|
{
|
||||||
return hasModifier(step, osrm::guidance::DirectionModifier::Straight) ||
|
return hasModifier(step, osrm::guidance::DirectionModifier::Straight) ||
|
||||||
hasLeftModifier(step.maneuver.instruction);
|
hasLeftModifier(step.maneuver.instruction);
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto is_right = [](const RouteStep &step) {
|
const auto is_right = [](const RouteStep &step)
|
||||||
|
{
|
||||||
return hasModifier(step, osrm::guidance::DirectionModifier::Straight) ||
|
return hasModifier(step, osrm::guidance::DirectionModifier::Straight) ||
|
||||||
hasRightModifier(step.maneuver.instruction);
|
hasRightModifier(step.maneuver.instruction);
|
||||||
};
|
};
|
||||||
@ -174,7 +175,8 @@ inline std::vector<RouteStep> removeNoTurnInstructions(std::vector<RouteStep> st
|
|||||||
// Two valid NO_TURNs exist in each leg in the form of Depart/Arrive
|
// Two valid NO_TURNs exist in each leg in the form of Depart/Arrive
|
||||||
|
|
||||||
// keep valid instructions
|
// keep valid instructions
|
||||||
const auto not_is_valid = [](const RouteStep &step) {
|
const auto not_is_valid = [](const RouteStep &step)
|
||||||
|
{
|
||||||
return step.maneuver.instruction == osrm::guidance::TurnInstruction::NO_TURN() &&
|
return step.maneuver.instruction == osrm::guidance::TurnInstruction::NO_TURN() &&
|
||||||
step.maneuver.waypoint_type == WaypointType::None;
|
step.maneuver.waypoint_type == WaypointType::None;
|
||||||
};
|
};
|
||||||
@ -225,7 +227,8 @@ inline double totalTurnAngle(const RouteStep &entry_step, const RouteStep &exit_
|
|||||||
inline bool bearingsAreReversed(const double bearing_in, const double bearing_out)
|
inline bool bearingsAreReversed(const double bearing_in, const double bearing_out)
|
||||||
{
|
{
|
||||||
// Nearly perfectly reversed angles have a difference close to 180 degrees (straight)
|
// Nearly perfectly reversed angles have a difference close to 180 degrees (straight)
|
||||||
const double left_turn_angle = [&]() {
|
const double left_turn_angle = [&]()
|
||||||
|
{
|
||||||
if (0 <= bearing_out && bearing_out <= bearing_in)
|
if (0 <= bearing_out && bearing_out <= bearing_in)
|
||||||
return bearing_in - bearing_out;
|
return bearing_in - bearing_out;
|
||||||
return bearing_in + 360 - bearing_out;
|
return bearing_in + 360 - bearing_out;
|
||||||
|
@ -20,14 +20,16 @@ template <typename Iter, typename Fn> inline Fn forEachRoundabout(Iter first, It
|
|||||||
{
|
{
|
||||||
while (first != last)
|
while (first != last)
|
||||||
{
|
{
|
||||||
const auto enter = std::find_if(first, last, [](const RouteStep &step) {
|
const auto enter = std::find_if(first,
|
||||||
return entersRoundabout(step.maneuver.instruction);
|
last,
|
||||||
});
|
[](const RouteStep &step)
|
||||||
|
{ return entersRoundabout(step.maneuver.instruction); });
|
||||||
|
|
||||||
// enter has to come before leave, otherwise: faulty data / partial roundabout, skip those
|
// enter has to come before leave, otherwise: faulty data / partial roundabout, skip those
|
||||||
const auto leave = std::find_if(enter, last, [](const RouteStep &step) {
|
const auto leave = std::find_if(enter,
|
||||||
return leavesRoundabout(step.maneuver.instruction);
|
last,
|
||||||
});
|
[](const RouteStep &step)
|
||||||
|
{ return leavesRoundabout(step.maneuver.instruction); });
|
||||||
|
|
||||||
// No roundabouts, or partial one (like start / end inside a roundabout)
|
// No roundabouts, or partial one (like start / end inside a roundabout)
|
||||||
if (enter == last || leave == last)
|
if (enter == last || leave == last)
|
||||||
|
@ -259,10 +259,10 @@ inline util::Coordinate candidatesInputLocation(const PhantomNodeCandidates &can
|
|||||||
|
|
||||||
inline bool candidatesHaveComponent(const PhantomNodeCandidates &candidates, uint32_t component_id)
|
inline bool candidatesHaveComponent(const PhantomNodeCandidates &candidates, uint32_t component_id)
|
||||||
{
|
{
|
||||||
return std::any_of(
|
return std::any_of(candidates.begin(),
|
||||||
candidates.begin(), candidates.end(), [component_id](const PhantomNode &node) {
|
candidates.end(),
|
||||||
return node.component.id == component_id;
|
[component_id](const PhantomNode &node)
|
||||||
});
|
{ return node.component.id == component_id; });
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PhantomEndpoints
|
struct PhantomEndpoints
|
||||||
|
@ -33,10 +33,10 @@ class BasePlugin
|
|||||||
|
|
||||||
bool CheckAllCoordinates(const std::vector<util::Coordinate> &coordinates) const
|
bool CheckAllCoordinates(const std::vector<util::Coordinate> &coordinates) const
|
||||||
{
|
{
|
||||||
return !std::any_of(
|
return !std::any_of(std::begin(coordinates),
|
||||||
std::begin(coordinates), std::end(coordinates), [](const util::Coordinate coordinate) {
|
std::end(coordinates),
|
||||||
return !coordinate.IsValid();
|
[](const util::Coordinate coordinate)
|
||||||
});
|
{ return !coordinate.IsValid(); });
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CheckAlgorithms(const api::BaseParameters ¶ms,
|
bool CheckAlgorithms(const api::BaseParameters ¶ms,
|
||||||
@ -105,13 +105,15 @@ class BasePlugin
|
|||||||
{
|
{
|
||||||
// are all phantoms from a tiny cc?
|
// are all phantoms from a tiny cc?
|
||||||
const auto all_in_same_tiny_component =
|
const auto all_in_same_tiny_component =
|
||||||
[](const std::vector<PhantomCandidateAlternatives> &alts_list) {
|
[](const std::vector<PhantomCandidateAlternatives> &alts_list)
|
||||||
|
{
|
||||||
return std::any_of(
|
return std::any_of(
|
||||||
alts_list.front().first.begin(),
|
alts_list.front().first.begin(),
|
||||||
alts_list.front().first.end(),
|
alts_list.front().first.end(),
|
||||||
// For each of the first possible phantoms, check if all other
|
// For each of the first possible phantoms, check if all other
|
||||||
// positions in the list have a phantom from the same small component.
|
// positions in the list have a phantom from the same small component.
|
||||||
[&](const PhantomNode &phantom) {
|
[&](const PhantomNode &phantom)
|
||||||
|
{
|
||||||
if (!phantom.component.is_tiny)
|
if (!phantom.component.is_tiny)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
@ -120,30 +122,28 @@ class BasePlugin
|
|||||||
return std::all_of(
|
return std::all_of(
|
||||||
std::next(alts_list.begin()),
|
std::next(alts_list.begin()),
|
||||||
std::end(alts_list),
|
std::end(alts_list),
|
||||||
[component_id](const PhantomCandidateAlternatives &alternatives) {
|
[component_id](const PhantomCandidateAlternatives &alternatives)
|
||||||
return candidatesHaveComponent(alternatives.first, component_id);
|
{ return candidatesHaveComponent(alternatives.first, component_id); });
|
||||||
});
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Move the alternative into the final list
|
// Move the alternative into the final list
|
||||||
const auto fallback_to_big_component = [](PhantomCandidateAlternatives &alternatives) {
|
const auto fallback_to_big_component = [](PhantomCandidateAlternatives &alternatives)
|
||||||
|
{
|
||||||
auto no_big_alternative = alternatives.second.empty();
|
auto no_big_alternative = alternatives.second.empty();
|
||||||
return no_big_alternative ? std::move(alternatives.first)
|
return no_big_alternative ? std::move(alternatives.first)
|
||||||
: std::move(alternatives.second);
|
: std::move(alternatives.second);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Move the alternative into the final list
|
// Move the alternative into the final list
|
||||||
const auto use_closed_phantom = [](PhantomCandidateAlternatives &alternatives) {
|
const auto use_closed_phantom = [](PhantomCandidateAlternatives &alternatives)
|
||||||
return std::move(alternatives.first);
|
{ return std::move(alternatives.first); };
|
||||||
};
|
|
||||||
|
|
||||||
const auto no_alternatives =
|
const auto no_alternatives =
|
||||||
std::all_of(alternatives_list.begin(),
|
std::all_of(alternatives_list.begin(),
|
||||||
alternatives_list.end(),
|
alternatives_list.end(),
|
||||||
[](const PhantomCandidateAlternatives &alternatives) {
|
[](const PhantomCandidateAlternatives &alternatives)
|
||||||
return alternatives.second.empty();
|
{ return alternatives.second.empty(); });
|
||||||
});
|
|
||||||
|
|
||||||
std::vector<PhantomNodeCandidates> snapped_phantoms;
|
std::vector<PhantomNodeCandidates> snapped_phantoms;
|
||||||
snapped_phantoms.reserve(alternatives_list.size());
|
snapped_phantoms.reserve(alternatives_list.size());
|
||||||
@ -313,12 +313,12 @@ class BasePlugin
|
|||||||
alternatives.end(),
|
alternatives.end(),
|
||||||
coordinates.begin(),
|
coordinates.begin(),
|
||||||
coordinates.end(),
|
coordinates.end(),
|
||||||
[](const auto &candidates_pair, const auto &coordinate) {
|
[](const auto &candidates_pair, const auto &coordinate)
|
||||||
|
{
|
||||||
return std::any_of(candidates_pair.first.begin(),
|
return std::any_of(candidates_pair.first.begin(),
|
||||||
candidates_pair.first.end(),
|
candidates_pair.first.end(),
|
||||||
[&](const auto &phantom) {
|
[&](const auto &phantom)
|
||||||
return phantom.input_location == coordinate;
|
{ return phantom.input_location == coordinate; });
|
||||||
});
|
|
||||||
});
|
});
|
||||||
std::size_t missing_index = std::distance(alternatives.begin(), mismatch.first);
|
std::size_t missing_index = std::distance(alternatives.begin(), mismatch.first);
|
||||||
return std::string("Could not find a matching segment for coordinate ") +
|
return std::string("Could not find a matching segment for coordinate ") +
|
||||||
|
@ -39,7 +39,8 @@ std::string encodePolyline(CoordVectorForwardIter begin, CoordVectorForwardIter
|
|||||||
begin,
|
begin,
|
||||||
end,
|
end,
|
||||||
[&delta_numbers, ¤t_lat, ¤t_lon, coordinate_to_polyline](
|
[&delta_numbers, ¤t_lat, ¤t_lon, coordinate_to_polyline](
|
||||||
const util::Coordinate loc) {
|
const util::Coordinate loc)
|
||||||
|
{
|
||||||
const int lat_diff =
|
const int lat_diff =
|
||||||
std::round(static_cast<int>(loc.lat) * coordinate_to_polyline) - current_lat;
|
std::round(static_cast<int>(loc.lat) * coordinate_to_polyline) - current_lat;
|
||||||
const int lon_diff =
|
const int lon_diff =
|
||||||
|
@ -190,8 +190,10 @@ void annotatePath(const FacadeT &facade,
|
|||||||
std::vector<SegmentDuration> duration_vector;
|
std::vector<SegmentDuration> duration_vector;
|
||||||
std::vector<DatasourceID> datasource_vector;
|
std::vector<DatasourceID> datasource_vector;
|
||||||
|
|
||||||
const auto get_segment_geometry = [&](const auto geometry_index) {
|
const auto get_segment_geometry = [&](const auto geometry_index)
|
||||||
const auto copy = [](auto &vector, const auto range) {
|
{
|
||||||
|
const auto copy = [](auto &vector, const auto range)
|
||||||
|
{
|
||||||
vector.resize(range.size());
|
vector.resize(range.size());
|
||||||
std::copy(range.begin(), range.end(), vector.begin());
|
std::copy(range.begin(), range.end(), vector.begin());
|
||||||
};
|
};
|
||||||
|
@ -294,9 +294,9 @@ EdgeDistance calculateEBGNodeAnnotations(const DataFacade<Algorithm> &facade,
|
|||||||
|
|
||||||
// Look for an edge on the forward CH graph (.forward)
|
// Look for an edge on the forward CH graph (.forward)
|
||||||
EdgeID smaller_edge_id =
|
EdgeID smaller_edge_id =
|
||||||
facade.FindSmallestEdge(std::get<0>(edge), std::get<1>(edge), [](const auto &data) {
|
facade.FindSmallestEdge(std::get<0>(edge),
|
||||||
return data.forward;
|
std::get<1>(edge),
|
||||||
});
|
[](const auto &data) { return data.forward; });
|
||||||
|
|
||||||
// If we didn't find one there, the we might be looking at a part of the path that
|
// If we didn't find one there, the we might be looking at a part of the path that
|
||||||
// was found using the backward search. Here, we flip the node order (.second,
|
// was found using the backward search. Here, we flip the node order (.second,
|
||||||
@ -381,7 +381,8 @@ void unpackPath(const FacadeT &facade,
|
|||||||
unpackPath(facade,
|
unpackPath(facade,
|
||||||
packed_path_begin,
|
packed_path_begin,
|
||||||
packed_path_end,
|
packed_path_end,
|
||||||
[&](std::pair<NodeID, NodeID> &edge, const auto &edge_id) {
|
[&](std::pair<NodeID, NodeID> &edge, const auto &edge_id)
|
||||||
|
{
|
||||||
BOOST_ASSERT(edge.first == unpacked_nodes.back());
|
BOOST_ASSERT(edge.first == unpacked_nodes.back());
|
||||||
unpacked_nodes.push_back(edge.second);
|
unpacked_nodes.push_back(edge.second);
|
||||||
unpacked_edges.push_back(edge_id);
|
unpacked_edges.push_back(edge_id);
|
||||||
|
@ -30,7 +30,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
const PhantomNode &source,
|
const PhantomNode &source,
|
||||||
const PhantomNode &target)
|
const PhantomNode &target)
|
||||||
{
|
{
|
||||||
auto level = [&partition, node](const SegmentID &source, const SegmentID &target) {
|
auto level = [&partition, node](const SegmentID &source, const SegmentID &target)
|
||||||
|
{
|
||||||
if (source.enabled && target.enabled)
|
if (source.enabled && target.enabled)
|
||||||
return partition.GetQueryLevel(source.id, target.id, node);
|
return partition.GetQueryLevel(source.id, target.id, node);
|
||||||
return INVALID_LEVEL_ID;
|
return INVALID_LEVEL_ID;
|
||||||
@ -59,7 +60,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
endpoint_candidates.source_phantoms.begin(),
|
endpoint_candidates.source_phantoms.begin(),
|
||||||
endpoint_candidates.source_phantoms.end(),
|
endpoint_candidates.source_phantoms.end(),
|
||||||
INVALID_LEVEL_ID,
|
INVALID_LEVEL_ID,
|
||||||
[&](LevelID current_level, const PhantomNode &source) {
|
[&](LevelID current_level, const PhantomNode &source)
|
||||||
|
{
|
||||||
return std::min(
|
return std::min(
|
||||||
current_level,
|
current_level,
|
||||||
getNodeQueryLevel(partition, node, source, endpoint_candidates.target_phantom));
|
getNodeQueryLevel(partition, node, source, endpoint_candidates.target_phantom));
|
||||||
@ -76,7 +78,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
endpoint_candidates.source_phantoms.begin(),
|
endpoint_candidates.source_phantoms.begin(),
|
||||||
endpoint_candidates.source_phantoms.end(),
|
endpoint_candidates.source_phantoms.end(),
|
||||||
INVALID_LEVEL_ID,
|
INVALID_LEVEL_ID,
|
||||||
[&](LevelID level_1, const PhantomNode &source) {
|
[&](LevelID level_1, const PhantomNode &source)
|
||||||
|
{
|
||||||
return std::min(
|
return std::min(
|
||||||
level_1,
|
level_1,
|
||||||
std::accumulate(endpoint_candidates.target_phantoms.begin(),
|
std::accumulate(endpoint_candidates.target_phantoms.begin(),
|
||||||
@ -119,7 +122,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
const NodeID node,
|
const NodeID node,
|
||||||
const PhantomNodeCandidates &candidates)
|
const PhantomNodeCandidates &candidates)
|
||||||
{
|
{
|
||||||
auto highest_different_level = [&partition, node](const SegmentID &segment) {
|
auto highest_different_level = [&partition, node](const SegmentID &segment)
|
||||||
|
{
|
||||||
return segment.enabled ? partition.GetHighestDifferentLevel(segment.id, node)
|
return segment.enabled ? partition.GetHighestDifferentLevel(segment.id, node)
|
||||||
: INVALID_LEVEL_ID;
|
: INVALID_LEVEL_ID;
|
||||||
};
|
};
|
||||||
@ -128,7 +132,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
std::accumulate(candidates.begin(),
|
std::accumulate(candidates.begin(),
|
||||||
candidates.end(),
|
candidates.end(),
|
||||||
INVALID_LEVEL_ID,
|
INVALID_LEVEL_ID,
|
||||||
[&](LevelID current_level, const PhantomNode &phantom_node) {
|
[&](LevelID current_level, const PhantomNode &phantom_node)
|
||||||
|
{
|
||||||
auto highest_level =
|
auto highest_level =
|
||||||
std::min(highest_different_level(phantom_node.forward_segment_id),
|
std::min(highest_different_level(phantom_node.forward_segment_id),
|
||||||
highest_different_level(phantom_node.reverse_segment_id));
|
highest_different_level(phantom_node.reverse_segment_id));
|
||||||
@ -151,9 +156,11 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
|||||||
// This is equivalent to min_{∀ source, target} partition.GetQueryLevel(source, node, target)
|
// This is equivalent to min_{∀ source, target} partition.GetQueryLevel(source, node, target)
|
||||||
auto init = getNodeQueryLevel(partition, node, candidates_list[phantom_index]);
|
auto init = getNodeQueryLevel(partition, node, candidates_list[phantom_index]);
|
||||||
auto result = std::accumulate(
|
auto result = std::accumulate(
|
||||||
phantom_indices.begin(), phantom_indices.end(), init, [&](LevelID level, size_t index) {
|
phantom_indices.begin(),
|
||||||
return std::min(level, getNodeQueryLevel(partition, node, candidates_list[index]));
|
phantom_indices.end(),
|
||||||
});
|
init,
|
||||||
|
[&](LevelID level, size_t index)
|
||||||
|
{ return std::min(level, getNodeQueryLevel(partition, node, candidates_list[index])); });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
@ -601,7 +608,8 @@ void unpackPath(const FacadeT &facade,
|
|||||||
util::for_each_pair(
|
util::for_each_pair(
|
||||||
packed_path_begin,
|
packed_path_begin,
|
||||||
packed_path_end,
|
packed_path_end,
|
||||||
[&facade, &unpacked_nodes, &unpacked_edges](const auto from, const auto to) {
|
[&facade, &unpacked_nodes, &unpacked_edges](const auto from, const auto to)
|
||||||
|
{
|
||||||
unpacked_nodes.push_back(to);
|
unpacked_nodes.push_back(to);
|
||||||
unpacked_edges.push_back(facade.FindEdge(from, to));
|
unpacked_edges.push_back(facade.FindEdge(from, to));
|
||||||
});
|
});
|
||||||
|
@ -19,8 +19,7 @@ void searchWithUTurn(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||||
const PhantomEndpointCandidates &candidates,
|
const PhantomEndpointCandidates &candidates,
|
||||||
const EdgeWeight &total_weight,
|
EdgeWeight &leg_weight,
|
||||||
EdgeWeight &new_total_weight,
|
|
||||||
std::vector<NodeID> &leg_packed_path)
|
std::vector<NodeID> &leg_packed_path)
|
||||||
{
|
{
|
||||||
forward_heap.Clear();
|
forward_heap.Clear();
|
||||||
@ -31,14 +30,14 @@ void searchWithUTurn(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
if (source.IsValidForwardSource())
|
if (source.IsValidForwardSource())
|
||||||
{
|
{
|
||||||
forward_heap.Insert(source.forward_segment_id.id,
|
forward_heap.Insert(source.forward_segment_id.id,
|
||||||
total_weight - source.GetForwardWeightPlusOffset(),
|
EdgeWeight{0} - source.GetForwardWeightPlusOffset(),
|
||||||
source.forward_segment_id.id);
|
source.forward_segment_id.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (source.IsValidReverseSource())
|
if (source.IsValidReverseSource())
|
||||||
{
|
{
|
||||||
forward_heap.Insert(source.reverse_segment_id.id,
|
forward_heap.Insert(source.reverse_segment_id.id,
|
||||||
total_weight - source.GetReverseWeightPlusOffset(),
|
EdgeWeight{0} - source.GetReverseWeightPlusOffset(),
|
||||||
source.reverse_segment_id.id);
|
source.reverse_segment_id.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -62,10 +61,10 @@ void searchWithUTurn(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
facade,
|
facade,
|
||||||
forward_heap,
|
forward_heap,
|
||||||
reverse_heap,
|
reverse_heap,
|
||||||
new_total_weight,
|
leg_weight,
|
||||||
leg_packed_path,
|
leg_packed_path,
|
||||||
getForwardLoopNodes(candidates),
|
{},
|
||||||
getBackwardLoopNodes(candidates),
|
{},
|
||||||
candidates);
|
candidates);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,7 +247,8 @@ constructRouteResult(const DataFacade<Algorithm> &facade,
|
|||||||
auto source_it =
|
auto source_it =
|
||||||
std::find_if(source_candidates.begin(),
|
std::find_if(source_candidates.begin(),
|
||||||
source_candidates.end(),
|
source_candidates.end(),
|
||||||
[&start_node](const auto &source_phantom) {
|
[&start_node](const auto &source_phantom)
|
||||||
|
{
|
||||||
return (start_node == source_phantom.forward_segment_id.id ||
|
return (start_node == source_phantom.forward_segment_id.id ||
|
||||||
start_node == source_phantom.reverse_segment_id.id);
|
start_node == source_phantom.reverse_segment_id.id);
|
||||||
});
|
});
|
||||||
@ -257,7 +257,8 @@ constructRouteResult(const DataFacade<Algorithm> &facade,
|
|||||||
auto target_it =
|
auto target_it =
|
||||||
std::find_if(target_candidates.begin(),
|
std::find_if(target_candidates.begin(),
|
||||||
target_candidates.end(),
|
target_candidates.end(),
|
||||||
[&end_node](const auto &target_phantom) {
|
[&end_node](const auto &target_phantom)
|
||||||
|
{
|
||||||
return (end_node == target_phantom.forward_segment_id.id ||
|
return (end_node == target_phantom.forward_segment_id.id ||
|
||||||
end_node == target_phantom.reverse_segment_id.id);
|
end_node == target_phantom.reverse_segment_id.id);
|
||||||
});
|
});
|
||||||
@ -302,7 +303,7 @@ shortestPathWithWaypointUTurns(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
PhantomEndpointCandidates search_candidates{waypoint_candidates[i],
|
PhantomEndpointCandidates search_candidates{waypoint_candidates[i],
|
||||||
waypoint_candidates[i + 1]};
|
waypoint_candidates[i + 1]};
|
||||||
std::vector<NodeID> packed_leg;
|
std::vector<NodeID> packed_leg;
|
||||||
EdgeWeight new_total_weight = INVALID_EDGE_WEIGHT;
|
EdgeWeight leg_weight = INVALID_EDGE_WEIGHT;
|
||||||
|
|
||||||
// We have a valid path up to this leg
|
// We have a valid path up to this leg
|
||||||
BOOST_ASSERT(total_weight != INVALID_EDGE_WEIGHT);
|
BOOST_ASSERT(total_weight != INVALID_EDGE_WEIGHT);
|
||||||
@ -311,16 +312,15 @@ shortestPathWithWaypointUTurns(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
forward_heap,
|
forward_heap,
|
||||||
reverse_heap,
|
reverse_heap,
|
||||||
search_candidates,
|
search_candidates,
|
||||||
total_weight,
|
leg_weight,
|
||||||
new_total_weight,
|
|
||||||
packed_leg);
|
packed_leg);
|
||||||
|
|
||||||
if (new_total_weight == INVALID_EDGE_WEIGHT)
|
if (leg_weight == INVALID_EDGE_WEIGHT)
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
packed_leg_begin.push_back(total_packed_path.size());
|
packed_leg_begin.push_back(total_packed_path.size());
|
||||||
total_packed_path.insert(total_packed_path.end(), packed_leg.begin(), packed_leg.end());
|
total_packed_path.insert(total_packed_path.end(), packed_leg.begin(), packed_leg.end());
|
||||||
total_weight = new_total_weight;
|
total_weight += leg_weight;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add sentinel
|
// Add sentinel
|
||||||
@ -466,16 +466,16 @@ struct route_state
|
|||||||
last.total_weight_to_forward.resize(init_candidates.size(), {0});
|
last.total_weight_to_forward.resize(init_candidates.size(), {0});
|
||||||
last.total_weight_to_reverse.resize(init_candidates.size(), {0});
|
last.total_weight_to_reverse.resize(init_candidates.size(), {0});
|
||||||
// Initialize routability from source validity.
|
// Initialize routability from source validity.
|
||||||
std::transform(
|
std::transform(init_candidates.begin(),
|
||||||
init_candidates.begin(),
|
|
||||||
init_candidates.end(),
|
init_candidates.end(),
|
||||||
std::back_inserter(last.reached_forward_node_target),
|
std::back_inserter(last.reached_forward_node_target),
|
||||||
[](const PhantomNode &phantom_node) { return phantom_node.IsValidForwardSource(); });
|
[](const PhantomNode &phantom_node)
|
||||||
std::transform(
|
{ return phantom_node.IsValidForwardSource(); });
|
||||||
init_candidates.begin(),
|
std::transform(init_candidates.begin(),
|
||||||
init_candidates.end(),
|
init_candidates.end(),
|
||||||
std::back_inserter(last.reached_reverse_node_target),
|
std::back_inserter(last.reached_reverse_node_target),
|
||||||
[](const PhantomNode &phantom_node) { return phantom_node.IsValidReverseSource(); });
|
[](const PhantomNode &phantom_node)
|
||||||
|
{ return phantom_node.IsValidReverseSource(); });
|
||||||
}
|
}
|
||||||
|
|
||||||
bool completeLeg()
|
bool completeLeg()
|
||||||
@ -613,15 +613,21 @@ struct route_state
|
|||||||
{
|
{
|
||||||
// Find the segment from final leg with the shortest path
|
// Find the segment from final leg with the shortest path
|
||||||
auto forward_range = util::irange<std::size_t>(0UL, last.total_weight_to_forward.size());
|
auto forward_range = util::irange<std::size_t>(0UL, last.total_weight_to_forward.size());
|
||||||
auto forward_min =
|
auto forward_min = std::min_element(
|
||||||
std::min_element(forward_range.begin(), forward_range.end(), [&](size_t a, size_t b) {
|
forward_range.begin(),
|
||||||
|
forward_range.end(),
|
||||||
|
[&](size_t a, size_t b)
|
||||||
|
{
|
||||||
return (last.total_weight_to_forward[a] < last.total_weight_to_forward[b] ||
|
return (last.total_weight_to_forward[a] < last.total_weight_to_forward[b] ||
|
||||||
(last.total_weight_to_forward[a] == last.total_weight_to_forward[b] &&
|
(last.total_weight_to_forward[a] == last.total_weight_to_forward[b] &&
|
||||||
last.total_nodes_to_forward[a] < last.total_nodes_to_forward[b]));
|
last.total_nodes_to_forward[a] < last.total_nodes_to_forward[b]));
|
||||||
});
|
});
|
||||||
auto reverse_range = util::irange<std::size_t>(0UL, last.total_weight_to_reverse.size());
|
auto reverse_range = util::irange<std::size_t>(0UL, last.total_weight_to_reverse.size());
|
||||||
auto reverse_min =
|
auto reverse_min = std::min_element(
|
||||||
std::min_element(reverse_range.begin(), reverse_range.end(), [&](size_t a, size_t b) {
|
reverse_range.begin(),
|
||||||
|
reverse_range.end(),
|
||||||
|
[&](size_t a, size_t b)
|
||||||
|
{
|
||||||
return (last.total_weight_to_reverse[a] < last.total_weight_to_reverse[b] ||
|
return (last.total_weight_to_reverse[a] < last.total_weight_to_reverse[b] ||
|
||||||
(last.total_weight_to_reverse[a] == last.total_weight_to_reverse[b] &&
|
(last.total_weight_to_reverse[a] == last.total_weight_to_reverse[b] &&
|
||||||
last.total_nodes_to_reverse[a] < last.total_nodes_to_reverse[b]));
|
last.total_nodes_to_reverse[a] < last.total_nodes_to_reverse[b]));
|
||||||
|
@ -27,9 +27,9 @@ inline auto getClassData(const std::size_t index)
|
|||||||
|
|
||||||
inline bool isValidClassName(const std::string &name)
|
inline bool isValidClassName(const std::string &name)
|
||||||
{
|
{
|
||||||
return std::find_if_not(name.begin(), name.end(), [](const auto c) {
|
return std::find_if_not(name.begin(),
|
||||||
return std::isalnum(c);
|
name.end(),
|
||||||
}) == name.end();
|
[](const auto c) { return std::isalnum(c); }) == name.end();
|
||||||
}
|
}
|
||||||
} // namespace osrm::extractor
|
} // namespace osrm::extractor
|
||||||
|
|
||||||
|
@ -125,9 +125,8 @@ inline std::string canonicalizeStringList(std::string strlist, const std::string
|
|||||||
|
|
||||||
// collapse spaces; this is needed in case we expand "; X" => "; X" above
|
// collapse spaces; this is needed in case we expand "; X" => "; X" above
|
||||||
// but also makes sense to do irregardless of the fact - canonicalizing strings.
|
// but also makes sense to do irregardless of the fact - canonicalizing strings.
|
||||||
const auto spaces = [](unsigned char lhs, unsigned char rhs) {
|
const auto spaces = [](unsigned char lhs, unsigned char rhs)
|
||||||
return ::isspace(lhs) && ::isspace(rhs);
|
{ return ::isspace(lhs) && ::isspace(rhs); };
|
||||||
};
|
|
||||||
auto it = std::unique(begin(strlist), end(strlist), spaces);
|
auto it = std::unique(begin(strlist), end(strlist), spaces);
|
||||||
strlist.erase(it, end(strlist));
|
strlist.erase(it, end(strlist));
|
||||||
|
|
||||||
|
@ -133,7 +133,8 @@ class ExtractionRelationContainer
|
|||||||
(void)res; // prevent unused warning in release
|
(void)res; // prevent unused warning in release
|
||||||
}
|
}
|
||||||
|
|
||||||
auto MergeRefMap = [&](RelationRefMap &source, RelationRefMap &target) {
|
auto MergeRefMap = [&](RelationRefMap &source, RelationRefMap &target)
|
||||||
|
{
|
||||||
for (auto it : source)
|
for (auto it : source)
|
||||||
{
|
{
|
||||||
auto &v = target[it.first];
|
auto &v = target[it.first];
|
||||||
@ -151,7 +152,8 @@ class ExtractionRelationContainer
|
|||||||
const RelationIDList &GetRelations(const OsmIDTyped &member_id) const
|
const RelationIDList &GetRelations(const OsmIDTyped &member_id) const
|
||||||
{
|
{
|
||||||
auto getFromMap = [this](std::uint64_t id,
|
auto getFromMap = [this](std::uint64_t id,
|
||||||
const RelationRefMap &map) -> const RelationIDList & {
|
const RelationRefMap &map) -> const RelationIDList &
|
||||||
|
{
|
||||||
auto it = map.find(id);
|
auto it = map.find(id);
|
||||||
if (it != map.end())
|
if (it != map.end())
|
||||||
return it->second;
|
return it->second;
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
#ifndef OSRM_EXTRACTION_SEGMENT_HPP
|
#ifndef OSRM_EXTRACTION_SEGMENT_HPP
|
||||||
#define OSRM_EXTRACTION_SEGMENT_HPP
|
#define OSRM_EXTRACTION_SEGMENT_HPP
|
||||||
|
|
||||||
|
#include <extractor/node_based_edge.hpp>
|
||||||
#include <util/coordinate.hpp>
|
#include <util/coordinate.hpp>
|
||||||
|
|
||||||
namespace osrm::extractor
|
namespace osrm::extractor
|
||||||
@ -12,9 +13,10 @@ struct ExtractionSegment
|
|||||||
const osrm::util::Coordinate target_,
|
const osrm::util::Coordinate target_,
|
||||||
double distance_,
|
double distance_,
|
||||||
double weight_,
|
double weight_,
|
||||||
double duration_)
|
double duration_,
|
||||||
|
const NodeBasedEdgeClassification flags_)
|
||||||
: source(source_), target(target_), distance(distance_), weight(weight_),
|
: source(source_), target(target_), distance(distance_), weight(weight_),
|
||||||
duration(duration_)
|
duration(duration_), flags(flags_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -23,6 +25,7 @@ struct ExtractionSegment
|
|||||||
const double distance;
|
const double distance;
|
||||||
double weight;
|
double weight;
|
||||||
double duration;
|
double duration;
|
||||||
|
const NodeBasedEdgeClassification flags;
|
||||||
};
|
};
|
||||||
} // namespace osrm::extractor
|
} // namespace osrm::extractor
|
||||||
|
|
||||||
|
@ -453,7 +453,8 @@ void readRawNBGraph(const boost::filesystem::path &path,
|
|||||||
coordinates.resize(number_of_nodes);
|
coordinates.resize(number_of_nodes);
|
||||||
osm_node_ids.reserve(number_of_nodes);
|
osm_node_ids.reserve(number_of_nodes);
|
||||||
auto index = 0;
|
auto index = 0;
|
||||||
auto decode = [&](const auto ¤t_node) {
|
auto decode = [&](const auto ¤t_node)
|
||||||
|
{
|
||||||
coordinates[index].lon = current_node.lon;
|
coordinates[index].lon = current_node.lon;
|
||||||
coordinates[index].lat = current_node.lat;
|
coordinates[index].lat = current_node.lat;
|
||||||
osm_node_ids.push_back(current_node.node_id);
|
osm_node_ids.push_back(current_node.node_id);
|
||||||
|
@ -24,7 +24,7 @@ class GraphCompressor
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
void Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
void Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||||
const TrafficSignals &traffic_signals,
|
TrafficSignals &traffic_signals,
|
||||||
ScriptingEnvironment &scripting_environment,
|
ScriptingEnvironment &scripting_environment,
|
||||||
std::vector<TurnRestriction> &turn_restrictions,
|
std::vector<TurnRestriction> &turn_restrictions,
|
||||||
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
||||||
|
@ -26,16 +26,14 @@ namespace osrm::extractor::intersection
|
|||||||
|
|
||||||
inline auto makeCompareAngularDeviation(const double angle)
|
inline auto makeCompareAngularDeviation(const double angle)
|
||||||
{
|
{
|
||||||
return [angle](const auto &lhs, const auto &rhs) {
|
return [angle](const auto &lhs, const auto &rhs)
|
||||||
return util::angularDeviation(lhs.angle, angle) < util::angularDeviation(rhs.angle, angle);
|
{ return util::angularDeviation(lhs.angle, angle) < util::angularDeviation(rhs.angle, angle); };
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
inline auto makeExtractLanesForRoad(const util::NodeBasedDynamicGraph &node_based_graph)
|
inline auto makeExtractLanesForRoad(const util::NodeBasedDynamicGraph &node_based_graph)
|
||||||
{
|
{
|
||||||
return [&node_based_graph](const auto &road) {
|
return [&node_based_graph](const auto &road)
|
||||||
return node_based_graph.GetEdgeData(road.eid).road_classification.GetNumberOfLanes();
|
{ return node_based_graph.GetEdgeData(road.eid).road_classification.GetNumberOfLanes(); };
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// When viewing an intersection from an incoming edge, we can transform a shape into a view which
|
// When viewing an intersection from an incoming edge, we can transform a shape into a view which
|
||||||
@ -63,7 +61,10 @@ template <typename Self> struct EnableShapeOps
|
|||||||
auto FindClosestBearing(double base_bearing) const
|
auto FindClosestBearing(double base_bearing) const
|
||||||
{
|
{
|
||||||
return std::min_element(
|
return std::min_element(
|
||||||
self()->begin(), self()->end(), [base_bearing](const auto &lhs, const auto &rhs) {
|
self()->begin(),
|
||||||
|
self()->end(),
|
||||||
|
[base_bearing](const auto &lhs, const auto &rhs)
|
||||||
|
{
|
||||||
return util::angularDeviation(lhs.perceived_bearing, base_bearing) <
|
return util::angularDeviation(lhs.perceived_bearing, base_bearing) <
|
||||||
util::angularDeviation(rhs.perceived_bearing, base_bearing);
|
util::angularDeviation(rhs.perceived_bearing, base_bearing);
|
||||||
});
|
});
|
||||||
@ -81,7 +82,8 @@ template <typename Self> struct EnableShapeOps
|
|||||||
BOOST_ASSERT(!self()->empty());
|
BOOST_ASSERT(!self()->empty());
|
||||||
auto initial = converter(self()->front());
|
auto initial = converter(self()->front());
|
||||||
|
|
||||||
const auto extract_maximal_value = [&initial, converter](const auto &road) {
|
const auto extract_maximal_value = [&initial, converter](const auto &road)
|
||||||
|
{
|
||||||
initial = std::max(initial, converter(road));
|
initial = std::max(initial, converter(road));
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
@ -191,8 +193,10 @@ template <typename Self> struct EnableIntersectionOps
|
|||||||
auto findClosestTurn(const double angle, const UnaryPredicate filter) const
|
auto findClosestTurn(const double angle, const UnaryPredicate filter) const
|
||||||
{
|
{
|
||||||
BOOST_ASSERT(!self()->empty());
|
BOOST_ASSERT(!self()->empty());
|
||||||
const auto candidate =
|
const auto candidate = boost::range::min_element(
|
||||||
boost::range::min_element(*self(), [angle, &filter](const auto &lhs, const auto &rhs) {
|
*self(),
|
||||||
|
[angle, &filter](const auto &lhs, const auto &rhs)
|
||||||
|
{
|
||||||
const auto filtered_lhs = filter(lhs), filtered_rhs = filter(rhs);
|
const auto filtered_lhs = filter(lhs), filtered_rhs = filter(rhs);
|
||||||
const auto deviation_lhs = util::angularDeviation(lhs.angle, angle),
|
const auto deviation_lhs = util::angularDeviation(lhs.angle, angle),
|
||||||
deviation_rhs = util::angularDeviation(rhs.angle, angle);
|
deviation_rhs = util::angularDeviation(rhs.angle, angle);
|
||||||
|
@ -39,7 +39,7 @@ class NodeBasedGraphFactory
|
|||||||
NodeBasedGraphFactory(ScriptingEnvironment &scripting_environment,
|
NodeBasedGraphFactory(ScriptingEnvironment &scripting_environment,
|
||||||
std::vector<TurnRestriction> &turn_restrictions,
|
std::vector<TurnRestriction> &turn_restrictions,
|
||||||
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
||||||
const TrafficSignals &traffic_signals,
|
TrafficSignals &traffic_signals,
|
||||||
std::unordered_set<NodeID> &&barriers,
|
std::unordered_set<NodeID> &&barriers,
|
||||||
std::vector<util::Coordinate> &&coordinates,
|
std::vector<util::Coordinate> &&coordinates,
|
||||||
extractor::PackedOSMIDs &&osm_node_ids,
|
extractor::PackedOSMIDs &&osm_node_ids,
|
||||||
@ -71,7 +71,7 @@ class NodeBasedGraphFactory
|
|||||||
void Compress(ScriptingEnvironment &scripting_environment,
|
void Compress(ScriptingEnvironment &scripting_environment,
|
||||||
std::vector<TurnRestriction> &turn_restrictions,
|
std::vector<TurnRestriction> &turn_restrictions,
|
||||||
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
std::vector<UnresolvedManeuverOverride> &maneuver_overrides,
|
||||||
const TrafficSignals &traffic_signals);
|
TrafficSignals &traffic_signals);
|
||||||
|
|
||||||
// Most ways are bidirectional, making the geometry in forward and backward direction the same,
|
// Most ways are bidirectional, making the geometry in forward and backward direction the same,
|
||||||
// except for reversal. We make use of this fact by keeping only one representation of the
|
// except for reversal. We make use of this fact by keeping only one representation of the
|
||||||
|
@ -32,9 +32,8 @@ template <typename RestrictionFilter> class NodeRestrictionMap
|
|||||||
// Find all restrictions applicable to (from,via,to) turns
|
// Find all restrictions applicable to (from,via,to) turns
|
||||||
auto Restrictions(NodeID from, NodeID via, NodeID to) const
|
auto Restrictions(NodeID from, NodeID via, NodeID to) const
|
||||||
{
|
{
|
||||||
const auto turnFilter = [this, to](const auto &restriction) {
|
const auto turnFilter = [this, to](const auto &restriction)
|
||||||
return index_filter(restriction) && restriction->IsTurnRestricted(to);
|
{ return index_filter(restriction) && restriction->IsTurnRestricted(to); };
|
||||||
};
|
|
||||||
return getRange(from, via) | boost::adaptors::filtered(turnFilter);
|
return getRange(from, via) | boost::adaptors::filtered(turnFilter);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -19,6 +19,21 @@ struct TrafficSignals
|
|||||||
{
|
{
|
||||||
return bidirectional_nodes.count(to) > 0 || unidirectional_segments.count({from, to}) > 0;
|
return bidirectional_nodes.count(to) > 0 || unidirectional_segments.count({from, to}) > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Compress(NodeID from, NodeID via, NodeID to)
|
||||||
|
{
|
||||||
|
bidirectional_nodes.erase(via);
|
||||||
|
if (unidirectional_segments.count({via, to}))
|
||||||
|
{
|
||||||
|
unidirectional_segments.erase({via, to});
|
||||||
|
unidirectional_segments.insert({from, to});
|
||||||
|
}
|
||||||
|
if (unidirectional_segments.count({via, from}))
|
||||||
|
{
|
||||||
|
unidirectional_segments.erase({via, from});
|
||||||
|
unidirectional_segments.insert({to, from});
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
} // namespace osrm::extractor
|
} // namespace osrm::extractor
|
||||||
|
|
||||||
|
@ -186,11 +186,11 @@ IntersectionHandler::IsDistinctNarrowTurn(const EdgeID via_edge,
|
|||||||
node_data_container.GetAnnotation(candidate_data.annotation_data);
|
node_data_container.GetAnnotation(candidate_data.annotation_data);
|
||||||
auto const candidate_deviation = util::angularDeviation(candidate->angle, STRAIGHT_ANGLE);
|
auto const candidate_deviation = util::angularDeviation(candidate->angle, STRAIGHT_ANGLE);
|
||||||
|
|
||||||
auto const num_lanes = [](auto const &data) {
|
auto const num_lanes = [](auto const &data)
|
||||||
return data.flags.road_classification.GetNumberOfLanes();
|
{ return data.flags.road_classification.GetNumberOfLanes(); };
|
||||||
};
|
|
||||||
|
|
||||||
auto const lanes_number_equal = [&](auto const &compare_data) {
|
auto const lanes_number_equal = [&](auto const &compare_data)
|
||||||
|
{
|
||||||
// Check if the lanes number is the same going from the inbound edge to the compare road
|
// Check if the lanes number is the same going from the inbound edge to the compare road
|
||||||
return num_lanes(compare_data) > 0 && num_lanes(compare_data) == num_lanes(via_edge_data);
|
return num_lanes(compare_data) > 0 && num_lanes(compare_data) == num_lanes(via_edge_data);
|
||||||
};
|
};
|
||||||
@ -209,7 +209,8 @@ IntersectionHandler::IsDistinctNarrowTurn(const EdgeID via_edge,
|
|||||||
|
|
||||||
// check if there are other narrow turns are not considered passing a low category or simply
|
// check if there are other narrow turns are not considered passing a low category or simply
|
||||||
// a link of the same type as the potentially obvious turn
|
// a link of the same type as the potentially obvious turn
|
||||||
auto const is_similar_turn = [&](auto const &road) {
|
auto const is_similar_turn = [&](auto const &road)
|
||||||
|
{
|
||||||
// 1. Skip the candidate road
|
// 1. Skip the candidate road
|
||||||
if (road.eid == candidate->eid)
|
if (road.eid == candidate->eid)
|
||||||
{
|
{
|
||||||
@ -404,7 +405,8 @@ IntersectionHandler::IsDistinctWideTurn(const EdgeID via_edge,
|
|||||||
// Deviation is larger than NARROW_TURN_ANGLE0 here for the candidate
|
// Deviation is larger than NARROW_TURN_ANGLE0 here for the candidate
|
||||||
// check if there is any turn, that might look just as obvious, even though it might not
|
// check if there is any turn, that might look just as obvious, even though it might not
|
||||||
// be allowed. Entry-allowed isn't considered a valid distinction criterion here
|
// be allowed. Entry-allowed isn't considered a valid distinction criterion here
|
||||||
auto const is_similar_turn = [&](auto const &road) {
|
auto const is_similar_turn = [&](auto const &road)
|
||||||
|
{
|
||||||
// 1. Skip over our candidate
|
// 1. Skip over our candidate
|
||||||
if (road.eid == candidate->eid)
|
if (road.eid == candidate->eid)
|
||||||
return false;
|
return false;
|
||||||
@ -502,7 +504,8 @@ std::size_t IntersectionHandler::findObviousTurn(const EdgeID via_edge,
|
|||||||
node_data_container.GetAnnotation(via_edge_data.annotation_data);
|
node_data_container.GetAnnotation(via_edge_data.annotation_data);
|
||||||
|
|
||||||
// implement a filter, taking out all roads of lower class or different names
|
// implement a filter, taking out all roads of lower class or different names
|
||||||
auto const continues_on_name_with_higher_class = [&](auto const &road) {
|
auto const continues_on_name_with_higher_class = [&](auto const &road)
|
||||||
|
{
|
||||||
// it needs to be possible to enter the road
|
// it needs to be possible to enter the road
|
||||||
if (!road.entry_allowed)
|
if (!road.entry_allowed)
|
||||||
return true;
|
return true;
|
||||||
@ -549,7 +552,8 @@ std::size_t IntersectionHandler::findObviousTurn(const EdgeID via_edge,
|
|||||||
|
|
||||||
// this check is not part of the main conditions, so that if the turn looks obvious from all
|
// this check is not part of the main conditions, so that if the turn looks obvious from all
|
||||||
// other perspectives, a mode change will not result in different classification
|
// other perspectives, a mode change will not result in different classification
|
||||||
auto const to_index_if_valid = [&](auto const iterator) -> std::size_t {
|
auto const to_index_if_valid = [&](auto const iterator) -> std::size_t
|
||||||
|
{
|
||||||
auto const &from_data = node_based_graph.GetEdgeData(via_edge);
|
auto const &from_data = node_based_graph.GetEdgeData(via_edge);
|
||||||
auto const &to_data = node_based_graph.GetEdgeData(iterator->eid);
|
auto const &to_data = node_based_graph.GetEdgeData(iterator->eid);
|
||||||
|
|
||||||
@ -576,7 +580,8 @@ std::size_t IntersectionHandler::findObviousTurn(const EdgeID via_edge,
|
|||||||
|
|
||||||
// opposed to before, we do not care about name changes, again: this is a filter, so internal
|
// opposed to before, we do not care about name changes, again: this is a filter, so internal
|
||||||
// false/true will be negated for selection
|
// false/true will be negated for selection
|
||||||
auto const valid_of_higher_or_same_category = [&](auto const &road) {
|
auto const valid_of_higher_or_same_category = [&](auto const &road)
|
||||||
|
{
|
||||||
if (!road.entry_allowed)
|
if (!road.entry_allowed)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
@ -639,7 +644,8 @@ std::size_t IntersectionHandler::findObviousTurn(const EdgeID via_edge,
|
|||||||
const auto all_roads_have_same_name =
|
const auto all_roads_have_same_name =
|
||||||
std::all_of(intersection.begin(),
|
std::all_of(intersection.begin(),
|
||||||
intersection.end(),
|
intersection.end(),
|
||||||
[id = via_edge_annotation.name_id, this](auto const &road) {
|
[id = via_edge_annotation.name_id, this](auto const &road)
|
||||||
|
{
|
||||||
auto const data_id = node_based_graph.GetEdgeData(road.eid).annotation_data;
|
auto const data_id = node_based_graph.GetEdgeData(road.eid).annotation_data;
|
||||||
auto const name_id = node_data_container.GetAnnotation(data_id).name_id;
|
auto const name_id = node_data_container.GetAnnotation(data_id).name_id;
|
||||||
return (name_id != EMPTY_NAMEID) && (name_id == id);
|
return (name_id != EMPTY_NAMEID) && (name_id == id);
|
||||||
|
@ -205,9 +205,50 @@ inline engine_config_ptr argumentsToEngineConfig(const Napi::CallbackInfo &args)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto disable_feature_dataset = params.Get("disable_feature_dataset");
|
||||||
|
if (disable_feature_dataset.IsArray())
|
||||||
|
{
|
||||||
|
Napi::Array datasets = disable_feature_dataset.As<Napi::Array>();
|
||||||
|
for (uint32_t i = 0; i < datasets.Length(); ++i)
|
||||||
|
{
|
||||||
|
Napi::Value dataset = datasets.Get(i);
|
||||||
|
if (!dataset.IsString())
|
||||||
|
{
|
||||||
|
ThrowError(args.Env(), "disable_feature_dataset list option must be a string");
|
||||||
|
return engine_config_ptr();
|
||||||
|
}
|
||||||
|
auto dataset_str = dataset.ToString().Utf8Value();
|
||||||
|
if (dataset_str == "ROUTE_GEOMETRY")
|
||||||
|
{
|
||||||
|
engine_config->disable_feature_dataset.push_back(
|
||||||
|
osrm::storage::FeatureDataset::ROUTE_GEOMETRY);
|
||||||
|
}
|
||||||
|
else if (dataset_str == "ROUTE_STEPS")
|
||||||
|
{
|
||||||
|
engine_config->disable_feature_dataset.push_back(
|
||||||
|
osrm::storage::FeatureDataset::ROUTE_STEPS);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ThrowError(
|
||||||
|
args.Env(),
|
||||||
|
"disable_feature_dataset array can include 'ROUTE_GEOMETRY', 'ROUTE_STEPS'.");
|
||||||
|
return engine_config_ptr();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (!disable_feature_dataset.IsUndefined())
|
||||||
|
{
|
||||||
|
ThrowError(args.Env(),
|
||||||
|
"disable_feature_dataset option must be an array and can include the string "
|
||||||
|
"values 'ROUTE_GEOMETRY', 'ROUTE_STEPS'.");
|
||||||
|
return engine_config_ptr();
|
||||||
|
}
|
||||||
|
|
||||||
if (!path.IsUndefined())
|
if (!path.IsUndefined())
|
||||||
{
|
{
|
||||||
engine_config->storage_config = osrm::StorageConfig(path.ToString().Utf8Value());
|
engine_config->storage_config = osrm::StorageConfig(path.ToString().Utf8Value(),
|
||||||
|
engine_config->disable_feature_dataset);
|
||||||
|
|
||||||
engine_config->use_shared_memory = false;
|
engine_config->use_shared_memory = false;
|
||||||
}
|
}
|
||||||
@ -317,9 +358,16 @@ inline engine_config_ptr argumentsToEngineConfig(const Napi::CallbackInfo &args)
|
|||||||
ThrowError(args.Env(), "max_alternatives must be an integral number");
|
ThrowError(args.Env(), "max_alternatives must be an integral number");
|
||||||
return engine_config_ptr();
|
return engine_config_ptr();
|
||||||
}
|
}
|
||||||
if (!default_radius.IsUndefined() && !default_radius.IsNumber())
|
if (!max_radius_map_matching.IsUndefined() && max_radius_map_matching.IsString() &&
|
||||||
|
max_radius_map_matching.ToString().Utf8Value() != "unlimited")
|
||||||
{
|
{
|
||||||
ThrowError(args.Env(), "default_radius must be an integral number");
|
ThrowError(args.Env(), "max_radius_map_matching must be unlimited or an integral number");
|
||||||
|
return engine_config_ptr();
|
||||||
|
}
|
||||||
|
if (!default_radius.IsUndefined() && default_radius.IsString() &&
|
||||||
|
default_radius.ToString().Utf8Value() != "unlimited")
|
||||||
|
{
|
||||||
|
ThrowError(args.Env(), "default_radius must be unlimited or an integral number");
|
||||||
return engine_config_ptr();
|
return engine_config_ptr();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -337,10 +385,17 @@ inline engine_config_ptr argumentsToEngineConfig(const Napi::CallbackInfo &args)
|
|||||||
engine_config->max_results_nearest = max_results_nearest.ToNumber().Int32Value();
|
engine_config->max_results_nearest = max_results_nearest.ToNumber().Int32Value();
|
||||||
if (max_alternatives.IsNumber())
|
if (max_alternatives.IsNumber())
|
||||||
engine_config->max_alternatives = max_alternatives.ToNumber().Int32Value();
|
engine_config->max_alternatives = max_alternatives.ToNumber().Int32Value();
|
||||||
|
|
||||||
if (max_radius_map_matching.IsNumber())
|
if (max_radius_map_matching.IsNumber())
|
||||||
engine_config->max_radius_map_matching = max_radius_map_matching.ToNumber().DoubleValue();
|
engine_config->max_radius_map_matching = max_radius_map_matching.ToNumber().DoubleValue();
|
||||||
|
else if (max_radius_map_matching.IsString() &&
|
||||||
|
max_radius_map_matching.ToString().Utf8Value() == "unlimited")
|
||||||
|
engine_config->max_radius_map_matching = -1.0;
|
||||||
|
|
||||||
if (default_radius.IsNumber())
|
if (default_radius.IsNumber())
|
||||||
engine_config->default_radius = default_radius.ToNumber().DoubleValue();
|
engine_config->default_radius = default_radius.ToNumber().DoubleValue();
|
||||||
|
else if (default_radius.IsString() && default_radius.ToString().Utf8Value() == "unlimited")
|
||||||
|
engine_config->default_radius = -1.0;
|
||||||
|
|
||||||
return engine_config;
|
return engine_config;
|
||||||
}
|
}
|
||||||
@ -505,6 +560,10 @@ inline bool argumentsToParameter(const Napi::CallbackInfo &args,
|
|||||||
{
|
{
|
||||||
params->approaches.push_back(osrm::Approach::CURB);
|
params->approaches.push_back(osrm::Approach::CURB);
|
||||||
}
|
}
|
||||||
|
else if (approach_str == "opposite")
|
||||||
|
{
|
||||||
|
params->approaches.push_back(osrm::Approach::OPPOSITE);
|
||||||
|
}
|
||||||
else if (approach_str == "unrestricted")
|
else if (approach_str == "unrestricted")
|
||||||
{
|
{
|
||||||
params->approaches.push_back(osrm::Approach::UNRESTRICTED);
|
params->approaches.push_back(osrm::Approach::UNRESTRICTED);
|
||||||
@ -512,13 +571,14 @@ inline bool argumentsToParameter(const Napi::CallbackInfo &args,
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
ThrowError(args.Env(),
|
ThrowError(args.Env(),
|
||||||
"'approaches' param must be one of [curb, unrestricted]");
|
"'approaches' param must be one of [curb, opposite, unrestricted]");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
ThrowError(args.Env(), "Approach must be a string: [curb, unrestricted] or null");
|
ThrowError(args.Env(),
|
||||||
|
"Approach must be a string: [curb, opposite, unrestricted] or null");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
15
include/osrm/datasets.hpp
Normal file
15
include/osrm/datasets.hpp
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#ifndef DATASETS_HPP
|
||||||
|
#define DATASETS_HPP
|
||||||
|
|
||||||
|
namespace osrm::storage
|
||||||
|
{
|
||||||
|
|
||||||
|
enum class FeatureDataset
|
||||||
|
{
|
||||||
|
ROUTE_STEPS,
|
||||||
|
ROUTE_GEOMETRY,
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace osrm::storage
|
||||||
|
|
||||||
|
#endif
|
@ -23,7 +23,8 @@ enum ErrorCode
|
|||||||
FileIOError,
|
FileIOError,
|
||||||
UnexpectedEndOfFile,
|
UnexpectedEndOfFile,
|
||||||
IncompatibleDataset,
|
IncompatibleDataset,
|
||||||
UnknownAlgorithm
|
UnknownAlgorithm,
|
||||||
|
UnknownFeatureDataset
|
||||||
#ifndef NDEBUG
|
#ifndef NDEBUG
|
||||||
// Leave this at the end. In debug mode, we assert that the size of
|
// Leave this at the end. In debug mode, we assert that the size of
|
||||||
// this enum matches the number of messages we have documented, and __ENDMARKER__
|
// this enum matches the number of messages we have documented, and __ENDMARKER__
|
||||||
|
@ -65,8 +65,8 @@ inline BisectionGraph makeBisectionGraph(const std::vector<util::Coordinate> &co
|
|||||||
result_edges.reserve(edges.size());
|
result_edges.reserve(edges.size());
|
||||||
|
|
||||||
// find the end of edges that belong to node_id
|
// find the end of edges that belong to node_id
|
||||||
const auto advance_edge_itr = [&edges, &result_edges](const std::size_t node_id,
|
const auto advance_edge_itr = [&edges, &result_edges](const std::size_t node_id, auto edge_itr)
|
||||||
auto edge_itr) {
|
{
|
||||||
while (edge_itr != edges.end() && edge_itr->source == node_id)
|
while (edge_itr != edges.end() && edge_itr->source == node_id)
|
||||||
{
|
{
|
||||||
result_edges.push_back(edge_itr->Reduce());
|
result_edges.push_back(edge_itr->Reduce());
|
||||||
@ -76,9 +76,9 @@ inline BisectionGraph makeBisectionGraph(const std::vector<util::Coordinate> &co
|
|||||||
};
|
};
|
||||||
|
|
||||||
// create a bisection node, requires the ID of the node as well as the lower bound to its edges
|
// create a bisection node, requires the ID of the node as well as the lower bound to its edges
|
||||||
const auto make_bisection_node = [&edges, &coordinates](const std::size_t node_id,
|
const auto make_bisection_node =
|
||||||
const auto begin_itr,
|
[&edges, &coordinates](const std::size_t node_id, const auto begin_itr, const auto end_itr)
|
||||||
const auto end_itr) {
|
{
|
||||||
std::size_t range_begin = std::distance(edges.begin(), begin_itr);
|
std::size_t range_begin = std::distance(edges.begin(), begin_itr);
|
||||||
std::size_t range_end = std::distance(edges.begin(), end_itr);
|
std::size_t range_end = std::distance(edges.begin(), end_itr);
|
||||||
return BisectionGraph::NodeT(range_begin, range_end, coordinates[node_id], node_id);
|
return BisectionGraph::NodeT(range_begin, range_end, coordinates[node_id], node_id);
|
||||||
@ -102,7 +102,10 @@ std::vector<BisectionInputEdge> adaptToBisectionEdge(std::vector<InputEdge> edge
|
|||||||
std::vector<BisectionInputEdge> result;
|
std::vector<BisectionInputEdge> result;
|
||||||
result.reserve(edges.size());
|
result.reserve(edges.size());
|
||||||
|
|
||||||
std::transform(begin(edges), end(edges), std::back_inserter(result), [](const auto &edge) {
|
std::transform(begin(edges),
|
||||||
|
end(edges),
|
||||||
|
std::back_inserter(result),
|
||||||
|
[](const auto &edge) {
|
||||||
return BisectionInputEdge{edge.source, edge.target};
|
return BisectionInputEdge{edge.source, edge.target};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -298,7 +298,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
|||||||
auto set_num_nodes_fn,
|
auto set_num_nodes_fn,
|
||||||
auto set_boundary_offset_fn,
|
auto set_boundary_offset_fn,
|
||||||
auto begin,
|
auto begin,
|
||||||
auto end) {
|
auto end)
|
||||||
|
{
|
||||||
BOOST_ASSERT(std::distance(begin, end) > 0);
|
BOOST_ASSERT(std::distance(begin, end) > 0);
|
||||||
|
|
||||||
const auto cell_id = begin->first;
|
const auto cell_id = begin->first;
|
||||||
@ -316,7 +317,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
|||||||
util::for_each_range(
|
util::for_each_range(
|
||||||
level_source_boundary.begin(),
|
level_source_boundary.begin(),
|
||||||
level_source_boundary.end(),
|
level_source_boundary.end(),
|
||||||
[this, insert_cell_boundary](auto begin, auto end) {
|
[this, insert_cell_boundary](auto begin, auto end)
|
||||||
|
{
|
||||||
insert_cell_boundary(
|
insert_cell_boundary(
|
||||||
source_boundary,
|
source_boundary,
|
||||||
[](auto &cell, auto value) { cell.num_source_nodes = value; },
|
[](auto &cell, auto value) { cell.num_source_nodes = value; },
|
||||||
@ -327,7 +329,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
|||||||
util::for_each_range(
|
util::for_each_range(
|
||||||
level_destination_boundary.begin(),
|
level_destination_boundary.begin(),
|
||||||
level_destination_boundary.end(),
|
level_destination_boundary.end(),
|
||||||
[this, insert_cell_boundary](auto begin, auto end) {
|
[this, insert_cell_boundary](auto begin, auto end)
|
||||||
|
{
|
||||||
insert_cell_boundary(
|
insert_cell_boundary(
|
||||||
destination_boundary,
|
destination_boundary,
|
||||||
[](auto &cell, auto value) { cell.num_destination_nodes = value; },
|
[](auto &cell, auto value) { cell.num_destination_nodes = value; },
|
||||||
|
@ -64,7 +64,10 @@ std::vector<OutputEdgeT> prepareEdgesForUsageInGraph(std::vector<extractor::Edge
|
|||||||
// sort into blocks of edges with same source + target
|
// sort into blocks of edges with same source + target
|
||||||
// the we partition by the forward flag to sort all edges with a forward direction first.
|
// the we partition by the forward flag to sort all edges with a forward direction first.
|
||||||
// the we sort by weight to ensure the first forward edge is the smallest forward edge
|
// the we sort by weight to ensure the first forward edge is the smallest forward edge
|
||||||
std::sort(begin(edges), end(edges), [](const auto &lhs, const auto &rhs) {
|
std::sort(begin(edges),
|
||||||
|
end(edges),
|
||||||
|
[](const auto &lhs, const auto &rhs)
|
||||||
|
{
|
||||||
return std::tie(lhs.source, lhs.target, rhs.data.forward, lhs.data.weight) <
|
return std::tie(lhs.source, lhs.target, rhs.data.forward, lhs.data.weight) <
|
||||||
std::tie(rhs.source, rhs.target, lhs.data.forward, rhs.data.weight);
|
std::tie(rhs.source, rhs.target, lhs.data.forward, rhs.data.weight);
|
||||||
});
|
});
|
||||||
@ -77,10 +80,11 @@ std::vector<OutputEdgeT> prepareEdgesForUsageInGraph(std::vector<extractor::Edge
|
|||||||
const NodeID source = begin_interval->source;
|
const NodeID source = begin_interval->source;
|
||||||
const NodeID target = begin_interval->target;
|
const NodeID target = begin_interval->target;
|
||||||
|
|
||||||
auto end_interval =
|
auto end_interval = std::find_if_not(
|
||||||
std::find_if_not(begin_interval, edges.end(), [source, target](const auto &edge) {
|
begin_interval,
|
||||||
return std::tie(edge.source, edge.target) == std::tie(source, target);
|
edges.end(),
|
||||||
});
|
[source, target](const auto &edge)
|
||||||
|
{ return std::tie(edge.source, edge.target) == std::tie(source, target); });
|
||||||
BOOST_ASSERT(begin_interval != end_interval);
|
BOOST_ASSERT(begin_interval != end_interval);
|
||||||
|
|
||||||
// remove eigenloops
|
// remove eigenloops
|
||||||
@ -144,7 +148,8 @@ graphToEdges(const DynamicEdgeBasedGraph &edge_based_graph)
|
|||||||
auto max_turn_id = tbb::parallel_reduce(
|
auto max_turn_id = tbb::parallel_reduce(
|
||||||
range,
|
range,
|
||||||
NodeID{0},
|
NodeID{0},
|
||||||
[&edge_based_graph](const auto range, NodeID initial) {
|
[&edge_based_graph](const auto range, NodeID initial)
|
||||||
|
{
|
||||||
NodeID max_turn_id = initial;
|
NodeID max_turn_id = initial;
|
||||||
for (auto node = range.begin(); node < range.end(); ++node)
|
for (auto node = range.begin(); node < range.end(); ++node)
|
||||||
{
|
{
|
||||||
@ -159,7 +164,10 @@ graphToEdges(const DynamicEdgeBasedGraph &edge_based_graph)
|
|||||||
[](const NodeID lhs, const NodeID rhs) { return std::max(lhs, rhs); });
|
[](const NodeID lhs, const NodeID rhs) { return std::max(lhs, rhs); });
|
||||||
|
|
||||||
std::vector<extractor::EdgeBasedEdge> edges(max_turn_id + 1);
|
std::vector<extractor::EdgeBasedEdge> edges(max_turn_id + 1);
|
||||||
tbb::parallel_for(range, [&](const auto range) {
|
tbb::parallel_for(
|
||||||
|
range,
|
||||||
|
[&](const auto range)
|
||||||
|
{
|
||||||
for (auto node = range.begin(); node < range.end(); ++node)
|
for (auto node = range.begin(); node < range.end(); ++node)
|
||||||
{
|
{
|
||||||
for (auto edge : edge_based_graph.GetAdjacentEdgeRange(node))
|
for (auto edge : edge_based_graph.GetAdjacentEdgeRange(node))
|
||||||
|
@ -159,10 +159,11 @@ class MultiLevelGraph : public util::StaticGraph<EdgeDataT, Ownership>
|
|||||||
auto GetHighestBorderLevel(const MultiLevelPartition &mlp, const ContainerT &edges) const
|
auto GetHighestBorderLevel(const MultiLevelPartition &mlp, const ContainerT &edges) const
|
||||||
{
|
{
|
||||||
std::vector<LevelID> highest_border_level(edges.size());
|
std::vector<LevelID> highest_border_level(edges.size());
|
||||||
std::transform(
|
std::transform(edges.begin(),
|
||||||
edges.begin(), edges.end(), highest_border_level.begin(), [&mlp](const auto &edge) {
|
edges.end(),
|
||||||
return mlp.GetHighestDifferentLevel(edge.source, edge.target);
|
highest_border_level.begin(),
|
||||||
});
|
[&mlp](const auto &edge)
|
||||||
|
{ return mlp.GetHighestDifferentLevel(edge.source, edge.target); });
|
||||||
return highest_border_level;
|
return highest_border_level;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -175,7 +176,8 @@ class MultiLevelGraph : public util::StaticGraph<EdgeDataT, Ownership>
|
|||||||
tbb::parallel_sort(
|
tbb::parallel_sort(
|
||||||
permutation.begin(),
|
permutation.begin(),
|
||||||
permutation.end(),
|
permutation.end(),
|
||||||
[&edges, &highest_border_level](const auto &lhs, const auto &rhs) {
|
[&edges, &highest_border_level](const auto &lhs, const auto &rhs)
|
||||||
|
{
|
||||||
// sort by source node and then by level in ascending order
|
// sort by source node and then by level in ascending order
|
||||||
return std::tie(edges[lhs].source, highest_border_level[lhs], edges[lhs].target) <
|
return std::tie(edges[lhs].source, highest_border_level[lhs], edges[lhs].target) <
|
||||||
std::tie(edges[rhs].source, highest_border_level[rhs], edges[rhs].target);
|
std::tie(edges[rhs].source, highest_border_level[rhs], edges[rhs].target);
|
||||||
@ -201,8 +203,9 @@ class MultiLevelGraph : public util::StaticGraph<EdgeDataT, Ownership>
|
|||||||
auto level_begin = iter;
|
auto level_begin = iter;
|
||||||
for (auto level : util::irange<LevelID>(0, mlp.GetNumberOfLevels()))
|
for (auto level : util::irange<LevelID>(0, mlp.GetNumberOfLevels()))
|
||||||
{
|
{
|
||||||
iter = std::find_if(
|
iter = std::find_if(iter,
|
||||||
iter, edge_and_level_end, [node, level](const auto &edge_and_level) {
|
edge_and_level_end,
|
||||||
|
[node, level](const auto &edge_and_level) {
|
||||||
return boost::get<0>(edge_and_level).source != node ||
|
return boost::get<0>(edge_and_level).source != node ||
|
||||||
boost::get<1>(edge_and_level) != level;
|
boost::get<1>(edge_and_level) != level;
|
||||||
});
|
});
|
||||||
|
@ -207,7 +207,8 @@ template <storage::Ownership Ownership> class MultiLevelPartitionImpl final
|
|||||||
auto lidx = 0UL;
|
auto lidx = 0UL;
|
||||||
util::for_each_pair(level_offsets.begin(),
|
util::for_each_pair(level_offsets.begin(),
|
||||||
level_offsets.begin() + num_level,
|
level_offsets.begin() + num_level,
|
||||||
[&](const auto offset, const auto next_offset) {
|
[&](const auto offset, const auto next_offset)
|
||||||
|
{
|
||||||
// create mask that has `bits` ones at its LSBs.
|
// create mask that has `bits` ones at its LSBs.
|
||||||
// 000011
|
// 000011
|
||||||
BOOST_ASSERT(offset <= NUM_PARTITION_BITS);
|
BOOST_ASSERT(offset <= NUM_PARTITION_BITS);
|
||||||
@ -274,9 +275,8 @@ template <storage::Ownership Ownership> class MultiLevelPartitionImpl final
|
|||||||
{
|
{
|
||||||
std::stable_sort(permutation.begin(),
|
std::stable_sort(permutation.begin(),
|
||||||
permutation.end(),
|
permutation.end(),
|
||||||
[&partition](const auto lhs, const auto rhs) {
|
[&partition](const auto lhs, const auto rhs)
|
||||||
return partition[lhs] < partition[rhs];
|
{ return partition[lhs] < partition[rhs]; });
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// top down assign new cell ids
|
// top down assign new cell ids
|
||||||
|
@ -59,8 +59,10 @@ std::size_t removeUnconnectedBoundaryNodes(const GraphT &edge_based_graph,
|
|||||||
|
|
||||||
if (level_index < static_cast<int>(partitions.size() - 1))
|
if (level_index < static_cast<int>(partitions.size() - 1))
|
||||||
{
|
{
|
||||||
auto new_end = std::remove_if(
|
auto new_end =
|
||||||
witnesses.begin(), witnesses.end(), [&](const auto &witness) {
|
std::remove_if(witnesses.begin(),
|
||||||
|
witnesses.end(),
|
||||||
|
[&](const auto &witness) {
|
||||||
return partitions[level_index + 1][node] !=
|
return partitions[level_index + 1][node] !=
|
||||||
partitions[level_index + 1][witness.id];
|
partitions[level_index + 1][witness.id];
|
||||||
});
|
});
|
||||||
@ -87,8 +89,10 @@ std::size_t removeUnconnectedBoundaryNodes(const GraphT &edge_based_graph,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto best_witness = std::min_element(
|
auto best_witness =
|
||||||
witnesses.begin(), witnesses.end(), [](const auto &lhs, const auto &rhs) {
|
std::min_element(witnesses.begin(),
|
||||||
|
witnesses.end(),
|
||||||
|
[](const auto &lhs, const auto &rhs) {
|
||||||
return lhs.induced_border_edges < rhs.induced_border_edges;
|
return lhs.induced_border_edges < rhs.induced_border_edges;
|
||||||
});
|
});
|
||||||
BOOST_ASSERT(best_witness != witnesses.end());
|
BOOST_ASSERT(best_witness != witnesses.end());
|
||||||
|
@ -30,10 +30,10 @@ void reorderFirstLast(RandomIt first, RandomIt last, std::size_t n, Comparator c
|
|||||||
// requirements.
|
// requirements.
|
||||||
std::reverse_iterator<RandomIt> rfirst{last}, rlast{first + n};
|
std::reverse_iterator<RandomIt> rfirst{last}, rlast{first + n};
|
||||||
|
|
||||||
const auto flipped = [](auto fn) {
|
const auto flipped = [](auto fn)
|
||||||
return [fn](auto &&lhs, auto &&rhs) {
|
{
|
||||||
return fn(std::forward<decltype(lhs)>(rhs), std::forward<decltype(rhs)>(lhs));
|
return [fn](auto &&lhs, auto &&rhs)
|
||||||
};
|
{ return fn(std::forward<decltype(lhs)>(rhs), std::forward<decltype(rhs)>(lhs)); };
|
||||||
};
|
};
|
||||||
|
|
||||||
std::nth_element(rfirst, rfirst + (n - 1), rlast, flipped(comp));
|
std::nth_element(rfirst, rfirst + (n - 1), rlast, flipped(comp));
|
||||||
|
@ -74,16 +74,16 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
|||||||
: BaseParametersGrammar::base_type(root_rule)
|
: BaseParametersGrammar::base_type(root_rule)
|
||||||
{
|
{
|
||||||
const auto add_hint = [](engine::api::BaseParameters &base_parameters,
|
const auto add_hint = [](engine::api::BaseParameters &base_parameters,
|
||||||
const std::vector<std::string> &hint_strings) {
|
const std::vector<std::string> &hint_strings)
|
||||||
|
{
|
||||||
if (!hint_strings.empty())
|
if (!hint_strings.empty())
|
||||||
{
|
{
|
||||||
std::vector<engine::SegmentHint> location_hints(hint_strings.size());
|
std::vector<engine::SegmentHint> location_hints(hint_strings.size());
|
||||||
std::transform(hint_strings.begin(),
|
std::transform(hint_strings.begin(),
|
||||||
hint_strings.end(),
|
hint_strings.end(),
|
||||||
location_hints.begin(),
|
location_hints.begin(),
|
||||||
[](const auto &hint_string) {
|
[](const auto &hint_string)
|
||||||
return engine::SegmentHint::FromBase64(hint_string);
|
{ return engine::SegmentHint::FromBase64(hint_string); });
|
||||||
});
|
|
||||||
base_parameters.hints.push_back(engine::Hint{std::move(location_hints)});
|
base_parameters.hints.push_back(engine::Hint{std::move(location_hints)});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -94,7 +94,8 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
|||||||
|
|
||||||
const auto add_bearing =
|
const auto add_bearing =
|
||||||
[](engine::api::BaseParameters &base_parameters,
|
[](engine::api::BaseParameters &base_parameters,
|
||||||
boost::optional<boost::fusion::vector2<short, short>> bearing_range) {
|
boost::optional<boost::fusion::vector2<short, short>> bearing_range)
|
||||||
|
{
|
||||||
boost::optional<engine::Bearing> bearing;
|
boost::optional<engine::Bearing> bearing;
|
||||||
if (bearing_range)
|
if (bearing_range)
|
||||||
{
|
{
|
||||||
@ -118,7 +119,8 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
|||||||
|
|
||||||
location_rule = (double_ > qi::lit(',') >
|
location_rule = (double_ > qi::lit(',') >
|
||||||
double_)[qi::_val = ph::bind(
|
double_)[qi::_val = ph::bind(
|
||||||
[](double lon, double lat) {
|
[](double lon, double lat)
|
||||||
|
{
|
||||||
return util::Coordinate(
|
return util::Coordinate(
|
||||||
util::toFixed(util::UnsafeFloatLongitude{lon}),
|
util::toFixed(util::UnsafeFloatLongitude{lon}),
|
||||||
util::toFixed(util::UnsafeFloatLatitude{lat}));
|
util::toFixed(util::UnsafeFloatLatitude{lat}));
|
||||||
@ -126,18 +128,16 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
|||||||
qi::_1,
|
qi::_1,
|
||||||
qi::_2)];
|
qi::_2)];
|
||||||
|
|
||||||
polyline_rule = qi::as_string[qi::lit("polyline(") > +polyline_chars > ')']
|
polyline_rule =
|
||||||
[qi::_val = ph::bind(
|
qi::as_string[qi::lit("polyline(") > +polyline_chars > ')']
|
||||||
[](const std::string &polyline) {
|
[qi::_val = ph::bind([](const std::string &polyline)
|
||||||
return engine::decodePolyline(polyline);
|
{ return engine::decodePolyline(polyline); },
|
||||||
},
|
|
||||||
qi::_1)];
|
qi::_1)];
|
||||||
|
|
||||||
polyline6_rule = qi::as_string[qi::lit("polyline6(") > +polyline_chars > ')']
|
polyline6_rule =
|
||||||
[qi::_val = ph::bind(
|
qi::as_string[qi::lit("polyline6(") > +polyline_chars > ')']
|
||||||
[](const std::string &polyline) {
|
[qi::_val = ph::bind([](const std::string &polyline)
|
||||||
return engine::decodePolyline<1000000>(polyline);
|
{ return engine::decodePolyline<1000000>(polyline); },
|
||||||
},
|
|
||||||
qi::_1)];
|
qi::_1)];
|
||||||
|
|
||||||
query_rule =
|
query_rule =
|
||||||
@ -166,8 +166,9 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
|||||||
qi::lit("bearings=") >
|
qi::lit("bearings=") >
|
||||||
(-(qi::short_ > ',' > qi::short_))[ph::bind(add_bearing, qi::_r1, qi::_1)] % ';';
|
(-(qi::short_ > ',' > qi::short_))[ph::bind(add_bearing, qi::_r1, qi::_1)] % ';';
|
||||||
|
|
||||||
approach_type.add("unrestricted", engine::Approach::UNRESTRICTED)("curb",
|
approach_type.add("unrestricted", engine::Approach::UNRESTRICTED)(
|
||||||
engine::Approach::CURB);
|
"curb", engine::Approach::CURB)("opposite", engine::Approach::OPPOSITE);
|
||||||
|
|
||||||
approach_rule = qi::lit("approaches=") >
|
approach_rule = qi::lit("approaches=") >
|
||||||
(-approach_type %
|
(-approach_type %
|
||||||
';')[ph::bind(&engine::api::BaseParameters::approaches, qi::_r1) = qi::_1];
|
';')[ph::bind(&engine::api::BaseParameters::approaches, qi::_r1) = qi::_1];
|
||||||
|
@ -54,8 +54,9 @@ struct RouteParametersGrammar : public BaseParametersGrammar<Iterator, Signature
|
|||||||
#endif
|
#endif
|
||||||
using AnnotationsType = engine::api::RouteParameters::AnnotationsType;
|
using AnnotationsType = engine::api::RouteParameters::AnnotationsType;
|
||||||
|
|
||||||
const auto add_annotation = [](engine::api::RouteParameters &route_parameters,
|
const auto add_annotation =
|
||||||
AnnotationsType route_param) {
|
[](engine::api::RouteParameters &route_parameters, AnnotationsType route_param)
|
||||||
|
{
|
||||||
route_parameters.annotations_type = route_parameters.annotations_type | route_param;
|
route_parameters.annotations_type = route_parameters.annotations_type | route_param;
|
||||||
route_parameters.annotations =
|
route_parameters.annotations =
|
||||||
route_parameters.annotations_type != AnnotationsType::None;
|
route_parameters.annotations_type != AnnotationsType::None;
|
||||||
|
@ -35,7 +35,9 @@ class RequestHandler;
|
|||||||
class Connection : public std::enable_shared_from_this<Connection>
|
class Connection : public std::enable_shared_from_this<Connection>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
explicit Connection(boost::asio::io_context &io_context, RequestHandler &handler);
|
explicit Connection(boost::asio::io_context &io_context,
|
||||||
|
RequestHandler &handler,
|
||||||
|
short keepalive_timeout);
|
||||||
Connection(const Connection &) = delete;
|
Connection(const Connection &) = delete;
|
||||||
Connection &operator=(const Connection &) = delete;
|
Connection &operator=(const Connection &) = delete;
|
||||||
|
|
||||||
|
@ -31,18 +31,24 @@ class Server
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
// Note: returns a shared instead of a unique ptr as it is captured in a lambda somewhere else
|
// Note: returns a shared instead of a unique ptr as it is captured in a lambda somewhere else
|
||||||
static std::shared_ptr<Server>
|
static std::shared_ptr<Server> CreateServer(std::string &ip_address,
|
||||||
CreateServer(std::string &ip_address, int ip_port, unsigned requested_num_threads)
|
int ip_port,
|
||||||
|
unsigned requested_num_threads,
|
||||||
|
short keepalive_timeout)
|
||||||
{
|
{
|
||||||
util::Log() << "http 1.1 compression handled by zlib version " << zlibVersion();
|
util::Log() << "http 1.1 compression handled by zlib version " << zlibVersion();
|
||||||
const unsigned hardware_threads = std::max(1u, std::thread::hardware_concurrency());
|
const unsigned hardware_threads = std::max(1u, std::thread::hardware_concurrency());
|
||||||
const unsigned real_num_threads = std::min(hardware_threads, requested_num_threads);
|
const unsigned real_num_threads = std::min(hardware_threads, requested_num_threads);
|
||||||
return std::make_shared<Server>(ip_address, ip_port, real_num_threads);
|
return std::make_shared<Server>(ip_address, ip_port, real_num_threads, keepalive_timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
explicit Server(const std::string &address, const int port, const unsigned thread_pool_size)
|
explicit Server(const std::string &address,
|
||||||
: thread_pool_size(thread_pool_size), acceptor(io_context),
|
const int port,
|
||||||
new_connection(std::make_shared<Connection>(io_context, request_handler))
|
const unsigned thread_pool_size,
|
||||||
|
const short keepalive_timeout)
|
||||||
|
: thread_pool_size(thread_pool_size), keepalive_timeout(keepalive_timeout),
|
||||||
|
acceptor(io_context), new_connection(std::make_shared<Connection>(
|
||||||
|
io_context, request_handler, keepalive_timeout))
|
||||||
{
|
{
|
||||||
const auto port_string = std::to_string(port);
|
const auto port_string = std::to_string(port);
|
||||||
|
|
||||||
@ -94,7 +100,8 @@ class Server
|
|||||||
if (!e)
|
if (!e)
|
||||||
{
|
{
|
||||||
new_connection->start();
|
new_connection->start();
|
||||||
new_connection = std::make_shared<Connection>(io_context, request_handler);
|
new_connection =
|
||||||
|
std::make_shared<Connection>(io_context, request_handler, keepalive_timeout);
|
||||||
acceptor.async_accept(
|
acceptor.async_accept(
|
||||||
new_connection->socket(),
|
new_connection->socket(),
|
||||||
boost::bind(&Server::HandleAccept, this, boost::asio::placeholders::error));
|
boost::bind(&Server::HandleAccept, this, boost::asio::placeholders::error));
|
||||||
@ -107,6 +114,7 @@ class Server
|
|||||||
|
|
||||||
RequestHandler request_handler;
|
RequestHandler request_handler;
|
||||||
unsigned thread_pool_size;
|
unsigned thread_pool_size;
|
||||||
|
short keepalive_timeout;
|
||||||
boost::asio::io_context io_context;
|
boost::asio::io_context io_context;
|
||||||
boost::asio::ip::tcp::acceptor acceptor;
|
boost::asio::ip::tcp::acceptor acceptor;
|
||||||
std::shared_ptr<Connection> new_connection;
|
std::shared_ptr<Connection> new_connection;
|
||||||
|
@ -35,6 +35,11 @@ struct IOConfig
|
|||||||
return {base_path.string() + fileName};
|
return {base_path.string() + fileName};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool IsRequiredConfiguredInput(const std::string &fileName) const
|
||||||
|
{
|
||||||
|
return IsConfigured(fileName, required_input_files);
|
||||||
|
}
|
||||||
|
|
||||||
boost::filesystem::path base_path;
|
boost::filesystem::path base_path;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
@ -66,7 +66,8 @@ void readBoolVector(tar::FileReader &reader, const std::string &name, VectorT &d
|
|||||||
using BlockType = std::uint64_t;
|
using BlockType = std::uint64_t;
|
||||||
constexpr std::uint64_t BLOCK_BITS = CHAR_BIT * sizeof(BlockType);
|
constexpr std::uint64_t BLOCK_BITS = CHAR_BIT * sizeof(BlockType);
|
||||||
|
|
||||||
const auto decode = [&](const BlockType block) {
|
const auto decode = [&](const BlockType block)
|
||||||
|
{
|
||||||
auto read_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
auto read_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
||||||
unpackBits<VectorT, BlockType>(data, index, read_size, block);
|
unpackBits<VectorT, BlockType>(data, index, read_size, block);
|
||||||
index += BLOCK_BITS;
|
index += BLOCK_BITS;
|
||||||
@ -87,7 +88,8 @@ void writeBoolVector(tar::FileWriter &writer, const std::string &name, const Vec
|
|||||||
|
|
||||||
// FIXME on old boost version the function_input_iterator does not work with lambdas
|
// FIXME on old boost version the function_input_iterator does not work with lambdas
|
||||||
// so we need to wrap it in a function here.
|
// so we need to wrap it in a function here.
|
||||||
const std::function<BlockType()> encode_function = [&]() -> BlockType {
|
const std::function<BlockType()> encode_function = [&]() -> BlockType
|
||||||
|
{
|
||||||
auto write_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
auto write_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
||||||
auto packed = packBits<VectorT, BlockType>(data, index, write_size);
|
auto packed = packBits<VectorT, BlockType>(data, index, write_size);
|
||||||
index += BLOCK_BITS;
|
index += BLOCK_BITS;
|
||||||
|
@ -28,10 +28,10 @@ class SharedDataIndex
|
|||||||
// Build mapping from block name to region
|
// Build mapping from block name to region
|
||||||
for (auto index : util::irange<std::uint32_t>(0, regions.size()))
|
for (auto index : util::irange<std::uint32_t>(0, regions.size()))
|
||||||
{
|
{
|
||||||
regions[index].layout->List("",
|
regions[index].layout->List(
|
||||||
boost::make_function_output_iterator([&](const auto &name) {
|
"",
|
||||||
block_to_region[name] = index;
|
boost::make_function_output_iterator([&](const auto &name)
|
||||||
}));
|
{ block_to_region[name] = index; }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,7 +223,10 @@ struct SharedRegionRegister
|
|||||||
// Returns the key of the region with the given name
|
// Returns the key of the region with the given name
|
||||||
RegionID Find(const std::string &name) const
|
RegionID Find(const std::string &name) const
|
||||||
{
|
{
|
||||||
auto iter = std::find_if(regions.begin(), regions.end(), [&](const auto ®ion) {
|
auto iter = std::find_if(
|
||||||
|
regions.begin(),
|
||||||
|
regions.end(),
|
||||||
|
[&](const auto ®ion) {
|
||||||
return std::strncmp(region.name, name.c_str(), SharedRegion::MAX_NAME_LENGTH) == 0;
|
return std::strncmp(region.name, name.c_str(), SharedRegion::MAX_NAME_LENGTH) == 0;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -113,7 +113,8 @@ class SharedMemory
|
|||||||
{
|
{
|
||||||
auto shmid = shm.get_shmid();
|
auto shmid = shm.get_shmid();
|
||||||
::shmid_ds xsi_ds;
|
::shmid_ds xsi_ds;
|
||||||
const auto errorToMessage = [](int error) -> std::string {
|
const auto errorToMessage = [](int error) -> std::string
|
||||||
|
{
|
||||||
switch (error)
|
switch (error)
|
||||||
{
|
{
|
||||||
case EPERM:
|
case EPERM:
|
||||||
|
@ -31,10 +31,61 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||||||
#include <boost/filesystem/path.hpp>
|
#include <boost/filesystem/path.hpp>
|
||||||
|
|
||||||
#include "storage/io_config.hpp"
|
#include "storage/io_config.hpp"
|
||||||
|
#include "osrm/datasets.hpp"
|
||||||
|
|
||||||
|
#include <set>
|
||||||
|
|
||||||
namespace osrm::storage
|
namespace osrm::storage
|
||||||
{
|
{
|
||||||
|
|
||||||
|
std::istream &operator>>(std::istream &in, FeatureDataset &datasets);
|
||||||
|
|
||||||
|
static std::vector<boost::filesystem::path>
|
||||||
|
GetRequiredFiles(const std::vector<storage::FeatureDataset> &disabled_feature_dataset)
|
||||||
|
{
|
||||||
|
std::set<boost::filesystem::path> required{
|
||||||
|
".osrm.datasource_names",
|
||||||
|
".osrm.ebg_nodes",
|
||||||
|
".osrm.edges",
|
||||||
|
".osrm.fileIndex",
|
||||||
|
".osrm.geometry",
|
||||||
|
".osrm.icd",
|
||||||
|
".osrm.maneuver_overrides",
|
||||||
|
".osrm.names",
|
||||||
|
".osrm.nbg_nodes",
|
||||||
|
".osrm.properties",
|
||||||
|
".osrm.ramIndex",
|
||||||
|
".osrm.timestamp",
|
||||||
|
".osrm.tld",
|
||||||
|
".osrm.tls",
|
||||||
|
".osrm.turn_duration_penalties",
|
||||||
|
".osrm.turn_weight_penalties",
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const auto &to_disable : disabled_feature_dataset)
|
||||||
|
{
|
||||||
|
switch (to_disable)
|
||||||
|
{
|
||||||
|
case FeatureDataset::ROUTE_STEPS:
|
||||||
|
for (const auto &dataset : {".osrm.icd", ".osrm.tld", ".osrm.tls"})
|
||||||
|
{
|
||||||
|
required.erase(dataset);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case FeatureDataset::ROUTE_GEOMETRY:
|
||||||
|
for (const auto &dataset :
|
||||||
|
{".osrm.edges", ".osrm.icd", ".osrm.names", ".osrm.tld", ".osrm.tls"})
|
||||||
|
{
|
||||||
|
required.erase(dataset);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::vector<boost::filesystem::path>(required.begin(), required.end());
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configures OSRM's file storage paths.
|
* Configures OSRM's file storage paths.
|
||||||
*
|
*
|
||||||
@ -42,33 +93,18 @@ namespace osrm::storage
|
|||||||
*/
|
*/
|
||||||
struct StorageConfig final : IOConfig
|
struct StorageConfig final : IOConfig
|
||||||
{
|
{
|
||||||
StorageConfig(const boost::filesystem::path &base) : StorageConfig()
|
|
||||||
|
StorageConfig(const boost::filesystem::path &base,
|
||||||
|
const std::vector<storage::FeatureDataset> &disabled_feature_datasets_ = {})
|
||||||
|
: StorageConfig(disabled_feature_datasets_)
|
||||||
{
|
{
|
||||||
IOConfig::UseDefaultOutputNames(base);
|
IOConfig::UseDefaultOutputNames(base);
|
||||||
}
|
}
|
||||||
|
|
||||||
StorageConfig()
|
StorageConfig(const std::vector<storage::FeatureDataset> &disabled_feature_datasets_ = {})
|
||||||
: IOConfig({".osrm.ramIndex",
|
: IOConfig(
|
||||||
".osrm.fileIndex",
|
GetRequiredFiles(disabled_feature_datasets_),
|
||||||
".osrm.edges",
|
{".osrm.hsgr", ".osrm.cells", ".osrm.cell_metrics", ".osrm.mldgr", ".osrm.partition"},
|
||||||
".osrm.geometry",
|
|
||||||
".osrm.turn_weight_penalties",
|
|
||||||
".osrm.turn_duration_penalties",
|
|
||||||
".osrm.datasource_names",
|
|
||||||
".osrm.names",
|
|
||||||
".osrm.timestamp",
|
|
||||||
".osrm.properties",
|
|
||||||
".osrm.icd",
|
|
||||||
".osrm.maneuver_overrides"},
|
|
||||||
{".osrm.hsgr",
|
|
||||||
".osrm.nbg_nodes",
|
|
||||||
".osrm.ebg_nodes",
|
|
||||||
".osrm.cells",
|
|
||||||
".osrm.cell_metrics",
|
|
||||||
".osrm.mldgr",
|
|
||||||
".osrm.tld",
|
|
||||||
".osrm.tls",
|
|
||||||
".osrm.partition"},
|
|
||||||
{})
|
{})
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -241,9 +241,9 @@ inline auto make_contracted_metric_view(const SharedDataIndex &index, const std:
|
|||||||
|
|
||||||
std::vector<util::vector_view<bool>> edge_filter;
|
std::vector<util::vector_view<bool>> edge_filter;
|
||||||
index.List(name + "/exclude",
|
index.List(name + "/exclude",
|
||||||
boost::make_function_output_iterator([&](const auto &filter_name) {
|
boost::make_function_output_iterator(
|
||||||
edge_filter.push_back(make_vector_view<bool>(index, filter_name));
|
[&](const auto &filter_name)
|
||||||
}));
|
{ edge_filter.push_back(make_vector_view<bool>(index, filter_name)); }));
|
||||||
|
|
||||||
return contractor::ContractedMetricView{{node_list, edge_list}, std::move(edge_filter)};
|
return contractor::ContractedMetricView{{node_list, edge_list}, std::move(edge_filter)};
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,10 @@ template <typename Key, typename Value> struct CSVFilesParser
|
|||||||
{
|
{
|
||||||
tbb::spin_mutex mutex;
|
tbb::spin_mutex mutex;
|
||||||
std::vector<std::pair<Key, Value>> lookup;
|
std::vector<std::pair<Key, Value>> lookup;
|
||||||
tbb::parallel_for(std::size_t{0}, csv_filenames.size(), [&](const std::size_t idx) {
|
tbb::parallel_for(std::size_t{0},
|
||||||
|
csv_filenames.size(),
|
||||||
|
[&](const std::size_t idx)
|
||||||
|
{
|
||||||
auto local = ParseCSVFile(csv_filenames[idx], start_index + idx);
|
auto local = ParseCSVFile(csv_filenames[idx], start_index + idx);
|
||||||
|
|
||||||
{ // Merge local CSV results into a flat global vector
|
{ // Merge local CSV results into a flat global vector
|
||||||
@ -61,16 +64,18 @@ template <typename Key, typename Value> struct CSVFilesParser
|
|||||||
// and unique them on key to keep only the value with the largest file index
|
// and unique them on key to keep only the value with the largest file index
|
||||||
// and the largest line number in a file.
|
// and the largest line number in a file.
|
||||||
// The operands order is swapped to make descending ordering on (key, source)
|
// The operands order is swapped to make descending ordering on (key, source)
|
||||||
tbb::parallel_sort(begin(lookup), end(lookup), [](const auto &lhs, const auto &rhs) {
|
tbb::parallel_sort(begin(lookup),
|
||||||
|
end(lookup),
|
||||||
|
[](const auto &lhs, const auto &rhs) {
|
||||||
return std::tie(rhs.first, rhs.second.source) <
|
return std::tie(rhs.first, rhs.second.source) <
|
||||||
std::tie(lhs.first, lhs.second.source);
|
std::tie(lhs.first, lhs.second.source);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Unique only on key to take the source precedence into account and remove duplicates.
|
// Unique only on key to take the source precedence into account and remove duplicates.
|
||||||
const auto it =
|
const auto it = std::unique(begin(lookup),
|
||||||
std::unique(begin(lookup), end(lookup), [](const auto &lhs, const auto &rhs) {
|
end(lookup),
|
||||||
return lhs.first == rhs.first;
|
[](const auto &lhs, const auto &rhs)
|
||||||
});
|
{ return lhs.first == rhs.first; });
|
||||||
lookup.erase(it, end(lookup));
|
lookup.erase(it, end(lookup));
|
||||||
|
|
||||||
util::Log() << "In total loaded " << csv_filenames.size() << " file(s) with a total of "
|
util::Log() << "In total loaded " << csv_filenames.size() << " file(s) with a total of "
|
||||||
|
@ -14,10 +14,12 @@ template <typename Key, typename Value> struct LookupTable
|
|||||||
std::optional<Value> operator()(const Key &key) const
|
std::optional<Value> operator()(const Key &key) const
|
||||||
{
|
{
|
||||||
using Result = std::optional<Value>;
|
using Result = std::optional<Value>;
|
||||||
const auto it = std::lower_bound(
|
using Result = boost::optional<Value>;
|
||||||
lookup.begin(), lookup.end(), key, [](const auto &lhs, const auto &rhs) {
|
const auto it =
|
||||||
return rhs < lhs.first;
|
std::lower_bound(lookup.begin(),
|
||||||
});
|
lookup.end(),
|
||||||
|
key,
|
||||||
|
[](const auto &lhs, const auto &rhs) { return rhs < lhs.first; });
|
||||||
return it != std::end(lookup) && !(it->first < key) ? Result(it->second) : Result();
|
return it != std::end(lookup) && !(it->first < key) ? Result(it->second) : Result();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,6 +33,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||||||
#include <boost/numeric/conversion/cast.hpp>
|
#include <boost/numeric/conversion/cast.hpp>
|
||||||
|
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
|
#include <cstdint>
|
||||||
#include <iosfwd> //for std::ostream
|
#include <iosfwd> //for std::ostream
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <string>
|
#include <string>
|
||||||
@ -71,12 +72,20 @@ using FloatLongitude = Alias<double, tag::longitude>;
|
|||||||
// range checks on these (toFixed/toFloat, etc)
|
// range checks on these (toFixed/toFloat, etc)
|
||||||
using UnsafeFloatLatitude = Alias<double, tag::unsafelatitude>;
|
using UnsafeFloatLatitude = Alias<double, tag::unsafelatitude>;
|
||||||
using UnsafeFloatLongitude = Alias<double, tag::unsafelongitude>;
|
using UnsafeFloatLongitude = Alias<double, tag::unsafelongitude>;
|
||||||
static_assert(std::is_pod<FixedLatitude>(), "FixedLatitude is not a valid alias");
|
static_assert(std::is_standard_layout<FixedLatitude>() && std::is_trivial<FixedLatitude>(),
|
||||||
static_assert(std::is_pod<FixedLongitude>(), "FixedLongitude is not a valid alias");
|
"FixedLatitude is not a valid alias");
|
||||||
static_assert(std::is_pod<FloatLatitude>(), "FloatLatitude is not a valid alias");
|
static_assert(std::is_standard_layout<FixedLongitude>() && std::is_trivial<FixedLongitude>(),
|
||||||
static_assert(std::is_pod<FloatLongitude>(), "FloatLongitude is not a valid alias");
|
"FixedLongitude is not a valid alias");
|
||||||
static_assert(std::is_pod<UnsafeFloatLatitude>(), "UnsafeFloatLatitude is not a valid alias");
|
static_assert(std::is_standard_layout<FloatLatitude>() && std::is_trivial<FloatLatitude>(),
|
||||||
static_assert(std::is_pod<UnsafeFloatLongitude>(), "UnsafeFloatLongitude is not a valid alias");
|
"FloatLatitude is not a valid alias");
|
||||||
|
static_assert(std::is_standard_layout<FloatLongitude>() && std::is_trivial<FloatLongitude>(),
|
||||||
|
"FloatLongitude is not a valid alias");
|
||||||
|
static_assert(std::is_standard_layout<UnsafeFloatLatitude>() &&
|
||||||
|
std::is_trivial<UnsafeFloatLatitude>(),
|
||||||
|
"UnsafeFloatLatitude is not a valid alias");
|
||||||
|
static_assert(std::is_standard_layout<UnsafeFloatLongitude>() &&
|
||||||
|
std::is_trivial<UnsafeFloatLongitude>(),
|
||||||
|
"UnsafeFloatLongitude is not a valid alias");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a typed latitude from floating to fixed representation.
|
* Converts a typed latitude from floating to fixed representation.
|
||||||
|
@ -179,12 +179,14 @@ template <class BinaryOperation, typename iterator_type>
|
|||||||
double getLength(iterator_type begin, const iterator_type end, BinaryOperation op)
|
double getLength(iterator_type begin, const iterator_type end, BinaryOperation op)
|
||||||
{
|
{
|
||||||
double result = 0;
|
double result = 0;
|
||||||
const auto functor = [&result, op](const Coordinate lhs, const Coordinate rhs) {
|
const auto functor = [&result, op](const Coordinate lhs, const Coordinate rhs)
|
||||||
|
{
|
||||||
result += op(lhs, rhs);
|
result += op(lhs, rhs);
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
// side-effect find adding up distances
|
// side-effect find adding up distances
|
||||||
std::adjacent_find(begin, end, functor);
|
// Ignore return value, we are only interested in the side-effect
|
||||||
|
[[maybe_unused]] auto _ = std::adjacent_find(begin, end, functor);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -196,13 +198,15 @@ findClosestDistance(const Coordinate coordinate, const iterator_type begin, cons
|
|||||||
double current_min = std::numeric_limits<double>::max();
|
double current_min = std::numeric_limits<double>::max();
|
||||||
|
|
||||||
// comparator updating current_min without ever finding an element
|
// comparator updating current_min without ever finding an element
|
||||||
const auto compute_minimum_distance = [¤t_min, coordinate](const Coordinate lhs,
|
const auto compute_minimum_distance =
|
||||||
const Coordinate rhs) {
|
[¤t_min, coordinate](const Coordinate lhs, const Coordinate rhs)
|
||||||
|
{
|
||||||
current_min = std::min(current_min, findClosestDistance(coordinate, lhs, rhs));
|
current_min = std::min(current_min, findClosestDistance(coordinate, lhs, rhs));
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::adjacent_find(begin, end, compute_minimum_distance);
|
// Ignore return value, we are only interested in the side-effect
|
||||||
|
[[maybe_unused]] auto _ = std::adjacent_find(begin, end, compute_minimum_distance);
|
||||||
return current_min;
|
return current_min;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -214,8 +218,9 @@ double findClosestDistance(const iterator_type lhs_begin,
|
|||||||
{
|
{
|
||||||
double current_min = std::numeric_limits<double>::max();
|
double current_min = std::numeric_limits<double>::max();
|
||||||
|
|
||||||
const auto compute_minimum_distance_in_rhs = [¤t_min, rhs_begin, rhs_end](
|
const auto compute_minimum_distance_in_rhs =
|
||||||
const Coordinate coordinate) {
|
[¤t_min, rhs_begin, rhs_end](const Coordinate coordinate)
|
||||||
|
{
|
||||||
current_min = std::min(current_min, findClosestDistance(coordinate, rhs_begin, rhs_end));
|
current_min = std::min(current_min, findClosestDistance(coordinate, rhs_begin, rhs_end));
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
@ -231,13 +236,11 @@ std::pair<Coordinate, Coordinate> leastSquareRegression(const iterator_type begi
|
|||||||
// following the formulas of https://faculty.elgin.edu/dkernler/statistics/ch04/4-2.html
|
// following the formulas of https://faculty.elgin.edu/dkernler/statistics/ch04/4-2.html
|
||||||
const auto number_of_coordinates = std::distance(begin, end);
|
const auto number_of_coordinates = std::distance(begin, end);
|
||||||
BOOST_ASSERT(number_of_coordinates >= 2);
|
BOOST_ASSERT(number_of_coordinates >= 2);
|
||||||
const auto extract_lon = [](const Coordinate coordinate) {
|
const auto extract_lon = [](const Coordinate coordinate)
|
||||||
return static_cast<double>(toFloating(coordinate.lon));
|
{ return static_cast<double>(toFloating(coordinate.lon)); };
|
||||||
};
|
|
||||||
|
|
||||||
const auto extract_lat = [](const Coordinate coordinate) {
|
const auto extract_lat = [](const Coordinate coordinate)
|
||||||
return static_cast<double>(toFloating(coordinate.lat));
|
{ return static_cast<double>(toFloating(coordinate.lat)); };
|
||||||
};
|
|
||||||
|
|
||||||
double min_lon = extract_lon(*begin);
|
double min_lon = extract_lon(*begin);
|
||||||
double max_lon = extract_lon(*begin);
|
double max_lon = extract_lon(*begin);
|
||||||
@ -260,19 +263,21 @@ std::pair<Coordinate, Coordinate> leastSquareRegression(const iterator_type begi
|
|||||||
{
|
{
|
||||||
std::vector<util::Coordinate> rotated_coordinates(number_of_coordinates);
|
std::vector<util::Coordinate> rotated_coordinates(number_of_coordinates);
|
||||||
// rotate all coordinates to the right
|
// rotate all coordinates to the right
|
||||||
std::transform(begin, end, rotated_coordinates.begin(), [](const auto coordinate) {
|
std::transform(begin,
|
||||||
return rotateCCWAroundZero(coordinate, detail::degToRad(-90));
|
end,
|
||||||
});
|
rotated_coordinates.begin(),
|
||||||
|
[](const auto coordinate)
|
||||||
|
{ return rotateCCWAroundZero(coordinate, detail::degToRad(-90)); });
|
||||||
const auto rotated_regression =
|
const auto rotated_regression =
|
||||||
leastSquareRegression(rotated_coordinates.begin(), rotated_coordinates.end());
|
leastSquareRegression(rotated_coordinates.begin(), rotated_coordinates.end());
|
||||||
return {rotateCCWAroundZero(rotated_regression.first, detail::degToRad(90)),
|
return {rotateCCWAroundZero(rotated_regression.first, detail::degToRad(90)),
|
||||||
rotateCCWAroundZero(rotated_regression.second, detail::degToRad(90))};
|
rotateCCWAroundZero(rotated_regression.second, detail::degToRad(90))};
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto make_accumulate = [](const auto extraction_function) {
|
const auto make_accumulate = [](const auto extraction_function)
|
||||||
return [extraction_function](const double sum_so_far, const Coordinate coordinate) {
|
{
|
||||||
return sum_so_far + extraction_function(coordinate);
|
return [extraction_function](const double sum_so_far, const Coordinate coordinate)
|
||||||
};
|
{ return sum_so_far + extraction_function(coordinate); };
|
||||||
};
|
};
|
||||||
|
|
||||||
const auto accumulated_lon = std::accumulate(begin, end, 0., make_accumulate(extract_lon));
|
const auto accumulated_lon = std::accumulate(begin, end, 0., make_accumulate(extract_lon));
|
||||||
@ -281,8 +286,10 @@ std::pair<Coordinate, Coordinate> leastSquareRegression(const iterator_type begi
|
|||||||
|
|
||||||
const auto mean_lon = accumulated_lon / number_of_coordinates;
|
const auto mean_lon = accumulated_lon / number_of_coordinates;
|
||||||
const auto mean_lat = accumulated_lat / number_of_coordinates;
|
const auto mean_lat = accumulated_lat / number_of_coordinates;
|
||||||
const auto make_variance = [](const auto mean, const auto extraction_function) {
|
const auto make_variance = [](const auto mean, const auto extraction_function)
|
||||||
return [extraction_function, mean](const double sum_so_far, const Coordinate coordinate) {
|
{
|
||||||
|
return [extraction_function, mean](const double sum_so_far, const Coordinate coordinate)
|
||||||
|
{
|
||||||
const auto difference = extraction_function(coordinate) - mean;
|
const auto difference = extraction_function(coordinate) - mean;
|
||||||
return sum_so_far + difference * difference;
|
return sum_so_far + difference * difference;
|
||||||
};
|
};
|
||||||
@ -310,7 +317,8 @@ std::pair<Coordinate, Coordinate> leastSquareRegression(const iterator_type begi
|
|||||||
std::accumulate(begin,
|
std::accumulate(begin,
|
||||||
end,
|
end,
|
||||||
0.,
|
0.,
|
||||||
[&](const auto sum_so_far, const auto current_coordinate) {
|
[&](const auto sum_so_far, const auto current_coordinate)
|
||||||
|
{
|
||||||
return sum_so_far + (extract_lon(current_coordinate) - mean_lon) *
|
return sum_so_far + (extract_lon(current_coordinate) - mean_lon) *
|
||||||
(extract_lat(current_coordinate) - mean_lat) /
|
(extract_lat(current_coordinate) - mean_lat) /
|
||||||
(sample_variance_lon * sample_variance_lat);
|
(sample_variance_lon * sample_variance_lat);
|
||||||
@ -321,9 +329,8 @@ std::pair<Coordinate, Coordinate> leastSquareRegression(const iterator_type begi
|
|||||||
const auto intercept = mean_lat - slope * mean_lon;
|
const auto intercept = mean_lat - slope * mean_lon;
|
||||||
|
|
||||||
const auto GetLatAtLon = [intercept,
|
const auto GetLatAtLon = [intercept,
|
||||||
slope](const util::FloatLongitude longitude) -> util::FloatLatitude {
|
slope](const util::FloatLongitude longitude) -> util::FloatLatitude
|
||||||
return {intercept + slope * static_cast<double>((longitude))};
|
{ return {intercept + slope * static_cast<double>((longitude))}; };
|
||||||
};
|
|
||||||
|
|
||||||
const double offset = 0.00001;
|
const double offset = 0.00001;
|
||||||
const Coordinate regression_first = {
|
const Coordinate regression_first = {
|
||||||
@ -357,7 +364,8 @@ bool areParallel(const iterator_type lhs_begin,
|
|||||||
const auto rotation_angle_radians = detail::degToRad(bearing_lhs - 90);
|
const auto rotation_angle_radians = detail::degToRad(bearing_lhs - 90);
|
||||||
const auto rotated_difference_rhs = rotateCCWAroundZero(difference_rhs, rotation_angle_radians);
|
const auto rotated_difference_rhs = rotateCCWAroundZero(difference_rhs, rotation_angle_radians);
|
||||||
|
|
||||||
const auto get_slope = [](const Coordinate from, const Coordinate to) {
|
const auto get_slope = [](const Coordinate from, const Coordinate to)
|
||||||
|
{
|
||||||
const auto diff_lat = static_cast<int>(from.lat) - static_cast<int>(to.lat);
|
const auto diff_lat = static_cast<int>(from.lat) - static_cast<int>(to.lat);
|
||||||
const auto diff_lon = static_cast<int>(from.lon) - static_cast<int>(to.lon);
|
const auto diff_lon = static_cast<int>(from.lon) - static_cast<int>(to.lon);
|
||||||
if (diff_lon == 0)
|
if (diff_lon == 0)
|
||||||
|
@ -189,8 +189,11 @@ template <typename EdgeDataT> class DynamicGraph
|
|||||||
other.node_array.resize(node_array.size());
|
other.node_array.resize(node_array.size());
|
||||||
|
|
||||||
NodeID node_id = 0;
|
NodeID node_id = 0;
|
||||||
std::transform(
|
std::transform(node_array.begin(),
|
||||||
node_array.begin(), node_array.end(), other.node_array.begin(), [&](const Node &node) {
|
node_array.end(),
|
||||||
|
other.node_array.begin(),
|
||||||
|
[&](const Node &node)
|
||||||
|
{
|
||||||
const EdgeIterator first_edge = other.edge_list.size();
|
const EdgeIterator first_edge = other.edge_list.size();
|
||||||
|
|
||||||
BOOST_ASSERT(node_id < number_of_nodes);
|
BOOST_ASSERT(node_id < number_of_nodes);
|
||||||
|
@ -62,7 +62,7 @@ class exception : public std::exception
|
|||||||
* user supplied bad data, etc).
|
* user supplied bad data, etc).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
constexpr const std::array<const char *, 11> ErrorDescriptions = {{
|
constexpr const std::array<const char *, 12> ErrorDescriptions = {{
|
||||||
"", // Dummy - ErrorCode values start at 2
|
"", // Dummy - ErrorCode values start at 2
|
||||||
"", // Dummy - ErrorCode values start at 2
|
"", // Dummy - ErrorCode values start at 2
|
||||||
"Fingerprint did not match the expected value", // InvalidFingerprint
|
"Fingerprint did not match the expected value", // InvalidFingerprint
|
||||||
@ -75,7 +75,8 @@ constexpr const std::array<const char *, 11> ErrorDescriptions = {{
|
|||||||
// NOLINTNEXTLINE(bugprone-suspicious-missing-comma)
|
// NOLINTNEXTLINE(bugprone-suspicious-missing-comma)
|
||||||
"The dataset you are trying to load is not " // IncompatibleDataset
|
"The dataset you are trying to load is not " // IncompatibleDataset
|
||||||
"compatible with the routing algorithm you want to use.", // ...continued...
|
"compatible with the routing algorithm you want to use.", // ...continued...
|
||||||
"Incompatible algorithm" // IncompatibleAlgorithm
|
"Incompatible algorithm", // IncompatibleAlgorithm
|
||||||
|
"Unknown feature dataset" // UnknownFeatureDataset
|
||||||
}};
|
}};
|
||||||
|
|
||||||
#ifndef NDEBUG
|
#ifndef NDEBUG
|
||||||
@ -84,6 +85,32 @@ static_assert(ErrorDescriptions.size() == ErrorCode::__ENDMARKER__,
|
|||||||
"ErrorCode list and ErrorDescription lists are different sizes");
|
"ErrorCode list and ErrorDescription lists are different sizes");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
class DisabledDatasetException : public exception
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit DisabledDatasetException(const std::string &dataset_)
|
||||||
|
: exception(BuildMessage(dataset_)), dataset(dataset_)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
const std::string &Dataset() const { return dataset; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
// This function exists to 'anchor' the class, and stop the compiler from
|
||||||
|
// copying vtable and RTTI info into every object file that includes
|
||||||
|
// this header. (Caught by -Wweak-vtables under Clang.)
|
||||||
|
virtual void anchor() const override;
|
||||||
|
const std::string dataset;
|
||||||
|
|
||||||
|
static std::string BuildMessage(const std::string &dataset)
|
||||||
|
{
|
||||||
|
return "DisabledDatasetException: Your query tried to access the disabled dataset " +
|
||||||
|
dataset +
|
||||||
|
". Please check your configuration: "
|
||||||
|
"https://github.com/Project-OSRM/osrm-backend/wiki/Disabled-Datasets";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
class RuntimeError : public exception
|
class RuntimeError : public exception
|
||||||
{
|
{
|
||||||
using Base = exception;
|
using Base = exception;
|
||||||
|
@ -37,9 +37,9 @@ class FilteredGraphImpl<util::StaticGraph<EdgeDataT, Ownership>, Ownership>
|
|||||||
unsigned GetOutDegree(const NodeIterator n) const
|
unsigned GetOutDegree(const NodeIterator n) const
|
||||||
{
|
{
|
||||||
auto range = graph.GetAdjacentEdgeRange(n);
|
auto range = graph.GetAdjacentEdgeRange(n);
|
||||||
return std::count_if(range.begin(), range.end(), [this](const EdgeIterator edge) {
|
return std::count_if(range.begin(),
|
||||||
return edge_filter[edge];
|
range.end(),
|
||||||
});
|
[this](const EdgeIterator edge) { return edge_filter[edge]; });
|
||||||
}
|
}
|
||||||
|
|
||||||
inline NodeIterator GetTarget(const EdgeIterator e) const
|
inline NodeIterator GetTarget(const EdgeIterator e) const
|
||||||
|
@ -33,7 +33,8 @@ struct FingerPrint
|
|||||||
|
|
||||||
static_assert(sizeof(FingerPrint) == 8, "FingerPrint has unexpected size");
|
static_assert(sizeof(FingerPrint) == 8, "FingerPrint has unexpected size");
|
||||||
static_assert(std::is_trivial<FingerPrint>::value, "FingerPrint needs to be trivial.");
|
static_assert(std::is_trivial<FingerPrint>::value, "FingerPrint needs to be trivial.");
|
||||||
static_assert(std::is_pod<FingerPrint>::value, "FingerPrint needs to be a POD.");
|
static_assert(std::is_standard_layout<FingerPrint>::value,
|
||||||
|
"FingerPrint needs have a standard layout.");
|
||||||
} // namespace osrm::util
|
} // namespace osrm::util
|
||||||
|
|
||||||
#endif /* FingerPrint_H */
|
#endif /* FingerPrint_H */
|
||||||
|
@ -63,14 +63,16 @@ template <typename StringView> inline auto decompose(const StringView &lhs, cons
|
|||||||
auto const lcs = longest_common_substring(lhs, rhs);
|
auto const lcs = longest_common_substring(lhs, rhs);
|
||||||
|
|
||||||
// trim spaces, transform to lower
|
// trim spaces, transform to lower
|
||||||
const auto trim = [](StringView view) {
|
const auto trim = [](StringView view)
|
||||||
|
{
|
||||||
// we compare suffixes based on this value, it might break UTF chars, but as long as we are
|
// we compare suffixes based on this value, it might break UTF chars, but as long as we are
|
||||||
// consistent in handling, we do not create bad results
|
// consistent in handling, we do not create bad results
|
||||||
std::string str;
|
std::string str;
|
||||||
str.reserve(view.size());
|
str.reserve(view.size());
|
||||||
std::transform(view.begin(), view.end(), std::back_inserter(str), [](unsigned char c) {
|
std::transform(view.begin(),
|
||||||
return std::tolower(c);
|
view.end(),
|
||||||
});
|
std::back_inserter(str),
|
||||||
|
[](unsigned char c) { return std::tolower(c); });
|
||||||
auto front = str.find_first_not_of(' ');
|
auto front = str.find_first_not_of(' ');
|
||||||
|
|
||||||
if (front == std::string::npos)
|
if (front == std::string::npos)
|
||||||
@ -131,13 +133,13 @@ inline bool requiresNameAnnounced(const StringView &from_name,
|
|||||||
|
|
||||||
const auto checkForPrefixOrSuffixChange = [](const std::string_view first,
|
const auto checkForPrefixOrSuffixChange = [](const std::string_view first,
|
||||||
const std::string_view second,
|
const std::string_view second,
|
||||||
const SuffixTable &suffix_table) {
|
const SuffixTable &suffix_table)
|
||||||
|
{
|
||||||
std::string first_prefix, first_suffix, second_prefix, second_suffix;
|
std::string first_prefix, first_suffix, second_prefix, second_suffix;
|
||||||
std::tie(first_prefix, first_suffix, second_prefix, second_suffix) =
|
std::tie(first_prefix, first_suffix, second_prefix, second_suffix) =
|
||||||
decompose(first, second);
|
decompose(first, second);
|
||||||
const auto checkTable = [&](const std::string &str) {
|
const auto checkTable = [&](const std::string &str)
|
||||||
return str.empty() || suffix_table.isSuffix(str);
|
{ return str.empty() || suffix_table.isSuffix(str); };
|
||||||
};
|
|
||||||
|
|
||||||
return checkTable(first_prefix) && checkTable(first_suffix) && checkTable(second_prefix) &&
|
return checkTable(first_prefix) && checkTable(first_suffix) && checkTable(second_prefix) &&
|
||||||
checkTable(second_suffix);
|
checkTable(second_suffix);
|
||||||
|
@ -315,9 +315,8 @@ template <typename GroupBlockPolicy, storage::Ownership Ownership> struct Indexe
|
|||||||
values_byte_iter = block.WriteBlockPrefix(curr, next, values_byte_iter);
|
values_byte_iter = block.WriteBlockPrefix(curr, next, values_byte_iter);
|
||||||
std::advance(next, std::min<diff_type>(1, std::distance(next, sentinel)));
|
std::advance(next, std::min<diff_type>(1, std::distance(next, sentinel)));
|
||||||
|
|
||||||
auto to_bytes = [&](const auto &data) {
|
auto to_bytes = [&](const auto &data)
|
||||||
values_byte_iter = std::copy_n(&data, sizeof(ValueType), values_byte_iter);
|
{ values_byte_iter = std::copy_n(&data, sizeof(ValueType), values_byte_iter); };
|
||||||
};
|
|
||||||
std::copy(data + *curr,
|
std::copy(data + *curr,
|
||||||
data + *next,
|
data + *next,
|
||||||
boost::make_function_output_iterator(std::cref(to_bytes)));
|
boost::make_function_output_iterator(std::cref(to_bytes)));
|
||||||
|
@ -43,8 +43,7 @@ class integer_iterator : public boost::iterator_facade<integer_iterator<Integer>
|
|||||||
|
|
||||||
difference_type distance_to(const integer_iterator &other) const
|
difference_type distance_to(const integer_iterator &other) const
|
||||||
{
|
{
|
||||||
return std::is_signed<value_type>::value
|
return std::is_signed<value_type>::value ? (other.m_value - m_value)
|
||||||
? (other.m_value - m_value)
|
|
||||||
: (other.m_value >= m_value)
|
: (other.m_value >= m_value)
|
||||||
? static_cast<difference_type>(other.m_value - m_value)
|
? static_cast<difference_type>(other.m_value - m_value)
|
||||||
: -static_cast<difference_type>(m_value - other.m_value);
|
: -static_cast<difference_type>(m_value - other.m_value);
|
||||||
|
@ -79,7 +79,8 @@ NodeBasedDynamicGraphFromEdges(NodeID number_of_nodes,
|
|||||||
auto edges_list = directedEdgesFromCompressed<NodeBasedDynamicGraph::InputEdge>(
|
auto edges_list = directedEdgesFromCompressed<NodeBasedDynamicGraph::InputEdge>(
|
||||||
input_edge_list,
|
input_edge_list,
|
||||||
[](NodeBasedDynamicGraph::InputEdge &output_edge,
|
[](NodeBasedDynamicGraph::InputEdge &output_edge,
|
||||||
const extractor::NodeBasedEdge &input_edge) {
|
const extractor::NodeBasedEdge &input_edge)
|
||||||
|
{
|
||||||
output_edge.data.weight = input_edge.weight;
|
output_edge.data.weight = input_edge.weight;
|
||||||
output_edge.data.duration = input_edge.duration;
|
output_edge.data.duration = input_edge.duration;
|
||||||
output_edge.data.distance = input_edge.distance;
|
output_edge.data.distance = input_edge.distance;
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
#include <boost/date_time/gregorian/gregorian.hpp>
|
#include <boost/date_time/gregorian/gregorian.hpp>
|
||||||
|
|
||||||
|
#include <cstdint>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
@ -192,23 +193,20 @@ struct OpeningHours
|
|||||||
&& (times.empty() ||
|
&& (times.empty() ||
|
||||||
std::any_of(times.begin(),
|
std::any_of(times.begin(),
|
||||||
times.end(),
|
times.end(),
|
||||||
[&time, &use_curr_day, &use_next_day](const auto &x) {
|
[&time, &use_curr_day, &use_next_day](const auto &x)
|
||||||
return x.IsInRange(time, use_curr_day, use_next_day);
|
{ return x.IsInRange(time, use_curr_day, use_next_day); }))
|
||||||
}))
|
|
||||||
// .. and if weekdays are not specified or matches weekdays range
|
// .. and if weekdays are not specified or matches weekdays range
|
||||||
&& (weekdays.empty() ||
|
&& (weekdays.empty() ||
|
||||||
std::any_of(weekdays.begin(),
|
std::any_of(weekdays.begin(),
|
||||||
weekdays.end(),
|
weekdays.end(),
|
||||||
[&time, use_curr_day, use_next_day](const auto &x) {
|
[&time, use_curr_day, use_next_day](const auto &x)
|
||||||
return x.IsInRange(time, use_curr_day, use_next_day);
|
{ return x.IsInRange(time, use_curr_day, use_next_day); }))
|
||||||
}))
|
|
||||||
// .. and if month-day ranges are not specified or is in any month-day range
|
// .. and if month-day ranges are not specified or is in any month-day range
|
||||||
&& (monthdays.empty() ||
|
&& (monthdays.empty() ||
|
||||||
std::any_of(monthdays.begin(),
|
std::any_of(monthdays.begin(),
|
||||||
monthdays.end(),
|
monthdays.end(),
|
||||||
[&time, use_curr_day, use_next_day](const auto &x) {
|
[&time, use_curr_day, use_next_day](const auto &x)
|
||||||
return x.IsInRange(time, use_curr_day, use_next_day);
|
{ return x.IsInRange(time, use_curr_day, use_next_day); }));
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<TimeSpan> times;
|
std::vector<TimeSpan> times;
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
#include <boost/heap/d_ary_heap.hpp>
|
#include <boost/heap/d_ary_heap.hpp>
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <cstdint>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
@ -344,9 +345,9 @@ class QueryHeap
|
|||||||
void DeleteAll()
|
void DeleteAll()
|
||||||
{
|
{
|
||||||
auto const none_handle = heap.s_handle_from_iterator(heap.end());
|
auto const none_handle = heap.s_handle_from_iterator(heap.end());
|
||||||
std::for_each(inserted_nodes.begin(), inserted_nodes.end(), [&none_handle](auto &node) {
|
std::for_each(inserted_nodes.begin(),
|
||||||
node.handle = none_handle;
|
inserted_nodes.end(),
|
||||||
});
|
[&none_handle](auto &node) { node.handle = none_handle; });
|
||||||
heap.clear();
|
heap.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +64,8 @@ template <unsigned BLOCK_SIZE, storage::Ownership Ownership> class RangeTable
|
|||||||
// construct table from length vector
|
// construct table from length vector
|
||||||
template <typename VectorT> explicit RangeTable(const VectorT &lengths)
|
template <typename VectorT> explicit RangeTable(const VectorT &lengths)
|
||||||
{
|
{
|
||||||
const unsigned number_of_blocks = [&lengths]() {
|
const unsigned number_of_blocks = [&lengths]()
|
||||||
|
{
|
||||||
unsigned num = (lengths.size() + 1) / (BLOCK_SIZE + 1);
|
unsigned num = (lengths.size() + 1) / (BLOCK_SIZE + 1);
|
||||||
if ((lengths.size() + 1) % (BLOCK_SIZE + 1) != 0)
|
if ((lengths.size() + 1) % (BLOCK_SIZE + 1) != 0)
|
||||||
{
|
{
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user