Merge branch 'master' into sf-float-traffic-updates
This commit is contained in:
commit
e314718284
83
.github/workflows/osrm-backend.yml
vendored
83
.github/workflows/osrm-backend.yml
vendored
@ -138,7 +138,8 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
|
||||
- name: clang-11.0-debug-clang-tidy
|
||||
@ -147,7 +148,8 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 11.0.0
|
||||
CCOMPILER: clang-11
|
||||
CXXCOMPILER: clang++-11
|
||||
CUCUMBER_TIMEOUT: 60000
|
||||
ENABLE_CLANG_TIDY: ON
|
||||
|
||||
@ -157,7 +159,8 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 11.0.0
|
||||
CCOMPILER: clang-11
|
||||
CXXCOMPILER: clang++-11
|
||||
ENABLE_CONAN: ON
|
||||
ENABLE_SANITIZER: ON
|
||||
|
||||
@ -167,7 +170,8 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TOOLS: ON
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_CONAN: ON
|
||||
|
||||
- name: gcc-11-release
|
||||
@ -198,7 +202,7 @@ jobs:
|
||||
CXXCOMPILER: g++-9
|
||||
CXXFLAGS: -Wno-cast-function-type
|
||||
|
||||
- name: gcc-9-release-i686
|
||||
- name: gcc-9-conan-release-i686
|
||||
continue-on-error: false
|
||||
node: 12
|
||||
runs-on: ubuntu-20.04
|
||||
@ -209,6 +213,7 @@ jobs:
|
||||
CXXCOMPILER: g++-9
|
||||
CXXFLAGS: "-m32 -msse2 -mfpmath=sse"
|
||||
TARGET_ARCH: i686
|
||||
ENABLE_CONAN: ON
|
||||
|
||||
- name: gcc-8-release
|
||||
continue-on-error: false
|
||||
@ -284,7 +289,8 @@ jobs:
|
||||
node: 12
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -295,7 +301,8 @@ jobs:
|
||||
node: 12
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -306,7 +313,8 @@ jobs:
|
||||
node: 14
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -317,7 +325,8 @@ jobs:
|
||||
node: 14
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -329,7 +338,8 @@ jobs:
|
||||
node: 16
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -340,7 +350,8 @@ jobs:
|
||||
node: 16
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -363,7 +374,8 @@ jobs:
|
||||
node: latest
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -374,7 +386,8 @@ jobs:
|
||||
node: latest
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -397,7 +410,8 @@ jobs:
|
||||
node: "lts/*"
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Release
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
@ -408,12 +422,12 @@ jobs:
|
||||
node: "lts/*"
|
||||
runs-on: ubuntu-20.04
|
||||
BUILD_TYPE: Debug
|
||||
CLANG_VERSION: 6.0.0
|
||||
CCOMPILER: clang-6.0
|
||||
CXXCOMPILER: clang++-6.0
|
||||
ENABLE_GLIBC_WORKAROUND: ON
|
||||
ENABLE_CONAN: ON
|
||||
NODE_PACKAGE_TESTS_ONLY: ON
|
||||
|
||||
|
||||
name: ${{ matrix.name}}
|
||||
continue-on-error: ${{ matrix.continue-on-error }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
@ -423,7 +437,6 @@ jobs:
|
||||
BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }}
|
||||
CCOMPILER: ${{ matrix.CCOMPILER }}
|
||||
CFLAGS: ${{ matrix.CFLAGS }}
|
||||
CLANG_VERSION: ${{ matrix.CLANG_VERSION }}
|
||||
CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }}
|
||||
CXXCOMPILER: ${{ matrix.CXXCOMPILER }}
|
||||
CXXFLAGS: ${{ matrix.CXXFLAGS }}
|
||||
@ -462,9 +475,9 @@ jobs:
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.conan
|
||||
key: v2-conan-${{ matrix.name }}-${{ github.sha }}
|
||||
key: v3-conan-${{ matrix.name }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
v2-conan-${{ matrix.name }}-
|
||||
v3-conan-${{ matrix.name }}-
|
||||
- name: Enable test cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
@ -510,35 +523,28 @@ jobs:
|
||||
wget --quiet -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C ${CMAKE_DIR}
|
||||
echo "${CMAKE_DIR}/bin" >> $GITHUB_PATH
|
||||
|
||||
# TBB
|
||||
${MASON} install tbb 2017_U7
|
||||
echo "LD_LIBRARY_PATH=$(${MASON} prefix tbb 2017_U7)/lib/:${LD_LIBRARY_PATH}" >> $GITHUB_ENV
|
||||
|
||||
# ccache
|
||||
${MASON} install ccache ${CCACHE_VERSION}
|
||||
echo "$(${MASON} prefix ccache ${CCACHE_VERSION})/bin" >> $GITHUB_PATH
|
||||
|
||||
# clang
|
||||
if [[ -n ${CLANG_VERSION} ]]; then
|
||||
echo "CCOMPILER=clang" >> $GITHUB_ENV
|
||||
echo "CXXCOMPILER=clang++" >> $GITHUB_ENV
|
||||
${MASON} install clang++ ${CLANG_VERSION}
|
||||
echo "$(${MASON} prefix clang++ ${CLANG_VERSION})/bin" >> $GITHUB_PATH
|
||||
if [[ "${CCOMPILER}" == "clang-6.0" ]]; then
|
||||
sudo apt-get update -y && sudo apt-get install clang++-6
|
||||
fi
|
||||
# we only enable lto for release builds
|
||||
# and therefore don't need to us ld.gold or llvm tools for linking
|
||||
# for debug builds
|
||||
if [[ ${BUILD_TYPE} == 'Release' ]]; then
|
||||
if [[ "${CCOMPILER}" == clang-* ]] && [[ ${BUILD_TYPE} == 'Release' ]]; then
|
||||
${MASON} install binutils 2.27
|
||||
echo "$(${MASON} prefix binutils 2.27)/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
fi
|
||||
|
||||
# Linux dev packages
|
||||
if [ "${TARGET_ARCH}" != "i686" ] && [ "${ENABLE_CONAN}" != "ON" ]; then
|
||||
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev libtbb-dev libboost-all-dev
|
||||
if [[ -z "${CLANG_VERSION}" ]]; then
|
||||
sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev libboost-all-dev
|
||||
if [[ "${CCOMPILER}" != clang-* ]]; then
|
||||
sudo apt-get install -y ${CXXCOMPILER}
|
||||
fi
|
||||
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
|
||||
@ -549,6 +555,18 @@ jobs:
|
||||
echo "PKG_CONFIG_PATH=/usr/lib/i386-linux-gnu/pkgconfig:${PKG_CONFIG_PATH}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
# TBB
|
||||
TBB_VERSION=2021.3.0
|
||||
if [[ "${RUNNER_OS}" == "Linux" ]]; then
|
||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz"
|
||||
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
|
||||
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz"
|
||||
fi
|
||||
wget --tries 5 ${TBB_URL} -O onetbb.tgz
|
||||
tar zxvf onetbb.tgz
|
||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/
|
||||
sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/
|
||||
|
||||
- name: Prepare build
|
||||
run: |
|
||||
mkdir ${OSRM_BUILD_DIR}
|
||||
@ -564,6 +582,7 @@ jobs:
|
||||
run: |
|
||||
echo "Using ${JOBS} jobs"
|
||||
pushd ${OSRM_BUILD_DIR}
|
||||
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_CONAN=${ENABLE_CONAN:-OFF} \
|
||||
-DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \
|
||||
|
||||
@ -1,13 +1,17 @@
|
||||
# Unreleased
|
||||
- Changes from 5.26.0
|
||||
- API:
|
||||
- ADDED: Add Flatbuffers support to NodeJS bindings. [#6338](https://github.com/Project-OSRM/osrm-backend/pull/6338)
|
||||
- CHANGED: Add `data_version` field to responses of all services. [#5387](https://github.com/Project-OSRM/osrm-backend/pull/5387)
|
||||
- FIXED: Use Boost.Beast to parse HTTP request. [#6294](https://github.com/Project-OSRM/osrm-backend/pull/6294)
|
||||
- FIXED: Fix inefficient osrm-routed connection handling [#6113](https://github.com/Project-OSRM/osrm-backend/pull/6113)
|
||||
- FIXED: Fix HTTP compression precedence [#6113](https://github.com/Project-OSRM/osrm-backend/pull/6113)
|
||||
- NodeJS:
|
||||
- FIXED: Support `skip_waypoints` in Node bindings [#6060](https://github.com/Project-OSRM/osrm-backend/pull/6060)
|
||||
- Misc:
|
||||
- ADDED: Support floats for speed value in traffic updates CSV. [#6327](https://github.com/Project-OSRM/osrm-backend/pull/6327)
|
||||
- CHANGED: Use Lua 5.4 in Docker image. [#6346](https://github.com/Project-OSRM/osrm-backend/pull/6346)
|
||||
- CHANGED: Remove redundant nullptr check. [#6326](https://github.com/Project-OSRM/osrm-backend/pull/6326)
|
||||
- CHANGED: missing files list is included in exception message. [#5360](https://github.com/Project-OSRM/osrm-backend/pull/5360)
|
||||
- CHANGED: Do not use deprecated Callback::Call overload in Node bindings. [#6318](https://github.com/Project-OSRM/osrm-backend/pull/6318)
|
||||
- FIXED: Fix distance calculation consistency. [#6315](https://github.com/Project-OSRM/osrm-backend/pull/6315)
|
||||
@ -17,6 +21,9 @@
|
||||
- FIXED: Bug in bicycle profile that caused exceptions if there is a highway=bicycle in the data. [#6296](https://github.com/Project-OSRM/osrm-backend/pull/6296)
|
||||
- FIXED: Internal refactoring of identifier types used in data facade [#6044](https://github.com/Project-OSRM/osrm-backend/pull/6044)
|
||||
- Build:
|
||||
- CHANGED: Use apt-get to install Clang on CI. [#6345](https://github.com/Project-OSRM/osrm-backend/pull/6345)
|
||||
- CHANGED: Fix TBB in case of Conan + NodeJS build. [#6333](https://github.com/Project-OSRM/osrm-backend/pull/6333)
|
||||
- CHANGED: Migrate to modern TBB version. [#6300](https://github.com/Project-OSRM/osrm-backend/pull/6300)
|
||||
- CHANGED: Enable performance-move-const-arg clang-tidy check. [#6319](https://github.com/Project-OSRM/osrm-backend/pull/6319)
|
||||
- CHANGED: Use the latest node on CI. [#6317](https://github.com/Project-OSRM/osrm-backend/pull/6317)
|
||||
- CHANGED: Migrate Windows CI to GitHub Actions. [#6312](https://github.com/Project-OSRM/osrm-backend/pull/6312)
|
||||
@ -46,6 +53,8 @@
|
||||
- FIXED: Completed support for no_entry and no_exit turn restrictions. [#5988](https://github.com/Project-OSRM/osrm-backend/pull/5988)
|
||||
- ADDED: Add support for non-round-trips with a single fixed endpoint. [#6050](https://github.com/Project-OSRM/osrm-backend/pull/6050)
|
||||
- FIXED: Improvements to maneuver override processing [#6125](https://github.com/Project-OSRM/osrm-backend/pull/6125)
|
||||
- ADDED: Support snapping to multiple ways at an input location. [#5953](https://github.com/Project-OSRM/osrm-backend/pull/5953)
|
||||
- FIXED: Fix snapping target locations to ways used in turn restrictions. [#6339](https://github.com/Project-OSRM/osrm-backend/pull/6339)
|
||||
|
||||
# 5.26.0
|
||||
- Changes from 5.25.0
|
||||
|
||||
@ -463,7 +463,7 @@ if(ENABLE_CONAN)
|
||||
set(CONAN_BZIP2_VERSION 1.0.8)
|
||||
set(CONAN_EXPAT_VERSION 2.2.10)
|
||||
set(CONAN_LUA_VERSION 5.4.4)
|
||||
set(CONAN_TBB_VERSION 2020.3)
|
||||
set(CONAN_TBB_VERSION 2021.3.0)
|
||||
|
||||
set(CONAN_SYSTEM_INCLUDES ON)
|
||||
|
||||
@ -476,21 +476,27 @@ if(ENABLE_CONAN)
|
||||
set(TBB_SHARED True)
|
||||
endif()
|
||||
|
||||
conan_cmake_run(
|
||||
set(CONAN_ARGS
|
||||
REQUIRES
|
||||
boost/${CONAN_BOOST_VERSION}
|
||||
bzip2/${CONAN_BZIP2_VERSION}
|
||||
expat/${CONAN_EXPAT_VERSION}
|
||||
lua/${CONAN_LUA_VERSION}
|
||||
tbb/${CONAN_TBB_VERSION}
|
||||
onetbb/${CONAN_TBB_VERSION}
|
||||
BASIC_SETUP
|
||||
BUILD missing
|
||||
GENERATORS cmake_find_package
|
||||
KEEP_RPATHS
|
||||
NO_OUTPUT_DIRS
|
||||
OPTIONS boost:filesystem_version=3 # https://stackoverflow.com/questions/73392648/error-with-boost-filesystem-version-in-cmake
|
||||
tbb:shared=${TBB_SHARED}
|
||||
onetbb:shared=${TBB_SHARED}
|
||||
BUILD missing
|
||||
)
|
||||
# explicitly say Conan to use x86 dependencies if build for x86 platforms (https://github.com/conan-io/cmake-conan/issues/141)
|
||||
if(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
conan_cmake_run("${CONAN_ARGS};ARCH;x86")
|
||||
else()
|
||||
conan_cmake_run("${CONAN_ARGS}")
|
||||
endif()
|
||||
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_BOOST})
|
||||
add_dependency_includes(${CONAN_INCLUDE_DIRS_BZIP2})
|
||||
@ -511,6 +517,7 @@ if(ENABLE_CONAN)
|
||||
set(Boost_REGEX_LIBRARY "${Boost_regex_LIB_TARGETS}")
|
||||
set(Boost_UNIT_TEST_FRAMEWORK_LIBRARY "${Boost_unit_test_framework_LIB_TARGETS}")
|
||||
|
||||
|
||||
find_package(BZip2 REQUIRED)
|
||||
find_package(EXPAT REQUIRED)
|
||||
find_package(lua REQUIRED)
|
||||
@ -533,9 +540,7 @@ else()
|
||||
|
||||
find_package(TBB REQUIRED)
|
||||
add_dependency_includes(${TBB_INCLUDE_DIR})
|
||||
if(WIN32)
|
||||
set(TBB_LIBRARIES optimized ${TBB_LIBRARY} optimized ${TBB_MALLOC_LIBRARY} debug ${TBB_LIBRARY_DEBUG} debug ${TBB_MALLOC_LIBRARY_DEBUG})
|
||||
endif()
|
||||
set(TBB_LIBRARIES TBB::tbb)
|
||||
|
||||
find_package(EXPAT REQUIRED)
|
||||
add_dependency_includes(${EXPAT_INCLUDE_DIRS})
|
||||
@ -809,9 +814,9 @@ JOIN("-I${DEPENDENCIES_INCLUDE_DIRS}" " -I" PKGCONFIG_OSRM_INCLUDE_FLAGS)
|
||||
|
||||
# Boost uses imported targets, we need to use a generator expression to extract
|
||||
# the link libraries to be written to the pkg-config file.
|
||||
# Conan defines dependencies as CMake targets too, that's why we do the same for them.
|
||||
# Conan & TBB define dependencies as CMake targets too, that's why we do the same for them.
|
||||
foreach(engine_lib ${ENGINE_LIBRARIES})
|
||||
if("${engine_lib}" MATCHES "^Boost.*" OR "${engine_lib}" MATCHES "^CONAN_LIB.*")
|
||||
if("${engine_lib}" MATCHES "^Boost.*" OR "${engine_lib}" MATCHES "^CONAN_LIB.*" OR "${engine_lib}" MATCHES "^TBB.*")
|
||||
list(APPEND PKGCONFIG_DEPENDENT_LIBRARIES "$<TARGET_LINKER_FILE:${engine_lib}>")
|
||||
else()
|
||||
list(APPEND PKGCONFIG_DEPENDENT_LIBRARIES "${engine_lib}")
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
## Open Source Routing Machine
|
||||
|
||||
| Linux / macOS | Windows | Code Coverage |
|
||||
| ------------- | ------- | ------------- |
|
||||
| [](https://github.com/Project-OSRM/osrm-backend/actions/workflows/osrm-backend.yml) | [](https://ci.appveyor.com/project/DennisOSRM/osrm-backend) | [](https://codecov.io/gh/Project-OSRM/osrm-backend) |
|
||||
| Linux / macOS / Windows | Code Coverage |
|
||||
| ----------------------- | ------------- |
|
||||
| [](https://github.com/Project-OSRM/osrm-backend/actions/workflows/osrm-backend.yml) | [](https://codecov.io/gh/Project-OSRM/osrm-backend) |
|
||||
|
||||
High performance routing engine written in C++14 designed to run on OpenStreetMap data.
|
||||
|
||||
|
||||
@ -1,286 +1,456 @@
|
||||
# Locate Intel Threading Building Blocks include paths and libraries
|
||||
# FindTBB.cmake can be found at https://code.google.com/p/findtbb/
|
||||
# Written by Hannes Hofmann <hannes.hofmann _at_ informatik.uni-erlangen.de>
|
||||
# Improvements by Gino van den Bergen <gino _at_ dtecta.com>,
|
||||
# Florian Uhlig <F.Uhlig _at_ gsi.de>,
|
||||
# Jiri Marsik <jiri.marsik89 _at_ gmail.com>
|
||||
|
||||
# The MIT License
|
||||
# - Find ThreadingBuildingBlocks include dirs and libraries
|
||||
# Use this module by invoking find_package with the form:
|
||||
# find_package(TBB
|
||||
# [REQUIRED] # Fail with error if TBB is not found
|
||||
# ) #
|
||||
# Once done, this will define
|
||||
#
|
||||
# Copyright (c) 2011 Hannes Hofmann
|
||||
# TBB_FOUND - system has TBB
|
||||
# TBB_INCLUDE_DIRS - the TBB include directories
|
||||
# TBB_LIBRARIES - TBB libraries to be lined, doesn't include malloc or
|
||||
# malloc proxy
|
||||
# TBB::tbb - imported target for the TBB library
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
# TBB_VERSION_MAJOR - Major Product Version Number
|
||||
# TBB_VERSION_MINOR - Minor Product Version Number
|
||||
# TBB_INTERFACE_VERSION - Engineering Focused Version Number
|
||||
# TBB_COMPATIBLE_INTERFACE_VERSION - The oldest major interface version
|
||||
# still supported. This uses the engineering
|
||||
# focused interface version numbers.
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
# TBB_MALLOC_FOUND - system has TBB malloc library
|
||||
# TBB_MALLOC_INCLUDE_DIRS - the TBB malloc include directories
|
||||
# TBB_MALLOC_LIBRARIES - The TBB malloc libraries to be lined
|
||||
# TBB::malloc - imported target for the TBB malloc library
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# GvdB: This module uses the environment variable TBB_ARCH_PLATFORM which defines architecture and compiler.
|
||||
# e.g. "ia32/vc8" or "em64t/cc4.1.0_libc2.4_kernel2.6.16.21"
|
||||
# TBB_ARCH_PLATFORM is set by the build script tbbvars[.bat|.sh|.csh], which can be found
|
||||
# in the TBB installation directory (TBB_INSTALL_DIR).
|
||||
# TBB_MALLOC_PROXY_FOUND - system has TBB malloc proxy library
|
||||
# TBB_MALLOC_PROXY_INCLUDE_DIRS = the TBB malloc proxy include directories
|
||||
# TBB_MALLOC_PROXY_LIBRARIES - The TBB malloc proxy libraries to be lined
|
||||
# TBB::malloc_proxy - imported target for the TBB malloc proxy library
|
||||
#
|
||||
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||
#
|
||||
# For backwards compatibility, you may explicitely set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER.
|
||||
# TBB_ARCHITECTURE [ ia32 | em64t | itanium ]
|
||||
# which architecture to use
|
||||
# TBB_COMPILER e.g. vc9 or cc3.2.3_libc2.3.2_kernel2.4.21 or cc4.0.1_os10.4.9
|
||||
# which compiler to use (detected automatically on Windows)
|
||||
|
||||
# This module respects
|
||||
# TBB_INSTALL_DIR or $ENV{TBB21_INSTALL_DIR} or $ENV{TBB_INSTALL_DIR}
|
||||
|
||||
# This module defines
|
||||
# TBB_INCLUDE_DIRS, where to find task_scheduler_init.h, etc.
|
||||
# TBB_LIBRARY_DIRS, where to find libtbb, libtbbmalloc
|
||||
# TBB_DEBUG_LIBRARY_DIRS, where to find libtbb_debug, libtbbmalloc_debug
|
||||
# TBB_INSTALL_DIR, the base TBB install directory
|
||||
# TBB_LIBRARIES, the libraries to link against to use TBB.
|
||||
# TBB_DEBUG_LIBRARIES, the libraries to link against to use TBB with debug symbols.
|
||||
# TBB_FOUND, If false, don't try to use TBB.
|
||||
# TBB_INTERFACE_VERSION, as defined in tbb/tbb_stddef.h
|
||||
# This module reads hints about search locations from variables:
|
||||
# ENV TBB_ARCH_PLATFORM - for eg. set it to "mic" for Xeon Phi builds
|
||||
# ENV TBB_ROOT or just TBB_ROOT - root directory of tbb installation
|
||||
# ENV TBB_BUILD_PREFIX - specifies the build prefix for user built tbb
|
||||
# libraries. Should be specified with ENV TBB_ROOT
|
||||
# and optionally...
|
||||
# ENV TBB_BUILD_DIR - if build directory is different than ${TBB_ROOT}/build
|
||||
#
|
||||
#
|
||||
# Modified by Robert Maynard from the original OGRE source
|
||||
#
|
||||
#-------------------------------------------------------------------
|
||||
# This file is part of the CMake build system for OGRE
|
||||
# (Object-oriented Graphics Rendering Engine)
|
||||
# For the latest info, see http://www.ogre3d.org/
|
||||
#
|
||||
# The contents of this file are placed in the public domain. Feel
|
||||
# free to make use of it in any way you like.
|
||||
#-------------------------------------------------------------------
|
||||
#
|
||||
#=============================================================================
|
||||
# Copyright 2010-2012 Kitware, Inc.
|
||||
# Copyright 2012 Rolf Eike Beer <eike@sf-mail.de>
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
|
||||
if (WIN32)
|
||||
# has em64t/vc8 em64t/vc9
|
||||
# has ia32/vc7.1 ia32/vc8 ia32/vc9
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "C:/Program Files/Intel/TBB")
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
if (MSVC71)
|
||||
set (_TBB_COMPILER "vc7.1")
|
||||
endif(MSVC71)
|
||||
if (MSVC80)
|
||||
set(_TBB_COMPILER "vc8")
|
||||
endif(MSVC80)
|
||||
if (MSVC90)
|
||||
set(_TBB_COMPILER "vc9")
|
||||
endif(MSVC90)
|
||||
if(MSVC10)
|
||||
set(_TBB_COMPILER "vc10")
|
||||
endif(MSVC10)
|
||||
# Todo: add other Windows compilers such as ICL.
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif (WIN32)
|
||||
#=============================================================================
|
||||
# FindTBB helper functions and macros
|
||||
#
|
||||
|
||||
if (UNIX)
|
||||
if (APPLE)
|
||||
# MAC
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "/Library/Frameworks/Intel_TBB.framework/Versions")
|
||||
# libs: libtbb.dylib, libtbbmalloc.dylib, *_debug
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
# default flavor on apple: ia32/cc4.0.1_os10.4.9
|
||||
# Jiri: There is no reason to presume there is only one flavor and
|
||||
# that user's setting of variables should be ignored.
|
||||
if(NOT TBB_COMPILER)
|
||||
set(_TBB_COMPILER "cc4.0.1_os10.4.9")
|
||||
elseif (NOT TBB_COMPILER)
|
||||
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||
endif(NOT TBB_COMPILER)
|
||||
if(NOT TBB_ARCHITECTURE)
|
||||
set(_TBB_ARCHITECTURE "ia32")
|
||||
elseif(NOT TBB_ARCHITECTURE)
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif(NOT TBB_ARCHITECTURE)
|
||||
else (APPLE)
|
||||
# LINUX
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "/usr")
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
# has em64t/cc3.2.3_libc2.3.2_kernel2.4.21 em64t/cc3.3.3_libc2.3.3_kernel2.6.5 em64t/cc3.4.3_libc2.3.4_kernel2.6.9 em64t/cc4.1.0_libc2.4_kernel2.6.16.21
|
||||
# has ia32/*
|
||||
# has itanium/*
|
||||
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif (APPLE)
|
||||
endif (UNIX)
|
||||
# Use TBBConfig.cmake if possible.
|
||||
|
||||
if (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||
# SUN
|
||||
# not yet supported
|
||||
# has em64t/cc3.4.3_kernel5.10
|
||||
# has ia32/*
|
||||
endif (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||
|
||||
|
||||
#-- Clear the public variables
|
||||
set (TBB_FOUND "NO")
|
||||
|
||||
|
||||
#-- Find TBB install dir and set ${_TBB_INSTALL_DIR} and cached ${TBB_INSTALL_DIR}
|
||||
# first: use CMake variable TBB_INSTALL_DIR
|
||||
if (TBB_INSTALL_DIR)
|
||||
set (_TBB_INSTALL_DIR ${TBB_INSTALL_DIR})
|
||||
endif (TBB_INSTALL_DIR)
|
||||
# second: use environment variable
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
if (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||
# Intel recommends setting TBB21_INSTALL_DIR
|
||||
if (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB21_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||
if (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB22_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||
if (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB30_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
# third: try to find path automatically
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
if (_TBB_DEFAULT_INSTALL_DIR)
|
||||
set (_TBB_INSTALL_DIR ${_TBB_DEFAULT_INSTALL_DIR})
|
||||
endif (_TBB_DEFAULT_INSTALL_DIR)
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
# sanity check
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
message ("ERROR: Unable to find Intel TBB install directory. ${_TBB_INSTALL_DIR}")
|
||||
else (NOT _TBB_INSTALL_DIR)
|
||||
# finally: set the cached CMake variable TBB_INSTALL_DIR
|
||||
if (NOT TBB_INSTALL_DIR)
|
||||
set (TBB_INSTALL_DIR ${_TBB_INSTALL_DIR} CACHE PATH "Intel TBB install directory")
|
||||
mark_as_advanced(TBB_INSTALL_DIR)
|
||||
endif (NOT TBB_INSTALL_DIR)
|
||||
|
||||
|
||||
#-- A macro to rewrite the paths of the library. This is necessary, because
|
||||
# find_library() always found the em64t/vc9 version of the TBB libs
|
||||
macro(TBB_CORRECT_LIB_DIR var_name)
|
||||
# if (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||
string(REPLACE em64t "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||
# endif (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||
string(REPLACE ia32 "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc7.1 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc8 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc9 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc10 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
endmacro(TBB_CORRECT_LIB_DIR var_content)
|
||||
|
||||
|
||||
#-- Look for include directory and set ${TBB_INCLUDE_DIR}
|
||||
set (TBB_INC_SEARCH_DIR ${_TBB_INSTALL_DIR}/include)
|
||||
# Jiri: tbbvars now sets the CPATH environment variable to the directory
|
||||
# containing the headers.
|
||||
find_path(TBB_INCLUDE_DIR
|
||||
tbb/task_scheduler_init.h
|
||||
HINTS ${TBB_INC_SEARCH_DIR} ENV CPATH
|
||||
)
|
||||
mark_as_advanced(TBB_INCLUDE_DIR)
|
||||
|
||||
#-- Look for libraries
|
||||
# GvdB: $ENV{TBB_ARCH_PLATFORM} is set by the build script tbbvars[.bat|.sh|.csh]
|
||||
if (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||
set (_TBB_LIBRARY_DIR
|
||||
${_TBB_INSTALL_DIR}/lib/$ENV{TBB_ARCH_PLATFORM}
|
||||
${_TBB_INSTALL_DIR}/$ENV{TBB_ARCH_PLATFORM}/lib
|
||||
)
|
||||
endif (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||
# Jiri: This block isn't mutually exclusive with the previous one
|
||||
# (hence no else), instead I test if the user really specified
|
||||
# the variables in question.
|
||||
if ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||
# HH: deprecated
|
||||
message(STATUS "[Warning] FindTBB.cmake: The use of TBB_ARCHITECTURE and TBB_COMPILER is deprecated and may not be supported in future versions. Please set \$ENV{TBB_ARCH_PLATFORM} (using tbbvars.[bat|csh|sh]).")
|
||||
# Jiri: It doesn't hurt to look in more places, so I store the hints from
|
||||
# ENV{TBB_ARCH_PLATFORM} and the TBB_ARCHITECTURE and TBB_COMPILER
|
||||
# variables and search them both.
|
||||
set (_TBB_LIBRARY_DIR "${_TBB_INSTALL_DIR}/${_TBB_ARCHITECTURE}/${_TBB_COMPILER}/lib" ${_TBB_LIBRARY_DIR})
|
||||
endif ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||
|
||||
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||
list(APPEND _TBB_LIBRARY_DIR ${_TBB_INSTALL_DIR}/lib)
|
||||
|
||||
if(EXISTS ${_TBB_INSTALL_DIR}/build)
|
||||
file(GLOB _TBB_BUILD_DIR_RELEASE ${_TBB_INSTALL_DIR}/build/*_release)
|
||||
file(GLOB _TBB_BUILD_DIR_DEBUG ${_TBB_INSTALL_DIR}/build/*_debug)
|
||||
set(_tbb_find_quiet)
|
||||
if (TBB_FIND_QUIETLY)
|
||||
set(_tbb_find_quiet QUIET)
|
||||
endif ()
|
||||
set(_tbb_find_components)
|
||||
set(_tbb_find_optional_components)
|
||||
foreach (_tbb_find_component IN LISTS TBB_FIND_COMPONENTS)
|
||||
if (TBB_FIND_REQUIRED_${_tbb_find_component})
|
||||
list(APPEND _tbb_find_components "${_tbb_find_component}")
|
||||
else ()
|
||||
list(APPEND _tbb_find_optional_components "${_tbb_find_component}")
|
||||
endif ()
|
||||
endforeach ()
|
||||
unset(_tbb_find_component)
|
||||
find_package(TBB CONFIG ${_tbb_find_quiet}
|
||||
COMPONENTS ${_tbb_find_components}
|
||||
OPTIONAL_COMPONENTS ${_tbb_find_optional_components})
|
||||
unset(_tbb_find_quiet)
|
||||
unset(_tbb_find_components)
|
||||
unset(_tbb_find_optional_components)
|
||||
if (TBB_FOUND)
|
||||
return ()
|
||||
endif ()
|
||||
|
||||
# Jiri: No reason not to check the default paths. From recent versions,
|
||||
# tbbvars has started exporting the LIBRARY_PATH and LD_LIBRARY_PATH
|
||||
# variables, which now point to the directories of the lib files.
|
||||
# It all makes more sense to use the ${_TBB_LIBRARY_DIR} as a HINTS
|
||||
# argument instead of the implicit PATHS as it isn't hard-coded
|
||||
# but computed by system introspection. Searching the LIBRARY_PATH
|
||||
# and LD_LIBRARY_PATH environment variables is now even more important
|
||||
# that tbbvars doesn't export TBB_ARCH_PLATFORM and it facilitates
|
||||
# the use of TBB built from sources.
|
||||
find_library(TBB_LIBRARY ${_TBB_LIB_NAME} HINTS ${_TBB_BUILD_DIR_RELEASE} ${_TBB_LIBRARY_DIR})
|
||||
find_library(TBB_MALLOC_LIBRARY ${_TBB_LIB_MALLOC_NAME} HINTS ${_TBB_BUILD_DIR_RELEASE} ${_TBB_LIBRARY_DIR})
|
||||
#====================================================
|
||||
# Fix the library path in case it is a linker script
|
||||
#====================================================
|
||||
function(tbb_extract_real_library library real_library)
|
||||
if(NOT UNIX OR NOT EXISTS ${library})
|
||||
set(${real_library} "${library}" PARENT_SCOPE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
#Extract path from TBB_LIBRARY name
|
||||
get_filename_component(TBB_LIBRARY_DIR ${TBB_LIBRARY} PATH)
|
||||
#Read in the first 4 bytes and see if they are the ELF magic number
|
||||
set(_elf_magic "7f454c46")
|
||||
file(READ ${library} _hex_data OFFSET 0 LIMIT 4 HEX)
|
||||
if(_hex_data STREQUAL _elf_magic)
|
||||
#we have opened a elf binary so this is what
|
||||
#we should link to
|
||||
set(${real_library} "${library}" PARENT_SCOPE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY)
|
||||
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY)
|
||||
mark_as_advanced(TBB_LIBRARY TBB_MALLOC_LIBRARY)
|
||||
file(READ ${library} _data OFFSET 0 LIMIT 1024)
|
||||
if("${_data}" MATCHES "INPUT \\(([^(]+)\\)")
|
||||
#extract out the .so name from REGEX MATCH command
|
||||
set(_proper_so_name "${CMAKE_MATCH_1}")
|
||||
|
||||
#-- Look for debug libraries
|
||||
# Jiri: Changed the same way as for the release libraries.
|
||||
find_library(TBB_LIBRARY_DEBUG ${_TBB_LIB_DEBUG_NAME} HINTS ${_TBB_BUILD_DIR_DEBUG} ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
find_library(TBB_MALLOC_LIBRARY_DEBUG ${_TBB_LIB_MALLOC_DEBUG_NAME} HINTS ${_TBB_BUILD_DIR_DEBUG} ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
#construct path to the real .so which is presumed to be in the same directory
|
||||
#as the input file
|
||||
get_filename_component(_so_dir "${library}" DIRECTORY)
|
||||
set(${real_library} "${_so_dir}/${_proper_so_name}" PARENT_SCOPE)
|
||||
else()
|
||||
#unable to determine what this library is so just hope everything works
|
||||
#and pass it unmodified.
|
||||
set(${real_library} "${library}" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||
# Extract path from TBB_LIBRARY_DEBUG name
|
||||
get_filename_component(TBB_LIBRARY_DEBUG_DIR ${TBB_LIBRARY_DEBUG} PATH)
|
||||
#===============================================
|
||||
# Do the final processing for the package find.
|
||||
#===============================================
|
||||
macro(findpkg_finish PREFIX TARGET_NAME)
|
||||
if (${PREFIX}_INCLUDE_DIR AND ${PREFIX}_LIBRARY)
|
||||
set(${PREFIX}_FOUND TRUE)
|
||||
set (${PREFIX}_INCLUDE_DIRS ${${PREFIX}_INCLUDE_DIR})
|
||||
set (${PREFIX}_LIBRARIES ${${PREFIX}_LIBRARY})
|
||||
else ()
|
||||
if (${PREFIX}_FIND_REQUIRED AND NOT ${PREFIX}_FIND_QUIETLY)
|
||||
message(FATAL_ERROR "Required library ${PREFIX} not found.")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY_DEBUG)
|
||||
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY_DEBUG)
|
||||
mark_as_advanced(TBB_LIBRARY_DEBUG TBB_MALLOC_LIBRARY_DEBUG)
|
||||
if (NOT TARGET "TBB::${TARGET_NAME}")
|
||||
if (${PREFIX}_LIBRARY_RELEASE)
|
||||
tbb_extract_real_library(${${PREFIX}_LIBRARY_RELEASE} real_release)
|
||||
endif ()
|
||||
if (${PREFIX}_LIBRARY_DEBUG)
|
||||
tbb_extract_real_library(${${PREFIX}_LIBRARY_DEBUG} real_debug)
|
||||
endif ()
|
||||
add_library(TBB::${TARGET_NAME} UNKNOWN IMPORTED)
|
||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${${PREFIX}_INCLUDE_DIR}")
|
||||
if (${PREFIX}_LIBRARY_DEBUG AND ${PREFIX}_LIBRARY_RELEASE)
|
||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
||||
IMPORTED_LOCATION "${real_release}"
|
||||
IMPORTED_LOCATION_DEBUG "${real_debug}"
|
||||
IMPORTED_LOCATION_RELEASE "${real_release}")
|
||||
elseif (${PREFIX}_LIBRARY_RELEASE)
|
||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
||||
IMPORTED_LOCATION "${real_release}")
|
||||
elseif (${PREFIX}_LIBRARY_DEBUG)
|
||||
set_target_properties(TBB::${TARGET_NAME} PROPERTIES
|
||||
IMPORTED_LOCATION "${real_debug}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#mark the following variables as internal variables
|
||||
mark_as_advanced(${PREFIX}_INCLUDE_DIR
|
||||
${PREFIX}_LIBRARY
|
||||
${PREFIX}_LIBRARY_DEBUG
|
||||
${PREFIX}_LIBRARY_RELEASE)
|
||||
endmacro()
|
||||
|
||||
#===============================================
|
||||
# Generate debug names from given release names
|
||||
#===============================================
|
||||
macro(get_debug_names PREFIX)
|
||||
foreach(i ${${PREFIX}})
|
||||
set(${PREFIX}_DEBUG ${${PREFIX}_DEBUG} ${i}d ${i}D ${i}_d ${i}_D ${i}_debug ${i})
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
#===============================================
|
||||
# See if we have env vars to help us find tbb
|
||||
#===============================================
|
||||
macro(getenv_path VAR)
|
||||
set(ENV_${VAR} $ENV{${VAR}})
|
||||
# replace won't work if var is blank
|
||||
if (ENV_${VAR})
|
||||
string( REGEX REPLACE "\\\\" "/" ENV_${VAR} ${ENV_${VAR}} )
|
||||
endif ()
|
||||
endmacro()
|
||||
|
||||
#===============================================
|
||||
# Couple a set of release AND debug libraries
|
||||
#===============================================
|
||||
macro(make_library_set PREFIX)
|
||||
if (${PREFIX}_RELEASE AND ${PREFIX}_DEBUG)
|
||||
set(${PREFIX} optimized ${${PREFIX}_RELEASE} debug ${${PREFIX}_DEBUG})
|
||||
elseif (${PREFIX}_RELEASE)
|
||||
set(${PREFIX} ${${PREFIX}_RELEASE})
|
||||
elseif (${PREFIX}_DEBUG)
|
||||
set(${PREFIX} ${${PREFIX}_DEBUG})
|
||||
endif ()
|
||||
endmacro()
|
||||
|
||||
|
||||
if (TBB_INCLUDE_DIR)
|
||||
if (TBB_LIBRARY)
|
||||
set (TBB_FOUND "YES")
|
||||
set (TBB_LIBRARIES ${TBB_LIBRARY} ${TBB_MALLOC_LIBRARY} ${TBB_LIBRARIES})
|
||||
set (TBB_DEBUG_LIBRARIES ${TBB_LIBRARY_DEBUG} ${TBB_MALLOC_LIBRARY_DEBUG} ${TBB_DEBUG_LIBRARIES})
|
||||
set (TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR} CACHE PATH "TBB include directory" FORCE)
|
||||
set (TBB_LIBRARY_DIRS ${TBB_LIBRARY_DIR} CACHE PATH "TBB library directory" FORCE)
|
||||
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||
set (TBB_DEBUG_LIBRARY_DIRS ${TBB_LIBRARY_DEBUG_DIR} CACHE PATH "TBB debug library directory" FORCE)
|
||||
mark_as_advanced(TBB_INCLUDE_DIRS TBB_LIBRARY_DIRS TBB_DEBUG_LIBRARY_DIRS TBB_LIBRARIES TBB_DEBUG_LIBRARIES)
|
||||
message(STATUS "Found Intel TBB")
|
||||
endif (TBB_LIBRARY)
|
||||
endif (TBB_INCLUDE_DIR)
|
||||
#=============================================================================
|
||||
# Now to actually find TBB
|
||||
#
|
||||
|
||||
# Get path, convert backslashes as ${ENV_${var}}
|
||||
getenv_path(TBB_ROOT)
|
||||
|
||||
# initialize search paths
|
||||
set(TBB_PREFIX_PATH ${TBB_ROOT} ${ENV_TBB_ROOT})
|
||||
set(TBB_INC_SEARCH_PATH "")
|
||||
set(TBB_LIB_SEARCH_PATH "")
|
||||
|
||||
|
||||
# If user built from sources
|
||||
set(TBB_BUILD_PREFIX $ENV{TBB_BUILD_PREFIX})
|
||||
if (TBB_BUILD_PREFIX AND ENV_TBB_ROOT)
|
||||
getenv_path(TBB_BUILD_DIR)
|
||||
if (NOT ENV_TBB_BUILD_DIR)
|
||||
set(ENV_TBB_BUILD_DIR ${ENV_TBB_ROOT}/build)
|
||||
endif ()
|
||||
|
||||
# include directory under ${ENV_TBB_ROOT}/include
|
||||
list(APPEND TBB_LIB_SEARCH_PATH
|
||||
${ENV_TBB_BUILD_DIR}/${TBB_BUILD_PREFIX}_release
|
||||
${ENV_TBB_BUILD_DIR}/${TBB_BUILD_PREFIX}_debug)
|
||||
endif ()
|
||||
|
||||
|
||||
# For Windows, let's assume that the user might be using the precompiled
|
||||
# TBB packages from the main website. These use a rather awkward directory
|
||||
# structure (at least for automatically finding the right files) depending
|
||||
# on platform and compiler, but we'll do our best to accommodate it.
|
||||
# Not adding the same effort for the precompiled linux builds, though. Those
|
||||
# have different versions for CC compiler versions and linux kernels which
|
||||
# will never adequately match the user's setup, so there is no feasible way
|
||||
# to detect the "best" version to use. The user will have to manually
|
||||
# select the right files. (Chances are the distributions are shipping their
|
||||
# custom version of tbb, anyway, so the problem is probably nonexistent.)
|
||||
if (WIN32 AND MSVC)
|
||||
set(COMPILER_PREFIX "vc7.1")
|
||||
if (MSVC_VERSION EQUAL 1400)
|
||||
set(COMPILER_PREFIX "vc8")
|
||||
elseif(MSVC_VERSION EQUAL 1500)
|
||||
set(COMPILER_PREFIX "vc9")
|
||||
elseif(MSVC_VERSION EQUAL 1600)
|
||||
set(COMPILER_PREFIX "vc10")
|
||||
elseif(MSVC_VERSION EQUAL 1700)
|
||||
set(COMPILER_PREFIX "vc11")
|
||||
elseif(MSVC_VERSION EQUAL 1800)
|
||||
set(COMPILER_PREFIX "vc12")
|
||||
elseif(MSVC_VERSION GREATER_EQUAL 1900)
|
||||
set(COMPILER_PREFIX "vc14")
|
||||
endif ()
|
||||
|
||||
# for each prefix path, add ia32/64\${COMPILER_PREFIX}\lib to the lib search path
|
||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
||||
if (CMAKE_CL_64)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia64/${COMPILER_PREFIX}/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia64/${COMPILER_PREFIX})
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/${COMPILER_PREFIX}/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64/${COMPILER_PREFIX})
|
||||
else ()
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/${COMPILER_PREFIX}/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32/${COMPILER_PREFIX})
|
||||
endif ()
|
||||
endforeach ()
|
||||
endif ()
|
||||
|
||||
# For OS X binary distribution, choose libc++ based libraries for Mavericks (10.9)
|
||||
# and above and AppleClang
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Darwin" AND
|
||||
NOT CMAKE_SYSTEM_VERSION VERSION_LESS 13.0)
|
||||
set (USE_LIBCXX OFF)
|
||||
cmake_policy(GET CMP0025 POLICY_VAR)
|
||||
|
||||
if (POLICY_VAR STREQUAL "NEW")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
|
||||
set (USE_LIBCXX ON)
|
||||
endif ()
|
||||
else ()
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||
set (USE_LIBCXX ON)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (USE_LIBCXX)
|
||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
||||
list (APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/libc++ ${dir}/libc++/lib)
|
||||
endforeach ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# check compiler ABI
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(COMPILER_PREFIX)
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.8)
|
||||
list(APPEND COMPILER_PREFIX "gcc4.8")
|
||||
endif()
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.7)
|
||||
list(APPEND COMPILER_PREFIX "gcc4.7")
|
||||
endif()
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.4)
|
||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
||||
endif()
|
||||
list(APPEND COMPILER_PREFIX "gcc4.1")
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
set(COMPILER_PREFIX)
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 4.0) # Complete guess
|
||||
list(APPEND COMPILER_PREFIX "gcc4.8")
|
||||
endif()
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 3.6)
|
||||
list(APPEND COMPILER_PREFIX "gcc4.7")
|
||||
endif()
|
||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
||||
else() # Assume compatibility with 4.4 for other compilers
|
||||
list(APPEND COMPILER_PREFIX "gcc4.4")
|
||||
endif ()
|
||||
|
||||
# if platform architecture is explicitly specified
|
||||
set(TBB_ARCH_PLATFORM $ENV{TBB_ARCH_PLATFORM})
|
||||
if (TBB_ARCH_PLATFORM)
|
||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/${TBB_ARCH_PLATFORM}/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/${TBB_ARCH_PLATFORM})
|
||||
endforeach ()
|
||||
endif ()
|
||||
|
||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
||||
foreach (prefix IN LISTS COMPILER_PREFIX)
|
||||
if (CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/intel64/${prefix})
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/intel64/${prefix}/lib)
|
||||
else ()
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib/ia32/${prefix})
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/lib)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/ia32/${prefix}/lib)
|
||||
endif ()
|
||||
endforeach()
|
||||
endforeach ()
|
||||
|
||||
# add general search paths
|
||||
foreach (dir IN LISTS TBB_PREFIX_PATH)
|
||||
list(APPEND TBB_LIB_SEARCH_PATH ${dir}/lib ${dir}/Lib ${dir}/lib/tbb
|
||||
${dir}/Libs)
|
||||
list(APPEND TBB_INC_SEARCH_PATH ${dir}/include ${dir}/Include
|
||||
${dir}/include/tbb)
|
||||
endforeach ()
|
||||
|
||||
set(TBB_LIBRARY_NAMES tbb)
|
||||
get_debug_names(TBB_LIBRARY_NAMES)
|
||||
|
||||
|
||||
find_path(TBB_INCLUDE_DIR
|
||||
NAMES tbb/tbb.h
|
||||
PATHS ${TBB_INC_SEARCH_PATH})
|
||||
|
||||
find_library(TBB_LIBRARY_RELEASE
|
||||
NAMES ${TBB_LIBRARY_NAMES}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
find_library(TBB_LIBRARY_DEBUG
|
||||
NAMES ${TBB_LIBRARY_NAMES_DEBUG}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
make_library_set(TBB_LIBRARY)
|
||||
|
||||
findpkg_finish(TBB tbb)
|
||||
|
||||
#if we haven't found TBB no point on going any further
|
||||
if (NOT TBB_FOUND)
|
||||
message("ERROR: Intel TBB NOT found!")
|
||||
message(STATUS "Looked for Threading Building Blocks in ${_TBB_INSTALL_DIR}")
|
||||
# do only throw fatal, if this pkg is REQUIRED
|
||||
if (TBB_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could NOT find TBB library.")
|
||||
endif (TBB_FIND_REQUIRED)
|
||||
endif (NOT TBB_FOUND)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
#=============================================================================
|
||||
# Look for TBB's malloc package
|
||||
set(TBB_MALLOC_LIBRARY_NAMES tbbmalloc)
|
||||
get_debug_names(TBB_MALLOC_LIBRARY_NAMES)
|
||||
|
||||
if (TBB_FOUND)
|
||||
set(TBB_INTERFACE_VERSION 0)
|
||||
FILE(READ "${TBB_INCLUDE_DIRS}/tbb/tbb_stddef.h" _TBB_VERSION_CONTENTS)
|
||||
STRING(REGEX REPLACE ".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1" TBB_INTERFACE_VERSION "${_TBB_VERSION_CONTENTS}")
|
||||
set(TBB_INTERFACE_VERSION "${TBB_INTERFACE_VERSION}")
|
||||
message(STATUS "TBB interface version: ${TBB_INTERFACE_VERSION}")
|
||||
endif (TBB_FOUND)
|
||||
find_path(TBB_MALLOC_INCLUDE_DIR
|
||||
NAMES tbb/tbb.h
|
||||
PATHS ${TBB_INC_SEARCH_PATH})
|
||||
|
||||
find_library(TBB_MALLOC_LIBRARY_RELEASE
|
||||
NAMES ${TBB_MALLOC_LIBRARY_NAMES}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
find_library(TBB_MALLOC_LIBRARY_DEBUG
|
||||
NAMES ${TBB_MALLOC_LIBRARY_NAMES_DEBUG}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
make_library_set(TBB_MALLOC_LIBRARY)
|
||||
|
||||
findpkg_finish(TBB_MALLOC tbbmalloc)
|
||||
|
||||
#=============================================================================
|
||||
# Look for TBB's malloc proxy package
|
||||
set(TBB_MALLOC_PROXY_LIBRARY_NAMES tbbmalloc_proxy)
|
||||
get_debug_names(TBB_MALLOC_PROXY_LIBRARY_NAMES)
|
||||
|
||||
find_path(TBB_MALLOC_PROXY_INCLUDE_DIR
|
||||
NAMES tbb/tbbmalloc_proxy.h
|
||||
PATHS ${TBB_INC_SEARCH_PATH})
|
||||
|
||||
find_library(TBB_MALLOC_PROXY_LIBRARY_RELEASE
|
||||
NAMES ${TBB_MALLOC_PROXY_LIBRARY_NAMES}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
find_library(TBB_MALLOC_PROXY_LIBRARY_DEBUG
|
||||
NAMES ${TBB_MALLOC_PROXY_LIBRARY_NAMES_DEBUG}
|
||||
PATHS ${TBB_LIB_SEARCH_PATH})
|
||||
make_library_set(TBB_MALLOC_PROXY_LIBRARY)
|
||||
|
||||
findpkg_finish(TBB_MALLOC_PROXY tbbmalloc_proxy)
|
||||
|
||||
|
||||
#=============================================================================
|
||||
#parse all the version numbers from tbb
|
||||
if(NOT TBB_VERSION)
|
||||
if (EXISTS "${TBB_INCLUDE_DIR}/oneapi/tbb/version.h")
|
||||
file(STRINGS
|
||||
"${TBB_INCLUDE_DIR}/oneapi/tbb/version.h"
|
||||
TBB_VERSION_CONTENTS
|
||||
REGEX "VERSION")
|
||||
else()
|
||||
#only read the start of the file
|
||||
file(STRINGS
|
||||
"${TBB_INCLUDE_DIR}/tbb/tbb_stddef.h"
|
||||
TBB_VERSION_CONTENTS
|
||||
REGEX "VERSION")
|
||||
endif()
|
||||
|
||||
string(REGEX REPLACE
|
||||
".*#define TBB_VERSION_MAJOR ([0-9]+).*" "\\1"
|
||||
TBB_VERSION_MAJOR "${TBB_VERSION_CONTENTS}")
|
||||
|
||||
string(REGEX REPLACE
|
||||
".*#define TBB_VERSION_MINOR ([0-9]+).*" "\\1"
|
||||
TBB_VERSION_MINOR "${TBB_VERSION_CONTENTS}")
|
||||
|
||||
string(REGEX REPLACE
|
||||
".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1"
|
||||
TBB_INTERFACE_VERSION "${TBB_VERSION_CONTENTS}")
|
||||
|
||||
string(REGEX REPLACE
|
||||
".*#define TBB_COMPATIBLE_INTERFACE_VERSION ([0-9]+).*" "\\1"
|
||||
TBB_COMPATIBLE_INTERFACE_VERSION "${TBB_VERSION_CONTENTS}")
|
||||
|
||||
endif()
|
||||
@ -4,13 +4,23 @@ ARG BUILD_CONCURRENCY
|
||||
RUN mkdir -p /src && mkdir -p /opt
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get -y --no-install-recommends install cmake make git gcc g++ libbz2-dev libxml2-dev \
|
||||
libzip-dev libboost1.74-all-dev lua5.2 liblua5.2-dev libtbb-dev -o APT::Install-Suggests=0 -o APT::Install-Recommends=0
|
||||
apt-get -y --no-install-recommends install ca-certificates cmake make git gcc g++ libbz2-dev libxml2-dev wget \
|
||||
libzip-dev libboost1.74-all-dev lua5.4 liblua5.4-dev -o APT::Install-Suggests=0 -o APT::Install-Recommends=0
|
||||
|
||||
RUN NPROC=${BUILD_CONCURRENCY:-$(nproc)} && \
|
||||
ldconfig /usr/local/lib && \
|
||||
git clone --branch v2021.3.0 --single-branch https://github.com/oneapi-src/oneTBB.git && \
|
||||
cd oneTBB && \
|
||||
mkdir build && \
|
||||
cd build && \
|
||||
cmake -DTBB_TEST=OFF -DCMAKE_BUILD_TYPE=Release .. && \
|
||||
cmake --build . && \
|
||||
cmake --install .
|
||||
|
||||
COPY . /src
|
||||
WORKDIR /src
|
||||
|
||||
RUN NPROC=${BUILD_CONCURRENCY:-$(grep -c ^processor /proc/cpuinfo 2>/dev/null || 1)} && \
|
||||
RUN NPROC=${BUILD_CONCURRENCY:-$(nproc)} && \
|
||||
echo "Building OSRM ${DOCKER_TAG}" && \
|
||||
git show --format="%H" | head -n1 > /opt/OSRM_GITSHA && \
|
||||
echo "Building OSRM gitsha $(cat /opt/OSRM_GITSHA)" && \
|
||||
@ -29,23 +39,29 @@ RUN NPROC=${BUILD_CONCURRENCY:-$(grep -c ^processor /proc/cpuinfo 2>/dev/null ||
|
||||
strip /usr/local/bin/* && \
|
||||
rm -rf /src /usr/local/lib/libosrm*
|
||||
|
||||
|
||||
# Multistage build to reduce image size - https://docs.docker.com/engine/userguide/eng-image/multistage-build/#use-multi-stage-builds
|
||||
# Only the content below ends up in the image, this helps remove /src from the image (which is large)
|
||||
FROM debian:bullseye-slim as runstage
|
||||
RUN mkdir -p /src && mkdir -p /opt
|
||||
|
||||
COPY --from=builder /usr/local /usr/local
|
||||
COPY --from=builder /opt /opt
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends libboost-program-options1.74.0 libboost-regex1.74.0 \
|
||||
libboost-date-time1.74.0 libboost-chrono1.74.0 libboost-filesystem1.74.0 \
|
||||
libboost-iostreams1.74.0 libboost-system1.74.0 libboost-thread1.74.0 \
|
||||
expat liblua5.2-0 libtbb2 &&\
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=builder /usr/local /usr/local
|
||||
COPY --from=builder /opt /opt
|
||||
expat liblua5.4-0 && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
# add /usr/local/lib to ldconfig to allow loading libraries from there
|
||||
ldconfig /usr/local/lib
|
||||
|
||||
RUN /usr/local/bin/osrm-extract --help && \
|
||||
/usr/local/bin/osrm-routed --help && \
|
||||
/usr/local/bin/osrm-contract --help && \
|
||||
/usr/local/bin/osrm-partition --help && \
|
||||
/usr/local/bin/osrm-customize --help
|
||||
|
||||
WORKDIR /opt
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
134
docs/http.md
134
docs/http.md
@ -1,7 +1,7 @@
|
||||
# OSRM HTTP server
|
||||
|
||||
Built-in HTTP server is a basic HTTP/1.0 server that supports 'keep-alive' extension. Persistent connections are limited to 512 requests per
|
||||
connection and allow no more then 5 seconds between requests.
|
||||
The built-in HTTP server is a basic HTTP/1.0 server that supports a 'keep-alive' extension. Persistent connections are limited to 512 requests per
|
||||
connection and allow no more than 5 seconds between requests.
|
||||
|
||||
## General options
|
||||
|
||||
@ -25,20 +25,20 @@ GET /{service}/{version}/{profile}/{coordinates}[.{format}]?option=value&option=
|
||||
|
||||
Passing any `option=value` is optional. `polyline` follows Google's polyline format with precision 5 by default and can be generated using [this package](https://www.npmjs.com/package/polyline).
|
||||
|
||||
To pass parameters to each location some options support an array like encoding:
|
||||
To pass parameters to each location some options support an array-like encoding:
|
||||
|
||||
**Request options**
|
||||
|
||||
| Option | Values | Description |
|
||||
|----------------|--------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
|bearings |`{bearing};{bearing}[;{bearing} ...]` |Limits the search to segments with given bearing in degrees towards true north in clockwise direction. |
|
||||
|bearings |`{bearing};{bearing}[;{bearing} ...]` |Limits the search to segments with given bearing in degrees towards true north in a clockwise direction. |
|
||||
|radiuses |`{radius};{radius}[;{radius} ...]` |Limits the search to given radius in meters. |
|
||||
|generate\_hints |`true` (default), `false` |Adds a Hint to the response which can be used in subsequent requests, see `hints` parameter. |
|
||||
|hints |`{hint};{hint}[;{hint} ...]` |Hint from previous request to derive position in street network. |
|
||||
|approaches |`{approach};{approach}[;{approach} ...]` |Keep waypoints on curbside. |
|
||||
|exclude |`{class}[,{class}]` |Additive list of classes to avoid, order does not matter. |
|
||||
|exclude |`{class}[,{class}]` |Additive list of classes to avoid, the order does not matter. |
|
||||
|snapping |`default` (default), `any` |Default snapping avoids is_startpoint (see profile) edges, `any` will snap to any edge in the graph |
|
||||
|skip_waypoints |`true`, `false` (default) |Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of response and do not want to transfer waste data. |
|
||||
|skip_waypoints |`true`, `false` (default) |Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of the response and do not want to transfer waste data. |
|
||||
|
||||
Where the elements follow the following format:
|
||||
|
||||
@ -56,7 +56,7 @@ Where the elements follow the following format:
|
||||
|
||||
The number of elements must match exactly the number of locations (except for `generate_hints` and `exclude`). If you don't want to pass a value but instead use the default you can pass an empty `element`.
|
||||
|
||||
Example: 2nd location use the default value for `option`:
|
||||
Example: 2nd location uses the default value for `option`:
|
||||
|
||||
```
|
||||
{option}={element};;{element}
|
||||
@ -88,17 +88,17 @@ Every response object has a `code` property containing one of the strings below
|
||||
| `InvalidService` | Service name is invalid. |
|
||||
| `InvalidVersion` | Version is not found. |
|
||||
| `InvalidOptions` | Options are invalid. |
|
||||
| `InvalidQuery` | The query string is synctactically malformed. |
|
||||
| `InvalidQuery` | The query string is syntactically malformed. |
|
||||
| `InvalidValue` | The successfully parsed query parameters are invalid. |
|
||||
| `NoSegment` | One of the supplied input coordinates could not snap to street segment. |
|
||||
| `TooBig` | The request size violates one of the service specific request size restrictions. |
|
||||
| `NoSegment` | One of the supplied input coordinates could not snap to the street segment. |
|
||||
| `TooBig` | The request size violates one of the service-specific request size restrictions. |
|
||||
|
||||
- `message` is a **optional** human-readable error message. All other status types are service dependent.
|
||||
- In case of an error the HTTP status code will be `400`. Otherwise the HTTP status code will be `200` and `code` will be `Ok`.
|
||||
- `message` is a **optional** human-readable error message. All other status types are service-dependent.
|
||||
- In case of an error the HTTP status code will be `400`. Otherwise, the HTTP status code will be `200` and `code` will be `Ok`.
|
||||
|
||||
#### Data version
|
||||
|
||||
Every response object has a `data_version` propetry containing timestamp from the original OpenStreetMap file. This field is optional. It can be ommited if data_version parametr was not set on osrm-extract stage or OSM file has not `osmosis_replication_timestamp` section.
|
||||
Every response object has a `data_version` property containing a timestamp from the original OpenStreetMap file. This field is optional. It can be omitted if the data_version parameter was not set on the osrm-extract stage or the OSM file has not `osmosis_replication_timestamp` section.
|
||||
|
||||
#### Example response
|
||||
|
||||
@ -129,8 +129,8 @@ In addition to the [general options](#general-options) the following options are
|
||||
|------------|------------------------------|----------------------------------------------------|
|
||||
|number |`integer >= 1` (default `1`) |Number of nearest segments that should be returned. |
|
||||
|
||||
As `waypoints` is a single thing, returned byt that service, using it with option `skip_waypoints` set to `true` is quite useless, but still
|
||||
possible. In that case only `code` field will be returned.
|
||||
As `waypoints` is a single thing, returned by that service, using it with the option `skip_waypoints` set to `true` is quite useless, but still
|
||||
possible. In that case, only the `code` field will be returned.
|
||||
|
||||
**Response**
|
||||
|
||||
@ -147,7 +147,7 @@ curl 'http://router.project-osrm.org/nearest/v1/driving/13.388860,52.517037?numb
|
||||
|
||||
#### Example Response
|
||||
|
||||
```json
|
||||
```JSON
|
||||
{
|
||||
"waypoints" : [
|
||||
{
|
||||
@ -239,7 +239,7 @@ curl 'http://router.project-osrm.org/route/v1/driving/13.388860,52.517037;13.397
|
||||
|
||||
### Table service
|
||||
|
||||
Computes the duration of the fastest route between all pairs of supplied coordinates. Returns the durations or distances or both between the coordinate pairs. Note that the distances are not the shortest distance between two coordinates, but rather the distances of the fastest routes. Duration is in seconds and distances is in meters.
|
||||
Computes the duration of the fastest route between all pairs of supplied coordinates. Returns durations or distances or both between the coordinate pairs. Note that the distances are not the shortest distance between two coordinates, but rather the distances of the fastest routes. Durations are in seconds and distances are in meters.
|
||||
|
||||
```endpoint
|
||||
GET /table/v1/{profile}/{coordinates}?{sources}=[{elem}...];&{destinations}=[{elem}...]&annotations={duration|distance|duration,distance}
|
||||
@ -417,7 +417,7 @@ All other properties might be undefined.
|
||||
### Match service
|
||||
|
||||
Map matching matches/snaps given GPS points to the road network in the most plausible way.
|
||||
Please note the request might result multiple sub-traces. Large jumps in the timestamps (> 60s) or improbable transitions lead to trace splits if a complete matching could not be found.
|
||||
Please note the request might result in multiple sub-traces. Large jumps in the timestamps (> 60s) or improbable transitions lead to trace splits if a complete matching could not be found.
|
||||
The algorithm might not be able to match all points. Outliers are removed if they can not be matched successfully.
|
||||
|
||||
```endpoint
|
||||
@ -452,11 +452,11 @@ The area to search is chosen such that the correct candidate should be considere
|
||||
|
||||
- `code` if the request was successful `Ok` otherwise see the service dependent and general status codes.
|
||||
- `tracepoints`: Array of `Waypoint` objects representing all points of the trace in order.
|
||||
If the trace point was ommited by map matching because it is an outlier, the entry will be `null`.
|
||||
If the tracepoint was omitted by map matching because it is an outlier, the entry will be `null`.
|
||||
Each `Waypoint` object has the following additional properties:
|
||||
- `matchings_index`: Index to the `Route` object in `matchings` the sub-trace was matched to.
|
||||
- `waypoint_index`: Index of the waypoint inside the matched route.
|
||||
- `alternatives_count`: Number of probable alternative matchings for this trace point. A value of zero indicate that this point was matched unambiguously. Split the trace at these points for incremental map matching.
|
||||
- `alternatives_count`: Number of probable alternative matchings for this tracepoint. A value of zero indicates that this point was matched unambiguously. Split the trace at these points for incremental map matching.
|
||||
- `matchings`: An array of `Route` objects that assemble the trace. Each `Route` object has the following additional properties:
|
||||
- `confidence`: Confidence of the matching. `float` value between 0 and 1. 1 is very confident that the matching is correct.
|
||||
|
||||
@ -471,7 +471,7 @@ All other properties might be undefined.
|
||||
### Trip service
|
||||
|
||||
The trip plugin solves the Traveling Salesman Problem using a greedy heuristic (farthest-insertion algorithm) for 10 or more waypoints and uses brute force for less than 10 waypoints.
|
||||
The returned path does not have to be the fastest path. As TSP is NP-hard it only returns an approximation.
|
||||
The returned path does not have to be the fastest one. As TSP is NP-hard it only returns an approximation.
|
||||
Note that all input coordinates have to be connected for the trip service to work.
|
||||
|
||||
```endpoint
|
||||
@ -492,12 +492,12 @@ In addition to the [general options](#general-options) the following options are
|
||||
|
||||
**Fixing Start and End Points**
|
||||
|
||||
It is possible to explicitely set the start or end coordinate of the trip.
|
||||
When source is set to `first`, the first coordinate is used as start coordinate of the trip in the output. When destination is set to `last`, the last coordinate will be used as destination of the trip in the returned output. If you specify `any`, any of the coordinates can be used as the first or last coordinate in the output.
|
||||
It is possible to explicitly set the start or end coordinate of the trip.
|
||||
When the source is set to `first`, the first coordinate is used as the start coordinate of the trip in the output. When the destination is set to `last`, the last coordinate will be used as the destination of the trip in the returned output. If you specify `any`, any of the coordinates can be used as the first or last coordinate in the output.
|
||||
|
||||
However, if `source=any&destination=any` the returned round-trip will still start at the first input coordinate by default.
|
||||
|
||||
Currently, not all combinations of `roundtrip`, `source` and `destination` are supported.
|
||||
Currently, not all combinations of `roundtrip`, `source`, and `destination` are supported.
|
||||
Right now, the following combinations are possible:
|
||||
|
||||
| roundtrip | source | destination | supported |
|
||||
@ -576,7 +576,7 @@ Vector tiles contain two layers:
|
||||
| `duration` | `float` | how long this segment takes to traverse, in seconds. This value is to calculate the total route ETA. |
|
||||
| `weight ` | `integer` | how long this segment takes to traverse, in units (may differ from `duration` when artificial biasing is applied in the Lua profiles). ACTUAL ROUTING USES THIS VALUE. |
|
||||
| `name` | `string` | the name of the road this segment belongs to |
|
||||
| `rate` | `float` | the value of `length/weight` - analagous to `speed`, but using the `weight` value rather than `duration`, rounded to the nearest integer |
|
||||
| `rate` | `float` | the value of `length/weight` - analogous to `speed`, but using the `weight` value rather than `duration`, rounded to the nearest integer |
|
||||
| `is_startpoint` | `boolean` | whether this segment can be used as a start/endpoint for routes |
|
||||
|
||||
`turns` layer:
|
||||
@ -601,13 +601,13 @@ Represents a route through (potentially multiple) waypoints.
|
||||
|
||||
- `distance`: The distance traveled by the route, in `float` meters.
|
||||
- `duration`: The estimated travel time, in `float` number of seconds.
|
||||
- `geometry`: The whole geometry of the route value depending on `overview` parameter, format depending on the `geometries` parameter. See `RouteStep`'s `geometry` property for a parameter documentation.
|
||||
- `geometry`: The whole geometry of the route value depending on `overview` parameter, format depending on the `geometries` parameter. See `RouteStep`'s `geometry` property for the parameter documentation.
|
||||
- `weight`: The calculated weight of the route.
|
||||
- `weight_name`: The name of the weight profile used during extraction phase.
|
||||
- `weight_name`: The name of the weight profile used during the extraction phase.
|
||||
|
||||
| overview | Description |
|
||||
|------------|-----------------------------|
|
||||
| simplified | Geometry is simplified according to the highest zoom level it can still be displayed on full. |
|
||||
| simplified | Geometry is simplified according to the highest zoom level it can still be displayed in full. |
|
||||
| full | Geometry is not simplified. |
|
||||
| false | Geometry is not added. |
|
||||
|
||||
@ -652,7 +652,7 @@ Represents a route between two waypoints.
|
||||
|
||||
| summary | |
|
||||
|--------------|-----------------------------------------------------------------------|
|
||||
| true | Names of the two major roads used. Can be empty if route is too short.|
|
||||
| true | Names of the two major roads used. Can be empty if the route is too short.|
|
||||
| false | empty `string` |
|
||||
|
||||
- `steps`: Depends on the `steps` parameter.
|
||||
@ -662,11 +662,11 @@ Represents a route between two waypoints.
|
||||
| true | array of `RouteStep` objects describing the turn-by-turn instructions |
|
||||
| false | empty array |
|
||||
|
||||
- `annotation`: Additional details about each coordinate along the route geometry:
|
||||
- `annotation`: Additional details about each coordinate along with the route geometry:
|
||||
|
||||
| annotations | |
|
||||
|--------------|-------------------------------------------------------------------------------|
|
||||
| true | An `Annotation` object containing node ids, durations, distances and weights. |
|
||||
| true | An `Annotation` object containing node ids, durations, distances, and weights. |
|
||||
| false | `undefined` |
|
||||
|
||||
#### Example
|
||||
@ -696,7 +696,7 @@ Annotation of the whole route leg with fine-grained information about each segme
|
||||
|
||||
**Properties**
|
||||
|
||||
- `distance`: The distance, in metres, between each pair of coordinates
|
||||
- `distance`: The distance, in meters, between each pair of coordinates
|
||||
- `duration`: The duration between each pair of coordinates, in seconds. Does not include the duration of any turns.
|
||||
- `datasources`: The index of the data source for the speed between each pair of coordinates. `0` is the default profile, other values are supplied via `--segment-speed-file` to `osrm-contract` or `osrm-customize`. String-like names are in the `metadata.datasource_names` array.
|
||||
- `nodes`: The OSM node ID for each coordinate along the route, excluding the first/last user-supplied coordinates
|
||||
@ -803,11 +803,11 @@ step.
|
||||
- `bearing_after`: The clockwise angle from true north to the
|
||||
direction of travel immediately after the maneuver. Range 0-359.
|
||||
- `type` A string indicating the type of maneuver. **new identifiers might be introduced without API change**
|
||||
Types unknown to the client should be handled like the `turn` type, the existence of correct `modifier` values is guranteed.
|
||||
Types unknown to the client should be handled like the `turn` type, the existence of correct `modifier` values is guaranteed.
|
||||
|
||||
| `type` | Description |
|
||||
|------------------|--------------------------------------------------------------|
|
||||
| `turn` | a basic turn into direction of the `modifier` |
|
||||
| `turn` | a basic turn into the direction of the `modifier` |
|
||||
| `new name` | no turn is taken/possible, but the road name changes. The road can take a turn itself, following `modifier`. |
|
||||
| `depart` | indicates the departure of the leg |
|
||||
| `arrive` | indicates the destination of the leg |
|
||||
@ -821,9 +821,9 @@ step.
|
||||
| `continue` | Turn in direction of `modifier` to stay on the same road |
|
||||
| `roundabout` | traverse roundabout, if the route leaves the roundabout there will be an additional property `exit` for exit counting. The modifier specifies the direction of entering the roundabout. |
|
||||
| `rotary` | a traffic circle. While very similar to a larger version of a roundabout, it does not necessarily follow roundabout rules for right of way. It can offer `rotary_name` and/or `rotary_pronunciation` parameters (located in the RouteStep object) in addition to the `exit` parameter (located on the StepManeuver object). |
|
||||
| `roundabout turn`| Describes a turn at a small roundabout that should be treated as normal turn. The `modifier` indicates the turn direciton. Example instruction: `At the roundabout turn left`. |
|
||||
| `roundabout turn`| Describes a turn at a small roundabout that should be treated as a normal turn. The `modifier` indicates the turn direction. Example instruction: `At the roundabout turn left`. |
|
||||
| `notification` | not an actual turn but a change in the driving conditions. For example the travel mode or classes. If the road takes a turn itself, the `modifier` describes the direction |
|
||||
| `exit roundabout`| Describes a maneuver exiting a roundabout (usually preceeded by a `roundabout` instruction) |
|
||||
| `exit roundabout`| Describes a maneuver exiting a roundabout (usually preceded by a `roundabout` instruction) |
|
||||
| `exit rotary` | Describes the maneuver exiting a rotary (large named roundabout) |
|
||||
|
||||
Please note that even though there are `new name` and `notification` instructions, the `mode` and `name` can change
|
||||
@ -833,7 +833,7 @@ step.
|
||||
|
||||
| `modifier` | Description |
|
||||
|-------------------|-------------------------------------------|
|
||||
| `uturn` | indicates reversal of direction |
|
||||
| `uturn` | indicates the reversal of direction |
|
||||
| `sharp right` | a sharp right turn |
|
||||
| `right` | a normal turn to the right |
|
||||
| `slight right` | a slight turn to the right |
|
||||
@ -851,8 +851,8 @@ step.
|
||||
| `turn` | `modifier` indicates the change in direction accomplished through the turn |
|
||||
| `depart`/`arrive` | `modifier` indicates the position of departure point and arrival point in relation to the current direction of travel |
|
||||
|
||||
- `exit` An optional `integer` indicating number of the exit to take. The property exists for the `roundabout` / `rotary` property:
|
||||
Number of the roundabout exit to take. If exit is `undefined` the destination is on the roundabout.
|
||||
- `exit` An optional `integer` indicating the number of the exit to take. The property exists for the `roundabout` / `rotary` property:
|
||||
Number of the roundabout exit to take. If an exit is `undefined` the destination is on the roundabout.
|
||||
|
||||
|
||||
New properties (potentially depending on `type`) may be introduced in the future without an API version change.
|
||||
@ -863,7 +863,7 @@ A `Lane` represents a turn lane at the corresponding turn location.
|
||||
|
||||
**Properties**
|
||||
|
||||
- `indications`: a indication (e.g. marking on the road) specifying the turn lane. A road can have multiple indications (e.g. an arrow pointing straight and left). The indications are given in an array, each containing one of the following types. Further indications might be added on without an API version change.
|
||||
- `indications`: an indication (e.g. marking on the road) specifying the turn lane. A road can have multiple indications (e.g. an arrow pointing straight and left). The indications are given in an array, each containing one of the following types. Further indications might be added on without an API version change.
|
||||
|
||||
| `value` | Description |
|
||||
|------------------------|---------------------------------------------------------------------------------------------------------------------------|
|
||||
@ -890,7 +890,7 @@ A `Lane` represents a turn lane at the corresponding turn location.
|
||||
|
||||
### Intersection object
|
||||
|
||||
An intersection gives a full representation of any cross-way the path passes bay. For every step, the very first intersection (`intersections[0]`) corresponds to the
|
||||
An intersection gives a full representation of any cross-way the path passes by. For every step, the very first intersection (`intersections[0]`) corresponds to the
|
||||
location of the StepManeuver. Further intersections are listed for every cross-way until the next turn instruction.
|
||||
|
||||
**Properties**
|
||||
@ -926,15 +926,15 @@ location of the StepManeuver. Further intersections are listed for every cross-w
|
||||
|
||||
### Waypoint object
|
||||
|
||||
Object used to describe waypoint on a route.
|
||||
The object is used to describe the waypoint on a route.
|
||||
|
||||
**Properties**
|
||||
|
||||
- `name` Name of the street the coordinate snapped to
|
||||
- `location` Array that contains the `[longitude, latitude]` pair of the snapped coordinate
|
||||
- `distance` The distance, in metres, from the input coordinate to the snapped coordinate
|
||||
- `distance` The distance, in meters, from the input coordinate to the snapped coordinate
|
||||
- `hint` Unique internal identifier of the segment (ephemeral, not constant over data updates)
|
||||
This can be used on subsequent request to significantly speed up the query and to connect multiple services.
|
||||
This can be used on subsequent requests to significantly speed up the query and to connect multiple services.
|
||||
E.g. you can use the `hint` value obtained by the `nearest` query as `hint` values for `route` inputs.
|
||||
|
||||
#### Example
|
||||
@ -953,14 +953,14 @@ Object used to describe waypoint on a route.
|
||||
|
||||
## Flatbuffers format
|
||||
|
||||
Default response format is `json`, but OSRM supports binary [`flatbuffers`](https://google.github.io/flatbuffers/) format, which
|
||||
The default response format is `json`, but OSRM supports binary [`flatbuffers`](https://google.github.io/flatbuffers/) format, which
|
||||
is much faster in serialization/deserialization, comparing to `json`.
|
||||
|
||||
The format itself is described in message descriptors, located at `include/engine/api/flatbuffers directory`. Those descriptors could
|
||||
The format itself is described in message descriptors, located at `include/engine/api/flatbuffers` directory. Those descriptors could
|
||||
be compiled to provide protocol parsers in Go/Javascript/Typescript/Java/Dart/C#/Python/Lobster/Lua/Rust/PHP/Kotlin. Precompiled
|
||||
protocol parser for C++ is supplied with OSRM.
|
||||
|
||||
`Flatbuffers` format provides exactly same data, as `json` format with a slightly different layout, which was optimized to minimize
|
||||
`Flatbuffers` format provides exactly the same data, as `json` format with a slightly different layout, which was optimized to minimize
|
||||
in-transfer size.
|
||||
|
||||
### Root object
|
||||
@ -971,7 +971,7 @@ Root object is the only object, available from a 'raw' `flatbuffers` buffer. It
|
||||
|
||||
**Properties**
|
||||
|
||||
- `error`: `bool` Marks response as erroneous. Erroneus response should include `code` field set, all the other field may not present.
|
||||
- `error`: `bool` Marks response as erroneous. An erroneous response should include the `code` fieldset, all the other fields may not be present.
|
||||
- `code`: `Error` Error description object, only present, when `error` is `true`
|
||||
- `waypoints`: `[Waypoint]` Array of `Waypoint` objects. Should present for every service call, unless `skip_waypoints` is set to `true`. Table service will put `sources` array here.
|
||||
- `routes`: `[RouteObject]` Array of `RouteObject` objects. May be empty or absent. Should present for Route/Trip/Match services call.
|
||||
@ -988,14 +988,14 @@ Contains error information.
|
||||
|
||||
### Waypoint object
|
||||
|
||||
Almost same as `json` Waypoint object. The following properties differ:
|
||||
Almost the same as `json` Waypoint object. The following properties differ:
|
||||
|
||||
- `location`: `Position` Same as `json` location field, but different format.
|
||||
- `nodes`: `Uint64Pair` Same as `json` nodes field, but different format.
|
||||
|
||||
### RouteObject object
|
||||
|
||||
Almost same as `json` Route object. The following properties differ:
|
||||
Almost the same as `json` Route object. The following properties differ:
|
||||
|
||||
- `polyline`: `string` Same as `json` geometry.polyline or geometry.polyline6 fields. One field for both formats.
|
||||
- `coordinates`: `[Position]` Same as `json` geometry.coordinates field, but different format.
|
||||
@ -1003,14 +1003,14 @@ Almost same as `json` Route object. The following properties differ:
|
||||
|
||||
### Leg object
|
||||
|
||||
Almost same as `json` Leg object. The following properties differ:
|
||||
Almost the same as `json` Leg object. The following properties differ:
|
||||
|
||||
- `annotations`: `Annotation` Same as `json` annotation field, but different format.
|
||||
- `steps`: `[Step]` Same as `step` annotation field, but different format.
|
||||
|
||||
### Step object
|
||||
|
||||
Almost same as `json` Step object. The following properties differ:
|
||||
Almost the same as `json` Step object. The following properties differ:
|
||||
|
||||
- `polyline`: `string` Same as `json` geometry.polyline or geometry.polyline6 fields. One field for both formats.
|
||||
- `coordinates`: `[Position]` Same as `json` geometry.coordinates field, but different format.
|
||||
@ -1018,7 +1018,7 @@ Almost same as `json` Step object. The following properties differ:
|
||||
|
||||
| `type` | Description |
|
||||
|------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `Turn` | a basic turn into direction of the `modifier` |
|
||||
| `Turn` | a basic turn into the direction of the `modifier` |
|
||||
| `NewName` | no turn is taken/possible, but the road name changes. The road can take a turn itself, following `modifier`. |
|
||||
| `Depart` | indicates the departure of the leg |
|
||||
| `Arrive` | indicates the destination of the leg |
|
||||
@ -1030,9 +1030,9 @@ Almost same as `json` Step object. The following properties differ:
|
||||
| `Continue` | Turn in direction of `modifier` to stay on the same road |
|
||||
| `Roundabout` | traverse roundabout, if the route leaves the roundabout there will be an additional property `exit` for exit counting. The modifier specifies the direction of entering the roundabout. |
|
||||
| `Rotary` | a traffic circle. While very similar to a larger version of a roundabout, it does not necessarily follow roundabout rules for right of way. It can offer `rotary_name` and/or `rotary_pronunciation` parameters (located in the RouteStep object) in addition to the `exit` parameter (located on the StepManeuver object). |
|
||||
| `RoundaboutTurn` | Describes a turn at a small roundabout that should be treated as normal turn. The `modifier` indicates the turn direciton. Example instruction: `At the roundabout turn left`. |
|
||||
| `RoundaboutTurn` | Describes a turn at a small roundabout that should be treated as a normal turn. The `modifier` indicates the turn direction. Example instruction: `At the roundabout turn left`. |
|
||||
| `Notification` | not an actual turn but a change in the driving conditions. For example the travel mode or classes. If the road takes a turn itself, the `modifier` describes the direction |
|
||||
| `ExitRoundabout` | Describes a maneuver exiting a roundabout (usually preceeded by a `roundabout` instruction) |
|
||||
| `ExitRoundabout` | Describes a maneuver exiting a roundabout (usually preceded by a `roundabout` instruction) |
|
||||
| `ExitRotary` | Describes the maneuver exiting a rotary (large named roundabout) |
|
||||
|
||||
- `driving_side`: `bool` Ttrue stands for the left side driving.
|
||||
@ -1040,14 +1040,14 @@ Almost same as `json` Step object. The following properties differ:
|
||||
|
||||
### Intersection object
|
||||
|
||||
Almost same as `json` Intersection object. The following properties differ:
|
||||
Almost the same as `json` Intersection object. The following properties differ:
|
||||
|
||||
- `location`: `Position` Same as `json` location property, but in different format.
|
||||
- `location`: `Position` Same as `json` location property, but in a different format.
|
||||
- `lanes`: `[Lane]` Array of `Lane` objects.
|
||||
|
||||
### Lane object
|
||||
|
||||
Almost same as `json` Lane object. The following properties differ:
|
||||
Almost the same as `json` Lane object. The following properties differ:
|
||||
|
||||
- `indications`: `Turn` Array of `Turn` enum values.
|
||||
|
||||
@ -1065,14 +1065,14 @@ Almost same as `json` Lane object. The following properties differ:
|
||||
|
||||
### StepManeuver object
|
||||
|
||||
Almost same as `json` StepManeuver object. The following properties differ:
|
||||
Almost the same as `json` StepManeuver object. The following properties differ:
|
||||
|
||||
- `location`: `Position` Same as `json` location property, but in different format.
|
||||
- `location`: `Position` Same as `json` location property, but in a different format.
|
||||
- `type`: `ManeuverType` Type of a maneuver (enum)
|
||||
|
||||
| `type` | Description |
|
||||
|------------------|--------------------------------------------------------------|
|
||||
| `Turn` | a basic turn into direction of the `modifier` |
|
||||
| `Turn` | a basic turn into the direction of the `modifier` |
|
||||
| `NewName` | no turn is taken/possible, but the road name changes. The road can take a turn itself, following `modifier`. |
|
||||
| `Depart` | indicates the departure of the leg |
|
||||
| `Arrive` | indicates the destination of the leg |
|
||||
@ -1084,16 +1084,16 @@ Almost same as `json` StepManeuver object. The following properties differ:
|
||||
| `Continue` | Turn in direction of `modifier` to stay on the same road |
|
||||
| `Roundabout` | traverse roundabout, if the route leaves the roundabout there will be an additional property `exit` for exit counting. The modifier specifies the direction of entering the roundabout. |
|
||||
| `Rotary` | a traffic circle. While very similar to a larger version of a roundabout, it does not necessarily follow roundabout rules for right of way. It can offer `rotary_name` and/or `rotary_pronunciation` parameters (located in the RouteStep object) in addition to the `exit` parameter (located on the StepManeuver object). |
|
||||
| `RoundaboutTurn` | Describes a turn at a small roundabout that should be treated as normal turn. The `modifier` indicates the turn direciton. Example instruction: `At the roundabout turn left`. |
|
||||
| `RoundaboutTurn` | Describes a turn at a small roundabout that should be treated as a normal turn. The `modifier` indicates the turn direction. Example instruction: `At the roundabout turn left`. |
|
||||
| `Notification` | not an actual turn but a change in the driving conditions. For example the travel mode or classes. If the road takes a turn itself, the `modifier` describes the direction |
|
||||
| `ExitRoundabout` | Describes a maneuver exiting a roundabout (usually preceeded by a `roundabout` instruction) |
|
||||
| `ExitRoundabout` | Describes a maneuver exiting a roundabout (usually preceded by a `roundabout` instruction) |
|
||||
| `ExitRotary` | Describes the maneuver exiting a rotary (large named roundabout) |
|
||||
|
||||
- `modifier`: `Turn` Maneuver turn (enum)
|
||||
|
||||
### Annotation object
|
||||
|
||||
Exactly same as `json` annotation object.
|
||||
Exactly the same as `json` annotation object.
|
||||
|
||||
|
||||
### Position object
|
||||
@ -1114,11 +1114,11 @@ A pair of long long integers. Used only by `Waypoint` object.
|
||||
|
||||
### Table object
|
||||
|
||||
Almost same as `json` Table object. The main difference is that 'sources' field is absent and root's object 'waypoints' field is
|
||||
Almost the same as `json` Table object. The main difference is that 'sources' field is absent and the root's object 'waypoints' field is
|
||||
used instead. All the other differences follow:
|
||||
|
||||
- `durations`: `[float]` Flat representation of a durations matrix. Element at row;col can be adressed as [row * cols + col]
|
||||
- `distances`: `[float]` Flat representation of a destinations matrix. Element at row;col can be adressed as [row * cols + col]
|
||||
- `durations`: `[float]` Flat representation of a durations matrix. Element at row;col can be addressed as [row * cols + col]
|
||||
- `distances`: `[float]` Flat representation of a destinations matrix. Element at row;col can be addressed as [row * cols + col]
|
||||
- `destinations`: `[Waypoint]` Array of `Waypoint` objects. Will be `null` if `skip_waypoints` will be set to `true`
|
||||
- `rows`: `ushort` Number of rows in durations/destinations matrices.
|
||||
- `cols`: `ushort` Number of cols in durations/destinations matrices.
|
||||
|
||||
@ -51,6 +51,7 @@ Returns the fastest route between two or more coordinates while visiting the way
|
||||
Can be `null` or an array of `[{value},{range}]` with `integer 0 .. 360,integer 0 .. 180`.
|
||||
- `options.radiuses` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Limits the coordinate snapping to streets in the given radius in meters. Can be `null` (unlimited, default) or `double >= 0`.
|
||||
- `options.hints` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Hints for the coordinate snapping. Array of base64 encoded strings.
|
||||
- `options.exclude` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** List of classes to avoid, order does not matter.
|
||||
- `options.generate_hints` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Whether or not adds a Hint to the response which can be used in subsequent requests. (optional, default `true`)
|
||||
- `options.alternatives` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Search for alternative routes. (optional, default `false`)
|
||||
- `options.alternatives` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** Search for up to this many alternative routes.
|
||||
@ -63,7 +64,9 @@ Returns the fastest route between two or more coordinates while visiting the way
|
||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
||||
`null`/`true`/`false`
|
||||
- `options.waypoints` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Indices to coordinates to treat as waypoints. If not supplied, all coordinates are waypoints. Must include first and last coordinate index.
|
||||
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||
- `options.skip_waypoints` **[Boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)** Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of response and do not want to transfer waste data. (optional, default `false`)
|
||||
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
||||
|
||||
**Examples**
|
||||
@ -97,6 +100,7 @@ Note: `coordinates` in the general options only supports a single `{longitude},{
|
||||
- `options.number` **[Number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** Number of nearest segments that should be returned.
|
||||
Must be an integer greater than or equal to `1`. (optional, default `1`)
|
||||
- `options.approaches` **[Array](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
||||
- `options.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||
- `options.snapping` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||
- `callback` **[Function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)**
|
||||
|
||||
@ -332,12 +336,15 @@ specific behaviours.
|
||||
|
||||
- `plugin_config` **[Object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** Object literal containing parameters for the trip query.
|
||||
- `plugin_config.format` **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** The format of the result object to various API calls.
|
||||
Valid options are `object` (default), which returns a
|
||||
standard Javascript object, as described above, and `json_buffer`, which will return a NodeJS
|
||||
**[Buffer](https://nodejs.org/api/buffer.html)** object, containing a JSON string. The latter has
|
||||
the advantage that it can be immediately serialized to disk/sent over the network, and the
|
||||
generation of the string is performed outside the main NodeJS event loop. This option is ignored
|
||||
by the `tile` plugin.
|
||||
Valid options are `object` (default if `options.format` is
|
||||
`json`), which returns a standard Javascript object, as described above, and `buffer`(default if
|
||||
`options.format` is `flatbuffers`), which will return a NodeJS
|
||||
**[Buffer](https://nodejs.org/api/buffer.html)** object, containing a JSON string or Flatbuffers
|
||||
object. The latter has the advantage that it can be immediately serialized to disk/sent over the
|
||||
network, and the generation of the string is performed outside the main NodeJS event loop. This
|
||||
option is ignored by the `tile` plugin. Also note that `options.format` set to `flatbuffers`
|
||||
cannot be used with `plugin_config.format` set to `object`. `json_buffer` is deprecated alias for
|
||||
`buffer`.
|
||||
|
||||
**Examples**
|
||||
|
||||
@ -349,7 +356,7 @@ var options = {
|
||||
[13.374481201171875, 52.506191342034576]
|
||||
]
|
||||
};
|
||||
osrm.route(options, { format: "json_buffer" }, function(err, response) {
|
||||
osrm.route(options, { format: "buffer" }, function(err, response) {
|
||||
if (err) throw err;
|
||||
console.log(response.toString("utf-8"));
|
||||
});
|
||||
|
||||
@ -1,283 +0,0 @@
|
||||
# Locate Intel Threading Building Blocks include paths and libraries
|
||||
# FindTBB.cmake can be found at https://code.google.com/p/findtbb/
|
||||
# Written by Hannes Hofmann <hannes.hofmann _at_ informatik.uni-erlangen.de>
|
||||
# Improvements by Gino van den Bergen <gino _at_ dtecta.com>,
|
||||
# Florian Uhlig <F.Uhlig _at_ gsi.de>,
|
||||
# Jiri Marsik <jiri.marsik89 _at_ gmail.com>
|
||||
|
||||
# The MIT License
|
||||
#
|
||||
# Copyright (c) 2011 Hannes Hofmann
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# GvdB: This module uses the environment variable TBB_ARCH_PLATFORM which defines architecture and compiler.
|
||||
# e.g. "ia32/vc8" or "em64t/cc4.1.0_libc2.4_kernel2.6.16.21"
|
||||
# TBB_ARCH_PLATFORM is set by the build script tbbvars[.bat|.sh|.csh], which can be found
|
||||
# in the TBB installation directory (TBB_INSTALL_DIR).
|
||||
#
|
||||
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||
#
|
||||
# For backwards compatibility, you may explicitely set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER.
|
||||
# TBB_ARCHITECTURE [ ia32 | em64t | itanium ]
|
||||
# which architecture to use
|
||||
# TBB_COMPILER e.g. vc9 or cc3.2.3_libc2.3.2_kernel2.4.21 or cc4.0.1_os10.4.9
|
||||
# which compiler to use (detected automatically on Windows)
|
||||
|
||||
# This module respects
|
||||
# TBB_INSTALL_DIR or $ENV{TBB21_INSTALL_DIR} or $ENV{TBB_INSTALL_DIR}
|
||||
|
||||
# This module defines
|
||||
# TBB_INCLUDE_DIRS, where to find task_scheduler_init.h, etc.
|
||||
# TBB_LIBRARY_DIRS, where to find libtbb, libtbbmalloc
|
||||
# TBB_DEBUG_LIBRARY_DIRS, where to find libtbb_debug, libtbbmalloc_debug
|
||||
# TBB_INSTALL_DIR, the base TBB install directory
|
||||
# TBB_LIBRARIES, the libraries to link against to use TBB.
|
||||
# TBB_DEBUG_LIBRARIES, the libraries to link against to use TBB with debug symbols.
|
||||
# TBB_FOUND, If false, don't try to use TBB.
|
||||
# TBB_INTERFACE_VERSION, as defined in tbb/tbb_stddef.h
|
||||
|
||||
|
||||
if (WIN32)
|
||||
# has em64t/vc8 em64t/vc9
|
||||
# has ia32/vc7.1 ia32/vc8 ia32/vc9
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "C:/Program Files/Intel/TBB" "C:/Program Files (x86)/Intel/TBB")
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
if (MSVC71)
|
||||
set (_TBB_COMPILER "vc7.1")
|
||||
endif(MSVC71)
|
||||
if (MSVC80)
|
||||
set(_TBB_COMPILER "vc8")
|
||||
endif(MSVC80)
|
||||
if (MSVC90)
|
||||
set(_TBB_COMPILER "vc9")
|
||||
endif(MSVC90)
|
||||
if(MSVC10)
|
||||
set(_TBB_COMPILER "vc10")
|
||||
endif(MSVC10)
|
||||
# Todo: add other Windows compilers such as ICL.
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif (WIN32)
|
||||
|
||||
if (UNIX)
|
||||
if (APPLE)
|
||||
# MAC
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "/Library/Frameworks/Intel_TBB.framework/Versions")
|
||||
# libs: libtbb.dylib, libtbbmalloc.dylib, *_debug
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
# default flavor on apple: ia32/cc4.0.1_os10.4.9
|
||||
# Jiri: There is no reason to presume there is only one flavor and
|
||||
# that user's setting of variables should be ignored.
|
||||
if(NOT TBB_COMPILER)
|
||||
set(_TBB_COMPILER "cc4.0.1_os10.4.9")
|
||||
elseif (NOT TBB_COMPILER)
|
||||
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||
endif(NOT TBB_COMPILER)
|
||||
if(NOT TBB_ARCHITECTURE)
|
||||
set(_TBB_ARCHITECTURE "ia32")
|
||||
elseif(NOT TBB_ARCHITECTURE)
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif(NOT TBB_ARCHITECTURE)
|
||||
else (APPLE)
|
||||
# LINUX
|
||||
set(_TBB_DEFAULT_INSTALL_DIR "/opt/intel/tbb" "/usr/local/include" "/usr/include")
|
||||
set(_TBB_LIB_NAME "tbb")
|
||||
set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
|
||||
set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
|
||||
set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
|
||||
# has em64t/cc3.2.3_libc2.3.2_kernel2.4.21 em64t/cc3.3.3_libc2.3.3_kernel2.6.5 em64t/cc3.4.3_libc2.3.4_kernel2.6.9 em64t/cc4.1.0_libc2.4_kernel2.6.16.21
|
||||
# has ia32/*
|
||||
# has itanium/*
|
||||
set(_TBB_COMPILER ${TBB_COMPILER})
|
||||
set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
|
||||
endif (APPLE)
|
||||
endif (UNIX)
|
||||
|
||||
if (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||
# SUN
|
||||
# not yet supported
|
||||
# has em64t/cc3.4.3_kernel5.10
|
||||
# has ia32/*
|
||||
endif (CMAKE_SYSTEM MATCHES "SunOS.*")
|
||||
|
||||
|
||||
#-- Clear the public variables
|
||||
set (TBB_FOUND "NO")
|
||||
|
||||
|
||||
#-- Find TBB install dir and set ${_TBB_INSTALL_DIR} and cached ${TBB_INSTALL_DIR}
|
||||
# first: use CMake variable TBB_INSTALL_DIR
|
||||
if (TBB_INSTALL_DIR)
|
||||
set (_TBB_INSTALL_DIR ${TBB_INSTALL_DIR})
|
||||
endif (TBB_INSTALL_DIR)
|
||||
# second: use environment variable
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
if (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
|
||||
# Intel recommends setting TBB21_INSTALL_DIR
|
||||
if (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB21_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
|
||||
if (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB22_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
|
||||
if (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||
set (_TBB_INSTALL_DIR $ENV{TBB30_INSTALL_DIR})
|
||||
endif (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
# third: try to find path automatically
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
if (_TBB_DEFAULT_INSTALL_DIR)
|
||||
set (_TBB_INSTALL_DIR ${_TBB_DEFAULT_INSTALL_DIR})
|
||||
endif (_TBB_DEFAULT_INSTALL_DIR)
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
# sanity check
|
||||
if (NOT _TBB_INSTALL_DIR)
|
||||
message ("ERROR: Unable to find Intel TBB install directory. ${_TBB_INSTALL_DIR}")
|
||||
else (NOT _TBB_INSTALL_DIR)
|
||||
# finally: set the cached CMake variable TBB_INSTALL_DIR
|
||||
if (NOT TBB_INSTALL_DIR)
|
||||
set (TBB_INSTALL_DIR ${_TBB_INSTALL_DIR} CACHE PATH "Intel TBB install directory")
|
||||
mark_as_advanced(TBB_INSTALL_DIR)
|
||||
endif (NOT TBB_INSTALL_DIR)
|
||||
|
||||
|
||||
#-- A macro to rewrite the paths of the library. This is necessary, because
|
||||
# find_library() always found the em64t/vc9 version of the TBB libs
|
||||
macro(TBB_CORRECT_LIB_DIR var_name)
|
||||
# if (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||
string(REPLACE em64t "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||
# endif (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
|
||||
string(REPLACE ia32 "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc7.1 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc8 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc9 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
string(REPLACE vc10 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
|
||||
endmacro(TBB_CORRECT_LIB_DIR var_content)
|
||||
|
||||
|
||||
#-- Look for include directory and set ${TBB_INCLUDE_DIR}
|
||||
set (TBB_INC_SEARCH_DIR ${_TBB_INSTALL_DIR}/include)
|
||||
# Jiri: tbbvars now sets the CPATH environment variable to the directory
|
||||
# containing the headers.
|
||||
find_path(TBB_INCLUDE_DIR
|
||||
tbb/task_scheduler_init.h
|
||||
PATHS ${TBB_INC_SEARCH_DIR} ENV CPATH
|
||||
)
|
||||
mark_as_advanced(TBB_INCLUDE_DIR)
|
||||
|
||||
|
||||
#-- Look for libraries
|
||||
# GvdB: $ENV{TBB_ARCH_PLATFORM} is set by the build script tbbvars[.bat|.sh|.csh]
|
||||
if (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||
set (_TBB_LIBRARY_DIR
|
||||
${_TBB_INSTALL_DIR}/lib/$ENV{TBB_ARCH_PLATFORM}
|
||||
${_TBB_INSTALL_DIR}/$ENV{TBB_ARCH_PLATFORM}/lib
|
||||
)
|
||||
endif (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
|
||||
# Jiri: This block isn't mutually exclusive with the previous one
|
||||
# (hence no else), instead I test if the user really specified
|
||||
# the variables in question.
|
||||
if ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||
# HH: deprecated
|
||||
message(STATUS "[Warning] FindTBB.cmake: The use of TBB_ARCHITECTURE and TBB_COMPILER is deprecated and may not be supported in future versions. Please set \$ENV{TBB_ARCH_PLATFORM} (using tbbvars.[bat|csh|sh]).")
|
||||
# Jiri: It doesn't hurt to look in more places, so I store the hints from
|
||||
# ENV{TBB_ARCH_PLATFORM} and the TBB_ARCHITECTURE and TBB_COMPILER
|
||||
# variables and search them both.
|
||||
set (_TBB_LIBRARY_DIR "${_TBB_INSTALL_DIR}/${_TBB_ARCHITECTURE}/${_TBB_COMPILER}/lib" ${_TBB_LIBRARY_DIR})
|
||||
endif ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
|
||||
|
||||
# GvdB: Mac OS X distribution places libraries directly in lib directory.
|
||||
list(APPEND _TBB_LIBRARY_DIR ${_TBB_INSTALL_DIR}/lib)
|
||||
|
||||
# Jiri: No reason not to check the default paths. From recent versions,
|
||||
# tbbvars has started exporting the LIBRARY_PATH and LD_LIBRARY_PATH
|
||||
# variables, which now point to the directories of the lib files.
|
||||
# It all makes more sense to use the ${_TBB_LIBRARY_DIR} as a HINTS
|
||||
# argument instead of the implicit PATHS as it isn't hard-coded
|
||||
# but computed by system introspection. Searching the LIBRARY_PATH
|
||||
# and LD_LIBRARY_PATH environment variables is now even more important
|
||||
# that tbbvars doesn't export TBB_ARCH_PLATFORM and it facilitates
|
||||
# the use of TBB built from sources.
|
||||
find_library(TBB_LIBRARY ${_TBB_LIB_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
find_library(TBB_MALLOC_LIBRARY ${_TBB_LIB_MALLOC_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
|
||||
#Extract path from TBB_LIBRARY name
|
||||
get_filename_component(TBB_LIBRARY_DIR ${TBB_LIBRARY} PATH)
|
||||
|
||||
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY)
|
||||
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY)
|
||||
mark_as_advanced(TBB_LIBRARY TBB_MALLOC_LIBRARY)
|
||||
|
||||
#-- Look for debug libraries
|
||||
# Jiri: Changed the same way as for the release libraries.
|
||||
find_library(TBB_LIBRARY_DEBUG ${_TBB_LIB_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
find_library(TBB_MALLOC_LIBRARY_DEBUG ${_TBB_LIB_MALLOC_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
|
||||
PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
|
||||
|
||||
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||
# Extract path from TBB_LIBRARY_DEBUG name
|
||||
get_filename_component(TBB_LIBRARY_DEBUG_DIR ${TBB_LIBRARY_DEBUG} PATH)
|
||||
|
||||
#TBB_CORRECT_LIB_DIR(TBB_LIBRARY_DEBUG)
|
||||
#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY_DEBUG)
|
||||
mark_as_advanced(TBB_LIBRARY_DEBUG TBB_MALLOC_LIBRARY_DEBUG)
|
||||
|
||||
|
||||
if (TBB_INCLUDE_DIR)
|
||||
if (TBB_LIBRARY)
|
||||
set (TBB_FOUND "YES")
|
||||
set (TBB_LIBRARIES ${TBB_LIBRARY} ${TBB_MALLOC_LIBRARY} ${TBB_LIBRARIES})
|
||||
set (TBB_DEBUG_LIBRARIES ${TBB_LIBRARY_DEBUG} ${TBB_MALLOC_LIBRARY_DEBUG} ${TBB_DEBUG_LIBRARIES})
|
||||
set (TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR} CACHE PATH "TBB include directory" FORCE)
|
||||
set (TBB_LIBRARY_DIRS ${TBB_LIBRARY_DIR} CACHE PATH "TBB library directory" FORCE)
|
||||
# Jiri: Self-built TBB stores the debug libraries in a separate directory.
|
||||
set (TBB_DEBUG_LIBRARY_DIRS ${TBB_LIBRARY_DEBUG_DIR} CACHE PATH "TBB debug library directory" FORCE)
|
||||
mark_as_advanced(TBB_INCLUDE_DIRS TBB_LIBRARY_DIRS TBB_DEBUG_LIBRARY_DIRS TBB_LIBRARIES TBB_DEBUG_LIBRARIES)
|
||||
message(STATUS "Found Intel TBB")
|
||||
endif (TBB_LIBRARY)
|
||||
endif (TBB_INCLUDE_DIR)
|
||||
|
||||
if (NOT TBB_FOUND)
|
||||
message("ERROR: Intel TBB NOT found!")
|
||||
message(STATUS "Looked for Threading Building Blocks in ${_TBB_INSTALL_DIR}")
|
||||
# do only throw fatal, if this pkg is REQUIRED
|
||||
if (TBB_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could NOT find TBB library.")
|
||||
endif (TBB_FIND_REQUIRED)
|
||||
endif (NOT TBB_FOUND)
|
||||
|
||||
endif (NOT _TBB_INSTALL_DIR)
|
||||
|
||||
if (TBB_FOUND)
|
||||
set(TBB_INTERFACE_VERSION 0)
|
||||
FILE(READ "${TBB_INCLUDE_DIRS}/tbb/tbb_stddef.h" _TBB_VERSION_CONTENTS)
|
||||
STRING(REGEX REPLACE ".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1" TBB_INTERFACE_VERSION "${_TBB_VERSION_CONTENTS}")
|
||||
set(TBB_INTERFACE_VERSION "${TBB_INTERFACE_VERSION}")
|
||||
endif (TBB_FOUND)
|
||||
1
example/cmake/FindTBB.cmake
Symbolic link
1
example/cmake/FindTBB.cmake
Symbolic link
@ -0,0 +1 @@
|
||||
../../cmake/FindTBB.cmake
|
||||
@ -28,7 +28,7 @@ Feature: Bicycle - Route around alleys
|
||||
|
||||
When I route I should get
|
||||
| from | to | a:nodes | weight | # |
|
||||
| a | f | 1:2:3:6 | 200.4 | Avoids d,e,f |
|
||||
| a | e | 1:2:5 | 176.4 | Take the alley b,e if neccessary |
|
||||
| d | f | 4:1:2:3:6 | 252.6 | Avoids the alley d,e,f |
|
||||
| a | f | 1:2:3:6 | 196.2 | Avoids d,e,f |
|
||||
| a | e | 1:2:5 | 172.2 | Take the alley b,e if neccessary |
|
||||
| d | f | 4:1:2:3:6 | 248.4 | Avoids the alley d,e,f |
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ Feature: Car - Destination only, no passing through
|
||||
Given the node map
|
||||
"""
|
||||
a e
|
||||
b c d
|
||||
b1 c 2d
|
||||
|
||||
x y
|
||||
"""
|
||||
@ -23,19 +23,19 @@ Feature: Car - Destination only, no passing through
|
||||
When I route I should get
|
||||
| from | to | route |
|
||||
| a | b | ab,ab |
|
||||
| a | c | ab,bcd |
|
||||
| a | d | ab,bcd,bcd |
|
||||
| a | c | ab,bcd,bcd |
|
||||
| a | 2 | ab,bcd,bcd |
|
||||
| a | e | axye,axye |
|
||||
| e | d | de,de |
|
||||
| e | c | de,bcd |
|
||||
| e | b | de,bcd,bcd |
|
||||
| e | c | de,bcd,bcd |
|
||||
| e | 1 | de,bcd,bcd |
|
||||
| e | a | axye,axye |
|
||||
|
||||
Scenario: Car - Destination only street
|
||||
Given the node map
|
||||
"""
|
||||
a e
|
||||
b c d
|
||||
b1 c 2d
|
||||
|
||||
x y
|
||||
"""
|
||||
@ -51,12 +51,12 @@ Feature: Car - Destination only, no passing through
|
||||
When I route I should get
|
||||
| from | to | route |
|
||||
| a | b | ab,ab |
|
||||
| a | c | ab,bc |
|
||||
| a | d | ab,cd |
|
||||
| a | c | ab,bc,bc |
|
||||
| a | 2 | ab,bc,cd |
|
||||
| a | e | axye,axye |
|
||||
| e | d | de,de |
|
||||
| e | c | de,cd |
|
||||
| e | b | de,bc |
|
||||
| e | c | de,cd,cd |
|
||||
| e | 1 | de,cd,bc |
|
||||
| e | a | axye,axye |
|
||||
|
||||
Scenario: Car - Routing inside a destination only area
|
||||
@ -117,6 +117,7 @@ Feature: Car - Destination only, no passing through
|
||||
+ \
|
||||
+ |
|
||||
d |
|
||||
1 |
|
||||
\___e
|
||||
"""
|
||||
|
||||
@ -129,7 +130,7 @@ Feature: Car - Destination only, no passing through
|
||||
When I route I should get
|
||||
| from | to | route |
|
||||
| e | a | acbe,acbe |
|
||||
| d | a | de,acbe,acbe |
|
||||
| 1 | a | de,acbe,acbe |
|
||||
| c | d | cd,cd |
|
||||
|
||||
Scenario: Car - Routing through a parking lot tagged access=destination,service
|
||||
|
||||
@ -1031,3 +1031,61 @@ Feature: Car - Multiple Via Turn restrictions
|
||||
| from | to | route | locations |
|
||||
| a | f | ab,bc,cd,de,ef,ef | a,b,c,d,e,f |
|
||||
|
||||
|
||||
@restriction-way
|
||||
Scenario: Snap source/target to via restriction way
|
||||
Given the node map
|
||||
"""
|
||||
a-1-b-2-c-3-d
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
| bc |
|
||||
| cd |
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:via | way:to | restriction |
|
||||
| restriction | ab | bc | cd | no_straight_on |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route |
|
||||
| 1 | 2 | ab,bc,bc |
|
||||
| 2 | 3 | bc,cd,cd |
|
||||
|
||||
|
||||
@restriction-way
|
||||
Scenario: Car - Snap source/target to multi-via restriction way
|
||||
# Example: https://www.openstreetmap.org/relation/11787041
|
||||
Given the node map
|
||||
"""
|
||||
|--g---f---e
|
||||
a | 1
|
||||
|--b---c---d
|
||||
|
||||
"""
|
||||
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
And the ways
|
||||
| nodes | oneway | name |
|
||||
| ab | yes | enter |
|
||||
| bc | yes | enter |
|
||||
| cd | yes | right |
|
||||
| de | yes | up |
|
||||
| ef | yes | left |
|
||||
| fc | yes | down |
|
||||
| fg | yes | exit |
|
||||
| ga | yes | exit |
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:via | way:to | restriction |
|
||||
| restriction | bc | cd,de,ef | fg | no_u_turn |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | locations |
|
||||
| a | 1 | enter,right,up,up | a,c,d,_ |
|
||||
| 1 | a | up,left,exit,exit | _,e,f,a |
|
||||
|
||||
@ -411,7 +411,7 @@ Feature: Car - Turn restrictions
|
||||
y
|
||||
i j f b x a e g h
|
||||
|
||||
c d
|
||||
c1 d
|
||||
"""
|
||||
|
||||
And the ways
|
||||
@ -438,7 +438,7 @@ Feature: Car - Turn restrictions
|
||||
When I route I should get
|
||||
| from | to | route |
|
||||
| e | f | ae,xa,bx,fb,fb |
|
||||
| c | f | dc,da,ae,ge,hg,hg,ge,ae,xa,bx,fb,fb |
|
||||
| 1 | f | dc,da,ae,ge,hg,hg,ge,ae,xa,bx,fb,fb |
|
||||
| d | f | da,ae,ge,hg,hg,ge,ae,xa,bx,fb,fb |
|
||||
|
||||
@except
|
||||
|
||||
@ -332,10 +332,11 @@ Feature: Merge Segregated Roads
|
||||
|
|
||||
.b.
|
||||
c h
|
||||
1 |
|
||||
| 4
|
||||
| |
|
||||
| |
|
||||
1 2
|
||||
| |
|
||||
2 |
|
||||
| 3
|
||||
d g
|
||||
'e'
|
||||
|
|
||||
@ -356,11 +357,11 @@ Feature: Merge Segregated Roads
|
||||
When I route I should get
|
||||
| waypoints | turns | route | intersections |
|
||||
| a,f | depart,arrive | road,road | true:180,false:0 true:180,false:0 true:180;true:0 |
|
||||
| c,f | depart,arrive | bridge,road | true:180,false:0 true:180;true:0 |
|
||||
| 1,f | depart,arrive | bridge,road | true:180,false:0 true:180;true:0 |
|
||||
| 2,f | depart,arrive | bridge,road | true:180,false:0 true:180;true:0 |
|
||||
| f,a | depart,arrive | road,road | true:0,true:0 false:180,true:0 false:180;true:180 |
|
||||
| g,a | depart,arrive | bridge,road | true:0,true:0 false:180;true:180 |
|
||||
| 2,a | depart,arrive | bridge,road | true:0,true:0 false:180;true:180 |
|
||||
| 3,a | depart,arrive | bridge,road | true:0,true:0 false:180;true:180 |
|
||||
| 4,a | depart,arrive | bridge,road | true:0,true:0 false:180;true:180 |
|
||||
|
||||
@negative
|
||||
Scenario: Traffic Circle
|
||||
|
||||
@ -67,10 +67,10 @@ Feature: Compass bearing
|
||||
Scenario: Bearing in a roundabout
|
||||
Given the node map
|
||||
"""
|
||||
k d c j
|
||||
k d 1c j
|
||||
e b
|
||||
f a
|
||||
l g h i
|
||||
l g2 h i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
@ -94,8 +94,8 @@ Feature: Compass bearing
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | bearing |
|
||||
| c | b | cd,de,ef,fg,gh,ha,ab,ab | 0->270,270->225,225->180,180->135,135->90,90->45,45->0,0->0 |
|
||||
| g | f | gh,ha,ab,bc,cd,de,ef,ef | 0->90,90->45,45->0,0->315,315->270,270->225,225->180,180->0 |
|
||||
| 1 | b | cd,de,ef,fg,gh,ha,ab,ab | 0->270,270->225,225->180,180->135,135->90,90->45,45->0,0->0 |
|
||||
| 2 | f | gh,ha,ab,bc,cd,de,ef,ef | 0->90,90->45,45->0,0->315,315->270,270->225,225->180,180->0 |
|
||||
|
||||
Scenario: Bearing should stay constant when zig-zagging
|
||||
Given the node map
|
||||
|
||||
@ -589,12 +589,12 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d | e | f |
|
||||
| a | 0 | 100 | 300 | 650 | 1934.5 | 1534.6 |
|
||||
| b | 760.6 | 0 | 200 | 550.1 | 1834.6 | 1434.6 |
|
||||
| c | 560.6 | 660.5 | 0 | 350 | 1634.6 | 1234.6 |
|
||||
| d | 1484.6 | 1584.5| 1784.5 | 0 | 1284.5 | 884.6 |
|
||||
| e | 200 | 300 | 500 | 710.6 | 0 | 1595.2 |
|
||||
| f | 600 | 699.9 | 899.9 | 1110.5 | 399.9 | 0 |
|
||||
| a | 0 | 100 | 300 | 650 | 660.5 | 1534.6 |
|
||||
| b | 760.6 | 0 | 200 | 550.1 | 560.6 | 1434.6 |
|
||||
| c | 560.6 | 660.5 | 0 | 350 | 360.5 | 1234.6 |
|
||||
| d | 1484.6 | 1584.5| 1645.1 | 0 | 1284.5 | 884.6 |
|
||||
| e | 200 | 300 | 360.5 | 710.6 | 0 | 1595.2 |
|
||||
| f | 600 | 699.9 | 760.5 | 884.6 | 399.9 | 0 |
|
||||
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates (defaults to input coordinate location)
|
||||
@ -727,4 +727,3 @@ Feature: Basic Distance Matrix
|
||||
| 1 | 0 | 1000.1 | 1400.1 |
|
||||
| 2 | 1000.1 | 0 | 400 |
|
||||
| 3 | 1400.1 | 400 | 0 |
|
||||
|
||||
|
||||
@ -491,18 +491,18 @@ Feature: Basic Duration Matrix
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | distance | time | weight |
|
||||
| a | c | ac,ac | 200m | 5s | 5 |
|
||||
| a | c | ac,ac | 200m | 1s | 1 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 1 | 5 | 10 |
|
||||
| a | 0 | 1 | 1 | 6 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 1 |
|
||||
| c | 15 |
|
||||
| d | 10 |
|
||||
| c | 1 |
|
||||
| d | 6 |
|
||||
|
||||
Scenario: Testbot - OneToMany vs ManyToOne
|
||||
Given the node map
|
||||
|
||||
629
features/testbot/snap_intersection.feature
Normal file
629
features/testbot/snap_intersection.feature
Normal file
@ -0,0 +1,629 @@
|
||||
Feature: Snapping at intersections
|
||||
|
||||
Background:
|
||||
# Use turnbot so that we can validate when we are
|
||||
# snapping to one of many potential candidate ways
|
||||
Given the profile "turnbot"
|
||||
|
||||
# https://github.com/Project-OSRM/osrm-backend/issues/4465
|
||||
Scenario: Snapping source to intersection with one-way roads
|
||||
Given the node map
|
||||
"""
|
||||
a e c
|
||||
\ | /
|
||||
d
|
||||
|
||||
1
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| da | yes |
|
||||
| dc | yes |
|
||||
| de | yes |
|
||||
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time |
|
||||
| 1 | e | de,de | 20s |
|
||||
| 1 | a | da,da | 28.3s |
|
||||
| 1 | c | dc,dc | 28.3s |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | c | e |
|
||||
| 1 | 28.3 | 28.3 | 20 |
|
||||
|
||||
|
||||
Scenario: Snapping destination to intersection with one-way roads
|
||||
Given the node map
|
||||
"""
|
||||
a e c
|
||||
\ | /
|
||||
d
|
||||
|
||||
1
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| da | -1 |
|
||||
| dc | -1 |
|
||||
| de | -1 |
|
||||
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time |
|
||||
| e | 1 | de,de | 20s |
|
||||
| a | 1 | da,da | 28.3s |
|
||||
| c | 1 | dc,dc | 28.3s |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | 1 |
|
||||
| a | 28.3 |
|
||||
| c | 28.3 |
|
||||
| e | 20 |
|
||||
|
||||
|
||||
Scenario: Snapping to intersection with bi-directional roads
|
||||
Given the node map
|
||||
"""
|
||||
a e
|
||||
| /
|
||||
d---c
|
||||
|
||||
1
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ad |
|
||||
| ed |
|
||||
| dc |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| 1 | c | dc,dc | 20s | 20 |
|
||||
| 1 | a | ad,ad | 20s | 20 |
|
||||
| 1 | e | ed,ed | 28.3s | 28.3 |
|
||||
| c | 1 | dc,dc | 20s | 20 |
|
||||
| a | 1 | ad,ad | 20s | 20 |
|
||||
| e | 1 | ed,ed | 28.3s | 28.3 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | c | e |
|
||||
| 1 | 20 | 20 | 28.3 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | 1 |
|
||||
| a | 20 |
|
||||
| c | 20 |
|
||||
| e | 28.3 |
|
||||
|
||||
|
||||
Scenario: Snapping at compressible node
|
||||
Given the node map
|
||||
"""
|
||||
a---b---c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | c | abc,abc | 20s | 20 |
|
||||
| b | a | abc,abc | 20s | 20 |
|
||||
| a | b | abc,abc | 20s | 20 |
|
||||
| c | b | abc,abc | 20s | 20 |
|
||||
|
||||
|
||||
Scenario: Snapping at compressible node with traffic lights
|
||||
Given the node map
|
||||
"""
|
||||
a---b---c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
|
||||
# Turnbot will use the turn penalty instead of traffic penalty.
|
||||
# We do this to induce a penalty between two edges of the same
|
||||
# segment.
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
# Snaps to first edge in forward direction
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | c | abc,abc | 40s | 40 |
|
||||
| b | a | abc,abc | 20s | 20 |
|
||||
| a | b | abc,abc | 20s | 20 |
|
||||
| c | b | abc,abc | 40s | 40 |
|
||||
|
||||
|
||||
Scenario: Snapping at compressible node traffic lights, one-way
|
||||
Given the node map
|
||||
"""
|
||||
a-->b-->c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| abc | yes |
|
||||
|
||||
# Turnbot will use the turn penalty instead of traffic penalty.
|
||||
# We do this to induce a penalty between two edges of the same
|
||||
# segment.
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
|
||||
# Snaps to first edge in forward direction
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | c | abc,abc | 40s | 40 |
|
||||
| a | b | abc,abc | 20s | 20 |
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| b | a | NoRoute |
|
||||
| c | b | NoRoute |
|
||||
|
||||
|
||||
Scenario: Snapping at compressible node traffic lights, reverse one-way
|
||||
Given the node map
|
||||
"""
|
||||
a<--b<--c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| abc | -1 |
|
||||
|
||||
# Turnbot will use the turn penalty instead of traffic penalty.
|
||||
# We do this to induce a penalty between two edges of the same
|
||||
# segment.
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
|
||||
# Snaps to first edge in forward direction - as this is one-way,
|
||||
# the forward direction has changed.
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | a | abc,abc | 40s | 40 |
|
||||
| c | b | abc,abc | 20s | 20 |
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| b | c | NoRoute |
|
||||
| a | b | NoRoute |
|
||||
|
||||
|
||||
Scenario: Snapping at traffic lights, reverse disabled
|
||||
Given the node map
|
||||
"""
|
||||
a-->b-->c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
|
||||
And the contract extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the customize extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the speed file
|
||||
"""
|
||||
2,1,0
|
||||
3,2,0
|
||||
"""
|
||||
|
||||
# Turnbot will use the turn penalty instead of traffic penalty.
|
||||
# We do this to induce a penalty between two edges of the same
|
||||
# segment.
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
# Snaps to first edge in forward direction.
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | c | abc,abc | 40s | 40 |
|
||||
| a | b | abc,abc | 20s | 20 |
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| b | a | NoRoute |
|
||||
| c | b | NoRoute |
|
||||
|
||||
|
||||
Scenario: Snapping at traffic lights, forward disabled
|
||||
Given the node map
|
||||
"""
|
||||
a<--b<--c
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
|
||||
And the contract extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the customize extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the speed file
|
||||
"""
|
||||
1,2,0
|
||||
2,3,0
|
||||
"""
|
||||
|
||||
# Turnbot will use the turn penalty instead of traffic penalty.
|
||||
# We do this to induce a penalty between two edges of the same
|
||||
# segment.
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
# Forward direction is disabled, still snaps to first edge in forward direction
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| b | a | abc,abc | 20s | 20 |
|
||||
| c | b | abc,abc | 40s | 40 |
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| b | c | NoRoute |
|
||||
| a | b | NoRoute |
|
||||
|
||||
|
||||
Scenario: Snap to target node with next section of segment blocked
|
||||
Given the node map
|
||||
"""
|
||||
a-->b---c---d<--e
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
| cde |
|
||||
|
||||
And the contract extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the customize extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the speed file
|
||||
"""
|
||||
2,1,0
|
||||
4,5,0
|
||||
"""
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| a | d | abc,cde,cde | 60s | 60 |
|
||||
| e | b | cde,abc,abc | 60s | 60 |
|
||||
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| a | e | NoRoute |
|
||||
| e | a | NoRoute |
|
||||
|
||||
|
||||
Scenario: Snapping to source node with previous section of segment blocked
|
||||
Given the node map
|
||||
"""
|
||||
a<--b---c---d-->e
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
| cde |
|
||||
|
||||
And the contract extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the customize extra arguments "--segment-speed-file {speeds_file}"
|
||||
And the speed file
|
||||
"""
|
||||
1,2,0
|
||||
5,4,0
|
||||
"""
|
||||
|
||||
When I route I should get
|
||||
| from | to | code |
|
||||
| a | e | NoRoute |
|
||||
| b | e | NoRoute |
|
||||
| e | a | NoRoute |
|
||||
| d | a | NoRoute |
|
||||
|
||||
|
||||
Scenario: Only snaps to one of many equidistant nearest locations
|
||||
Given the node map
|
||||
"""
|
||||
b-------c
|
||||
| |
|
||||
| |
|
||||
a 1 d
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
| bc |
|
||||
| cd |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| 1 | b | ab,ab | 30s | 30 |
|
||||
| 1 | c | ab,bc,bc | 80s +-1 | 80 +-1 |
|
||||
|
||||
|
||||
Scenario: Snaps to alternative big SCC candidate if nearest candidates are not strongly connected
|
||||
Given the node map
|
||||
"""
|
||||
1
|
||||
g---h---i
|
||||
a-----b-----c
|
||||
|
|
||||
f-----e-----d
|
||||
j---k---l
|
||||
2
|
||||
"""
|
||||
|
||||
Given the extract extra arguments "--small-component-size=4"
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abc |
|
||||
| cd |
|
||||
| fed |
|
||||
| ghi |
|
||||
| jkl |
|
||||
|
||||
# As forward direction is disabled...
|
||||
When I route I should get
|
||||
| from | to | route | time | weight | locations |
|
||||
| 1 | 2 | abc,cd,fed,fed | 100s +-1 | 100 +-1 | b,c,d,e |
|
||||
|
||||
|
||||
Scenario: Can use big or small SCC nearest candidates if at same location
|
||||
Given the node map
|
||||
"""
|
||||
1
|
||||
a-----b-----c
|
||||
| |
|
||||
g |
|
||||
|
|
||||
f-----e-----d
|
||||
|
||||
"""
|
||||
|
||||
Given the extract extra arguments "--small-component-size=4"
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| ab | no |
|
||||
| bc | no |
|
||||
| cd | no |
|
||||
| fed | no |
|
||||
| bg | yes | # small SCC
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:to | node:via | restriction |
|
||||
| restriction | ab | bg | b | no_right_turn |
|
||||
| restriction | bc | bg | b | no_left_turn |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight | locations |
|
||||
| 1 | g | bg,bg | 20s | 20 | b,g |
|
||||
| 1 | e | bc,cd,fed,fed | 120s +-1 | 120 +-1 | b,c,d,e |
|
||||
|
||||
|
||||
Scenario: Using small SCC candidates when at same location as big SCC alternatives is not supported
|
||||
Given the node map
|
||||
"""
|
||||
1
|
||||
g---h---i
|
||||
a-----b-----c
|
||||
| |
|
||||
| |
|
||||
m |
|
||||
f-----e-----d
|
||||
j---k---l
|
||||
2
|
||||
|
||||
"""
|
||||
|
||||
Given the extract extra arguments "--small-component-size=4"
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| ab | no |
|
||||
| bc | no |
|
||||
| cd | no |
|
||||
| fed | no |
|
||||
| ghi | no |
|
||||
| jkl | no |
|
||||
| bm | yes | # small SCC
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:to | node:via | restriction |
|
||||
| restriction | ab | bm | b | no_right_turn |
|
||||
| restriction | bc | bm | b | no_left_turn |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight | locations |
|
||||
| 1 | 2 | bc,cd,fed,fed | 120s +-1 | 120 +-1 | b,c,d,e |
|
||||
| 1 | m | bc,cd,fed,fed | 120s +-1 | 120 +-1 | b,c,d,e |
|
||||
|
||||
|
||||
Scenario: Shortest via path with continuation, simple loop
|
||||
Given the node map
|
||||
"""
|
||||
a---b
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
|
||||
When I route I should get
|
||||
| waypoints | route | time | weight |
|
||||
| a,b,a | ab,ab,ab,ab | 60s | 60 |
|
||||
|
||||
|
||||
Scenario: Shortest via path with uturns, simple loop
|
||||
Given the node map
|
||||
"""
|
||||
a---b
|
||||
"""
|
||||
|
||||
Given the query options
|
||||
| continue_straight | false |
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
|
||||
# Does not pay the cost of the turn
|
||||
When I route I should get
|
||||
| waypoints | route | time | weight |
|
||||
| a,b,a | ab,ab,ab,ab | 40s | 40 |
|
||||
|
||||
|
||||
Scenario: Shortest path with multiple endpoint snapping candidates
|
||||
Given the node map
|
||||
"""
|
||||
b
|
||||
|
||||
c
|
||||
|
||||
a d f
|
||||
|
||||
e
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| ab | no |
|
||||
| ac | no |
|
||||
| ad | no |
|
||||
| ae | no |
|
||||
| bf | no |
|
||||
| cf | yes |
|
||||
| df | yes |
|
||||
| ef | no |
|
||||
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time | weight |
|
||||
| a | f | ad,df,df | 40s | 40 |
|
||||
| f | a | ef,ae,ae | 66.6s | 66.6 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | f |
|
||||
| a | 0 | 40 |
|
||||
| f | 66.6 | 0 |
|
||||
|
||||
|
||||
Scenario: Shortest via path with continuation, multiple waypoint snapping candidates
|
||||
Given the node map
|
||||
"""
|
||||
b g
|
||||
|
||||
c h
|
||||
|
||||
a d f i
|
||||
k
|
||||
e j
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| ab | no |
|
||||
| ac | no |
|
||||
| ad | no |
|
||||
| ae | no |
|
||||
| bf | no |
|
||||
| cf | yes |
|
||||
| df | yes |
|
||||
| ef | no |
|
||||
| fg | no |
|
||||
| fh | -1 |
|
||||
| fi | -1 |
|
||||
| fj | no |
|
||||
| gk | no |
|
||||
| hk | no |
|
||||
| ik | no |
|
||||
| kj | no |
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:to | node:via | restriction |
|
||||
| restriction | df | fg | f | only_left_turn |
|
||||
| restriction | fi | bf | f | only_right_turn |
|
||||
|
||||
# Longer routes can take different paths from sub-routes
|
||||
When I route I should get
|
||||
| waypoints | route | time | weight |
|
||||
| a,f | ad,df,df | 40s | 40 |
|
||||
| f,k | fj,kj,kj | 65.6s | 65.6 |
|
||||
| a,f,k | ac,cf,cf,fj,kj,kj | 132.8s | 132.8 |
|
||||
| k,f | ik,fi,fi | 54.3s | 54.3 |
|
||||
| f,a | ef,ae,ae | 66.6s | 66.6 |
|
||||
| k,f,a | kj,fj,fj,ef,ae,ae | 141.4s | 141.4 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | f | k |
|
||||
| a | 0 | 40 | 132.8 |
|
||||
| f | 66.6 | 0 | 65.6 |
|
||||
| k | 141.4 | 54.3 | 0 |
|
||||
|
||||
|
||||
Scenario: Shortest via path with uturns, multiple waypoint snapping candidates
|
||||
Given the node map
|
||||
"""
|
||||
b g
|
||||
|
||||
c h
|
||||
|
||||
a d f i
|
||||
k
|
||||
e j
|
||||
"""
|
||||
|
||||
Given the query options
|
||||
| continue_straight | false |
|
||||
|
||||
And the ways
|
||||
| nodes | oneway |
|
||||
| ab | no |
|
||||
| ac | no |
|
||||
| ad | no |
|
||||
| ae | no |
|
||||
| bf | no |
|
||||
| cf | yes |
|
||||
| df | yes |
|
||||
| ef | no |
|
||||
| fg | no |
|
||||
| fh | -1 |
|
||||
| fi | -1 |
|
||||
| fj | no |
|
||||
| gk | no |
|
||||
| hk | no |
|
||||
| ik | no |
|
||||
| kj | no |
|
||||
|
||||
And the relations
|
||||
| type | way:from | way:to | node:via | restriction |
|
||||
| restriction | df | fg | f | only_left_turn |
|
||||
| restriction | fi | bf | f | only_right_turn |
|
||||
|
||||
# Longer routes use same path as sub-routes
|
||||
When I route I should get
|
||||
| waypoints | route | time | weight |
|
||||
| a,f | ad,df,df | 40s | 40 |
|
||||
| f,k | fj,kj,kj | 65.6s | 65.6 |
|
||||
| a,f,k | ad,df,df,fj,kj,kj | 105.6s | 105.6 |
|
||||
| k,f | ik,fi,fi | 54.3s | 54.3 |
|
||||
| f,a | ef,ae,ae | 66.6s | 66.6 |
|
||||
| k,f,a | ik,fi,fi,ef,ae,ae | 120.9s | 120.9 |
|
||||
@ -47,7 +47,7 @@ Feature: Traffic - turn penalties applied to turn onto which a phantom node snap
|
||||
| 1 | e | ab,be,be | 36 km/h | 30s +-1 |
|
||||
| b | f | bc,cf,cf | 36 km/h | 40s +-1 |
|
||||
| 2 | f | bc,cf,cf | 36 km/h | 30s +-1 |
|
||||
| c | g | cd,dg,dg | 144 km/h | 10s +-1 |
|
||||
| c | g | cd,dg,dg | 72 km/h | 20s +-1 |
|
||||
| 3 | g | cd,dg,dg | 54 km/h | 20s +-1 |
|
||||
|
||||
Scenario: Weighting based on turn penalty file with weights
|
||||
@ -65,5 +65,5 @@ Feature: Traffic - turn penalties applied to turn onto which a phantom node snap
|
||||
| 1 | e | ab,be,be | 36 km/h | 30s +-1 | 6.8,20,0 |
|
||||
| b | f | bc,cf,cf | 36 km/h | 40s +-1 | 20,20,0 |
|
||||
| 2 | f | bc,cf,cf | 36 km/h | 30s +-1 | 10.1,20,0 |
|
||||
| c | g | cd,dg,dg | 144 km/h | 10s +-1 | 120.8,20,0 |
|
||||
| c | g | cd,dg,dg | 72 km/h | 20s +-1 | 120.8,20,0 |
|
||||
| 3 | g | cd,dg,dg | 54 km/h | 20s +-1 | 110.9,20,0 |
|
||||
|
||||
@ -281,7 +281,7 @@ Feature: Weight tests
|
||||
|
||||
When I route I should get
|
||||
| waypoints | route | distance | weights | times |
|
||||
| a,c | , | 40m +-.1 | 5.12,0 | 290s,0s |
|
||||
| a,c | , | 40m +-.1 | 2.22,0 | 200s,0s |
|
||||
| a,e | ,, | 60m +-.1 | 5.12,1.11,0 | 290s,100s,0s |
|
||||
| e,a | ,, | 60m +-.1 | 2.21,2.22,0 | 10s,200s,0s |
|
||||
| e,d | ,, | 40m +-.1 | 4.01,1.11,0 | 190s,100s,0s |
|
||||
|
||||
@ -9,9 +9,12 @@
|
||||
#include "engine/hint.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
|
||||
#include <boost/algorithm/string/join.hpp>
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/range/adaptor/transformed.hpp>
|
||||
#include <boost/range/algorithm/transform.hpp>
|
||||
|
||||
#include <boost/range/adaptor/filtered.hpp>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
@ -22,6 +25,8 @@ namespace engine
|
||||
namespace api
|
||||
{
|
||||
|
||||
static const constexpr char *INTERSECTION_DELIMITER = " / ";
|
||||
|
||||
class BaseAPI
|
||||
{
|
||||
public:
|
||||
@ -30,92 +35,129 @@ class BaseAPI
|
||||
{
|
||||
}
|
||||
|
||||
util::json::Array MakeWaypoints(const std::vector<PhantomNodes> &segment_end_coordinates) const
|
||||
util::json::Array
|
||||
MakeWaypoints(const std::vector<PhantomNodeCandidates> &waypoint_candidates) const
|
||||
{
|
||||
BOOST_ASSERT(parameters.coordinates.size() > 0);
|
||||
BOOST_ASSERT(parameters.coordinates.size() == segment_end_coordinates.size() + 1);
|
||||
BOOST_ASSERT(parameters.coordinates.size() == waypoint_candidates.size());
|
||||
|
||||
util::json::Array waypoints;
|
||||
waypoints.values.resize(parameters.coordinates.size());
|
||||
waypoints.values[0] = MakeWaypoint(segment_end_coordinates.front().source_phantom);
|
||||
|
||||
auto out_iter = std::next(waypoints.values.begin());
|
||||
boost::range::transform(
|
||||
segment_end_coordinates, out_iter, [this](const PhantomNodes &phantom_pair) {
|
||||
return MakeWaypoint(phantom_pair.target_phantom);
|
||||
});
|
||||
waypoint_candidates,
|
||||
waypoints.values.begin(),
|
||||
[this](const PhantomNodeCandidates &candidates) { return MakeWaypoint(candidates); });
|
||||
return waypoints;
|
||||
}
|
||||
|
||||
// FIXME: gcc 4.9 does not like MakeWaypoints to be protected
|
||||
// protected:
|
||||
util::json::Object MakeWaypoint(const PhantomNode &phantom) const
|
||||
{
|
||||
if (parameters.generate_hints)
|
||||
util::json::Object MakeWaypoint(const PhantomNodeCandidates &candidates) const
|
||||
{
|
||||
// TODO: check forward/reverse
|
||||
const auto toName = [this](const auto &phantom) {
|
||||
return facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id))
|
||||
.to_string();
|
||||
};
|
||||
const auto noEmpty = [](const auto &name) { return !name.empty(); };
|
||||
|
||||
// At an intersection we may have multiple phantom node candidates.
|
||||
// Combine them to represent the waypoint name.
|
||||
std::string waypoint_name = boost::algorithm::join(
|
||||
candidates | boost::adaptors::transformed(toName) | boost::adaptors::filtered(noEmpty),
|
||||
INTERSECTION_DELIMITER);
|
||||
|
||||
const auto &snapped_location = candidatesSnappedLocation(candidates);
|
||||
const auto &input_location = candidatesInputLocation(candidates);
|
||||
if (parameters.generate_hints)
|
||||
{
|
||||
std::vector<SegmentHint> seg_hints(candidates.size());
|
||||
std::transform(candidates.begin(),
|
||||
candidates.end(),
|
||||
seg_hints.begin(),
|
||||
[this](const auto &phantom) {
|
||||
return SegmentHint{phantom, facade.GetCheckSum()};
|
||||
});
|
||||
|
||||
return json::makeWaypoint(
|
||||
phantom.location,
|
||||
util::coordinate_calculation::greatCircleDistance(phantom.location,
|
||||
phantom.input_location),
|
||||
facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id)).to_string(),
|
||||
Hint{phantom, facade.GetCheckSum()});
|
||||
snapped_location,
|
||||
util::coordinate_calculation::greatCircleDistance(snapped_location, input_location),
|
||||
waypoint_name,
|
||||
{std::move(seg_hints)});
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: check forward/reverse
|
||||
return json::makeWaypoint(
|
||||
phantom.location,
|
||||
util::coordinate_calculation::greatCircleDistance(phantom.location,
|
||||
phantom.input_location),
|
||||
facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id))
|
||||
.to_string());
|
||||
snapped_location,
|
||||
util::coordinate_calculation::greatCircleDistance(snapped_location, input_location),
|
||||
waypoint_name);
|
||||
}
|
||||
}
|
||||
|
||||
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<fbresult::Waypoint>>>
|
||||
MakeWaypoints(flatbuffers::FlatBufferBuilder *builder,
|
||||
const std::vector<PhantomNodes> &segment_end_coordinates) const
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates) const
|
||||
{
|
||||
BOOST_ASSERT(parameters.coordinates.size() > 0);
|
||||
BOOST_ASSERT(parameters.coordinates.size() == segment_end_coordinates.size() + 1);
|
||||
BOOST_ASSERT(parameters.coordinates.size() == waypoint_candidates.size());
|
||||
|
||||
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
||||
waypoints.resize(parameters.coordinates.size());
|
||||
waypoints[0] =
|
||||
MakeWaypoint(builder, segment_end_coordinates.front().source_phantom)->Finish();
|
||||
|
||||
std::transform(segment_end_coordinates.begin(),
|
||||
segment_end_coordinates.end(),
|
||||
std::next(waypoints.begin()),
|
||||
[this, builder](const PhantomNodes &phantom_pair) {
|
||||
return MakeWaypoint(builder, phantom_pair.target_phantom)->Finish();
|
||||
std::transform(waypoint_candidates.begin(),
|
||||
waypoint_candidates.end(),
|
||||
waypoints.begin(),
|
||||
[this, builder](const PhantomNodeCandidates &candidates) {
|
||||
return MakeWaypoint(builder, candidates)->Finish();
|
||||
});
|
||||
return builder->CreateVector(waypoints);
|
||||
}
|
||||
|
||||
// FIXME: gcc 4.9 does not like MakeWaypoints to be protected
|
||||
// protected:
|
||||
std::unique_ptr<fbresult::WaypointBuilder> MakeWaypoint(flatbuffers::FlatBufferBuilder *builder,
|
||||
const PhantomNode &phantom) const
|
||||
std::unique_ptr<fbresult::WaypointBuilder>
|
||||
MakeWaypoint(flatbuffers::FlatBufferBuilder *builder,
|
||||
const PhantomNodeCandidates &candidates) const
|
||||
{
|
||||
|
||||
const auto &snapped_location = candidatesSnappedLocation(candidates);
|
||||
const auto &input_location = candidatesInputLocation(candidates);
|
||||
auto location =
|
||||
fbresult::Position(static_cast<double>(util::toFloating(phantom.location.lon)),
|
||||
static_cast<double>(util::toFloating(phantom.location.lat)));
|
||||
auto name_string = builder->CreateString(
|
||||
facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id)).to_string());
|
||||
fbresult::Position(static_cast<double>(util::toFloating(snapped_location.lon)),
|
||||
static_cast<double>(util::toFloating(snapped_location.lat)));
|
||||
|
||||
const auto toName = [this](const auto &phantom) {
|
||||
return facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id))
|
||||
.to_string();
|
||||
};
|
||||
const auto noEmpty = [](const auto &name) { return !name.empty(); };
|
||||
|
||||
// At an intersection we may have multiple phantom node candidates.
|
||||
// Combine them to represent the waypoint name.
|
||||
std::string waypoint_name = boost::algorithm::join(
|
||||
candidates | boost::adaptors::transformed(toName) | boost::adaptors::filtered(noEmpty),
|
||||
INTERSECTION_DELIMITER);
|
||||
auto name_string = builder->CreateString(waypoint_name);
|
||||
|
||||
flatbuffers::Offset<flatbuffers::String> hint_string;
|
||||
if (parameters.generate_hints)
|
||||
{
|
||||
hint_string = builder->CreateString(Hint{phantom, facade.GetCheckSum()}.ToBase64());
|
||||
std::vector<SegmentHint> seg_hints(candidates.size());
|
||||
std::transform(candidates.begin(),
|
||||
candidates.end(),
|
||||
seg_hints.begin(),
|
||||
[this](const auto &phantom) {
|
||||
return SegmentHint{phantom, facade.GetCheckSum()};
|
||||
});
|
||||
Hint hint{std::move(seg_hints)};
|
||||
hint_string = builder->CreateString(hint.ToBase64());
|
||||
}
|
||||
|
||||
auto waypoint = std::make_unique<fbresult::WaypointBuilder>(*builder);
|
||||
waypoint->add_location(&location);
|
||||
waypoint->add_distance(util::coordinate_calculation::greatCircleDistance(
|
||||
phantom.location, phantom.input_location));
|
||||
waypoint->add_distance(
|
||||
util::coordinate_calculation::greatCircleDistance(snapped_location, input_location));
|
||||
waypoint->add_name(name_string);
|
||||
if (parameters.generate_hints)
|
||||
{
|
||||
|
||||
@ -51,14 +51,14 @@ namespace api
|
||||
* Holds member attributes:
|
||||
* - coordinates: for specifying location(s) to services
|
||||
* - hints: hint for the service to derive the position(s) in the road network more efficiently,
|
||||
* optional per coordinate
|
||||
* optional per coordinate. Multiple hints can be provided for a coordinate.
|
||||
* - radiuses: limits the search for segments in the road network to given radius(es) in meter,
|
||||
* optional per coordinate
|
||||
* - bearings: limits the search for segments in the road network to given bearing(s) in degree
|
||||
* towards true north in clockwise direction, optional per coordinate
|
||||
* - approaches: force the phantom node to start towards the node with the road country side.
|
||||
*
|
||||
* \see OSRM, Coordinate, Hint, Bearing, RouteParame, RouteParameters, TableParameters,
|
||||
* \see OSRM, Coordinate, Hint, Bearing, RouteParameters, TableParameters,
|
||||
* NearestParameters, TripParameters, MatchParameters and TileParameters
|
||||
*/
|
||||
struct BaseParameters
|
||||
|
||||
@ -76,7 +76,7 @@ class MatchAPI final : public RouteAPI
|
||||
routes.values.reserve(number_of_routes);
|
||||
for (auto index : util::irange<std::size_t>(0UL, sub_matchings.size()))
|
||||
{
|
||||
auto route = MakeRoute(sub_routes[index].segment_end_coordinates,
|
||||
auto route = MakeRoute(sub_routes[index].leg_endpoints,
|
||||
sub_routes[index].unpacked_path_segments,
|
||||
sub_routes[index].source_traversed_in_reverse,
|
||||
sub_routes[index].target_traversed_in_reverse);
|
||||
@ -146,7 +146,7 @@ class MatchAPI final : public RouteAPI
|
||||
}
|
||||
const auto &phantom =
|
||||
sub_matchings[matching_index.sub_matching_index].nodes[matching_index.point_index];
|
||||
auto waypoint = BaseAPI::MakeWaypoint(&fb_result, phantom);
|
||||
auto waypoint = BaseAPI::MakeWaypoint(&fb_result, {phantom});
|
||||
waypoint->add_matchings_index(matching_index.sub_matching_index);
|
||||
waypoint->add_alternatives_count(sub_matchings[matching_index.sub_matching_index]
|
||||
.alternatives_count[matching_index.point_index]);
|
||||
@ -200,7 +200,7 @@ class MatchAPI final : public RouteAPI
|
||||
}
|
||||
const auto &phantom =
|
||||
sub_matchings[matching_index.sub_matching_index].nodes[matching_index.point_index];
|
||||
auto waypoint = BaseAPI::MakeWaypoint(phantom);
|
||||
auto waypoint = BaseAPI::MakeWaypoint({phantom});
|
||||
waypoint.values["matchings_index"] = matching_index.sub_matching_index;
|
||||
waypoint.values["waypoint_index"] = matching_index.point_index;
|
||||
waypoint.values["alternatives_count"] =
|
||||
|
||||
@ -71,7 +71,7 @@ class NearestAPI final : public BaseAPI
|
||||
auto node_values = MakeNodes(phantom_node);
|
||||
fbresult::Uint64Pair nodes{node_values.first, node_values.second};
|
||||
|
||||
auto waypoint = MakeWaypoint(&fb_result, phantom_node);
|
||||
auto waypoint = MakeWaypoint(&fb_result, {phantom_node});
|
||||
waypoint->add_nodes(&nodes);
|
||||
return waypoint->Finish();
|
||||
});
|
||||
@ -100,7 +100,7 @@ class NearestAPI final : public BaseAPI
|
||||
waypoints.values.begin(),
|
||||
[this](const PhantomNodeWithDistance &phantom_with_distance) {
|
||||
auto &phantom_node = phantom_with_distance.phantom_node;
|
||||
auto waypoint = MakeWaypoint(phantom_node);
|
||||
auto waypoint = MakeWaypoint({phantom_node});
|
||||
|
||||
util::json::Array nodes;
|
||||
|
||||
|
||||
@ -47,8 +47,8 @@ class RouteAPI : public BaseAPI
|
||||
|
||||
void
|
||||
MakeResponse(const InternalManyRoutesResult &raw_routes,
|
||||
const std::vector<PhantomNodes>
|
||||
&all_start_end_points, // all used coordinates, ignoring waypoints= parameter
|
||||
const std::vector<PhantomNodeCandidates>
|
||||
&waypoint_candidates, // all used coordinates, ignoring waypoints= parameter
|
||||
osrm::engine::api::ResultT &response) const
|
||||
{
|
||||
BOOST_ASSERT(!raw_routes.routes.empty());
|
||||
@ -56,19 +56,19 @@ class RouteAPI : public BaseAPI
|
||||
if (response.is<flatbuffers::FlatBufferBuilder>())
|
||||
{
|
||||
auto &fb_result = response.get<flatbuffers::FlatBufferBuilder>();
|
||||
MakeResponse(raw_routes, all_start_end_points, fb_result);
|
||||
MakeResponse(raw_routes, waypoint_candidates, fb_result);
|
||||
}
|
||||
else
|
||||
{
|
||||
auto &json_result = response.get<util::json::Object>();
|
||||
MakeResponse(raw_routes, all_start_end_points, json_result);
|
||||
MakeResponse(raw_routes, waypoint_candidates, json_result);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MakeResponse(const InternalManyRoutesResult &raw_routes,
|
||||
const std::vector<PhantomNodes>
|
||||
&all_start_end_points, // all used coordinates, ignoring waypoints= parameter
|
||||
const std::vector<PhantomNodeCandidates>
|
||||
&waypoint_candidates, // all used coordinates, ignoring waypoints= parameter
|
||||
flatbuffers::FlatBufferBuilder &fb_result) const
|
||||
{
|
||||
|
||||
@ -80,8 +80,8 @@ class RouteAPI : public BaseAPI
|
||||
}
|
||||
|
||||
auto response =
|
||||
MakeFBResponse(raw_routes, fb_result, [this, &all_start_end_points, &fb_result]() {
|
||||
return BaseAPI::MakeWaypoints(&fb_result, all_start_end_points);
|
||||
MakeFBResponse(raw_routes, fb_result, [this, &waypoint_candidates, &fb_result]() {
|
||||
return BaseAPI::MakeWaypoints(&fb_result, waypoint_candidates);
|
||||
});
|
||||
|
||||
if (!data_timestamp.empty())
|
||||
@ -93,8 +93,8 @@ class RouteAPI : public BaseAPI
|
||||
|
||||
void
|
||||
MakeResponse(const InternalManyRoutesResult &raw_routes,
|
||||
const std::vector<PhantomNodes>
|
||||
&all_start_end_points, // all used coordinates, ignoring waypoints= parameter
|
||||
const std::vector<PhantomNodeCandidates>
|
||||
&waypoint_candidates, // all used coordinates, ignoring waypoints= parameter
|
||||
util::json::Object &response) const
|
||||
{
|
||||
util::json::Array jsRoutes;
|
||||
@ -104,7 +104,7 @@ class RouteAPI : public BaseAPI
|
||||
if (!route.is_valid())
|
||||
continue;
|
||||
|
||||
jsRoutes.values.push_back(MakeRoute(route.segment_end_coordinates,
|
||||
jsRoutes.values.push_back(MakeRoute(route.leg_endpoints,
|
||||
route.unpacked_path_segments,
|
||||
route.source_traversed_in_reverse,
|
||||
route.target_traversed_in_reverse));
|
||||
@ -112,7 +112,7 @@ class RouteAPI : public BaseAPI
|
||||
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["waypoints"] = BaseAPI::MakeWaypoints(all_start_end_points);
|
||||
response.values["waypoints"] = BaseAPI::MakeWaypoints(waypoint_candidates);
|
||||
}
|
||||
response.values["routes"] = std::move(jsRoutes);
|
||||
response.values["code"] = "Ok";
|
||||
@ -138,7 +138,7 @@ class RouteAPI : public BaseAPI
|
||||
continue;
|
||||
|
||||
routes.push_back(MakeRoute(fb_result,
|
||||
raw_route.segment_end_coordinates,
|
||||
raw_route.leg_endpoints,
|
||||
raw_route.unpacked_path_segments,
|
||||
raw_route.source_traversed_in_reverse,
|
||||
raw_route.target_traversed_in_reverse));
|
||||
@ -328,12 +328,12 @@ class RouteAPI : public BaseAPI
|
||||
|
||||
flatbuffers::Offset<fbresult::RouteObject>
|
||||
MakeRoute(flatbuffers::FlatBufferBuilder &fb_result,
|
||||
const std::vector<PhantomNodes> &segment_end_coordinates,
|
||||
const std::vector<PhantomEndpoints> &leg_endpoints,
|
||||
const std::vector<std::vector<PathData>> &unpacked_path_segments,
|
||||
const std::vector<bool> &source_traversed_in_reverse,
|
||||
const std::vector<bool> &target_traversed_in_reverse) const
|
||||
{
|
||||
auto legs_info = MakeLegs(segment_end_coordinates,
|
||||
auto legs_info = MakeLegs(leg_endpoints,
|
||||
unpacked_path_segments,
|
||||
source_traversed_in_reverse,
|
||||
target_traversed_in_reverse);
|
||||
@ -705,12 +705,12 @@ class RouteAPI : public BaseAPI
|
||||
return fb_result.CreateVector(intersections);
|
||||
}
|
||||
|
||||
util::json::Object MakeRoute(const std::vector<PhantomNodes> &segment_end_coordinates,
|
||||
util::json::Object MakeRoute(const std::vector<PhantomEndpoints> &leg_endpoints,
|
||||
const std::vector<std::vector<PathData>> &unpacked_path_segments,
|
||||
const std::vector<bool> &source_traversed_in_reverse,
|
||||
const std::vector<bool> &target_traversed_in_reverse) const
|
||||
{
|
||||
auto legs_info = MakeLegs(segment_end_coordinates,
|
||||
auto legs_info = MakeLegs(leg_endpoints,
|
||||
unpacked_path_segments,
|
||||
source_traversed_in_reverse,
|
||||
target_traversed_in_reverse);
|
||||
@ -868,7 +868,7 @@ class RouteAPI : public BaseAPI
|
||||
const RouteParameters ¶meters;
|
||||
|
||||
std::pair<std::vector<guidance::RouteLeg>, std::vector<guidance::LegGeometry>>
|
||||
MakeLegs(const std::vector<PhantomNodes> &segment_end_coordinates,
|
||||
MakeLegs(const std::vector<PhantomEndpoints> &leg_endpoints,
|
||||
const std::vector<std::vector<PathData>> &unpacked_path_segments,
|
||||
const std::vector<bool> &source_traversed_in_reverse,
|
||||
const std::vector<bool> &target_traversed_in_reverse) const
|
||||
@ -877,13 +877,13 @@ class RouteAPI : public BaseAPI
|
||||
std::make_pair(std::vector<guidance::RouteLeg>(), std::vector<guidance::LegGeometry>());
|
||||
auto &legs = result.first;
|
||||
auto &leg_geometries = result.second;
|
||||
auto number_of_legs = segment_end_coordinates.size();
|
||||
auto number_of_legs = leg_endpoints.size();
|
||||
legs.reserve(number_of_legs);
|
||||
leg_geometries.reserve(number_of_legs);
|
||||
|
||||
for (auto idx : util::irange<std::size_t>(0UL, number_of_legs))
|
||||
{
|
||||
const auto &phantoms = segment_end_coordinates[idx];
|
||||
const auto &phantoms = leg_endpoints[idx];
|
||||
const auto &path_data = unpacked_path_segments[idx];
|
||||
|
||||
const bool reversed_source = source_traversed_in_reverse[idx];
|
||||
|
||||
@ -48,25 +48,25 @@ class TableAPI final : public BaseAPI
|
||||
|
||||
virtual void
|
||||
MakeResponse(const std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>> &tables,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
const std::vector<TableCellRef> &fallback_speed_cells,
|
||||
osrm::engine::api::ResultT &response) const
|
||||
{
|
||||
if (response.is<flatbuffers::FlatBufferBuilder>())
|
||||
{
|
||||
auto &fb_result = response.get<flatbuffers::FlatBufferBuilder>();
|
||||
MakeResponse(tables, phantoms, fallback_speed_cells, fb_result);
|
||||
MakeResponse(tables, candidates, fallback_speed_cells, fb_result);
|
||||
}
|
||||
else
|
||||
{
|
||||
auto &json_result = response.get<util::json::Object>();
|
||||
MakeResponse(tables, phantoms, fallback_speed_cells, json_result);
|
||||
MakeResponse(tables, candidates, fallback_speed_cells, json_result);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void
|
||||
MakeResponse(const std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>> &tables,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
const std::vector<TableCellRef> &fallback_speed_cells,
|
||||
flatbuffers::FlatBufferBuilder &fb_result) const
|
||||
{
|
||||
@ -86,15 +86,15 @@ class TableAPI final : public BaseAPI
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
sources = MakeWaypoints(fb_result, phantoms);
|
||||
sources = MakeWaypoints(fb_result, candidates);
|
||||
}
|
||||
number_of_sources = phantoms.size();
|
||||
number_of_sources = candidates.size();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
sources = MakeWaypoints(fb_result, phantoms, parameters.sources);
|
||||
sources = MakeWaypoints(fb_result, candidates, parameters.sources);
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,15 +104,15 @@ class TableAPI final : public BaseAPI
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
destinations = MakeWaypoints(fb_result, phantoms);
|
||||
destinations = MakeWaypoints(fb_result, candidates);
|
||||
}
|
||||
number_of_destinations = phantoms.size();
|
||||
number_of_destinations = candidates.size();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
destinations = MakeWaypoints(fb_result, phantoms, parameters.destinations);
|
||||
destinations = MakeWaypoints(fb_result, candidates, parameters.destinations);
|
||||
}
|
||||
}
|
||||
|
||||
@ -168,7 +168,7 @@ class TableAPI final : public BaseAPI
|
||||
|
||||
virtual void
|
||||
MakeResponse(const std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>> &tables,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
const std::vector<TableCellRef> &fallback_speed_cells,
|
||||
util::json::Object &response) const
|
||||
{
|
||||
@ -180,15 +180,15 @@ class TableAPI final : public BaseAPI
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["sources"] = MakeWaypoints(phantoms);
|
||||
response.values["sources"] = MakeWaypoints(candidates);
|
||||
}
|
||||
number_of_sources = phantoms.size();
|
||||
number_of_sources = candidates.size();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["sources"] = MakeWaypoints(phantoms, parameters.sources);
|
||||
response.values["sources"] = MakeWaypoints(candidates, parameters.sources);
|
||||
}
|
||||
}
|
||||
|
||||
@ -196,15 +196,16 @@ class TableAPI final : public BaseAPI
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["destinations"] = MakeWaypoints(phantoms);
|
||||
response.values["destinations"] = MakeWaypoints(candidates);
|
||||
}
|
||||
number_of_destinations = phantoms.size();
|
||||
number_of_destinations = candidates.size();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["destinations"] = MakeWaypoints(phantoms, parameters.destinations);
|
||||
response.values["destinations"] =
|
||||
MakeWaypoints(candidates, parameters.destinations);
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,31 +237,33 @@ class TableAPI final : public BaseAPI
|
||||
protected:
|
||||
virtual flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<fbresult::Waypoint>>>
|
||||
MakeWaypoints(flatbuffers::FlatBufferBuilder &builder,
|
||||
const std::vector<PhantomNode> &phantoms) const
|
||||
const std::vector<PhantomNodeCandidates> &candidates) const
|
||||
{
|
||||
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
||||
waypoints.reserve(phantoms.size());
|
||||
BOOST_ASSERT(phantoms.size() == parameters.coordinates.size());
|
||||
waypoints.reserve(candidates.size());
|
||||
BOOST_ASSERT(candidates.size() == parameters.coordinates.size());
|
||||
|
||||
boost::range::transform(
|
||||
phantoms, std::back_inserter(waypoints), [this, &builder](const PhantomNode &phantom) {
|
||||
return BaseAPI::MakeWaypoint(&builder, phantom)->Finish();
|
||||
boost::range::transform(candidates,
|
||||
std::back_inserter(waypoints),
|
||||
[this, &builder](const PhantomNodeCandidates &candidates) {
|
||||
return BaseAPI::MakeWaypoint(&builder, candidates)->Finish();
|
||||
});
|
||||
return builder.CreateVector(waypoints);
|
||||
}
|
||||
|
||||
virtual flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<fbresult::Waypoint>>>
|
||||
MakeWaypoints(flatbuffers::FlatBufferBuilder &builder,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
const std::vector<std::size_t> &indices) const
|
||||
{
|
||||
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
||||
waypoints.reserve(indices.size());
|
||||
boost::range::transform(indices,
|
||||
boost::range::transform(
|
||||
indices,
|
||||
std::back_inserter(waypoints),
|
||||
[this, &builder, phantoms](const std::size_t idx) {
|
||||
BOOST_ASSERT(idx < phantoms.size());
|
||||
return BaseAPI::MakeWaypoint(&builder, phantoms[idx])->Finish();
|
||||
[this, &builder, &candidates](const std::size_t idx) {
|
||||
BOOST_ASSERT(idx < candidates.size());
|
||||
return BaseAPI::MakeWaypoint(&builder, candidates[idx])->Finish();
|
||||
});
|
||||
return builder.CreateVector(waypoints);
|
||||
}
|
||||
@ -313,29 +316,31 @@ class TableAPI final : public BaseAPI
|
||||
return builder.CreateVector(fb_table);
|
||||
}
|
||||
|
||||
virtual util::json::Array MakeWaypoints(const std::vector<PhantomNode> &phantoms) const
|
||||
virtual util::json::Array
|
||||
MakeWaypoints(const std::vector<PhantomNodeCandidates> &candidates) const
|
||||
{
|
||||
util::json::Array json_waypoints;
|
||||
json_waypoints.values.reserve(phantoms.size());
|
||||
BOOST_ASSERT(phantoms.size() == parameters.coordinates.size());
|
||||
json_waypoints.values.reserve(candidates.size());
|
||||
BOOST_ASSERT(candidates.size() == parameters.coordinates.size());
|
||||
|
||||
boost::range::transform(
|
||||
phantoms,
|
||||
boost::range::transform(candidates,
|
||||
std::back_inserter(json_waypoints.values),
|
||||
[this](const PhantomNode &phantom) { return BaseAPI::MakeWaypoint(phantom); });
|
||||
[this](const PhantomNodeCandidates &candidates) {
|
||||
return BaseAPI::MakeWaypoint(candidates);
|
||||
});
|
||||
return json_waypoints;
|
||||
}
|
||||
|
||||
virtual util::json::Array MakeWaypoints(const std::vector<PhantomNode> &phantoms,
|
||||
virtual util::json::Array MakeWaypoints(const std::vector<PhantomNodeCandidates> &candidates,
|
||||
const std::vector<std::size_t> &indices) const
|
||||
{
|
||||
util::json::Array json_waypoints;
|
||||
json_waypoints.values.reserve(indices.size());
|
||||
boost::range::transform(indices,
|
||||
std::back_inserter(json_waypoints.values),
|
||||
[this, phantoms](const std::size_t idx) {
|
||||
BOOST_ASSERT(idx < phantoms.size());
|
||||
return BaseAPI::MakeWaypoint(phantoms[idx]);
|
||||
[this, &candidates](const std::size_t idx) {
|
||||
BOOST_ASSERT(idx < candidates.size());
|
||||
return BaseAPI::MakeWaypoint(candidates[idx]);
|
||||
});
|
||||
return json_waypoints;
|
||||
}
|
||||
|
||||
@ -26,7 +26,7 @@ class TripAPI final : public RouteAPI
|
||||
}
|
||||
void MakeResponse(const std::vector<std::vector<NodeID>> &sub_trips,
|
||||
const std::vector<InternalRouteResult> &sub_routes,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
osrm::engine::api::ResultT &response) const
|
||||
{
|
||||
BOOST_ASSERT(sub_trips.size() == sub_routes.size());
|
||||
@ -34,17 +34,17 @@ class TripAPI final : public RouteAPI
|
||||
if (response.is<flatbuffers::FlatBufferBuilder>())
|
||||
{
|
||||
auto &fb_result = response.get<flatbuffers::FlatBufferBuilder>();
|
||||
MakeResponse(sub_trips, sub_routes, phantoms, fb_result);
|
||||
MakeResponse(sub_trips, sub_routes, candidates, fb_result);
|
||||
}
|
||||
else
|
||||
{
|
||||
auto &json_result = response.get<util::json::Object>();
|
||||
MakeResponse(sub_trips, sub_routes, phantoms, json_result);
|
||||
MakeResponse(sub_trips, sub_routes, candidates, json_result);
|
||||
}
|
||||
}
|
||||
void MakeResponse(const std::vector<std::vector<NodeID>> &sub_trips,
|
||||
const std::vector<InternalRouteResult> &sub_routes,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
flatbuffers::FlatBufferBuilder &fb_result) const
|
||||
{
|
||||
auto data_timestamp = facade.GetTimestamp();
|
||||
@ -55,8 +55,8 @@ class TripAPI final : public RouteAPI
|
||||
}
|
||||
|
||||
auto response =
|
||||
MakeFBResponse(sub_routes, fb_result, [this, &fb_result, &sub_trips, &phantoms]() {
|
||||
return MakeWaypoints(fb_result, sub_trips, phantoms);
|
||||
MakeFBResponse(sub_routes, fb_result, [this, &fb_result, &sub_trips, &candidates]() {
|
||||
return MakeWaypoints(fb_result, sub_trips, candidates);
|
||||
});
|
||||
|
||||
if (!data_timestamp.empty())
|
||||
@ -67,7 +67,7 @@ class TripAPI final : public RouteAPI
|
||||
}
|
||||
void MakeResponse(const std::vector<std::vector<NodeID>> &sub_trips,
|
||||
const std::vector<InternalRouteResult> &sub_routes,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<PhantomNodeCandidates> &candidates,
|
||||
util::json::Object &response) const
|
||||
{
|
||||
auto number_of_routes = sub_trips.size();
|
||||
@ -75,7 +75,7 @@ class TripAPI final : public RouteAPI
|
||||
routes.values.reserve(number_of_routes);
|
||||
for (auto index : util::irange<std::size_t>(0UL, sub_trips.size()))
|
||||
{
|
||||
auto route = MakeRoute(sub_routes[index].segment_end_coordinates,
|
||||
auto route = MakeRoute(sub_routes[index].leg_endpoints,
|
||||
sub_routes[index].unpacked_path_segments,
|
||||
sub_routes[index].source_traversed_in_reverse,
|
||||
sub_routes[index].target_traversed_in_reverse);
|
||||
@ -83,7 +83,7 @@ class TripAPI final : public RouteAPI
|
||||
}
|
||||
if (!parameters.skip_waypoints)
|
||||
{
|
||||
response.values["waypoints"] = MakeWaypoints(sub_trips, phantoms);
|
||||
response.values["waypoints"] = MakeWaypoints(sub_trips, candidates);
|
||||
}
|
||||
response.values["trips"] = std::move(routes);
|
||||
response.values["code"] = "Ok";
|
||||
@ -120,7 +120,7 @@ class TripAPI final : public RouteAPI
|
||||
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<fbresult::Waypoint>>>
|
||||
MakeWaypoints(flatbuffers::FlatBufferBuilder &fb_result,
|
||||
const std::vector<std::vector<NodeID>> &sub_trips,
|
||||
const std::vector<PhantomNode> &phantoms) const
|
||||
const std::vector<PhantomNodeCandidates> &candidates) const
|
||||
{
|
||||
std::vector<flatbuffers::Offset<fbresult::Waypoint>> waypoints;
|
||||
waypoints.reserve(parameters.coordinates.size());
|
||||
@ -132,7 +132,7 @@ class TripAPI final : public RouteAPI
|
||||
auto trip_index = input_idx_to_trip_idx[input_index];
|
||||
BOOST_ASSERT(!trip_index.NotUsed());
|
||||
|
||||
auto waypoint = BaseAPI::MakeWaypoint(&fb_result, phantoms[input_index]);
|
||||
auto waypoint = BaseAPI::MakeWaypoint(&fb_result, candidates[input_index]);
|
||||
waypoint->add_waypoint_index(trip_index.point_index);
|
||||
waypoint->add_trips_index(trip_index.sub_trip_index);
|
||||
waypoints.push_back(waypoint->Finish());
|
||||
@ -142,7 +142,7 @@ class TripAPI final : public RouteAPI
|
||||
}
|
||||
|
||||
util::json::Array MakeWaypoints(const std::vector<std::vector<NodeID>> &sub_trips,
|
||||
const std::vector<PhantomNode> &phantoms) const
|
||||
const std::vector<PhantomNodeCandidates> &candidates) const
|
||||
{
|
||||
util::json::Array waypoints;
|
||||
waypoints.values.reserve(parameters.coordinates.size());
|
||||
@ -154,7 +154,7 @@ class TripAPI final : public RouteAPI
|
||||
auto trip_index = input_idx_to_trip_idx[input_index];
|
||||
BOOST_ASSERT(!trip_index.NotUsed());
|
||||
|
||||
auto waypoint = BaseAPI::MakeWaypoint(phantoms[input_index]);
|
||||
auto waypoint = BaseAPI::MakeWaypoint(candidates[input_index]);
|
||||
waypoint.values["trips_index"] = trip_index.sub_trip_index;
|
||||
waypoint.values["waypoint_index"] = trip_index.point_index;
|
||||
waypoints.values.push_back(std::move(waypoint));
|
||||
|
||||
@ -323,127 +323,41 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodesInRange(
|
||||
input_coordinate, max_distance, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodesInRange(
|
||||
input_coordinate, max_distance, bearing, bearing_range, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const Approach approach) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodes(input_coordinate, max_results, approach);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodes(
|
||||
input_coordinate, approach, boost::none, max_distance, bearing, use_all_edges);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const size_t max_results,
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodes(
|
||||
input_coordinate, max_results, max_distance, approach);
|
||||
input_coordinate, approach, max_results, max_distance, bearing, boost::none);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodes(
|
||||
input_coordinate, max_results, bearing, bearing_range, approach);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodes(
|
||||
input_coordinate, max_results, max_distance, bearing, bearing_range, approach);
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
PhantomCandidateAlternatives
|
||||
NearestCandidatesWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
input_coordinate, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
input_coordinate, max_distance, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
input_coordinate, max_distance, bearing, bearing_range, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
input_coordinate, bearing, bearing_range, approach, use_all_edges);
|
||||
return m_geospatial_query->NearestCandidatesWithAlternativeFromBigComponent(
|
||||
input_coordinate, approach, max_distance, bearing, use_all_edges);
|
||||
}
|
||||
|
||||
std::uint32_t GetCheckSum() const override final { return m_check_sum; }
|
||||
|
||||
@ -35,6 +35,7 @@
|
||||
#include <boost/range/any_range.hpp>
|
||||
#include <cstddef>
|
||||
|
||||
#include <engine/bearing.hpp>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
@ -130,62 +131,24 @@ class BaseDataFacade
|
||||
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const double max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const = 0;
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const = 0;
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const Approach approach) const = 0;
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const size_t max_results,
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach) const = 0;
|
||||
|
||||
virtual std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
virtual PhantomCandidateAlternatives
|
||||
NearestCandidatesWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
virtual std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
virtual std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
virtual std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges = false) const = 0;
|
||||
|
||||
virtual bool HasLaneData(const EdgeID edge_based_edge_id) const = 0;
|
||||
virtual util::guidance::LaneTupleIdPair GetLaneData(const EdgeID edge_based_edge_id) const = 0;
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
#define GEOSPATIAL_QUERY_HPP
|
||||
|
||||
#include "engine/approach.hpp"
|
||||
#include "engine/bearing.hpp"
|
||||
#include "engine/phantom_node.hpp"
|
||||
#include "util/bearing.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
@ -22,10 +23,10 @@ namespace osrm
|
||||
namespace engine
|
||||
{
|
||||
|
||||
inline std::pair<bool, bool> boolPairAnd(const std::pair<bool, bool> &A,
|
||||
const std::pair<bool, bool> &B)
|
||||
inline std::pair<bool, bool> operator&&(const std::pair<bool, bool> &a,
|
||||
const std::pair<bool, bool> &b)
|
||||
{
|
||||
return std::make_pair(A.first && B.first, A.second && B.second);
|
||||
return {a.first && b.first, a.second && b.second};
|
||||
}
|
||||
|
||||
// Implements complex queries on top of an RTree and builds PhantomNodes from it.
|
||||
@ -48,390 +49,241 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
return rtree.SearchInBox(bbox);
|
||||
}
|
||||
|
||||
// Returns nearest PhantomNodes in the given bearing range within max_distance.
|
||||
// Returns max_results nearest PhantomNodes that are valid within the provided parameters.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
const boost::optional<size_t> max_results,
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing_with_range,
|
||||
const boost::optional<bool> use_all_edges) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate, use_all_edges](const CandidateSegment &segment) {
|
||||
return boolPairAnd(
|
||||
boolPairAnd(HasValidEdge(segment, use_all_edges), CheckSegmentExclude(segment)),
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[this, max_distance, input_coordinate](const std::size_t,
|
||||
[this, approach, &input_coordinate, &bearing_with_range, &use_all_edges](
|
||||
const CandidateSegment &segment) {
|
||||
return CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
auto valid = CheckSegmentExclude(segment) &&
|
||||
CheckApproach(input_coordinate, segment, approach) &&
|
||||
(use_all_edges ? HasValidEdge(segment, *use_all_edges)
|
||||
: HasValidEdge(segment)) &&
|
||||
(bearing_with_range ? CheckSegmentBearing(segment, *bearing_with_range)
|
||||
: std::make_pair(true, true));
|
||||
return valid;
|
||||
},
|
||||
[this, &max_distance, &max_results, input_coordinate](const std::size_t num_results,
|
||||
const CandidateSegment &segment) {
|
||||
return (max_results && num_results >= *max_results) ||
|
||||
(max_distance &&
|
||||
CheckSegmentDistance(input_coordinate, segment, *max_distance));
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns nearest PhantomNodes in the given bearing range within max_distance.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
// Returns a list of phantom node candidates from the nearest location that are valid
|
||||
// within the provided parameters. If there is tie between equidistant locations,
|
||||
// we only pick candidates from one location.
|
||||
// If candidates do not include a node from a big component, an alternative list of candidates
|
||||
// from the nearest location which has nodes from a big component is returned.
|
||||
PhantomCandidateAlternatives NearestCandidatesWithAlternativeFromBigComponent(
|
||||
const util::Coordinate input_coordinate,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
const boost::optional<double> max_distance,
|
||||
const boost::optional<Bearing> bearing_with_range,
|
||||
const boost::optional<bool> use_all_edges) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate, bearing, bearing_range, use_all_edges](
|
||||
const CandidateSegment &segment) {
|
||||
auto use_direction =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
boolPairAnd(HasValidEdge(segment, use_all_edges),
|
||||
CheckSegmentExclude(segment)));
|
||||
use_direction =
|
||||
boolPairAnd(use_direction, CheckApproach(input_coordinate, segment, approach));
|
||||
return use_direction;
|
||||
},
|
||||
[this, max_distance, input_coordinate](const std::size_t,
|
||||
const CandidateSegment &segment) {
|
||||
return CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns max_results nearest PhantomNodes in the given bearing range.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate, bearing, bearing_range](
|
||||
const CandidateSegment &segment) {
|
||||
auto use_direction =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)));
|
||||
return boolPairAnd(use_direction,
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[max_results](const std::size_t num_results, const CandidateSegment &) {
|
||||
return num_results >= max_results;
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns max_results nearest PhantomNodes in the given bearing range within the maximum
|
||||
// distance.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate, bearing, bearing_range](
|
||||
const CandidateSegment &segment) {
|
||||
auto use_direction =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)));
|
||||
return boolPairAnd(use_direction,
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[this, max_distance, max_results, input_coordinate](const std::size_t num_results,
|
||||
const CandidateSegment &segment) {
|
||||
return num_results >= max_results ||
|
||||
CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns max_results nearest PhantomNodes.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const Approach approach) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate](const CandidateSegment &segment) {
|
||||
return boolPairAnd(boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)),
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[max_results](const std::size_t num_results, const CandidateSegment &) {
|
||||
return num_results >= max_results;
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns max_results nearest PhantomNodes in the given max distance.
|
||||
// Does not filter by small/big component!
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
const unsigned max_results,
|
||||
const double max_distance,
|
||||
const Approach approach) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate](const CandidateSegment &segment) {
|
||||
return boolPairAnd(boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)),
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[this, max_distance, max_results, input_coordinate](const std::size_t num_results,
|
||||
const CandidateSegment &segment) {
|
||||
return num_results >= max_results ||
|
||||
CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
});
|
||||
|
||||
return MakePhantomNodes(input_coordinate, results);
|
||||
}
|
||||
|
||||
// Returns the nearest phantom node. If this phantom node is not from a big component
|
||||
// a second phantom node is return that is the nearest coordinate in a big component.
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
bool has_small_component = false;
|
||||
bool has_nearest = false;
|
||||
bool has_big_component = false;
|
||||
Coordinate big_component_coord;
|
||||
double big_component_distance = std::numeric_limits<double>::max();
|
||||
Coordinate nearest_coord;
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this,
|
||||
approach,
|
||||
&input_coordinate,
|
||||
&has_nearest,
|
||||
&has_big_component,
|
||||
&has_small_component,
|
||||
&use_all_edges](const CandidateSegment &segment) {
|
||||
auto use_segment =
|
||||
(!has_small_component || (!has_big_component && !IsTinyComponent(segment)));
|
||||
auto use_directions = std::make_pair(use_segment, use_segment);
|
||||
const auto valid_edges = HasValidEdge(segment, use_all_edges);
|
||||
const auto admissible_segments = CheckSegmentExclude(segment);
|
||||
use_directions = boolPairAnd(use_directions, admissible_segments);
|
||||
use_directions = boolPairAnd(use_directions, valid_edges);
|
||||
use_directions =
|
||||
boolPairAnd(use_directions, CheckApproach(input_coordinate, segment, approach));
|
||||
&nearest_coord,
|
||||
&big_component_coord,
|
||||
&big_component_distance,
|
||||
&use_all_edges,
|
||||
&bearing_with_range](const CandidateSegment &segment) {
|
||||
auto is_big_component = !IsTinyComponent(segment);
|
||||
auto not_nearest =
|
||||
has_nearest && segment.fixed_projected_coordinate != nearest_coord;
|
||||
auto not_big =
|
||||
has_big_component && segment.fixed_projected_coordinate != big_component_coord;
|
||||
|
||||
if (use_directions.first || use_directions.second)
|
||||
/**
|
||||
*
|
||||
* Two reasons why we don't want this candidate:
|
||||
* 1. A non-big component candidate that is not at the nearest location
|
||||
* 2. A big component candidate that is not at the big location.
|
||||
*
|
||||
* It's possible that 1. could end up having the same location as the nearest big
|
||||
* component node if we have yet to see one. However, we don't know this and it
|
||||
* could lead to buffering large numbers of candidates before finding the big
|
||||
* component location.
|
||||
* By filtering out 1. nodes, this does mean that the alternative list of
|
||||
* candidates will not have non-big component candidates. Given the alternative
|
||||
* list of big component candidates is meant as a backup choice, this seems
|
||||
* reasonable.
|
||||
*/
|
||||
if ((!is_big_component && not_nearest) || (is_big_component && not_big))
|
||||
{
|
||||
has_big_component = has_big_component || !IsTinyComponent(segment);
|
||||
has_small_component = has_small_component || IsTinyComponent(segment);
|
||||
return std::make_pair(false, false);
|
||||
}
|
||||
auto use_candidate =
|
||||
CheckSegmentExclude(segment) &&
|
||||
CheckApproach(input_coordinate, segment, approach) &&
|
||||
(use_all_edges ? HasValidEdge(segment, *use_all_edges)
|
||||
: HasValidEdge(segment)) &&
|
||||
(bearing_with_range ? CheckSegmentBearing(segment, *bearing_with_range)
|
||||
: std::make_pair(true, true));
|
||||
|
||||
if (use_candidate.first || use_candidate.second)
|
||||
{
|
||||
if (!has_nearest)
|
||||
{
|
||||
has_nearest = true;
|
||||
nearest_coord = segment.fixed_projected_coordinate;
|
||||
}
|
||||
if (is_big_component && !has_big_component)
|
||||
{
|
||||
has_big_component = true;
|
||||
big_component_coord = segment.fixed_projected_coordinate;
|
||||
big_component_distance = GetSegmentDistance(input_coordinate, segment);
|
||||
}
|
||||
}
|
||||
|
||||
return use_directions;
|
||||
return use_candidate;
|
||||
},
|
||||
[this, &has_big_component, max_distance, input_coordinate](
|
||||
const std::size_t num_results, const CandidateSegment &segment) {
|
||||
return (num_results > 0 && has_big_component) ||
|
||||
CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
[this, &has_big_component, &max_distance, input_coordinate, &big_component_distance](
|
||||
const std::size_t /*num_results*/, const CandidateSegment &segment) {
|
||||
auto distance = GetSegmentDistance(input_coordinate, segment);
|
||||
auto further_than_big_component = distance > big_component_distance;
|
||||
auto no_more_candidates = has_big_component && further_than_big_component;
|
||||
auto too_far_away = max_distance && distance > *max_distance;
|
||||
|
||||
// Time to terminate the search when:
|
||||
// 1. We've found a node from a big component and the next candidate is further away
|
||||
// than that node.
|
||||
// 2. We're further away from the input then our max allowed distance.
|
||||
return no_more_candidates || too_far_away;
|
||||
});
|
||||
|
||||
if (results.size() == 0)
|
||||
{
|
||||
return std::make_pair(PhantomNode{}, PhantomNode{});
|
||||
}
|
||||
|
||||
BOOST_ASSERT(results.size() == 1 || results.size() == 2);
|
||||
return std::make_pair(MakePhantomNode(input_coordinate, results.front()).phantom_node,
|
||||
MakePhantomNode(input_coordinate, results.back()).phantom_node);
|
||||
}
|
||||
|
||||
// Returns the nearest phantom node. If this phantom node is not from a big component
|
||||
// a second phantom node is return that is the nearest coordinate in a big component.
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
bool has_small_component = false;
|
||||
bool has_big_component = false;
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this,
|
||||
approach,
|
||||
&input_coordinate,
|
||||
&has_big_component,
|
||||
&has_small_component,
|
||||
&use_all_edges](const CandidateSegment &segment) {
|
||||
auto use_segment =
|
||||
(!has_small_component || (!has_big_component && !IsTinyComponent(segment)));
|
||||
auto use_directions = std::make_pair(use_segment, use_segment);
|
||||
|
||||
const auto valid_edges = HasValidEdge(segment, use_all_edges);
|
||||
const auto admissible_segments = CheckSegmentExclude(segment);
|
||||
use_directions = boolPairAnd(use_directions, admissible_segments);
|
||||
use_directions = boolPairAnd(use_directions, valid_edges);
|
||||
use_directions =
|
||||
boolPairAnd(use_directions, CheckApproach(input_coordinate, segment, approach));
|
||||
|
||||
if (use_directions.first || use_directions.second)
|
||||
{
|
||||
has_big_component = has_big_component || !IsTinyComponent(segment);
|
||||
has_small_component = has_small_component || IsTinyComponent(segment);
|
||||
}
|
||||
|
||||
return use_directions;
|
||||
},
|
||||
[&has_big_component](const std::size_t num_results, const CandidateSegment &) {
|
||||
return num_results > 0 && has_big_component;
|
||||
});
|
||||
|
||||
if (results.size() == 0)
|
||||
{
|
||||
return std::make_pair(PhantomNode{}, PhantomNode{});
|
||||
}
|
||||
|
||||
BOOST_ASSERT(results.size() == 1 || results.size() == 2);
|
||||
return std::make_pair(MakePhantomNode(input_coordinate, results.front()).phantom_node,
|
||||
MakePhantomNode(input_coordinate, results.back()).phantom_node);
|
||||
}
|
||||
|
||||
// Returns the nearest phantom node. If this phantom node is not from a big component
|
||||
// a second phantom node is return that is the nearest coordinate in a big component.
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
bool has_small_component = false;
|
||||
bool has_big_component = false;
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this,
|
||||
approach,
|
||||
&input_coordinate,
|
||||
bearing,
|
||||
bearing_range,
|
||||
&has_big_component,
|
||||
&has_small_component,
|
||||
&use_all_edges](const CandidateSegment &segment) {
|
||||
auto use_segment =
|
||||
(!has_small_component || (!has_big_component && !IsTinyComponent(segment)));
|
||||
auto use_directions = std::make_pair(use_segment, use_segment);
|
||||
const auto admissible_segments = CheckSegmentExclude(segment);
|
||||
|
||||
if (use_segment)
|
||||
{
|
||||
use_directions =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
HasValidEdge(segment, use_all_edges));
|
||||
use_directions = boolPairAnd(use_directions, admissible_segments);
|
||||
use_directions = boolPairAnd(
|
||||
use_directions, CheckApproach(input_coordinate, segment, approach));
|
||||
|
||||
if (use_directions.first || use_directions.second)
|
||||
{
|
||||
has_big_component = has_big_component || !IsTinyComponent(segment);
|
||||
has_small_component = has_small_component || IsTinyComponent(segment);
|
||||
}
|
||||
}
|
||||
|
||||
return use_directions;
|
||||
},
|
||||
[&has_big_component](const std::size_t num_results, const CandidateSegment &) {
|
||||
return num_results > 0 && has_big_component;
|
||||
});
|
||||
|
||||
if (results.size() == 0)
|
||||
{
|
||||
return std::make_pair(PhantomNode{}, PhantomNode{});
|
||||
}
|
||||
|
||||
BOOST_ASSERT(results.size() > 0);
|
||||
return std::make_pair(MakePhantomNode(input_coordinate, results.front()).phantom_node,
|
||||
MakePhantomNode(input_coordinate, results.back()).phantom_node);
|
||||
}
|
||||
|
||||
// Returns the nearest phantom node. If this phantom node is not from a big component
|
||||
// a second phantom node is return that is the nearest coordinate in a big component.
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
bool has_small_component = false;
|
||||
bool has_big_component = false;
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this,
|
||||
approach,
|
||||
&input_coordinate,
|
||||
bearing,
|
||||
bearing_range,
|
||||
&has_big_component,
|
||||
&has_small_component,
|
||||
&use_all_edges](const CandidateSegment &segment) {
|
||||
auto use_segment =
|
||||
(!has_small_component || (!has_big_component && !IsTinyComponent(segment)));
|
||||
auto use_directions = std::make_pair(use_segment, use_segment);
|
||||
const auto admissible_segments = CheckSegmentExclude(segment);
|
||||
|
||||
if (use_segment)
|
||||
{
|
||||
use_directions =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
HasValidEdge(segment, use_all_edges));
|
||||
use_directions = boolPairAnd(use_directions, admissible_segments);
|
||||
use_directions = boolPairAnd(
|
||||
use_directions, CheckApproach(input_coordinate, segment, approach));
|
||||
|
||||
if (use_directions.first || use_directions.second)
|
||||
{
|
||||
has_big_component = has_big_component || !IsTinyComponent(segment);
|
||||
has_small_component = has_small_component || IsTinyComponent(segment);
|
||||
}
|
||||
}
|
||||
|
||||
return use_directions;
|
||||
},
|
||||
[this, &has_big_component, max_distance, input_coordinate](
|
||||
const std::size_t num_results, const CandidateSegment &segment) {
|
||||
return (num_results > 0 && has_big_component) ||
|
||||
CheckSegmentDistance(input_coordinate, segment, max_distance);
|
||||
});
|
||||
|
||||
if (results.size() == 0)
|
||||
{
|
||||
return std::make_pair(PhantomNode{}, PhantomNode{});
|
||||
}
|
||||
|
||||
BOOST_ASSERT(results.size() > 0);
|
||||
return std::make_pair(MakePhantomNode(input_coordinate, results.front()).phantom_node,
|
||||
MakePhantomNode(input_coordinate, results.back()).phantom_node);
|
||||
return MakeAlternativeBigCandidates(input_coordinate, nearest_coord, results);
|
||||
}
|
||||
|
||||
private:
|
||||
PhantomCandidateAlternatives
|
||||
MakeAlternativeBigCandidates(const util::Coordinate input_coordinate,
|
||||
const Coordinate nearest_coord,
|
||||
const std::vector<CandidateSegment> &results) const
|
||||
{
|
||||
if (results.size() == 0)
|
||||
{
|
||||
return std::make_pair(PhantomNodeCandidates{}, PhantomNodeCandidates{});
|
||||
}
|
||||
|
||||
PhantomNodeCandidates nearest_phantoms;
|
||||
PhantomNodeCandidates big_component_phantoms;
|
||||
|
||||
const auto add_to_candidates = [this, &input_coordinate](PhantomNodeCandidates &candidates,
|
||||
const EdgeData data) {
|
||||
auto candidate_it =
|
||||
std::find_if(candidates.begin(), candidates.end(), [&](const PhantomNode &node) {
|
||||
return data.forward_segment_id.id == node.forward_segment_id.id &&
|
||||
data.reverse_segment_id.id == node.reverse_segment_id.id;
|
||||
});
|
||||
if (candidate_it == candidates.end())
|
||||
{
|
||||
// First candidate from this segment
|
||||
candidates.push_back(MakePhantomNode(input_coordinate, data).phantom_node);
|
||||
}
|
||||
else
|
||||
{
|
||||
/**
|
||||
* Second candidate from this segment (there can be at most two).
|
||||
* We're snapping at the connection between two edges e1,e2 of the segment.
|
||||
*
|
||||
* | e1 | e2 |
|
||||
* | --- f1 --> | --- f2 --> |
|
||||
* | <-- r1 --- | <-- r2 --- |
|
||||
*
|
||||
* Most of the routing algorithms only support one candidate from each segment.
|
||||
* Therefore, we have to choose between e1 and e2.
|
||||
*
|
||||
* It makes sense to pick one edge over another if that edge offers more
|
||||
* opportunities to act as a source or target for a route.
|
||||
*
|
||||
* For consistency, we use the following logic:
|
||||
* "Pick e1 unless it makes sense to choose e2"
|
||||
*
|
||||
* Representing edge enabled as a truth table:
|
||||
* f1 | r1 | f2 | r2 | selected
|
||||
* ____________________________
|
||||
* t | t | t | t | e1
|
||||
* t | t | t | f | e1
|
||||
* t | t | f | t | e1
|
||||
* t | f | t | t | e2
|
||||
* t | f | t | f | e1
|
||||
* t | f | f | t | e1
|
||||
* f | t | t | t | e2
|
||||
* f | t | t | f | e1
|
||||
* f | t | f | t | e1
|
||||
*
|
||||
* The other rows in truth table don't appear as we discard an edge if both
|
||||
* forward and reverse are disabled.
|
||||
*
|
||||
**/
|
||||
if (candidate_it->fwd_segment_position < data.fwd_segment_position)
|
||||
{
|
||||
if (data.forward_segment_id.enabled && data.reverse_segment_id.enabled &&
|
||||
!(candidate_it->forward_segment_id.enabled &&
|
||||
candidate_it->reverse_segment_id.enabled))
|
||||
{
|
||||
*candidate_it = MakePhantomNode(input_coordinate, data).phantom_node;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!candidate_it->forward_segment_id.enabled ||
|
||||
!candidate_it->reverse_segment_id.enabled ||
|
||||
(data.forward_segment_id.enabled && data.reverse_segment_id.enabled))
|
||||
{
|
||||
*candidate_it = MakePhantomNode(input_coordinate, data).phantom_node;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
std::for_each(results.begin(), results.end(), [&](const CandidateSegment &segment) {
|
||||
if (segment.fixed_projected_coordinate == nearest_coord)
|
||||
{
|
||||
add_to_candidates(nearest_phantoms, segment.data);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Can only be from a big component for the alternative candidates
|
||||
add_to_candidates(big_component_phantoms, segment.data);
|
||||
}
|
||||
});
|
||||
return std::make_pair(std::move(nearest_phantoms), std::move(big_component_phantoms));
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
MakePhantomNodes(const util::Coordinate input_coordinate,
|
||||
const std::vector<EdgeData> &results) const
|
||||
const std::vector<CandidateSegment> &results) const
|
||||
{
|
||||
std::vector<PhantomNodeWithDistance> distance_and_phantoms(results.size());
|
||||
std::transform(results.begin(),
|
||||
results.end(),
|
||||
distance_and_phantoms.begin(),
|
||||
[this, &input_coordinate](const EdgeData &data) {
|
||||
return MakePhantomNode(input_coordinate, data);
|
||||
[this, &input_coordinate](const CandidateSegment &segment) {
|
||||
return MakePhantomNode(input_coordinate, segment.data);
|
||||
});
|
||||
return distance_and_phantoms;
|
||||
}
|
||||
@ -580,9 +432,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
return transformed;
|
||||
}
|
||||
|
||||
bool CheckSegmentDistance(const Coordinate input_coordinate,
|
||||
const CandidateSegment &segment,
|
||||
const double max_distance) const
|
||||
double GetSegmentDistance(const Coordinate input_coordinate,
|
||||
const CandidateSegment &segment) const
|
||||
{
|
||||
BOOST_ASSERT(segment.data.forward_segment_id.id != SPECIAL_SEGMENTID ||
|
||||
!segment.data.forward_segment_id.enabled);
|
||||
@ -593,7 +444,14 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
util::web_mercator::toWGS84(segment.fixed_projected_coordinate);
|
||||
|
||||
return util::coordinate_calculation::greatCircleDistance(input_coordinate,
|
||||
wsg84_coordinate) > max_distance;
|
||||
wsg84_coordinate);
|
||||
}
|
||||
|
||||
bool CheckSegmentDistance(const Coordinate input_coordinate,
|
||||
const CandidateSegment &segment,
|
||||
const double max_distance) const
|
||||
{
|
||||
return GetSegmentDistance(input_coordinate, segment) > max_distance;
|
||||
}
|
||||
|
||||
std::pair<bool, bool> CheckSegmentExclude(const CandidateSegment &segment) const
|
||||
@ -616,8 +474,7 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
}
|
||||
|
||||
std::pair<bool, bool> CheckSegmentBearing(const CandidateSegment &segment,
|
||||
const int filter_bearing,
|
||||
const int filter_bearing_range) const
|
||||
const Bearing filter_bearing) const
|
||||
{
|
||||
BOOST_ASSERT(segment.data.forward_segment_id.id != SPECIAL_SEGMENTID ||
|
||||
!segment.data.forward_segment_id.enabled);
|
||||
@ -633,11 +490,11 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
|
||||
const bool forward_bearing_valid =
|
||||
util::bearing::CheckInBounds(
|
||||
std::round(forward_edge_bearing), filter_bearing, filter_bearing_range) &&
|
||||
std::round(forward_edge_bearing), filter_bearing.bearing, filter_bearing.range) &&
|
||||
segment.data.forward_segment_id.enabled;
|
||||
const bool backward_bearing_valid =
|
||||
util::bearing::CheckInBounds(
|
||||
std::round(backward_edge_bearing), filter_bearing, filter_bearing_range) &&
|
||||
std::round(backward_edge_bearing), filter_bearing.bearing, filter_bearing.range) &&
|
||||
segment.data.reverse_segment_id.enabled;
|
||||
return std::make_pair(forward_bearing_valid, backward_bearing_valid);
|
||||
}
|
||||
@ -645,7 +502,7 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
/**
|
||||
* Checks to see if the edge weights are valid. We might have an edge,
|
||||
* but a traffic update might set the speed to 0 (weight == INVALID_SEGMENT_WEIGHT).
|
||||
* which means that this edge is not currently traversible. If this is the case,
|
||||
* which means that this edge is not currently traversable. If this is the case,
|
||||
* then we shouldn't snap to this edge.
|
||||
*/
|
||||
std::pair<bool, bool> HasValidEdge(const CandidateSegment &segment,
|
||||
@ -682,7 +539,7 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
bool IsTinyComponent(const CandidateSegment &segment) const
|
||||
{
|
||||
const auto &data = segment.data;
|
||||
BOOST_ASSERT(data.forward_segment_id.enabled);
|
||||
BOOST_ASSERT(data.forward_segment_id.enabled || data.reverse_segment_id.enabled);
|
||||
BOOST_ASSERT(data.forward_segment_id.id != SPECIAL_NODEID);
|
||||
return datafacade.GetComponentID(data.forward_segment_id.id).is_tiny;
|
||||
}
|
||||
|
||||
@ -47,8 +47,9 @@ namespace datafacade
|
||||
class BaseDataFacade;
|
||||
}
|
||||
|
||||
// Is returned as a temporary identifier for snapped coodinates
|
||||
struct Hint
|
||||
// SegmentHint represents an individual segment position that could be used
|
||||
// as the waypoint for a given input location
|
||||
struct SegmentHint
|
||||
{
|
||||
PhantomNode phantom;
|
||||
std::uint32_t data_checksum;
|
||||
@ -57,16 +58,31 @@ struct Hint
|
||||
const datafacade::BaseDataFacade &facade) const;
|
||||
|
||||
std::string ToBase64() const;
|
||||
static Hint FromBase64(const std::string &base64Hint);
|
||||
static SegmentHint FromBase64(const std::string &base64Hint);
|
||||
|
||||
friend bool operator==(const Hint &, const Hint &);
|
||||
friend std::ostream &operator<<(std::ostream &, const Hint &);
|
||||
friend bool operator==(const SegmentHint &, const SegmentHint &);
|
||||
friend bool operator!=(const SegmentHint &, const SegmentHint &);
|
||||
friend std::ostream &operator<<(std::ostream &, const SegmentHint &);
|
||||
};
|
||||
|
||||
static_assert(sizeof(Hint) == 80 + 4, "Hint is bigger than expected");
|
||||
constexpr std::size_t ENCODED_HINT_SIZE = 112;
|
||||
static_assert(ENCODED_HINT_SIZE / 4 * 3 >= sizeof(Hint),
|
||||
"ENCODED_HINT_SIZE does not match size of Hint");
|
||||
// Hint represents the suggested segment positions that could be used
|
||||
// as the waypoint for a given input location
|
||||
struct Hint
|
||||
{
|
||||
std::vector<SegmentHint> segment_hints;
|
||||
|
||||
bool IsValid(const util::Coordinate new_input_coordinates,
|
||||
const datafacade::BaseDataFacade &facade) const;
|
||||
|
||||
std::string ToBase64() const;
|
||||
static Hint FromBase64(const std::string &base64Hint);
|
||||
};
|
||||
|
||||
static_assert(sizeof(SegmentHint) == 80 + 4, "Hint is bigger than expected");
|
||||
constexpr std::size_t ENCODED_SEGMENT_HINT_SIZE = 112;
|
||||
static_assert(ENCODED_SEGMENT_HINT_SIZE / 4 * 3 >= sizeof(SegmentHint),
|
||||
"ENCODED_SEGMENT_HINT_SIZE does not match size of SegmentHint");
|
||||
|
||||
} // namespace engine
|
||||
} // namespace osrm
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ struct PathData
|
||||
struct InternalRouteResult
|
||||
{
|
||||
std::vector<std::vector<PathData>> unpacked_path_segments;
|
||||
std::vector<PhantomNodes> segment_end_coordinates;
|
||||
std::vector<PhantomEndpoints> leg_endpoints;
|
||||
std::vector<bool> source_traversed_in_reverse;
|
||||
std::vector<bool> target_traversed_in_reverse;
|
||||
EdgeWeight shortest_path_weight = INVALID_EDGE_WEIGHT;
|
||||
@ -96,7 +96,7 @@ inline InternalRouteResult CollapseInternalRouteResult(const InternalRouteResult
|
||||
if (leggy_result.unpacked_path_segments.size() == 1)
|
||||
return leggy_result;
|
||||
|
||||
BOOST_ASSERT(leggy_result.segment_end_coordinates.size() > 1);
|
||||
BOOST_ASSERT(leggy_result.leg_endpoints.size() > 1);
|
||||
|
||||
InternalRouteResult collapsed;
|
||||
collapsed.shortest_path_weight = leggy_result.shortest_path_weight;
|
||||
@ -107,7 +107,7 @@ inline InternalRouteResult CollapseInternalRouteResult(const InternalRouteResult
|
||||
// start another leg vector
|
||||
collapsed.unpacked_path_segments.push_back(leggy_result.unpacked_path_segments[i]);
|
||||
// save new phantom node pair
|
||||
collapsed.segment_end_coordinates.push_back(leggy_result.segment_end_coordinates[i]);
|
||||
collapsed.leg_endpoints.push_back(leggy_result.leg_endpoints[i]);
|
||||
// save data about phantom nodes
|
||||
collapsed.source_traversed_in_reverse.push_back(
|
||||
leggy_result.source_traversed_in_reverse[i]);
|
||||
@ -119,9 +119,9 @@ inline InternalRouteResult CollapseInternalRouteResult(const InternalRouteResult
|
||||
{
|
||||
BOOST_ASSERT(!collapsed.unpacked_path_segments.empty());
|
||||
auto &last_segment = collapsed.unpacked_path_segments.back();
|
||||
BOOST_ASSERT(!collapsed.segment_end_coordinates.empty());
|
||||
collapsed.segment_end_coordinates.back().target_phantom =
|
||||
leggy_result.segment_end_coordinates[i].target_phantom;
|
||||
BOOST_ASSERT(!collapsed.leg_endpoints.empty());
|
||||
collapsed.leg_endpoints.back().target_phantom =
|
||||
leggy_result.leg_endpoints[i].target_phantom;
|
||||
collapsed.target_traversed_in_reverse.back() =
|
||||
leggy_result.target_traversed_in_reverse[i];
|
||||
// copy path segments into current leg
|
||||
@ -138,19 +138,18 @@ inline InternalRouteResult CollapseInternalRouteResult(const InternalRouteResult
|
||||
last_segment[old_size].weight_until_turn +=
|
||||
|
||||
leggy_result.source_traversed_in_reverse[i]
|
||||
? leggy_result.segment_end_coordinates[i].source_phantom.reverse_weight
|
||||
: leggy_result.segment_end_coordinates[i].source_phantom.forward_weight;
|
||||
? leggy_result.leg_endpoints[i].source_phantom.reverse_weight
|
||||
: leggy_result.leg_endpoints[i].source_phantom.forward_weight;
|
||||
|
||||
last_segment[old_size].duration_until_turn +=
|
||||
leggy_result.source_traversed_in_reverse[i]
|
||||
? leggy_result.segment_end_coordinates[i].source_phantom.reverse_duration
|
||||
: leggy_result.segment_end_coordinates[i].source_phantom.forward_duration;
|
||||
? leggy_result.leg_endpoints[i].source_phantom.reverse_duration
|
||||
: leggy_result.leg_endpoints[i].source_phantom.forward_duration;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_ASSERT(collapsed.segment_end_coordinates.size() ==
|
||||
collapsed.unpacked_path_segments.size());
|
||||
BOOST_ASSERT(collapsed.leg_endpoints.size() == collapsed.unpacked_path_segments.size());
|
||||
return collapsed;
|
||||
}
|
||||
} // namespace engine
|
||||
|
||||
@ -28,6 +28,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#ifndef OSRM_ENGINE_PHANTOM_NODES_H
|
||||
#define OSRM_ENGINE_PHANTOM_NODES_H
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "extractor/travel_mode.hpp"
|
||||
|
||||
#include "util/bearing.hpp"
|
||||
@ -223,7 +225,8 @@ struct PhantomNode
|
||||
|
||||
static_assert(sizeof(PhantomNode) == 80, "PhantomNode has more padding then expected");
|
||||
|
||||
using PhantomNodePair = std::pair<PhantomNode, PhantomNode>;
|
||||
using PhantomNodeCandidates = std::vector<PhantomNode>;
|
||||
using PhantomCandidateAlternatives = std::pair<PhantomNodeCandidates, PhantomNodeCandidates>;
|
||||
|
||||
struct PhantomNodeWithDistance
|
||||
{
|
||||
@ -231,11 +234,44 @@ struct PhantomNodeWithDistance
|
||||
double distance;
|
||||
};
|
||||
|
||||
struct PhantomNodes
|
||||
struct PhantomEndpointCandidates
|
||||
{
|
||||
const PhantomNodeCandidates &source_phantoms;
|
||||
const PhantomNodeCandidates &target_phantoms;
|
||||
};
|
||||
|
||||
struct PhantomCandidatesToTarget
|
||||
{
|
||||
const PhantomNodeCandidates &source_phantoms;
|
||||
const PhantomNode &target_phantom;
|
||||
};
|
||||
|
||||
inline util::Coordinate candidatesSnappedLocation(const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
BOOST_ASSERT(!candidates.empty());
|
||||
return candidates.front().location;
|
||||
}
|
||||
|
||||
inline util::Coordinate candidatesInputLocation(const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
BOOST_ASSERT(!candidates.empty());
|
||||
return candidates.front().input_location;
|
||||
}
|
||||
|
||||
inline bool candidatesHaveComponent(const PhantomNodeCandidates &candidates, uint32_t component_id)
|
||||
{
|
||||
return std::any_of(
|
||||
candidates.begin(), candidates.end(), [component_id](const PhantomNode &node) {
|
||||
return node.component.id == component_id;
|
||||
});
|
||||
}
|
||||
|
||||
struct PhantomEndpoints
|
||||
{
|
||||
PhantomNode source_phantom;
|
||||
PhantomNode target_phantom;
|
||||
};
|
||||
|
||||
} // namespace engine
|
||||
} // namespace osrm
|
||||
|
||||
|
||||
@ -99,64 +99,67 @@ class BasePlugin
|
||||
return Status::Error;
|
||||
}
|
||||
|
||||
// Decides whether to use the phantom node from a big or small component if both are found.
|
||||
// Returns true if all phantom nodes are in the same component after snapping.
|
||||
std::vector<PhantomNode>
|
||||
SnapPhantomNodes(const std::vector<PhantomNodePair> &phantom_node_pair_list) const
|
||||
// Decides whether to use the phantom candidates from big or small components if both are found.
|
||||
std::vector<PhantomNodeCandidates>
|
||||
SnapPhantomNodes(std::vector<PhantomCandidateAlternatives> alternatives_list) const
|
||||
{
|
||||
const auto check_component_id_is_tiny =
|
||||
[](const std::pair<PhantomNode, PhantomNode> &phantom_pair) {
|
||||
return phantom_pair.first.component.is_tiny;
|
||||
};
|
||||
|
||||
// are all phantoms from a tiny cc?
|
||||
const auto check_all_in_same_component =
|
||||
[](const std::vector<std::pair<PhantomNode, PhantomNode>> &nodes) {
|
||||
const auto component_id = nodes.front().first.component.id;
|
||||
|
||||
return std::all_of(std::begin(nodes),
|
||||
std::end(nodes),
|
||||
[component_id](const PhantomNodePair &phantom_pair) {
|
||||
return component_id == phantom_pair.first.component.id;
|
||||
const auto all_in_same_tiny_component =
|
||||
[](const std::vector<PhantomCandidateAlternatives> &alts_list) {
|
||||
return std::any_of(
|
||||
alts_list.front().first.begin(),
|
||||
alts_list.front().first.end(),
|
||||
// For each of the first possible phantoms, check if all other
|
||||
// positions in the list have a phantom from the same small component.
|
||||
[&](const PhantomNode &phantom) {
|
||||
if (!phantom.component.is_tiny)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
const auto component_id = phantom.component.id;
|
||||
return std::all_of(
|
||||
std::next(alts_list.begin()),
|
||||
std::end(alts_list),
|
||||
[component_id](const PhantomCandidateAlternatives &alternatives) {
|
||||
return candidatesHaveComponent(alternatives.first, component_id);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const auto fallback_to_big_component =
|
||||
[](const std::pair<PhantomNode, PhantomNode> &phantom_pair) {
|
||||
if (phantom_pair.first.component.is_tiny && phantom_pair.second.IsValid() &&
|
||||
!phantom_pair.second.component.is_tiny)
|
||||
{
|
||||
return phantom_pair.second;
|
||||
}
|
||||
return phantom_pair.first;
|
||||
// Move the alternative into the final list
|
||||
const auto fallback_to_big_component = [](PhantomCandidateAlternatives &alternatives) {
|
||||
auto no_big_alternative = alternatives.second.empty();
|
||||
return no_big_alternative ? std::move(alternatives.first)
|
||||
: std::move(alternatives.second);
|
||||
};
|
||||
|
||||
const auto use_closed_phantom =
|
||||
[](const std::pair<PhantomNode, PhantomNode> &phantom_pair) {
|
||||
return phantom_pair.first;
|
||||
// Move the alternative into the final list
|
||||
const auto use_closed_phantom = [](PhantomCandidateAlternatives &alternatives) {
|
||||
return std::move(alternatives.first);
|
||||
};
|
||||
|
||||
const bool every_phantom_is_in_tiny_cc = std::all_of(std::begin(phantom_node_pair_list),
|
||||
std::end(phantom_node_pair_list),
|
||||
check_component_id_is_tiny);
|
||||
auto all_in_same_component = check_all_in_same_component(phantom_node_pair_list);
|
||||
|
||||
std::vector<PhantomNode> snapped_phantoms;
|
||||
snapped_phantoms.reserve(phantom_node_pair_list.size());
|
||||
const auto no_alternatives =
|
||||
std::all_of(alternatives_list.begin(),
|
||||
alternatives_list.end(),
|
||||
[](const PhantomCandidateAlternatives &alternatives) {
|
||||
return alternatives.second.empty();
|
||||
});
|
||||
|
||||
std::vector<PhantomNodeCandidates> snapped_phantoms;
|
||||
snapped_phantoms.reserve(alternatives_list.size());
|
||||
// The only case we don't snap to the big component if all phantoms are in the same small
|
||||
// component
|
||||
if (every_phantom_is_in_tiny_cc && all_in_same_component)
|
||||
if (no_alternatives || all_in_same_tiny_component(alternatives_list))
|
||||
{
|
||||
std::transform(phantom_node_pair_list.begin(),
|
||||
phantom_node_pair_list.end(),
|
||||
std::transform(alternatives_list.begin(),
|
||||
alternatives_list.end(),
|
||||
std::back_inserter(snapped_phantoms),
|
||||
use_closed_phantom);
|
||||
}
|
||||
else
|
||||
{
|
||||
std::transform(phantom_node_pair_list.begin(),
|
||||
phantom_node_pair_list.end(),
|
||||
std::transform(alternatives_list.begin(),
|
||||
alternatives_list.end(),
|
||||
std::back_inserter(snapped_phantoms),
|
||||
fallback_to_big_component);
|
||||
}
|
||||
@ -181,35 +184,26 @@ class BasePlugin
|
||||
|
||||
for (const auto i : util::irange<std::size_t>(0UL, parameters.coordinates.size()))
|
||||
{
|
||||
Approach approach = engine::Approach::UNRESTRICTED;
|
||||
if (use_approaches && parameters.approaches[i])
|
||||
approach = parameters.approaches[i].get();
|
||||
|
||||
if (use_hints && parameters.hints[i] &&
|
||||
if (use_hints && parameters.hints[i] && !parameters.hints[i]->segment_hints.empty() &&
|
||||
parameters.hints[i]->IsValid(parameters.coordinates[i], facade))
|
||||
{
|
||||
for (const auto &seg_hint : parameters.hints[i]->segment_hints)
|
||||
{
|
||||
phantom_nodes[i].push_back(PhantomNodeWithDistance{
|
||||
parameters.hints[i]->phantom,
|
||||
seg_hint.phantom,
|
||||
util::coordinate_calculation::greatCircleDistance(
|
||||
parameters.coordinates[i], parameters.hints[i]->phantom.location),
|
||||
});
|
||||
parameters.coordinates[i], seg_hint.phantom.location)});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (use_bearings && parameters.bearings[i])
|
||||
{
|
||||
phantom_nodes[i] =
|
||||
facade.NearestPhantomNodesInRange(parameters.coordinates[i],
|
||||
radiuses[i],
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach,
|
||||
use_all_edges);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
phantom_nodes[i] = facade.NearestPhantomNodesInRange(
|
||||
parameters.coordinates[i], radiuses[i], approach, use_all_edges);
|
||||
}
|
||||
parameters.coordinates[i],
|
||||
radiuses[i],
|
||||
use_bearings ? parameters.bearings[i] : boost::none,
|
||||
use_approaches && parameters.approaches[i] ? parameters.approaches[i].get()
|
||||
: engine::Approach::UNRESTRICTED,
|
||||
use_all_edges);
|
||||
}
|
||||
|
||||
return phantom_nodes;
|
||||
@ -218,7 +212,7 @@ class BasePlugin
|
||||
std::vector<std::vector<PhantomNodeWithDistance>>
|
||||
GetPhantomNodes(const datafacade::BaseDataFacade &facade,
|
||||
const api::BaseParameters ¶meters,
|
||||
unsigned number_of_results) const
|
||||
size_t number_of_results) const
|
||||
{
|
||||
std::vector<std::vector<PhantomNodeWithDistance>> phantom_nodes(
|
||||
parameters.coordinates.size());
|
||||
@ -231,56 +225,26 @@ class BasePlugin
|
||||
BOOST_ASSERT(parameters.IsValid());
|
||||
for (const auto i : util::irange<std::size_t>(0UL, parameters.coordinates.size()))
|
||||
{
|
||||
Approach approach = engine::Approach::UNRESTRICTED;
|
||||
if (use_approaches && parameters.approaches[i])
|
||||
approach = parameters.approaches[i].get();
|
||||
|
||||
if (use_hints && parameters.hints[i] &&
|
||||
if (use_hints && parameters.hints[i] && !parameters.hints[i]->segment_hints.empty() &&
|
||||
parameters.hints[i]->IsValid(parameters.coordinates[i], facade))
|
||||
{
|
||||
for (const auto &seg_hint : parameters.hints[i]->segment_hints)
|
||||
{
|
||||
phantom_nodes[i].push_back(PhantomNodeWithDistance{
|
||||
parameters.hints[i]->phantom,
|
||||
seg_hint.phantom,
|
||||
util::coordinate_calculation::greatCircleDistance(
|
||||
parameters.coordinates[i], parameters.hints[i]->phantom.location),
|
||||
});
|
||||
parameters.coordinates[i], seg_hint.phantom.location)});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (use_bearings && parameters.bearings[i])
|
||||
{
|
||||
if (use_radiuses && parameters.radiuses[i])
|
||||
{
|
||||
phantom_nodes[i] = facade.NearestPhantomNodes(parameters.coordinates[i],
|
||||
number_of_results,
|
||||
*parameters.radiuses[i],
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach);
|
||||
}
|
||||
else
|
||||
{
|
||||
phantom_nodes[i] = facade.NearestPhantomNodes(parameters.coordinates[i],
|
||||
number_of_results,
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (use_radiuses && parameters.radiuses[i])
|
||||
{
|
||||
phantom_nodes[i] = facade.NearestPhantomNodes(parameters.coordinates[i],
|
||||
number_of_results,
|
||||
*parameters.radiuses[i],
|
||||
approach);
|
||||
}
|
||||
else
|
||||
{
|
||||
phantom_nodes[i] = facade.NearestPhantomNodes(
|
||||
parameters.coordinates[i], number_of_results, approach);
|
||||
}
|
||||
}
|
||||
parameters.coordinates[i],
|
||||
number_of_results,
|
||||
use_radiuses ? parameters.radiuses[i] : boost::none,
|
||||
use_bearings ? parameters.bearings[i] : boost::none,
|
||||
use_approaches && parameters.approaches[i] ? parameters.approaches[i].get()
|
||||
: engine::Approach::UNRESTRICTED);
|
||||
|
||||
// we didn't find a fitting node, return error
|
||||
if (phantom_nodes[i].empty())
|
||||
@ -291,10 +255,11 @@ class BasePlugin
|
||||
return phantom_nodes;
|
||||
}
|
||||
|
||||
std::vector<PhantomNodePair> GetPhantomNodes(const datafacade::BaseDataFacade &facade,
|
||||
std::vector<PhantomCandidateAlternatives>
|
||||
GetPhantomNodes(const datafacade::BaseDataFacade &facade,
|
||||
const api::BaseParameters ¶meters) const
|
||||
{
|
||||
std::vector<PhantomNodePair> phantom_node_pairs(parameters.coordinates.size());
|
||||
std::vector<PhantomCandidateAlternatives> alternatives(parameters.coordinates.size());
|
||||
|
||||
const bool use_hints = !parameters.hints.empty();
|
||||
const bool use_bearings = !parameters.bearings.empty();
|
||||
@ -305,87 +270,57 @@ class BasePlugin
|
||||
BOOST_ASSERT(parameters.IsValid());
|
||||
for (const auto i : util::irange<std::size_t>(0UL, parameters.coordinates.size()))
|
||||
{
|
||||
Approach approach = engine::Approach::UNRESTRICTED;
|
||||
if (use_approaches && parameters.approaches[i])
|
||||
approach = parameters.approaches[i].get();
|
||||
|
||||
if (use_hints && parameters.hints[i] &&
|
||||
if (use_hints && parameters.hints[i] && !parameters.hints[i]->segment_hints.empty() &&
|
||||
parameters.hints[i]->IsValid(parameters.coordinates[i], facade))
|
||||
{
|
||||
phantom_node_pairs[i].first = parameters.hints[i]->phantom;
|
||||
std::transform(parameters.hints[i]->segment_hints.begin(),
|
||||
parameters.hints[i]->segment_hints.end(),
|
||||
std::back_inserter(alternatives[i].first),
|
||||
[](const auto &seg_hint) { return seg_hint.phantom; });
|
||||
// we don't set the second one - it will be marked as invalid
|
||||
continue;
|
||||
}
|
||||
|
||||
if (use_bearings && parameters.bearings[i])
|
||||
{
|
||||
if (use_radiuses && parameters.radiuses[i])
|
||||
{
|
||||
phantom_node_pairs[i] =
|
||||
facade.NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
alternatives[i] = facade.NearestCandidatesWithAlternativeFromBigComponent(
|
||||
parameters.coordinates[i],
|
||||
*parameters.radiuses[i],
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach,
|
||||
use_radiuses ? parameters.radiuses[i] : boost::none,
|
||||
use_bearings ? parameters.bearings[i] : boost::none,
|
||||
use_approaches && parameters.approaches[i] ? parameters.approaches[i].get()
|
||||
: engine::Approach::UNRESTRICTED,
|
||||
use_all_edges);
|
||||
}
|
||||
else
|
||||
{
|
||||
phantom_node_pairs[i] =
|
||||
facade.NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
parameters.coordinates[i],
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach,
|
||||
use_all_edges);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (use_radiuses && parameters.radiuses[i])
|
||||
{
|
||||
phantom_node_pairs[i] =
|
||||
facade.NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
parameters.coordinates[i],
|
||||
*parameters.radiuses[i],
|
||||
approach,
|
||||
use_all_edges);
|
||||
}
|
||||
else
|
||||
{
|
||||
phantom_node_pairs[i] =
|
||||
facade.NearestPhantomNodeWithAlternativeFromBigComponent(
|
||||
parameters.coordinates[i], approach, use_all_edges);
|
||||
}
|
||||
}
|
||||
|
||||
// we didn't find a fitting node, return error
|
||||
if (!phantom_node_pairs[i].first.IsValid())
|
||||
if (alternatives[i].first.empty())
|
||||
{
|
||||
// This ensures the list of phantom nodes only consists of valid nodes.
|
||||
// We can use this on the call-site to detect an error.
|
||||
phantom_node_pairs.pop_back();
|
||||
alternatives.pop_back();
|
||||
break;
|
||||
}
|
||||
BOOST_ASSERT(phantom_node_pairs[i].first.IsValid());
|
||||
BOOST_ASSERT(phantom_node_pairs[i].second.IsValid());
|
||||
|
||||
BOOST_ASSERT(!alternatives[i].first.empty());
|
||||
}
|
||||
return phantom_node_pairs;
|
||||
return alternatives;
|
||||
}
|
||||
|
||||
std::string MissingPhantomErrorMessage(const std::vector<PhantomNodePair> &phantom_nodes,
|
||||
std::string
|
||||
MissingPhantomErrorMessage(const std::vector<PhantomCandidateAlternatives> &alternatives,
|
||||
const std::vector<util::Coordinate> &coordinates) const
|
||||
{
|
||||
BOOST_ASSERT(phantom_nodes.size() < coordinates.size());
|
||||
auto mismatch = std::mismatch(phantom_nodes.begin(),
|
||||
phantom_nodes.end(),
|
||||
BOOST_ASSERT(alternatives.size() < coordinates.size());
|
||||
auto mismatch =
|
||||
std::mismatch(alternatives.begin(),
|
||||
alternatives.end(),
|
||||
coordinates.begin(),
|
||||
coordinates.end(),
|
||||
[](const auto &phantom_node, const auto &coordinate) {
|
||||
return phantom_node.first.input_location == coordinate;
|
||||
[](const auto &candidates_pair, const auto &coordinate) {
|
||||
return std::any_of(candidates_pair.first.begin(),
|
||||
candidates_pair.first.end(),
|
||||
[&](const auto &phantom) {
|
||||
return phantom.input_location == coordinate;
|
||||
});
|
||||
std::size_t missing_index = std::distance(phantom_nodes.begin(), mismatch.first);
|
||||
});
|
||||
std::size_t missing_index = std::distance(alternatives.begin(), mismatch.first);
|
||||
return std::string("Could not find a matching segment for coordinate ") +
|
||||
std::to_string(missing_index);
|
||||
}
|
||||
|
||||
@ -31,7 +31,7 @@ class TripPlugin final : public BasePlugin
|
||||
const int max_locations_trip;
|
||||
|
||||
InternalRouteResult ComputeRoute(const RoutingAlgorithmsInterface &algorithms,
|
||||
const std::vector<PhantomNode> &phantom_node_list,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<NodeID> &trip,
|
||||
const bool roundtrip) const;
|
||||
|
||||
|
||||
@ -20,18 +20,18 @@ class RoutingAlgorithmsInterface
|
||||
{
|
||||
public:
|
||||
virtual InternalManyRoutesResult
|
||||
AlternativePathSearch(const PhantomNodes &phantom_node_pair,
|
||||
AlternativePathSearch(const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned number_of_alternatives) const = 0;
|
||||
|
||||
virtual InternalRouteResult
|
||||
ShortestPathSearch(const std::vector<PhantomNodes> &phantom_node_pair,
|
||||
ShortestPathSearch(const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint) const = 0;
|
||||
|
||||
virtual InternalRouteResult
|
||||
DirectShortestPathSearch(const PhantomNodes &phantom_node_pair) const = 0;
|
||||
DirectShortestPathSearch(const PhantomEndpointCandidates &endpoint_candidates) const = 0;
|
||||
|
||||
virtual std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
ManyToManySearch(const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance) const = 0;
|
||||
@ -73,18 +73,18 @@ template <typename Algorithm> class RoutingAlgorithms final : public RoutingAlgo
|
||||
virtual ~RoutingAlgorithms() = default;
|
||||
|
||||
InternalManyRoutesResult
|
||||
AlternativePathSearch(const PhantomNodes &phantom_node_pair,
|
||||
AlternativePathSearch(const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned number_of_alternatives) const final override;
|
||||
|
||||
InternalRouteResult ShortestPathSearch(
|
||||
const std::vector<PhantomNodes> &phantom_node_pair,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint) const final override;
|
||||
|
||||
InternalRouteResult
|
||||
DirectShortestPathSearch(const PhantomNodes &phantom_nodes) const final override;
|
||||
InternalRouteResult DirectShortestPathSearch(
|
||||
const PhantomEndpointCandidates &endpoint_candidates) const final override;
|
||||
|
||||
virtual std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
ManyToManySearch(const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance) const final override;
|
||||
@ -150,28 +150,27 @@ template <typename Algorithm> class RoutingAlgorithms final : public RoutingAlgo
|
||||
};
|
||||
|
||||
template <typename Algorithm>
|
||||
InternalManyRoutesResult
|
||||
RoutingAlgorithms<Algorithm>::AlternativePathSearch(const PhantomNodes &phantom_node_pair,
|
||||
unsigned number_of_alternatives) const
|
||||
InternalManyRoutesResult RoutingAlgorithms<Algorithm>::AlternativePathSearch(
|
||||
const PhantomEndpointCandidates &endpoint_candidates, unsigned number_of_alternatives) const
|
||||
{
|
||||
return routing_algorithms::alternativePathSearch(
|
||||
heaps, *facade, phantom_node_pair, number_of_alternatives);
|
||||
heaps, *facade, endpoint_candidates, number_of_alternatives);
|
||||
}
|
||||
|
||||
template <typename Algorithm>
|
||||
InternalRouteResult RoutingAlgorithms<Algorithm>::ShortestPathSearch(
|
||||
const std::vector<PhantomNodes> &phantom_node_pair,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint) const
|
||||
{
|
||||
return routing_algorithms::shortestPathSearch(
|
||||
heaps, *facade, phantom_node_pair, continue_straight_at_waypoint);
|
||||
heaps, *facade, waypoint_candidates, continue_straight_at_waypoint);
|
||||
}
|
||||
|
||||
template <typename Algorithm>
|
||||
InternalRouteResult
|
||||
RoutingAlgorithms<Algorithm>::DirectShortestPathSearch(const PhantomNodes &phantom_nodes) const
|
||||
InternalRouteResult RoutingAlgorithms<Algorithm>::DirectShortestPathSearch(
|
||||
const PhantomEndpointCandidates &endpoint_candidates) const
|
||||
{
|
||||
return routing_algorithms::directShortestPathSearch(heaps, *facade, phantom_nodes);
|
||||
return routing_algorithms::directShortestPathSearch(heaps, *facade, endpoint_candidates);
|
||||
}
|
||||
|
||||
template <typename Algorithm>
|
||||
@ -193,30 +192,31 @@ inline routing_algorithms::SubMatchingList RoutingAlgorithms<Algorithm>::MapMatc
|
||||
|
||||
template <typename Algorithm>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
RoutingAlgorithms<Algorithm>::ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
RoutingAlgorithms<Algorithm>::ManyToManySearch(
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &_source_indices,
|
||||
const std::vector<std::size_t> &_target_indices,
|
||||
const bool calculate_distance) const
|
||||
{
|
||||
BOOST_ASSERT(!phantom_nodes.empty());
|
||||
BOOST_ASSERT(!candidates_list.empty());
|
||||
|
||||
auto source_indices = _source_indices;
|
||||
auto target_indices = _target_indices;
|
||||
|
||||
if (source_indices.empty())
|
||||
{
|
||||
source_indices.resize(phantom_nodes.size());
|
||||
source_indices.resize(candidates_list.size());
|
||||
std::iota(source_indices.begin(), source_indices.end(), 0);
|
||||
}
|
||||
if (target_indices.empty())
|
||||
{
|
||||
target_indices.resize(phantom_nodes.size());
|
||||
target_indices.resize(candidates_list.size());
|
||||
std::iota(target_indices.begin(), target_indices.end(), 0);
|
||||
}
|
||||
|
||||
return routing_algorithms::manyToManySearch(heaps,
|
||||
*facade,
|
||||
phantom_nodes,
|
||||
candidates_list,
|
||||
std::move(source_indices),
|
||||
std::move(target_indices),
|
||||
calculate_distance);
|
||||
|
||||
@ -18,12 +18,12 @@ namespace routing_algorithms
|
||||
|
||||
InternalManyRoutesResult alternativePathSearch(SearchEngineData<ch::Algorithm> &search_engine_data,
|
||||
const DataFacade<ch::Algorithm> &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned number_of_alternatives);
|
||||
|
||||
InternalManyRoutesResult alternativePathSearch(SearchEngineData<mld::Algorithm> &search_engine_data,
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned number_of_alternatives);
|
||||
|
||||
} // namespace routing_algorithms
|
||||
|
||||
@ -24,7 +24,7 @@ namespace routing_algorithms
|
||||
template <typename Algorithm>
|
||||
InternalRouteResult directShortestPathSearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const PhantomNodes &phantom_nodes);
|
||||
const PhantomEndpointCandidates &endpoint_candidates);
|
||||
|
||||
} // namespace routing_algorithms
|
||||
} // namespace engine
|
||||
|
||||
@ -94,7 +94,7 @@ template <typename Algorithm>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance);
|
||||
|
||||
@ -34,20 +34,12 @@ namespace engine
|
||||
|
||||
namespace routing_algorithms
|
||||
{
|
||||
static constexpr bool FORWARD_DIRECTION = true;
|
||||
static constexpr bool REVERSE_DIRECTION = false;
|
||||
static constexpr bool DO_NOT_FORCE_LOOPS = false;
|
||||
|
||||
bool needsLoopForward(const PhantomNode &source_phantom, const PhantomNode &target_phantom);
|
||||
bool needsLoopBackwards(const PhantomNode &source_phantom, const PhantomNode &target_phantom);
|
||||
|
||||
bool needsLoopForward(const PhantomNodes &phantoms);
|
||||
bool needsLoopBackwards(const PhantomNodes &phantoms);
|
||||
|
||||
template <typename Heap>
|
||||
void insertNodesInHeaps(Heap &forward_heap, Heap &reverse_heap, const PhantomNodes &nodes)
|
||||
namespace details
|
||||
{
|
||||
template <typename Heap>
|
||||
void insertSourceInForwardHeap(Heap &forward_heap, const PhantomNode &source)
|
||||
{
|
||||
const auto &source = nodes.source_phantom;
|
||||
if (source.IsValidForwardSource())
|
||||
{
|
||||
forward_heap.Insert(source.forward_segment_id.id,
|
||||
@ -61,8 +53,11 @@ void insertNodesInHeaps(Heap &forward_heap, Heap &reverse_heap, const PhantomNod
|
||||
-source.GetReverseWeightPlusOffset(),
|
||||
source.reverse_segment_id.id);
|
||||
}
|
||||
}
|
||||
|
||||
const auto &target = nodes.target_phantom;
|
||||
template <typename Heap>
|
||||
void insertTargetInReverseHeap(Heap &reverse_heap, const PhantomNode &target)
|
||||
{
|
||||
if (target.IsValidForwardTarget())
|
||||
{
|
||||
reverse_heap.Insert(target.forward_segment_id.id,
|
||||
@ -77,9 +72,57 @@ void insertNodesInHeaps(Heap &forward_heap, Heap &reverse_heap, const PhantomNod
|
||||
target.reverse_segment_id.id);
|
||||
}
|
||||
}
|
||||
} // namespace details
|
||||
static constexpr bool FORWARD_DIRECTION = true;
|
||||
static constexpr bool REVERSE_DIRECTION = false;
|
||||
|
||||
// Identify nodes in the forward(reverse) search direction that will require loop forcing
|
||||
// e.g. if source and destination nodes are on the same segment.
|
||||
std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &candidates);
|
||||
std::vector<NodeID> getForwardLoopNodes(const PhantomCandidatesToTarget &candidates);
|
||||
std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &candidates);
|
||||
std::vector<NodeID> getBackwardLoopNodes(const PhantomCandidatesToTarget &candidates);
|
||||
|
||||
// Find the specific phantom node endpoints for a given path from a list of candidates.
|
||||
PhantomEndpoints endpointsFromCandidates(const PhantomEndpointCandidates &candidates,
|
||||
const std::vector<NodeID> &path);
|
||||
|
||||
template <typename HeapNodeT>
|
||||
inline bool force_loop(const std::vector<NodeID> &force_nodes, const HeapNodeT &heap_node)
|
||||
{
|
||||
// if loops are forced, they are so at the source
|
||||
return !force_nodes.empty() &&
|
||||
std::find(force_nodes.begin(), force_nodes.end(), heap_node.node) != force_nodes.end() &&
|
||||
heap_node.data.parent == heap_node.node;
|
||||
}
|
||||
|
||||
template <typename Heap>
|
||||
void insertNodesInHeaps(Heap &forward_heap, Heap &reverse_heap, const PhantomEndpoints &endpoints)
|
||||
{
|
||||
details::insertSourceInForwardHeap(forward_heap, endpoints.source_phantom);
|
||||
details::insertTargetInReverseHeap(reverse_heap, endpoints.target_phantom);
|
||||
}
|
||||
|
||||
template <typename Heap>
|
||||
void insertNodesInHeaps(Heap &forward_heap,
|
||||
Heap &reverse_heap,
|
||||
const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
for (const auto &source : endpoint_candidates.source_phantoms)
|
||||
{
|
||||
details::insertSourceInForwardHeap(forward_heap, source);
|
||||
}
|
||||
|
||||
for (const auto &target : endpoint_candidates.target_phantoms)
|
||||
{
|
||||
details::insertTargetInReverseHeap(reverse_heap, target);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ManyToManyQueryHeap>
|
||||
void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_node)
|
||||
void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNodeCandidates &source_candidates)
|
||||
{
|
||||
for (const auto &phantom_node : source_candidates)
|
||||
{
|
||||
if (phantom_node.IsValidForwardSource())
|
||||
{
|
||||
@ -98,9 +141,12 @@ void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
-phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ManyToManyQueryHeap>
|
||||
void insertTargetInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_node)
|
||||
void insertTargetInHeap(ManyToManyQueryHeap &heap, const PhantomNodeCandidates &target_candidates)
|
||||
{
|
||||
for (const auto &phantom_node : target_candidates)
|
||||
{
|
||||
if (phantom_node.IsValidForwardTarget())
|
||||
{
|
||||
@ -119,10 +165,11 @@ void insertTargetInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <typename FacadeT>
|
||||
void annotatePath(const FacadeT &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpoints &endpoints,
|
||||
const std::vector<NodeID> &unpacked_nodes,
|
||||
const std::vector<EdgeID> &unpacked_edges,
|
||||
std::vector<PathData> &unpacked_path)
|
||||
@ -133,14 +180,14 @@ void annotatePath(const FacadeT &facade,
|
||||
const auto source_node_id = unpacked_nodes.front();
|
||||
const auto target_node_id = unpacked_nodes.back();
|
||||
const bool start_traversed_in_reverse =
|
||||
phantom_node_pair.source_phantom.forward_segment_id.id != source_node_id;
|
||||
endpoints.source_phantom.forward_segment_id.id != source_node_id;
|
||||
const bool target_traversed_in_reverse =
|
||||
phantom_node_pair.target_phantom.forward_segment_id.id != target_node_id;
|
||||
endpoints.target_phantom.forward_segment_id.id != target_node_id;
|
||||
|
||||
BOOST_ASSERT(phantom_node_pair.source_phantom.forward_segment_id.id == source_node_id ||
|
||||
phantom_node_pair.source_phantom.reverse_segment_id.id == source_node_id);
|
||||
BOOST_ASSERT(phantom_node_pair.target_phantom.forward_segment_id.id == target_node_id ||
|
||||
phantom_node_pair.target_phantom.reverse_segment_id.id == target_node_id);
|
||||
BOOST_ASSERT(endpoints.source_phantom.forward_segment_id.id == source_node_id ||
|
||||
endpoints.source_phantom.reverse_segment_id.id == source_node_id);
|
||||
BOOST_ASSERT(endpoints.target_phantom.forward_segment_id.id == target_node_id ||
|
||||
endpoints.target_phantom.reverse_segment_id.id == target_node_id);
|
||||
|
||||
// datastructures to hold extracted data from geometry
|
||||
std::vector<NodeID> id_vector;
|
||||
@ -180,8 +227,8 @@ void annotatePath(const FacadeT &facade,
|
||||
const auto geometry_index = facade.GetGeometryIndex(node_id);
|
||||
get_segment_geometry(geometry_index);
|
||||
|
||||
BOOST_ASSERT(id_vector.size() > 0);
|
||||
BOOST_ASSERT(datasource_vector.size() > 0);
|
||||
BOOST_ASSERT(!id_vector.empty());
|
||||
BOOST_ASSERT(!datasource_vector.empty());
|
||||
BOOST_ASSERT(weight_vector.size() + 1 == id_vector.size());
|
||||
BOOST_ASSERT(duration_vector.size() + 1 == id_vector.size());
|
||||
|
||||
@ -190,11 +237,11 @@ void annotatePath(const FacadeT &facade,
|
||||
std::size_t start_index = 0;
|
||||
if (is_first_segment)
|
||||
{
|
||||
unsigned short segment_position = phantom_node_pair.source_phantom.fwd_segment_position;
|
||||
unsigned short segment_position = endpoints.source_phantom.fwd_segment_position;
|
||||
if (start_traversed_in_reverse)
|
||||
{
|
||||
segment_position = weight_vector.size() -
|
||||
phantom_node_pair.source_phantom.fwd_segment_position - 1;
|
||||
segment_position =
|
||||
weight_vector.size() - endpoints.source_phantom.fwd_segment_position - 1;
|
||||
}
|
||||
BOOST_ASSERT(segment_position >= 0);
|
||||
start_index = static_cast<std::size_t>(segment_position);
|
||||
@ -214,7 +261,7 @@ void annotatePath(const FacadeT &facade,
|
||||
datasource_vector[segment_idx],
|
||||
boost::none});
|
||||
}
|
||||
BOOST_ASSERT(unpacked_path.size() > 0);
|
||||
BOOST_ASSERT(!unpacked_path.empty());
|
||||
|
||||
const auto turn_duration = facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
const auto turn_weight = facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
@ -237,19 +284,17 @@ void annotatePath(const FacadeT &facade,
|
||||
{
|
||||
if (is_local_path)
|
||||
{
|
||||
start_index =
|
||||
weight_vector.size() - phantom_node_pair.source_phantom.fwd_segment_position - 1;
|
||||
start_index = weight_vector.size() - endpoints.source_phantom.fwd_segment_position - 1;
|
||||
}
|
||||
end_index =
|
||||
weight_vector.size() - phantom_node_pair.target_phantom.fwd_segment_position - 1;
|
||||
end_index = weight_vector.size() - endpoints.target_phantom.fwd_segment_position - 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (is_local_path)
|
||||
{
|
||||
start_index = phantom_node_pair.source_phantom.fwd_segment_position;
|
||||
start_index = endpoints.source_phantom.fwd_segment_position;
|
||||
}
|
||||
end_index = phantom_node_pair.target_phantom.fwd_segment_position;
|
||||
end_index = endpoints.target_phantom.fwd_segment_position;
|
||||
}
|
||||
|
||||
// Given the following compressed geometry:
|
||||
@ -277,11 +322,11 @@ void annotatePath(const FacadeT &facade,
|
||||
if (!unpacked_path.empty())
|
||||
{
|
||||
const auto source_weight = start_traversed_in_reverse
|
||||
? phantom_node_pair.source_phantom.reverse_weight
|
||||
: phantom_node_pair.source_phantom.forward_weight;
|
||||
? endpoints.source_phantom.reverse_weight
|
||||
: endpoints.source_phantom.forward_weight;
|
||||
const auto source_duration = start_traversed_in_reverse
|
||||
? phantom_node_pair.source_phantom.reverse_duration
|
||||
: phantom_node_pair.source_phantom.forward_duration;
|
||||
? endpoints.source_phantom.reverse_duration
|
||||
: endpoints.source_phantom.forward_duration;
|
||||
// The above code will create segments for (v, w), (w,x), (x, y) and (y, Z).
|
||||
// However the first segment duration needs to be adjusted to the fact that the source
|
||||
// phantom is in the middle of the segment. We do this by subtracting v--s from the
|
||||
@ -358,12 +403,11 @@ double getPathDistance(const DataFacade<Algorithm> &facade,
|
||||
template <typename AlgorithmT>
|
||||
InternalRouteResult extractRoute(const DataFacade<AlgorithmT> &facade,
|
||||
const EdgeWeight weight,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
const std::vector<NodeID> &unpacked_nodes,
|
||||
const std::vector<EdgeID> &unpacked_edges)
|
||||
{
|
||||
InternalRouteResult raw_route_data;
|
||||
raw_route_data.segment_end_coordinates = {phantom_nodes};
|
||||
|
||||
// No path found for both target nodes?
|
||||
if (INVALID_EDGE_WEIGHT == weight)
|
||||
@ -371,15 +415,18 @@ InternalRouteResult extractRoute(const DataFacade<AlgorithmT> &facade,
|
||||
return raw_route_data;
|
||||
}
|
||||
|
||||
auto phantom_endpoints = endpointsFromCandidates(endpoint_candidates, unpacked_nodes);
|
||||
raw_route_data.leg_endpoints = {phantom_endpoints};
|
||||
|
||||
raw_route_data.shortest_path_weight = weight;
|
||||
raw_route_data.unpacked_path_segments.resize(1);
|
||||
raw_route_data.source_traversed_in_reverse.push_back(
|
||||
(unpacked_nodes.front() != phantom_nodes.source_phantom.forward_segment_id.id));
|
||||
(unpacked_nodes.front() != phantom_endpoints.source_phantom.forward_segment_id.id));
|
||||
raw_route_data.target_traversed_in_reverse.push_back(
|
||||
(unpacked_nodes.back() != phantom_nodes.target_phantom.forward_segment_id.id));
|
||||
(unpacked_nodes.back() != phantom_endpoints.target_phantom.forward_segment_id.id));
|
||||
|
||||
annotatePath(facade,
|
||||
phantom_nodes,
|
||||
phantom_endpoints,
|
||||
unpacked_nodes,
|
||||
unpacked_edges,
|
||||
raw_route_data.unpacked_path_segments.front());
|
||||
|
||||
@ -120,8 +120,8 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
NodeID &middle_node_id,
|
||||
EdgeWeight &upper_bound,
|
||||
EdgeWeight min_edge_offset,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse)
|
||||
const std::vector<NodeID> &force_loop_forward_nodes,
|
||||
const std::vector<NodeID> &force_loop_reverse_nodes)
|
||||
{
|
||||
auto heapNode = forward_heap.DeleteMinGetHeapNode();
|
||||
const auto reverseHeapNode = reverse_heap.GetHeapNodeIfWasInserted(heapNode.node);
|
||||
@ -131,9 +131,8 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
const EdgeWeight new_weight = reverseHeapNode->weight + heapNode.weight;
|
||||
if (new_weight < upper_bound)
|
||||
{
|
||||
// if loops are forced, they are so at the source
|
||||
if ((force_loop_forward && heapNode.data.parent == heapNode.node) ||
|
||||
(force_loop_reverse && reverseHeapNode->data.parent == heapNode.node) ||
|
||||
if (force_loop(force_loop_forward_nodes, heapNode) ||
|
||||
force_loop(force_loop_reverse_nodes, heapNode) ||
|
||||
// in this case we are looking at a bi-directional way where the source
|
||||
// and target phantom are on the same edge based node
|
||||
new_weight < 0)
|
||||
@ -398,7 +397,7 @@ template <typename RandomIter, typename FacadeT>
|
||||
void unpackPath(const FacadeT &facade,
|
||||
RandomIter packed_path_begin,
|
||||
RandomIter packed_path_end,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const PhantomEndpoints &route_endpoints,
|
||||
std::vector<PathData> &unpacked_path)
|
||||
{
|
||||
const auto nodes_number = std::distance(packed_path_begin, packed_path_end);
|
||||
@ -422,7 +421,7 @@ void unpackPath(const FacadeT &facade,
|
||||
});
|
||||
}
|
||||
|
||||
annotatePath(facade, phantom_nodes, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
annotatePath(facade, route_endpoints, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -467,12 +466,35 @@ void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
std::int32_t &weight,
|
||||
EdgeWeight &weight,
|
||||
std::vector<NodeID> &packed_leg,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const int duration_upper_bound = INVALID_EDGE_WEIGHT);
|
||||
const std::vector<NodeID> &force_loop_forward_node,
|
||||
const std::vector<NodeID> &force_loop_reverse_node,
|
||||
const EdgeWeight duration_upper_bound = INVALID_EDGE_WEIGHT);
|
||||
|
||||
template <typename PhantomEndpointT>
|
||||
void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
EdgeWeight &weight,
|
||||
std::vector<NodeID> &packed_leg,
|
||||
const std::vector<NodeID> &force_loop_forward_node,
|
||||
const std::vector<NodeID> &force_loop_reverse_node,
|
||||
const PhantomEndpointT & /*endpoints*/,
|
||||
const EdgeWeight duration_upper_bound = INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
// Avoid templating the CH search implementations.
|
||||
return search(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
weight,
|
||||
packed_leg,
|
||||
force_loop_forward_node,
|
||||
force_loop_reverse_node,
|
||||
duration_upper_bound);
|
||||
}
|
||||
|
||||
// Requires the heaps for be empty
|
||||
// If heaps should be adjusted to be initialized outside of this function,
|
||||
|
||||
@ -33,24 +33,75 @@ namespace
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
NodeID node,
|
||||
const PhantomNodes &phantom_nodes)
|
||||
const PhantomNode &source,
|
||||
const PhantomNode &target)
|
||||
{
|
||||
auto level = [&partition, node](const SegmentID &source, const SegmentID &target) {
|
||||
if (source.enabled && target.enabled)
|
||||
return partition.GetQueryLevel(source.id, target.id, node);
|
||||
return INVALID_LEVEL_ID;
|
||||
};
|
||||
return std::min(std::min(level(phantom_nodes.source_phantom.forward_segment_id,
|
||||
phantom_nodes.target_phantom.forward_segment_id),
|
||||
level(phantom_nodes.source_phantom.forward_segment_id,
|
||||
phantom_nodes.target_phantom.reverse_segment_id)),
|
||||
std::min(level(phantom_nodes.source_phantom.reverse_segment_id,
|
||||
phantom_nodes.target_phantom.forward_segment_id),
|
||||
level(phantom_nodes.source_phantom.reverse_segment_id,
|
||||
phantom_nodes.target_phantom.reverse_segment_id)));
|
||||
|
||||
return std::min(std::min(level(source.forward_segment_id, target.forward_segment_id),
|
||||
level(source.forward_segment_id, target.reverse_segment_id)),
|
||||
std::min(level(source.reverse_segment_id, target.forward_segment_id),
|
||||
level(source.reverse_segment_id, target.reverse_segment_id)));
|
||||
}
|
||||
|
||||
inline bool checkParentCellRestriction(CellID, const PhantomNodes &) { return true; }
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
NodeID node,
|
||||
const PhantomEndpoints &endpoints)
|
||||
{
|
||||
return getNodeQueryLevel(partition, node, endpoints.source_phantom, endpoints.target_phantom);
|
||||
}
|
||||
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
NodeID node,
|
||||
const PhantomCandidatesToTarget &endpoint_candidates)
|
||||
{
|
||||
auto min_level = std::accumulate(
|
||||
endpoint_candidates.source_phantoms.begin(),
|
||||
endpoint_candidates.source_phantoms.end(),
|
||||
INVALID_LEVEL_ID,
|
||||
[&](LevelID current_level, const PhantomNode &source) {
|
||||
return std::min(
|
||||
current_level,
|
||||
getNodeQueryLevel(partition, node, source, endpoint_candidates.target_phantom));
|
||||
});
|
||||
return min_level;
|
||||
}
|
||||
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
NodeID node,
|
||||
const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
auto min_level = std::accumulate(
|
||||
endpoint_candidates.source_phantoms.begin(),
|
||||
endpoint_candidates.source_phantoms.end(),
|
||||
INVALID_LEVEL_ID,
|
||||
[&](LevelID level_1, const PhantomNode &source) {
|
||||
return std::min(
|
||||
level_1,
|
||||
std::accumulate(endpoint_candidates.target_phantoms.begin(),
|
||||
endpoint_candidates.target_phantoms.end(),
|
||||
level_1,
|
||||
[&](LevelID level_2, const PhantomNode &target) {
|
||||
return std::min(
|
||||
level_2,
|
||||
getNodeQueryLevel(partition, node, source, target));
|
||||
}));
|
||||
});
|
||||
return min_level;
|
||||
}
|
||||
|
||||
template <typename PhantomCandidateT>
|
||||
inline bool checkParentCellRestriction(CellID, const PhantomCandidateT &)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Restricted search (Args is LevelID, CellID):
|
||||
// * use the fixed level for queries
|
||||
@ -72,17 +123,23 @@ inline bool checkParentCellRestriction(CellID cell, LevelID, CellID parent)
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
const NodeID node,
|
||||
const PhantomNode &phantom_node)
|
||||
const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
auto highest_diffrent_level = [&partition, node](const SegmentID &phantom_node) {
|
||||
if (phantom_node.enabled)
|
||||
return partition.GetHighestDifferentLevel(phantom_node.id, node);
|
||||
return INVALID_LEVEL_ID;
|
||||
auto highest_different_level = [&partition, node](const SegmentID &segment) {
|
||||
return segment.enabled ? partition.GetHighestDifferentLevel(segment.id, node)
|
||||
: INVALID_LEVEL_ID;
|
||||
};
|
||||
|
||||
const auto node_level = std::min(highest_diffrent_level(phantom_node.forward_segment_id),
|
||||
highest_diffrent_level(phantom_node.reverse_segment_id));
|
||||
|
||||
auto node_level =
|
||||
std::accumulate(candidates.begin(),
|
||||
candidates.end(),
|
||||
INVALID_LEVEL_ID,
|
||||
[&](LevelID current_level, const PhantomNode &phantom_node) {
|
||||
auto highest_level =
|
||||
std::min(highest_different_level(phantom_node.forward_segment_id),
|
||||
highest_different_level(phantom_node.reverse_segment_id));
|
||||
return std::min(current_level, highest_level);
|
||||
});
|
||||
return node_level;
|
||||
}
|
||||
|
||||
@ -92,31 +149,17 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
NodeID node,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::size_t phantom_index,
|
||||
const std::vector<std::size_t> &phantom_indices)
|
||||
{
|
||||
auto min_level = [&partition, node](const PhantomNode &phantom_node) {
|
||||
const auto &forward_segment = phantom_node.forward_segment_id;
|
||||
const auto forward_level =
|
||||
forward_segment.enabled ? partition.GetHighestDifferentLevel(node, forward_segment.id)
|
||||
: INVALID_LEVEL_ID;
|
||||
|
||||
const auto &reverse_segment = phantom_node.reverse_segment_id;
|
||||
const auto reverse_level =
|
||||
reverse_segment.enabled ? partition.GetHighestDifferentLevel(node, reverse_segment.id)
|
||||
: INVALID_LEVEL_ID;
|
||||
|
||||
return std::min(forward_level, reverse_level);
|
||||
};
|
||||
|
||||
// Get minimum level over all phantoms of the highest different level with respect to node
|
||||
// This is equivalent to min_{∀ source, target} partition.GetQueryLevel(source, node, target)
|
||||
auto result = min_level(phantom_nodes[phantom_index]);
|
||||
for (const auto &index : phantom_indices)
|
||||
{
|
||||
result = std::min(result, min_level(phantom_nodes[index]));
|
||||
}
|
||||
auto init = getNodeQueryLevel(partition, node, candidates_list[phantom_index]);
|
||||
auto result = std::accumulate(
|
||||
phantom_indices.begin(), phantom_indices.end(), init, [&](LevelID level, size_t index) {
|
||||
return std::min(level, getNodeQueryLevel(partition, node, candidates_list[index]));
|
||||
});
|
||||
return result;
|
||||
}
|
||||
} // namespace
|
||||
@ -229,7 +272,7 @@ template <bool DIRECTION, typename Algorithm, typename... Args>
|
||||
void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
const typename SearchEngineData<Algorithm>::QueryHeap::HeapNode &heapNode,
|
||||
Args... args)
|
||||
const Args &... args)
|
||||
{
|
||||
const auto &partition = facade.GetMultiLevelPartition();
|
||||
const auto &cells = facade.GetCellStorage();
|
||||
@ -344,9 +387,9 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
NodeID &middle_node,
|
||||
EdgeWeight &path_upper_bound,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
Args... args)
|
||||
const std::vector<NodeID> &force_loop_forward_nodes,
|
||||
const std::vector<NodeID> &force_loop_reverse_nodes,
|
||||
const Args &... args)
|
||||
{
|
||||
const auto heapNode = forward_heap.DeleteMinGetHeapNode();
|
||||
const auto weight = heapNode.weight;
|
||||
@ -366,9 +409,9 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
|
||||
// MLD uses loops forcing only to prune single node paths in forward and/or
|
||||
// backward direction (there is no need to force loops in MLD but in CH)
|
||||
if (!(force_loop_forward && heapNode.data.parent == heapNode.node) &&
|
||||
!(force_loop_reverse && reverseHeapNode->data.parent == heapNode.node) &&
|
||||
(path_weight >= 0) && (path_weight < path_upper_bound))
|
||||
if (!force_loop(force_loop_forward_nodes, heapNode) &&
|
||||
!force_loop(force_loop_reverse_nodes, heapNode) && (path_weight >= 0) &&
|
||||
(path_weight < path_upper_bound))
|
||||
{
|
||||
middle_node = heapNode.node;
|
||||
path_upper_bound = path_weight;
|
||||
@ -393,10 +436,10 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
const std::vector<NodeID> &force_loop_forward_nodes,
|
||||
const std::vector<NodeID> &force_loop_reverse_nodes,
|
||||
EdgeWeight weight_upper_bound,
|
||||
Args... args)
|
||||
const Args &... args)
|
||||
{
|
||||
if (forward_heap.Empty() || reverse_heap.Empty())
|
||||
{
|
||||
@ -423,8 +466,8 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
reverse_heap,
|
||||
middle,
|
||||
weight,
|
||||
force_loop_forward,
|
||||
force_loop_reverse,
|
||||
force_loop_forward_nodes,
|
||||
force_loop_reverse_nodes,
|
||||
args...);
|
||||
if (!forward_heap.Empty())
|
||||
forward_heap_min = forward_heap.MinKey();
|
||||
@ -436,8 +479,8 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
forward_heap,
|
||||
middle,
|
||||
weight,
|
||||
force_loop_reverse,
|
||||
force_loop_forward,
|
||||
force_loop_reverse_nodes,
|
||||
force_loop_forward_nodes,
|
||||
args...);
|
||||
if (!reverse_heap.Empty())
|
||||
reverse_heap_min = reverse_heap.MinKey();
|
||||
@ -494,12 +537,13 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
EdgeWeight subpath_weight;
|
||||
std::vector<NodeID> subpath_nodes;
|
||||
std::vector<EdgeID> subpath_edges;
|
||||
std::tie(subpath_weight, subpath_nodes, subpath_edges) = search(engine_working_data,
|
||||
std::tie(subpath_weight, subpath_nodes, subpath_edges) =
|
||||
search(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
force_loop_forward,
|
||||
force_loop_reverse,
|
||||
force_loop_forward_nodes,
|
||||
force_loop_reverse_nodes,
|
||||
INVALID_EDGE_WEIGHT,
|
||||
sublevel,
|
||||
parent_cell_id);
|
||||
@ -517,16 +561,16 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
}
|
||||
|
||||
// Alias to be compatible with the CH-based search
|
||||
template <typename Algorithm>
|
||||
template <typename Algorithm, typename PhantomEndpointT>
|
||||
inline void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
EdgeWeight &weight,
|
||||
std::vector<NodeID> &unpacked_nodes,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const std::vector<NodeID> &force_loop_forward_node,
|
||||
const std::vector<NodeID> &force_loop_reverse_node,
|
||||
const PhantomEndpointT &endpoints,
|
||||
const EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
// TODO: change search calling interface to use unpacked_edges result
|
||||
@ -534,10 +578,10 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
force_loop_forward,
|
||||
force_loop_reverse,
|
||||
force_loop_forward_node,
|
||||
force_loop_reverse_node,
|
||||
weight_upper_bound,
|
||||
phantom_nodes);
|
||||
endpoints);
|
||||
}
|
||||
|
||||
// TODO: refactor CH-related stub to use unpacked_edges
|
||||
@ -545,7 +589,7 @@ template <typename RandomIter, typename FacadeT>
|
||||
void unpackPath(const FacadeT &facade,
|
||||
RandomIter packed_path_begin,
|
||||
RandomIter packed_path_end,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const PhantomEndpoints &route_endpoints,
|
||||
std::vector<PathData> &unpacked_path)
|
||||
{
|
||||
const auto nodes_number = std::distance(packed_path_begin, packed_path_end);
|
||||
@ -568,7 +612,7 @@ void unpackPath(const FacadeT &facade,
|
||||
});
|
||||
}
|
||||
|
||||
annotatePath(facade, phantom_nodes, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
annotatePath(facade, route_endpoints, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
}
|
||||
|
||||
template <typename Algorithm>
|
||||
@ -583,8 +627,8 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
forward_heap.Clear();
|
||||
reverse_heap.Clear();
|
||||
|
||||
const PhantomNodes phantom_nodes{source_phantom, target_phantom};
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, phantom_nodes);
|
||||
const PhantomEndpoints endpoints{source_phantom, target_phantom};
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, endpoints);
|
||||
|
||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
||||
std::vector<NodeID> unpacked_nodes;
|
||||
@ -593,10 +637,10 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
{},
|
||||
{},
|
||||
weight_upper_bound,
|
||||
phantom_nodes);
|
||||
endpoints);
|
||||
|
||||
if (weight == INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
@ -605,7 +649,7 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
|
||||
std::vector<PathData> unpacked_path;
|
||||
|
||||
annotatePath(facade, phantom_nodes, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
annotatePath(facade, endpoints, unpacked_nodes, unpacked_edges, unpacked_path);
|
||||
|
||||
return getPathDistance(facade, unpacked_path, source_phantom, target_phantom);
|
||||
}
|
||||
|
||||
@ -14,9 +14,10 @@ namespace routing_algorithms
|
||||
{
|
||||
|
||||
template <typename Algorithm>
|
||||
InternalRouteResult shortestPathSearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
InternalRouteResult
|
||||
shortestPathSearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const std::vector<PhantomNodes> &phantom_nodes_vector,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint);
|
||||
|
||||
} // namespace routing_algorithms
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2,8 +2,7 @@
|
||||
#define OSRM_BINDINGS_NODE_SUPPORT_HPP
|
||||
|
||||
#include "nodejs/json_v8_renderer.hpp"
|
||||
#include "util/json_renderer.hpp"
|
||||
|
||||
#include "engine/api/flatbuffers/fbresult_generated.h"
|
||||
#include "osrm/approach.hpp"
|
||||
#include "osrm/bearing.hpp"
|
||||
#include "osrm/coordinate.hpp"
|
||||
@ -18,6 +17,7 @@
|
||||
#include "osrm/table_parameters.hpp"
|
||||
#include "osrm/tile_parameters.hpp"
|
||||
#include "osrm/trip_parameters.hpp"
|
||||
#include "util/json_renderer.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
@ -26,6 +26,7 @@
|
||||
#include <iostream>
|
||||
#include <iterator>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -46,7 +47,7 @@ using table_parameters_ptr = std::unique_ptr<osrm::TableParameters>;
|
||||
|
||||
struct PluginParameters
|
||||
{
|
||||
bool renderJSONToBuffer = false;
|
||||
bool renderToBuffer = false;
|
||||
};
|
||||
|
||||
using ObjectOrString = typename mapbox::util::variant<osrm::json::Object, std::string>;
|
||||
@ -96,6 +97,17 @@ inline void ParseResult(const osrm::Status &result_status, osrm::json::Object &r
|
||||
}
|
||||
|
||||
inline void ParseResult(const osrm::Status & /*result_status*/, const std::string & /*unused*/) {}
|
||||
inline void ParseResult(const osrm::Status &result_status,
|
||||
const flatbuffers::FlatBufferBuilder &fbs_builder)
|
||||
{
|
||||
auto fbs_result = osrm::engine::api::fbresult::GetFBResult(fbs_builder.GetBufferPointer());
|
||||
|
||||
if (result_status == osrm::Status::Error)
|
||||
{
|
||||
BOOST_ASSERT(fbs_result->code());
|
||||
throw std::logic_error(fbs_result->code()->message()->c_str());
|
||||
}
|
||||
}
|
||||
|
||||
inline engine_config_ptr argumentsToEngineConfig(const Nan::FunctionCallbackInfo<v8::Value> &args)
|
||||
{
|
||||
@ -725,6 +737,36 @@ inline bool argumentsToParameter(const Nan::FunctionCallbackInfo<v8::Value> &arg
|
||||
}
|
||||
}
|
||||
|
||||
if (Nan::Has(obj, Nan::New("format").ToLocalChecked()).FromJust())
|
||||
{
|
||||
v8::Local<v8::Value> format =
|
||||
Nan::Get(obj, Nan::New("format").ToLocalChecked()).ToLocalChecked();
|
||||
if (format.IsEmpty())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!format->IsString())
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"json\" or \"flatbuffers\"");
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string format_str = *Nan::Utf8String(format);
|
||||
if (format_str == "json")
|
||||
{
|
||||
params->format = osrm::engine::api::BaseParameters::OutputFormatType::JSON;
|
||||
}
|
||||
else if (format_str == "flatbuffers")
|
||||
{
|
||||
params->format = osrm::engine::api::BaseParameters::OutputFormatType::FLATBUFFERS;
|
||||
}
|
||||
else
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"json\" or \"flatbuffers\"");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -885,17 +927,18 @@ inline bool parseCommonParameters(const v8::Local<v8::Object> &obj, ParamType &p
|
||||
return true;
|
||||
}
|
||||
|
||||
inline PluginParameters
|
||||
argumentsToPluginParameters(const Nan::FunctionCallbackInfo<v8::Value> &args)
|
||||
inline PluginParameters argumentsToPluginParameters(
|
||||
const Nan::FunctionCallbackInfo<v8::Value> &args,
|
||||
const boost::optional<osrm::engine::api::BaseParameters::OutputFormatType> &output_format = {})
|
||||
{
|
||||
if (args.Length() < 3 || !args[1]->IsObject())
|
||||
{
|
||||
return {};
|
||||
// output to buffer by default for Flatbuffers
|
||||
return {output_format == osrm::engine::api::BaseParameters::OutputFormatType::FLATBUFFERS};
|
||||
}
|
||||
v8::Local<v8::Object> obj = Nan::To<v8::Object>(args[1]).ToLocalChecked();
|
||||
if (Nan::Has(obj, Nan::New("format").ToLocalChecked()).FromJust())
|
||||
{
|
||||
|
||||
v8::Local<v8::Value> format =
|
||||
Nan::Get(obj, Nan::New("format").ToLocalChecked()).ToLocalChecked();
|
||||
if (format.IsEmpty())
|
||||
@ -905,7 +948,7 @@ argumentsToPluginParameters(const Nan::FunctionCallbackInfo<v8::Value> &args)
|
||||
|
||||
if (!format->IsString())
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"json_buffer\"");
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"buffer\"");
|
||||
return {};
|
||||
}
|
||||
|
||||
@ -914,20 +957,35 @@ argumentsToPluginParameters(const Nan::FunctionCallbackInfo<v8::Value> &args)
|
||||
|
||||
if (format_str == "object")
|
||||
{
|
||||
if (output_format == osrm::engine::api::BaseParameters::OutputFormatType::FLATBUFFERS)
|
||||
{
|
||||
Nan::ThrowError("Flatbuffers result can only output to buffer.");
|
||||
return {true};
|
||||
}
|
||||
return {false};
|
||||
}
|
||||
else if (format_str == "buffer")
|
||||
{
|
||||
return {true};
|
||||
}
|
||||
else if (format_str == "json_buffer")
|
||||
{
|
||||
if (output_format &&
|
||||
output_format != osrm::engine::api::BaseParameters::OutputFormatType::JSON)
|
||||
{
|
||||
Nan::ThrowError("Deprecated `json_buffer` can only be used with JSON format");
|
||||
}
|
||||
return {true};
|
||||
}
|
||||
else
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"json_buffer\"");
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"buffer\"");
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
// output to buffer by default for Flatbuffers
|
||||
return {output_format == osrm::engine::api::BaseParameters::OutputFormatType::FLATBUFFERS};
|
||||
}
|
||||
|
||||
inline route_parameters_ptr
|
||||
|
||||
@ -78,10 +78,17 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
||||
: BaseParametersGrammar::base_type(root_rule)
|
||||
{
|
||||
const auto add_hint = [](engine::api::BaseParameters &base_parameters,
|
||||
const boost::optional<std::string> &hint_string) {
|
||||
if (hint_string)
|
||||
const std::vector<std::string> &hint_strings) {
|
||||
if (!hint_strings.empty())
|
||||
{
|
||||
base_parameters.hints.emplace_back(engine::Hint::FromBase64(hint_string.get()));
|
||||
std::vector<engine::SegmentHint> location_hints(hint_strings.size());
|
||||
std::transform(hint_strings.begin(),
|
||||
hint_strings.end(),
|
||||
location_hints.begin(),
|
||||
[](const auto &hint_string) {
|
||||
return engine::SegmentHint::FromBase64(hint_string);
|
||||
});
|
||||
base_parameters.hints.push_back(engine::Hint{std::move(location_hints)});
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -145,8 +152,9 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
||||
(-(qi::double_ | unlimited_rule) %
|
||||
';')[ph::bind(&engine::api::BaseParameters::radiuses, qi::_r1) = qi::_1];
|
||||
|
||||
hints_rule = qi::lit("hints=") >
|
||||
(-qi::as_string[qi::repeat(engine::ENCODED_HINT_SIZE)[base64_char]])[ph::bind(
|
||||
hints_rule =
|
||||
qi::lit("hints=") >
|
||||
(*qi::as_string[qi::repeat(engine::ENCODED_SEGMENT_HINT_SIZE)[base64_char]])[ph::bind(
|
||||
add_hint, qi::_r1, qi::_1)] %
|
||||
';';
|
||||
|
||||
|
||||
@ -235,12 +235,8 @@ template <typename ElementT> class DeallocatingVector
|
||||
{
|
||||
// Delete[]'ing ptr's to all Buckets
|
||||
for (auto bucket : bucket_list)
|
||||
{
|
||||
if (nullptr != bucket)
|
||||
{
|
||||
delete[] bucket;
|
||||
bucket = nullptr;
|
||||
}
|
||||
}
|
||||
bucket_list.clear();
|
||||
bucket_list.shrink_to_fit();
|
||||
|
||||
@ -11,12 +11,15 @@
|
||||
#include <boost/iterator/iterator_facade.hpp>
|
||||
#include <boost/iterator/reverse_iterator.hpp>
|
||||
|
||||
#include <tbb/atomic.h>
|
||||
|
||||
#include <array>
|
||||
#include <cmath>
|
||||
#include <vector>
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
// for `InterlockedCompareExchange64`
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace util
|
||||
@ -89,6 +92,19 @@ inline WordT set_upper_value(WordT word, WordT mask, std::uint8_t offset, T valu
|
||||
return (word & ~mask) | ((static_cast<WordT>(value) >> offset) & mask);
|
||||
}
|
||||
|
||||
inline bool compare_and_swap(uint64_t *ptr, uint64_t old_value, uint64_t new_value)
|
||||
{
|
||||
#if defined(_MSC_VER)
|
||||
return InterlockedCompareExchange64(reinterpret_cast<LONG64 *>(ptr),
|
||||
static_cast<LONG64>(new_value),
|
||||
static_cast<LONG64>(old_value)) == old_value;
|
||||
#elif defined(__GNUC__)
|
||||
return __sync_bool_compare_and_swap(ptr, old_value, new_value);
|
||||
#else
|
||||
#error "Unsupported compiler";
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename T, std::size_t Bits, storage::Ownership Ownership> class PackedVector
|
||||
{
|
||||
using WordT = std::uint64_t;
|
||||
@ -527,8 +543,7 @@ template <typename T, std::size_t Bits, storage::Ownership Ownership> class Pack
|
||||
lower_mask[internal_index.element],
|
||||
lower_offset[internal_index.element],
|
||||
value);
|
||||
} while (tbb::internal::as_atomic(lower_word)
|
||||
.compare_and_swap(new_lower_word, local_lower_word) != local_lower_word);
|
||||
} while (!compare_and_swap(&lower_word, local_lower_word, new_lower_word));
|
||||
|
||||
// Lock-free update of the upper word
|
||||
WordT local_upper_word, new_upper_word;
|
||||
@ -539,8 +554,7 @@ template <typename T, std::size_t Bits, storage::Ownership Ownership> class Pack
|
||||
upper_mask[internal_index.element],
|
||||
upper_offset[internal_index.element],
|
||||
value);
|
||||
} while (tbb::internal::as_atomic(upper_word)
|
||||
.compare_and_swap(new_upper_word, local_upper_word) != local_upper_word);
|
||||
} while (!compare_and_swap(&upper_word, local_upper_word, new_upper_word));
|
||||
}
|
||||
|
||||
util::ViewOrVector<WordT, Ownership> vec;
|
||||
|
||||
@ -68,7 +68,7 @@ write(storage::tar::FileWriter &writer,
|
||||
|
||||
/***
|
||||
* Static RTree for serving nearest neighbour queries
|
||||
* // All coordinates are pojected first to Web Mercator before the bounding boxes
|
||||
* // All coordinates are projected first to Web Mercator before the bounding boxes
|
||||
* // are computed, this means the internal distance metric doesn not represent meters!
|
||||
*/
|
||||
|
||||
@ -556,7 +556,7 @@ class StaticRTree
|
||||
}
|
||||
|
||||
// Override filter and terminator for the desired behaviour.
|
||||
std::vector<EdgeDataT> Nearest(const Coordinate input_coordinate,
|
||||
std::vector<CandidateSegment> Nearest(const Coordinate input_coordinate,
|
||||
const std::size_t max_results) const
|
||||
{
|
||||
return Nearest(
|
||||
@ -567,13 +567,13 @@ class StaticRTree
|
||||
});
|
||||
}
|
||||
|
||||
// Override filter and terminator for the desired behaviour.
|
||||
// Return edges in distance order with the coordinate of the closest point on the edge.
|
||||
template <typename FilterT, typename TerminationT>
|
||||
std::vector<EdgeDataT> Nearest(const Coordinate input_coordinate,
|
||||
std::vector<CandidateSegment> Nearest(const Coordinate input_coordinate,
|
||||
const FilterT filter,
|
||||
const TerminationT terminate) const
|
||||
{
|
||||
std::vector<EdgeDataT> results;
|
||||
std::vector<CandidateSegment> results;
|
||||
auto projected_coordinate = web_mercator::fromWGS84(input_coordinate);
|
||||
Coordinate fixed_projected_coordinate{projected_coordinate};
|
||||
// initialize queue with root element
|
||||
@ -603,10 +603,10 @@ class StaticRTree
|
||||
}
|
||||
else
|
||||
{ // current candidate is an actual road segment
|
||||
// We deliberatly make a copy here, we mutate the value below
|
||||
auto edge_data = m_objects[current_query_node.segment_index];
|
||||
const auto ¤t_candidate =
|
||||
CandidateSegment{current_query_node.fixed_projected_coordinate, edge_data};
|
||||
const auto &edge_data = m_objects[current_query_node.segment_index];
|
||||
// We deliberately make an edge data copy here, we mutate the value below
|
||||
CandidateSegment current_candidate{current_query_node.fixed_projected_coordinate,
|
||||
edge_data};
|
||||
|
||||
// to allow returns of no-results if too restrictive filtering, this needs to be
|
||||
// done here even though performance would indicate that we want to stop after
|
||||
@ -621,11 +621,11 @@ class StaticRTree
|
||||
{
|
||||
continue;
|
||||
}
|
||||
edge_data.forward_segment_id.enabled &= use_segment.first;
|
||||
edge_data.reverse_segment_id.enabled &= use_segment.second;
|
||||
current_candidate.data.forward_segment_id.enabled &= use_segment.first;
|
||||
current_candidate.data.reverse_segment_id.enabled &= use_segment.second;
|
||||
|
||||
// store phantom node in result vector
|
||||
results.push_back(std::move(edge_data));
|
||||
results.push_back(std::move(current_candidate));
|
||||
}
|
||||
}
|
||||
|
||||
@ -676,7 +676,7 @@ class StaticRTree
|
||||
* Iterates over all the children of a TreeNode and inserts them into the search
|
||||
* priority queue using their distance from the search coordinate as the
|
||||
* priority metric.
|
||||
* The closests distance to a box from our point is also the closest distance
|
||||
* The closest distance to a box from our point is also the closest distance
|
||||
* to the closest line in that box (assuming the boxes hug their contents).
|
||||
*/
|
||||
template <class QueueT>
|
||||
|
||||
@ -35,12 +35,7 @@
|
||||
#include <vector>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#endif
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -49,13 +44,8 @@ namespace contractor
|
||||
|
||||
int Contractor::Run()
|
||||
{
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
tbb::global_control gc(tbb::global_control::max_allowed_parallelism,
|
||||
config.requested_num_threads);
|
||||
#else
|
||||
tbb::task_scheduler_init init(config.requested_num_threads);
|
||||
BOOST_ASSERT(init.is_active());
|
||||
#endif
|
||||
|
||||
if (config.core_factor != 1.0)
|
||||
{
|
||||
|
||||
@ -21,11 +21,7 @@
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#endif
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -122,13 +118,8 @@ std::vector<CellMetric> customizeFilteredMetrics(const partitioner::MultiLevelEd
|
||||
|
||||
int Customizer::Run(const CustomizationConfig &config)
|
||||
{
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
tbb::global_control gc(tbb::global_control::max_allowed_parallelism,
|
||||
config.requested_num_threads);
|
||||
#else
|
||||
tbb::task_scheduler_init init(config.requested_num_threads);
|
||||
BOOST_ASSERT(init.is_active());
|
||||
#endif
|
||||
|
||||
TIMER_START(loading_data);
|
||||
|
||||
|
||||
@ -239,10 +239,10 @@ makeWaypoint(const util::Coordinate &location, const double &distance, std::stri
|
||||
util::json::Object makeWaypoint(const util::Coordinate &location,
|
||||
const double &distance,
|
||||
std::string name,
|
||||
const Hint &hint)
|
||||
const Hint &location_hints)
|
||||
{
|
||||
auto waypoint = makeWaypoint(location, distance, std::move(name));
|
||||
waypoint.values["hint"] = hint.ToBase64();
|
||||
waypoint.values["hint"] = location_hints.ToBase64();
|
||||
return waypoint;
|
||||
}
|
||||
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
#include "engine/datafacade/datafacade_base.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/unordered_set.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <ostream>
|
||||
#include <tuple>
|
||||
|
||||
namespace osrm
|
||||
@ -14,7 +14,7 @@ namespace osrm
|
||||
namespace engine
|
||||
{
|
||||
|
||||
bool Hint::IsValid(const util::Coordinate new_input_coordinates,
|
||||
bool SegmentHint::IsValid(const util::Coordinate new_input_coordinates,
|
||||
const datafacade::BaseDataFacade &facade) const
|
||||
{
|
||||
auto is_same_input_coordinate = new_input_coordinates.lon == phantom.input_location.lon &&
|
||||
@ -25,7 +25,7 @@ bool Hint::IsValid(const util::Coordinate new_input_coordinates,
|
||||
return is_same_input_coordinate && phantom.IsValid() && facade.GetCheckSum() == data_checksum;
|
||||
}
|
||||
|
||||
std::string Hint::ToBase64() const
|
||||
std::string SegmentHint::ToBase64() const
|
||||
{
|
||||
auto base64 = encodeBase64Bytewise(*this);
|
||||
|
||||
@ -36,9 +36,9 @@ std::string Hint::ToBase64() const
|
||||
return base64;
|
||||
}
|
||||
|
||||
Hint Hint::FromBase64(const std::string &base64Hint)
|
||||
SegmentHint SegmentHint::FromBase64(const std::string &base64Hint)
|
||||
{
|
||||
BOOST_ASSERT_MSG(base64Hint.size() == ENCODED_HINT_SIZE, "Hint has invalid size");
|
||||
BOOST_ASSERT_MSG(base64Hint.size() == ENCODED_SEGMENT_HINT_SIZE, "Hint has invalid size");
|
||||
|
||||
// We need mutability but don't want to change the API
|
||||
auto encoded = base64Hint;
|
||||
@ -47,15 +47,82 @@ Hint Hint::FromBase64(const std::string &base64Hint)
|
||||
std::replace(begin(encoded), end(encoded), '-', '+');
|
||||
std::replace(begin(encoded), end(encoded), '_', '/');
|
||||
|
||||
return decodeBase64Bytewise<Hint>(encoded);
|
||||
return decodeBase64Bytewise<SegmentHint>(encoded);
|
||||
}
|
||||
|
||||
bool operator==(const Hint &lhs, const Hint &rhs)
|
||||
bool operator==(const SegmentHint &lhs, const SegmentHint &rhs)
|
||||
{
|
||||
return std::tie(lhs.phantom, lhs.data_checksum) == std::tie(rhs.phantom, rhs.data_checksum);
|
||||
}
|
||||
|
||||
std::ostream &operator<<(std::ostream &out, const Hint &hint) { return out << hint.ToBase64(); }
|
||||
bool operator!=(const SegmentHint &lhs, const SegmentHint &rhs) { return !(lhs == rhs); }
|
||||
|
||||
std::ostream &operator<<(std::ostream &out, const SegmentHint &hint)
|
||||
{
|
||||
return out << hint.ToBase64();
|
||||
}
|
||||
|
||||
std::string Hint::ToBase64() const
|
||||
{
|
||||
std::string res;
|
||||
for (const auto &hint : segment_hints)
|
||||
{
|
||||
res += hint.ToBase64();
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
Hint Hint::FromBase64(const std::string &base64Hint)
|
||||
{
|
||||
|
||||
BOOST_ASSERT_MSG(base64Hint.size() % ENCODED_SEGMENT_HINT_SIZE == 0,
|
||||
"SegmentHint has invalid size");
|
||||
|
||||
auto num_hints = base64Hint.size() / ENCODED_SEGMENT_HINT_SIZE;
|
||||
std::vector<SegmentHint> res(num_hints);
|
||||
|
||||
for (const auto i : util::irange<std::size_t>(0UL, num_hints))
|
||||
{
|
||||
auto start_offset = i * ENCODED_SEGMENT_HINT_SIZE;
|
||||
auto end_offset = start_offset + ENCODED_SEGMENT_HINT_SIZE;
|
||||
res[i] = SegmentHint::FromBase64(
|
||||
std::string(base64Hint.begin() + start_offset, base64Hint.begin() + end_offset));
|
||||
}
|
||||
|
||||
return {std::move(res)};
|
||||
}
|
||||
|
||||
bool Hint::IsValid(const util::Coordinate new_input_coordinates,
|
||||
const datafacade::BaseDataFacade &facade) const
|
||||
{
|
||||
const auto all_valid =
|
||||
std::all_of(segment_hints.begin(), segment_hints.end(), [&](const auto &seg_hint) {
|
||||
return seg_hint.IsValid(new_input_coordinates, facade);
|
||||
});
|
||||
if (!all_valid)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check hints do not contain duplicate segment pairs
|
||||
// We can't allow duplicates as search heaps do not support it.
|
||||
boost::unordered_set<NodeID> forward_segments;
|
||||
boost::unordered_set<NodeID> reverse_segments;
|
||||
for (const auto &seg_hint : segment_hints)
|
||||
{
|
||||
const auto forward_res = forward_segments.insert(seg_hint.phantom.forward_segment_id.id);
|
||||
if (!forward_res.second)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
const auto backward_res = reverse_segments.insert(seg_hint.phantom.reverse_segment_id.id);
|
||||
if (!backward_res.second)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace engine
|
||||
} // namespace osrm
|
||||
|
||||
@ -4,21 +4,16 @@
|
||||
#include "engine/api/match_api.hpp"
|
||||
#include "engine/api/match_parameters.hpp"
|
||||
#include "engine/api/match_parameters_tidy.hpp"
|
||||
#include "engine/map_matching/bayes_classifier.hpp"
|
||||
#include "engine/map_matching/sub_matching.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
#include "util/integer_range.hpp"
|
||||
#include "util/json_util.hpp"
|
||||
#include "util/string_util.hpp"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <iterator>
|
||||
#include <memory>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace osrm
|
||||
@ -28,7 +23,7 @@ namespace engine
|
||||
namespace plugins
|
||||
{
|
||||
|
||||
// Filters PhantomNodes to obtain a set of viable candiates
|
||||
// Filters PhantomNodes to obtain a set of viable candidates
|
||||
void filterCandidates(const std::vector<util::Coordinate> &coordinates,
|
||||
MatchPlugin::CandidateLists &candidates_lists)
|
||||
{
|
||||
@ -272,20 +267,26 @@ Status MatchPlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
|
||||
// FIXME we only run this to obtain the geometry
|
||||
// The clean way would be to get this directly from the map matching plugin
|
||||
PhantomNodes current_phantom_node_pair;
|
||||
for (unsigned i = 0; i < sub_matchings[index].nodes.size() - 1; ++i)
|
||||
{
|
||||
current_phantom_node_pair.source_phantom = sub_matchings[index].nodes[i];
|
||||
current_phantom_node_pair.target_phantom = sub_matchings[index].nodes[i + 1];
|
||||
BOOST_ASSERT(current_phantom_node_pair.source_phantom.IsValid());
|
||||
BOOST_ASSERT(current_phantom_node_pair.target_phantom.IsValid());
|
||||
sub_routes[index].segment_end_coordinates.emplace_back(current_phantom_node_pair);
|
||||
PhantomEndpoints current_endpoints{sub_matchings[index].nodes[i],
|
||||
sub_matchings[index].nodes[i + 1]};
|
||||
BOOST_ASSERT(current_endpoints.source_phantom.IsValid());
|
||||
BOOST_ASSERT(current_endpoints.target_phantom.IsValid());
|
||||
sub_routes[index].leg_endpoints.push_back(current_endpoints);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeCandidates> waypoint_candidates;
|
||||
waypoint_candidates.reserve(sub_matchings[index].nodes.size());
|
||||
std::transform(sub_matchings[index].nodes.begin(),
|
||||
sub_matchings[index].nodes.end(),
|
||||
std::back_inserter(waypoint_candidates),
|
||||
[](const auto &phantom) { return PhantomNodeCandidates{phantom}; });
|
||||
|
||||
// force uturns to be on
|
||||
// we split the phantom nodes anyway and only have bi-directional phantom nodes for
|
||||
// possible uturns
|
||||
sub_routes[index] =
|
||||
algorithms.ShortestPathSearch(sub_routes[index].segment_end_coordinates, {false});
|
||||
sub_routes[index] = algorithms.ShortestPathSearch(waypoint_candidates, {false});
|
||||
BOOST_ASSERT(sub_routes[index].shortest_path_weight != INVALID_EDGE_WEIGHT);
|
||||
if (collapse_legs)
|
||||
{
|
||||
|
||||
@ -1,10 +1,7 @@
|
||||
#include "engine/plugins/nearest.hpp"
|
||||
#include "engine/api/nearest_api.hpp"
|
||||
#include "engine/api/nearest_parameters.hpp"
|
||||
#include "engine/phantom_node.hpp"
|
||||
#include "util/integer_range.hpp"
|
||||
|
||||
#include <cstddef>
|
||||
#include <string>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
@ -2,17 +2,11 @@
|
||||
|
||||
#include "engine/api/table_api.hpp"
|
||||
#include "engine/api/table_parameters.hpp"
|
||||
#include "engine/routing_algorithms/many_to_many.hpp"
|
||||
#include "engine/search_engine_data.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
#include "util/json_container.hpp"
|
||||
#include "util/string_util.hpp"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
@ -47,7 +41,7 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
return Error("InvalidOptions", "Coordinates are invalid", result);
|
||||
}
|
||||
|
||||
if (params.bearings.size() > 0 && params.coordinates.size() != params.bearings.size())
|
||||
if (!params.bearings.empty() && params.coordinates.size() != params.bearings.size())
|
||||
{
|
||||
return Error(
|
||||
"InvalidOptions", "Number of bearings does not match number of coordinates", result);
|
||||
@ -79,7 +73,7 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
"NoSegment", MissingPhantomErrorMessage(phantom_nodes, params.coordinates), result);
|
||||
}
|
||||
|
||||
auto snapped_phantoms = SnapPhantomNodes(phantom_nodes);
|
||||
auto snapped_phantoms = SnapPhantomNodes(std::move(phantom_nodes));
|
||||
|
||||
bool request_distance = params.annotations & api::TableParameters::AnnotationsType::Distance;
|
||||
bool request_duration = params.annotations & api::TableParameters::AnnotationsType::Duration;
|
||||
@ -117,9 +111,11 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
params.fallback_coordinate_type ==
|
||||
api::TableParameters::FallbackCoordinateType::Input
|
||||
? util::coordinate_calculation::greatCircleDistance(
|
||||
source.input_location, destination.input_location)
|
||||
candidatesInputLocation(source),
|
||||
candidatesInputLocation(destination))
|
||||
: util::coordinate_calculation::greatCircleDistance(
|
||||
source.location, destination.location);
|
||||
candidatesSnappedLocation(source),
|
||||
candidatesSnappedLocation(destination));
|
||||
|
||||
result_tables_pair.first[table_index] =
|
||||
distance_estimate / (double)params.fallback_speed;
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
#include <algorithm>
|
||||
#include <numeric>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
|
||||
@ -4,18 +4,12 @@
|
||||
#include "engine/api/trip_parameters.hpp"
|
||||
#include "engine/trip/trip_brute_force.hpp"
|
||||
#include "engine/trip/trip_farthest_insertion.hpp"
|
||||
#include "engine/trip/trip_nearest_neighbour.hpp"
|
||||
#include "util/dist_table_wrapper.hpp" // to access the dist table more easily
|
||||
#include "util/json_container.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdlib>
|
||||
#include <iterator>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
@ -41,40 +35,33 @@ bool IsSupportedParameterCombination(const bool fixed_start,
|
||||
|
||||
// given the node order in which to visit, compute the actual route (with geometry, travel time and
|
||||
// so on) and return the result
|
||||
InternalRouteResult TripPlugin::ComputeRoute(const RoutingAlgorithmsInterface &algorithms,
|
||||
const std::vector<PhantomNode> &snapped_phantoms,
|
||||
InternalRouteResult
|
||||
TripPlugin::ComputeRoute(const RoutingAlgorithmsInterface &algorithms,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const std::vector<NodeID> &trip,
|
||||
const bool roundtrip) const
|
||||
{
|
||||
InternalRouteResult min_route;
|
||||
// given the final trip, compute total duration and return the route and location permutation
|
||||
PhantomNodes viapoint;
|
||||
|
||||
// computes a roundtrip from the nodes in trip
|
||||
for (auto node = trip.begin(); node < trip.end() - 1; ++node)
|
||||
{
|
||||
const auto from_node = *node;
|
||||
const auto to_node = *std::next(node);
|
||||
|
||||
viapoint = PhantomNodes{snapped_phantoms[from_node], snapped_phantoms[to_node]};
|
||||
min_route.segment_end_coordinates.emplace_back(viapoint);
|
||||
}
|
||||
|
||||
// TODO make a more efficient solution that doesn't require copying all the waypoints vectors.
|
||||
std::vector<PhantomNodeCandidates> trip_candidates;
|
||||
std::transform(trip.begin(),
|
||||
trip.end(),
|
||||
std::back_inserter(trip_candidates),
|
||||
[&](const auto &node) { return waypoint_candidates[node]; });
|
||||
// return back to the first node if it is a round trip
|
||||
if (roundtrip)
|
||||
{
|
||||
viapoint = PhantomNodes{snapped_phantoms[trip.back()], snapped_phantoms[trip.front()]};
|
||||
min_route.segment_end_coordinates.emplace_back(viapoint);
|
||||
trip_candidates.push_back(waypoint_candidates[trip.front()]);
|
||||
// trip comes out to be something like 0 1 4 3 2 0
|
||||
BOOST_ASSERT(min_route.segment_end_coordinates.size() == trip.size());
|
||||
BOOST_ASSERT(trip_candidates.size() == trip.size() + 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// trip comes out to be something like 0 1 4 3 2, so the sizes don't match
|
||||
BOOST_ASSERT(min_route.segment_end_coordinates.size() == trip.size() - 1);
|
||||
// trip comes out to be something like 0 1 4 3 2
|
||||
BOOST_ASSERT(trip_candidates.size() == trip.size());
|
||||
}
|
||||
|
||||
min_route = algorithms.ShortestPathSearch(min_route.segment_end_coordinates, {false});
|
||||
auto min_route = algorithms.ShortestPathSearch(trip_candidates, {false});
|
||||
BOOST_ASSERT_MSG(min_route.shortest_path_weight < INVALID_EDGE_WEIGHT, "unroutable route");
|
||||
return min_route;
|
||||
}
|
||||
@ -226,7 +213,7 @@ Status TripPlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
return Error("InvalidValue", "Invalid source or destination value.", result);
|
||||
}
|
||||
|
||||
auto snapped_phantoms = SnapPhantomNodes(phantom_node_pairs);
|
||||
auto snapped_phantoms = SnapPhantomNodes(std::move(phantom_node_pairs));
|
||||
|
||||
BOOST_ASSERT(snapped_phantoms.size() == number_of_locations);
|
||||
|
||||
|
||||
@ -5,12 +5,10 @@
|
||||
|
||||
#include "util/for_each_pair.hpp"
|
||||
#include "util/integer_range.hpp"
|
||||
#include "util/json_container.hpp"
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -95,19 +93,10 @@ Status ViaRoutePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithm
|
||||
}
|
||||
BOOST_ASSERT(phantom_node_pairs.size() == route_parameters.coordinates.size());
|
||||
|
||||
auto snapped_phantoms = SnapPhantomNodes(phantom_node_pairs);
|
||||
|
||||
std::vector<PhantomNodes> start_end_nodes;
|
||||
auto build_phantom_pairs = [&start_end_nodes](const PhantomNode &first_node,
|
||||
const PhantomNode &second_node) {
|
||||
start_end_nodes.push_back(PhantomNodes{first_node, second_node});
|
||||
};
|
||||
util::for_each_pair(snapped_phantoms, build_phantom_pairs);
|
||||
auto snapped_phantoms = SnapPhantomNodes(std::move(phantom_node_pairs));
|
||||
|
||||
api::RouteAPI route_api{facade, route_parameters};
|
||||
|
||||
InternalManyRoutesResult routes;
|
||||
|
||||
// TODO: in v6 we should remove the boolean and only keep the number parameter.
|
||||
// For now just force them to be in sync. and keep backwards compatibility.
|
||||
const auto wants_alternatives =
|
||||
@ -115,20 +104,23 @@ Status ViaRoutePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithm
|
||||
(route_parameters.alternatives || route_parameters.number_of_alternatives > 0);
|
||||
const auto number_of_alternatives = std::max(1u, route_parameters.number_of_alternatives);
|
||||
|
||||
InternalManyRoutesResult routes;
|
||||
// Alternatives do not support vias, only direct s,t queries supported
|
||||
// See the implementation notes and high-level outline.
|
||||
// https://github.com/Project-OSRM/osrm-backend/issues/3905
|
||||
if (1 == start_end_nodes.size() && algorithms.HasAlternativePathSearch() && wants_alternatives)
|
||||
if (2 == snapped_phantoms.size() && algorithms.HasAlternativePathSearch() && wants_alternatives)
|
||||
{
|
||||
routes = algorithms.AlternativePathSearch(start_end_nodes.front(), number_of_alternatives);
|
||||
routes = algorithms.AlternativePathSearch({snapped_phantoms[0], snapped_phantoms[1]},
|
||||
number_of_alternatives);
|
||||
}
|
||||
else if (1 == start_end_nodes.size() && algorithms.HasDirectShortestPathSearch())
|
||||
else if (2 == snapped_phantoms.size() && algorithms.HasDirectShortestPathSearch())
|
||||
{
|
||||
routes = algorithms.DirectShortestPathSearch(start_end_nodes.front());
|
||||
routes = algorithms.DirectShortestPathSearch({snapped_phantoms[0], snapped_phantoms[1]});
|
||||
}
|
||||
else
|
||||
{
|
||||
routes = algorithms.ShortestPathSearch(start_end_nodes, route_parameters.continue_straight);
|
||||
routes =
|
||||
algorithms.ShortestPathSearch(snapped_phantoms, route_parameters.continue_straight);
|
||||
}
|
||||
|
||||
// The post condition for all path searches is we have at least one route in our result.
|
||||
@ -160,18 +152,29 @@ Status ViaRoutePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithm
|
||||
}
|
||||
}
|
||||
|
||||
route_api.MakeResponse(routes, start_end_nodes, result);
|
||||
route_api.MakeResponse(routes, snapped_phantoms, result);
|
||||
}
|
||||
else
|
||||
{
|
||||
auto first_component_id = snapped_phantoms.front().component.id;
|
||||
auto not_in_same_component = std::any_of(snapped_phantoms.begin(),
|
||||
snapped_phantoms.end(),
|
||||
[first_component_id](const PhantomNode &node) {
|
||||
return node.component.id != first_component_id;
|
||||
const auto all_in_same_component =
|
||||
[](const std::vector<PhantomNodeCandidates> &waypoint_candidates) {
|
||||
return std::any_of(waypoint_candidates.front().begin(),
|
||||
waypoint_candidates.front().end(),
|
||||
// For each of the first possible phantoms, check if all other
|
||||
// positions in the list have a phantom from the same component.
|
||||
[&](const PhantomNode &phantom) {
|
||||
const auto component_id = phantom.component.id;
|
||||
return std::all_of(
|
||||
std::next(waypoint_candidates.begin()),
|
||||
std::end(waypoint_candidates),
|
||||
[component_id](const PhantomNodeCandidates &candidates) {
|
||||
return candidatesHaveComponent(candidates,
|
||||
component_id);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
if (not_in_same_component)
|
||||
if (!all_in_same_component(snapped_phantoms))
|
||||
{
|
||||
return Error("NoRoute", "Impossible route between points", result);
|
||||
}
|
||||
|
||||
@ -190,8 +190,8 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
|
||||
s_v_middle,
|
||||
upper_bound_s_v_path_weight,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
{},
|
||||
{});
|
||||
}
|
||||
// compute path <v,..,t> by reusing backward search from node t
|
||||
NodeID v_t_middle = SPECIAL_NODEID;
|
||||
@ -205,8 +205,8 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
|
||||
v_t_middle,
|
||||
upper_bound_of_v_t_path_weight,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
{},
|
||||
{});
|
||||
}
|
||||
*real_weight_of_via_path = upper_bound_s_v_path_weight + upper_bound_of_v_t_path_weight;
|
||||
|
||||
@ -351,8 +351,8 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
*s_v_middle,
|
||||
upper_bound_s_v_path_weight,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
{},
|
||||
{});
|
||||
}
|
||||
|
||||
if (INVALID_EDGE_WEIGHT == upper_bound_s_v_path_weight)
|
||||
@ -372,8 +372,8 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
*v_t_middle,
|
||||
upper_bound_of_v_t_path_weight,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
{},
|
||||
{});
|
||||
}
|
||||
|
||||
if (INVALID_EDGE_WEIGHT == upper_bound_of_v_t_path_weight)
|
||||
@ -539,25 +539,13 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
{
|
||||
if (!forward_heap3.Empty())
|
||||
{
|
||||
routingStep<FORWARD_DIRECTION>(facade,
|
||||
forward_heap3,
|
||||
reverse_heap3,
|
||||
middle,
|
||||
upper_bound,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
routingStep<FORWARD_DIRECTION>(
|
||||
facade, forward_heap3, reverse_heap3, middle, upper_bound, min_edge_offset, {}, {});
|
||||
}
|
||||
if (!reverse_heap3.Empty())
|
||||
{
|
||||
routingStep<REVERSE_DIRECTION>(facade,
|
||||
reverse_heap3,
|
||||
forward_heap3,
|
||||
middle,
|
||||
upper_bound,
|
||||
min_edge_offset,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS);
|
||||
routingStep<REVERSE_DIRECTION>(
|
||||
facade, reverse_heap3, forward_heap3, middle, upper_bound, min_edge_offset, {}, {});
|
||||
}
|
||||
}
|
||||
return (upper_bound <= t_test_path_weight);
|
||||
@ -566,15 +554,12 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
|
||||
InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned /*number_of_alternatives*/)
|
||||
{
|
||||
InternalRouteResult primary_route;
|
||||
InternalRouteResult secondary_route;
|
||||
|
||||
primary_route.segment_end_coordinates = {phantom_node_pair};
|
||||
secondary_route.segment_end_coordinates = {phantom_node_pair};
|
||||
|
||||
std::vector<NodeID> alternative_path;
|
||||
std::vector<NodeID> via_node_candidate_list;
|
||||
std::vector<SearchSpaceEdge> forward_search_space;
|
||||
@ -592,15 +577,13 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
|
||||
EdgeWeight upper_bound_to_shortest_path_weight = INVALID_EDGE_WEIGHT;
|
||||
NodeID middle_node = SPECIAL_NODEID;
|
||||
const EdgeWeight min_edge_offset =
|
||||
std::min(phantom_node_pair.source_phantom.forward_segment_id.enabled
|
||||
? -phantom_node_pair.source_phantom.GetForwardWeightPlusOffset()
|
||||
: 0,
|
||||
phantom_node_pair.source_phantom.reverse_segment_id.enabled
|
||||
? -phantom_node_pair.source_phantom.GetReverseWeightPlusOffset()
|
||||
: 0);
|
||||
|
||||
insertNodesInHeaps(forward_heap1, reverse_heap1, phantom_node_pair);
|
||||
insertNodesInHeaps(forward_heap1, reverse_heap1, endpoint_candidates);
|
||||
// get offset to account for offsets on phantom nodes on compressed edges
|
||||
EdgeWeight min_edge_offset = forward_heap1.Empty() ? 0 : std::min(0, forward_heap1.MinKey());
|
||||
BOOST_ASSERT(min_edge_offset <= 0);
|
||||
// we only every insert negative offsets for nodes in the forward heap
|
||||
BOOST_ASSERT(reverse_heap1.Empty() || reverse_heap1.MinKey() >= 0);
|
||||
|
||||
// search from s and t till new_min/(1+epsilon) > weight_of_shortest_path
|
||||
while (0 < (forward_heap1.Size() + reverse_heap1.Size()))
|
||||
@ -790,7 +773,7 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
&v_t_middle,
|
||||
min_edge_offset))
|
||||
{
|
||||
// select first admissable
|
||||
// select first admissible
|
||||
selected_via_node = candidate.node;
|
||||
break;
|
||||
}
|
||||
@ -799,20 +782,23 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
// Unpack shortest path and alternative, if they exist
|
||||
if (INVALID_EDGE_WEIGHT != upper_bound_to_shortest_path_weight)
|
||||
{
|
||||
auto phantom_endpoints = endpointsFromCandidates(endpoint_candidates, packed_shortest_path);
|
||||
primary_route.leg_endpoints = {phantom_endpoints};
|
||||
|
||||
BOOST_ASSERT(!packed_shortest_path.empty());
|
||||
primary_route.unpacked_path_segments.resize(1);
|
||||
primary_route.source_traversed_in_reverse.push_back(
|
||||
(packed_shortest_path.front() !=
|
||||
phantom_node_pair.source_phantom.forward_segment_id.id));
|
||||
phantom_endpoints.source_phantom.forward_segment_id.id));
|
||||
primary_route.target_traversed_in_reverse.push_back((
|
||||
packed_shortest_path.back() != phantom_node_pair.target_phantom.forward_segment_id.id));
|
||||
packed_shortest_path.back() != phantom_endpoints.target_phantom.forward_segment_id.id));
|
||||
|
||||
unpackPath(facade,
|
||||
// -- packed input
|
||||
packed_shortest_path.begin(),
|
||||
packed_shortest_path.end(),
|
||||
// -- start of route
|
||||
phantom_node_pair,
|
||||
phantom_endpoints,
|
||||
// -- unpacked output
|
||||
primary_route.unpacked_path_segments.front());
|
||||
primary_route.shortest_path_weight = upper_bound_to_shortest_path_weight;
|
||||
@ -830,19 +816,23 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
v_t_middle,
|
||||
packed_alternate_path);
|
||||
|
||||
auto phantom_endpoints =
|
||||
endpointsFromCandidates(endpoint_candidates, packed_alternate_path);
|
||||
secondary_route.leg_endpoints = {phantom_endpoints};
|
||||
|
||||
secondary_route.unpacked_path_segments.resize(1);
|
||||
secondary_route.source_traversed_in_reverse.push_back(
|
||||
(packed_alternate_path.front() !=
|
||||
phantom_node_pair.source_phantom.forward_segment_id.id));
|
||||
phantom_endpoints.source_phantom.forward_segment_id.id));
|
||||
secondary_route.target_traversed_in_reverse.push_back(
|
||||
(packed_alternate_path.back() !=
|
||||
phantom_node_pair.target_phantom.forward_segment_id.id));
|
||||
phantom_endpoints.target_phantom.forward_segment_id.id));
|
||||
|
||||
// unpack the alternate path
|
||||
unpackPath(facade,
|
||||
packed_alternate_path.begin(),
|
||||
packed_alternate_path.end(),
|
||||
phantom_node_pair,
|
||||
phantom_endpoints,
|
||||
secondary_route.unpacked_path_segments.front());
|
||||
|
||||
secondary_route.shortest_path_weight = weight_of_via_path;
|
||||
|
||||
@ -133,12 +133,13 @@ double getLongerByFactorBasedOnDuration(const EdgeWeight duration)
|
||||
return a + b / (duration - d) + c / std::pow(duration - d, 3);
|
||||
}
|
||||
|
||||
Parameters parametersFromRequest(const PhantomNodes &phantom_node_pair)
|
||||
Parameters parametersFromRequest(const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
Parameters parameters;
|
||||
|
||||
const auto distance = util::coordinate_calculation::greatCircleDistance(
|
||||
phantom_node_pair.source_phantom.location, phantom_node_pair.target_phantom.location);
|
||||
candidatesSnappedLocation(endpoint_candidates.source_phantoms),
|
||||
candidatesSnappedLocation(endpoint_candidates.target_phantoms));
|
||||
|
||||
// 10km
|
||||
if (distance < 10000.)
|
||||
@ -547,7 +548,7 @@ void unpackPackedPaths(InputIt first,
|
||||
OutIt out,
|
||||
SearchEngineData<Algorithm> &search_engine_data,
|
||||
const Facade &facade,
|
||||
const PhantomNodes &phantom_node_pair)
|
||||
const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
util::static_assert_iter_category<InputIt, std::input_iterator_tag>();
|
||||
util::static_assert_iter_category<OutIt, std::output_iterator_tag>();
|
||||
@ -600,7 +601,7 @@ void unpackPackedPaths(InputIt first,
|
||||
}
|
||||
else
|
||||
{ // an overlay graph edge
|
||||
LevelID level = getNodeQueryLevel(partition, source, phantom_node_pair); // XXX
|
||||
LevelID level = getNodeQueryLevel(partition, source, endpoint_candidates); // XXX
|
||||
CellID parent_cell_id = partition.GetCell(level, source);
|
||||
BOOST_ASSERT(parent_cell_id == partition.GetCell(level, target));
|
||||
|
||||
@ -624,8 +625,8 @@ void unpackPackedPaths(InputIt first,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
{},
|
||||
{},
|
||||
INVALID_EDGE_WEIGHT,
|
||||
sublevel,
|
||||
parent_cell_id);
|
||||
@ -656,13 +657,13 @@ void unpackPackedPaths(InputIt first,
|
||||
inline std::vector<WeightedViaNode>
|
||||
makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
|
||||
const Facade &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
const Parameters ¶meters)
|
||||
{
|
||||
Heap &forward_heap = *search_engine_data.forward_heap_1;
|
||||
Heap &reverse_heap = *search_engine_data.reverse_heap_1;
|
||||
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, phantom_node_pair);
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, endpoint_candidates);
|
||||
if (forward_heap.Empty() || reverse_heap.Empty())
|
||||
{
|
||||
return {};
|
||||
@ -712,9 +713,9 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
|
||||
reverse_heap,
|
||||
overlap_via,
|
||||
overlap_weight,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
phantom_node_pair);
|
||||
{},
|
||||
{},
|
||||
endpoint_candidates);
|
||||
|
||||
if (!forward_heap.Empty())
|
||||
forward_heap_min = forward_heap.MinKey();
|
||||
@ -738,9 +739,9 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
|
||||
forward_heap,
|
||||
overlap_via,
|
||||
overlap_weight,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
phantom_node_pair);
|
||||
{},
|
||||
{},
|
||||
endpoint_candidates);
|
||||
|
||||
if (!reverse_heap.Empty())
|
||||
reverse_heap_min = reverse_heap.MinKey();
|
||||
@ -776,10 +777,10 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
|
||||
// https://github.com/Project-OSRM/osrm-backend/issues/3905
|
||||
InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &search_engine_data,
|
||||
const Facade &facade,
|
||||
const PhantomNodes &phantom_node_pair,
|
||||
const PhantomEndpointCandidates &endpoint_candidates,
|
||||
unsigned number_of_alternatives)
|
||||
{
|
||||
Parameters parameters = parametersFromRequest(phantom_node_pair);
|
||||
Parameters parameters = parametersFromRequest(endpoint_candidates);
|
||||
|
||||
const auto max_number_of_alternatives = number_of_alternatives;
|
||||
const auto max_number_of_alternatives_to_unpack =
|
||||
@ -798,7 +799,7 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &sear
|
||||
|
||||
// Do forward and backward search, save search space overlap as via candidates.
|
||||
auto candidate_vias =
|
||||
makeCandidateVias(search_engine_data, facade, phantom_node_pair, parameters);
|
||||
makeCandidateVias(search_engine_data, facade, endpoint_candidates, parameters);
|
||||
|
||||
const auto by_weight = [](const auto &lhs, const auto &rhs) { return lhs.weight < rhs.weight; };
|
||||
auto shortest_path_via_it =
|
||||
@ -813,8 +814,6 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &sear
|
||||
if (!has_shortest_path)
|
||||
{
|
||||
InternalRouteResult invalid;
|
||||
invalid.shortest_path_weight = INVALID_EDGE_WEIGHT;
|
||||
invalid.segment_end_coordinates = {phantom_node_pair};
|
||||
return invalid;
|
||||
}
|
||||
|
||||
@ -900,7 +899,7 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &sear
|
||||
std::back_inserter(unpacked_paths),
|
||||
search_engine_data,
|
||||
facade,
|
||||
phantom_node_pair);
|
||||
endpoint_candidates);
|
||||
|
||||
//
|
||||
// Filter and rank a second time. This time instead of being fast and doing
|
||||
@ -927,7 +926,7 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &sear
|
||||
routes.reserve(number_of_unpacked_paths);
|
||||
|
||||
const auto unpacked_path_to_route = [&](const WeightedViaNodeUnpackedPath &path) {
|
||||
return extractRoute(facade, path.via.weight, phantom_node_pair, path.nodes, path.edges);
|
||||
return extractRoute(facade, path.via.weight, endpoint_candidates, path.nodes, path.edges);
|
||||
};
|
||||
|
||||
std::transform(unpacked_paths_first,
|
||||
|
||||
@ -19,7 +19,7 @@ namespace routing_algorithms
|
||||
template <>
|
||||
InternalRouteResult directShortestPathSearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
const DataFacade<ch::Algorithm> &facade,
|
||||
const PhantomNodes &phantom_nodes)
|
||||
const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
engine_working_data.InitializeOrClearFirstThreadLocalStorage(facade.GetNumberOfNodes());
|
||||
auto &forward_heap = *engine_working_data.forward_heap_1;
|
||||
@ -29,7 +29,7 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<ch::Algorithm> &en
|
||||
|
||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
||||
std::vector<NodeID> packed_leg;
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, phantom_nodes);
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, endpoint_candidates);
|
||||
|
||||
search(engine_working_data,
|
||||
facade,
|
||||
@ -37,9 +37,9 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<ch::Algorithm> &en
|
||||
reverse_heap,
|
||||
weight,
|
||||
packed_leg,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
phantom_nodes);
|
||||
{},
|
||||
{},
|
||||
endpoint_candidates);
|
||||
|
||||
std::vector<NodeID> unpacked_nodes;
|
||||
std::vector<EdgeID> unpacked_edges;
|
||||
@ -60,19 +60,19 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<ch::Algorithm> &en
|
||||
});
|
||||
}
|
||||
|
||||
return extractRoute(facade, weight, phantom_nodes, unpacked_nodes, unpacked_edges);
|
||||
return extractRoute(facade, weight, endpoint_candidates, unpacked_nodes, unpacked_edges);
|
||||
}
|
||||
|
||||
template <>
|
||||
InternalRouteResult directShortestPathSearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const PhantomNodes &phantom_nodes)
|
||||
const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
engine_working_data.InitializeOrClearFirstThreadLocalStorage(facade.GetNumberOfNodes(),
|
||||
facade.GetMaxBorderNodeID() + 1);
|
||||
auto &forward_heap = *engine_working_data.forward_heap_1;
|
||||
auto &reverse_heap = *engine_working_data.reverse_heap_1;
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, phantom_nodes);
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, endpoint_candidates);
|
||||
|
||||
// TODO: when structured bindings will be allowed change to
|
||||
// auto [weight, source_node, target_node, unpacked_edges] = ...
|
||||
@ -83,12 +83,12 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<mld::Algorithm> &e
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
{},
|
||||
{},
|
||||
INVALID_EDGE_WEIGHT,
|
||||
phantom_nodes);
|
||||
endpoint_candidates);
|
||||
|
||||
return extractRoute(facade, weight, phantom_nodes, unpacked_nodes, unpacked_edges);
|
||||
return extractRoute(facade, weight, endpoint_candidates, unpacked_nodes, unpacked_edges);
|
||||
}
|
||||
|
||||
} // namespace routing_algorithms
|
||||
|
||||
@ -49,7 +49,7 @@ void relaxOutgoingEdges(
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const typename SearchEngineData<Algorithm>::ManyToManyQueryHeap::HeapNode &heapNode,
|
||||
typename SearchEngineData<Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
const PhantomNode &)
|
||||
const PhantomNodeCandidates &)
|
||||
{
|
||||
if (stallAtNode<DIRECTION>(facade, heapNode, query_heap))
|
||||
{
|
||||
@ -99,7 +99,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
std::vector<EdgeDuration> &durations_table,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
std::vector<NodeID> &middle_nodes_table,
|
||||
const PhantomNode &phantom_node)
|
||||
const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
// Take a copy of the extracted node because otherwise could be modified later if toHeapNode is
|
||||
// the same
|
||||
@ -151,14 +151,14 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<FORWARD_DIRECTION>(facade, heapNode, query_heap, phantom_node);
|
||||
relaxOutgoingEdges<FORWARD_DIRECTION>(facade, heapNode, query_heap, candidates);
|
||||
}
|
||||
|
||||
void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const unsigned column_index,
|
||||
typename SearchEngineData<Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
std::vector<NodeBucket> &search_space_with_buckets,
|
||||
const PhantomNode &phantom_node)
|
||||
const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
// Take a copy (no ref &) of the extracted node because otherwise could be modified later if
|
||||
// toHeapNode is the same
|
||||
@ -172,7 +172,7 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
heapNode.data.duration,
|
||||
heapNode.data.distance);
|
||||
|
||||
relaxOutgoingEdges<REVERSE_DIRECTION>(facade, heapNode, query_heap, phantom_node);
|
||||
relaxOutgoingEdges<REVERSE_DIRECTION>(facade, heapNode, query_heap, candidates);
|
||||
}
|
||||
|
||||
} // namespace ch
|
||||
@ -181,7 +181,7 @@ template <>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
const DataFacade<ch::Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance)
|
||||
@ -202,18 +202,18 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
for (std::uint32_t column_index = 0; column_index < target_indices.size(); ++column_index)
|
||||
{
|
||||
const auto index = target_indices[column_index];
|
||||
const auto &phantom = phantom_nodes[index];
|
||||
const auto &target_candidates = candidates_list[index];
|
||||
|
||||
engine_working_data.InitializeOrClearManyToManyThreadLocalStorage(
|
||||
facade.GetNumberOfNodes());
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
insertTargetInHeap(query_heap, phantom);
|
||||
insertTargetInHeap(query_heap, target_candidates);
|
||||
|
||||
// Explore search space
|
||||
while (!query_heap.Empty())
|
||||
{
|
||||
backwardRoutingStep(
|
||||
facade, column_index, query_heap, search_space_with_buckets, phantom);
|
||||
facade, column_index, query_heap, search_space_with_buckets, target_candidates);
|
||||
}
|
||||
}
|
||||
|
||||
@ -224,13 +224,13 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
for (std::uint32_t row_index = 0; row_index < source_indices.size(); ++row_index)
|
||||
{
|
||||
const auto source_index = source_indices[row_index];
|
||||
const auto &source_phantom = phantom_nodes[source_index];
|
||||
const auto &source_candidates = candidates_list[source_index];
|
||||
|
||||
// Clear heap and insert source nodes
|
||||
engine_working_data.InitializeOrClearManyToManyThreadLocalStorage(
|
||||
facade.GetNumberOfNodes());
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
insertSourceInHeap(query_heap, source_phantom);
|
||||
insertSourceInHeap(query_heap, source_candidates);
|
||||
|
||||
// Explore search space
|
||||
while (!query_heap.Empty())
|
||||
@ -244,7 +244,7 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
durations_table,
|
||||
distances_table,
|
||||
middle_nodes_table,
|
||||
source_phantom);
|
||||
source_candidates);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ using PackedPath = std::vector<PackedEdge>;
|
||||
template <typename MultiLevelPartition>
|
||||
inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition,
|
||||
const NodeID node,
|
||||
const PhantomNode &phantom_node,
|
||||
const PhantomNodeCandidates &phantom_node,
|
||||
const LevelID maximal_level)
|
||||
{
|
||||
const auto node_level = getNodeQueryLevel(partition, node, phantom_node);
|
||||
@ -96,7 +96,7 @@ void relaxOutgoingEdges(
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const typename SearchEngineData<mld::Algorithm>::ManyToManyQueryHeap::HeapNode &heapNode,
|
||||
typename SearchEngineData<mld::Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
Args... args)
|
||||
const Args &... args)
|
||||
{
|
||||
BOOST_ASSERT(!facade.ExcludeNode(heapNode.node));
|
||||
|
||||
@ -214,26 +214,27 @@ template <bool DIRECTION>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
std::size_t phantom_index,
|
||||
const std::vector<std::size_t> &phantom_indices,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
std::size_t source_index,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance)
|
||||
{
|
||||
std::vector<EdgeWeight> weights_table(phantom_indices.size(), INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations_table(phantom_indices.size(), MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table(calculate_distance ? phantom_indices.size() : 0,
|
||||
std::vector<EdgeWeight> weights_table(target_indices.size(), INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations_table(target_indices.size(), MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table(calculate_distance ? target_indices.size() : 0,
|
||||
MAXIMAL_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(phantom_indices.size(), SPECIAL_NODEID);
|
||||
std::vector<NodeID> middle_nodes_table(target_indices.size(), SPECIAL_NODEID);
|
||||
|
||||
// Collect destination (source) nodes into a map
|
||||
std::unordered_multimap<NodeID, std::tuple<std::size_t, EdgeWeight, EdgeDuration, EdgeDistance>>
|
||||
target_nodes_index;
|
||||
target_nodes_index.reserve(phantom_indices.size());
|
||||
for (std::size_t index = 0; index < phantom_indices.size(); ++index)
|
||||
target_nodes_index.reserve(target_indices.size());
|
||||
for (std::size_t index = 0; index < target_indices.size(); ++index)
|
||||
{
|
||||
const auto &phantom_index = phantom_indices[index];
|
||||
const auto &phantom_node = phantom_nodes[phantom_index];
|
||||
const auto &target_candidates = candidates_list[target_indices[index]];
|
||||
|
||||
for (const auto &phantom_node : target_candidates)
|
||||
{
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
{
|
||||
if (phantom_node.IsValidForwardTarget())
|
||||
@ -243,6 +244,7 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance())});
|
||||
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
@ -260,6 +262,7 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance())});
|
||||
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
@ -269,6 +272,7 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
-phantom_node.GetReverseDistance())});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize query heap
|
||||
engine_working_data.InitializeOrClearManyToManyThreadLocalStorage(
|
||||
@ -337,8 +341,10 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
};
|
||||
|
||||
{ // Place source (destination) adjacent nodes into the heap
|
||||
const auto &phantom_node = phantom_nodes[phantom_index];
|
||||
const auto &source_candidates = candidates_list[source_index];
|
||||
|
||||
for (const auto &phantom_node : source_candidates)
|
||||
{
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
{
|
||||
if (phantom_node.IsValidForwardSource())
|
||||
@ -376,6 +382,7 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (!query_heap.Empty() && !target_nodes_index.empty())
|
||||
{
|
||||
@ -389,7 +396,7 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
|
||||
// Relax outgoing edges
|
||||
relaxOutgoingEdges<DIRECTION>(
|
||||
facade, heapNode, query_heap, phantom_nodes, phantom_index, phantom_indices);
|
||||
facade, heapNode, query_heap, candidates_list, source_index, target_indices);
|
||||
}
|
||||
|
||||
return std::make_pair(std::move(durations_table), std::move(distances_table));
|
||||
@ -409,7 +416,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
std::vector<EdgeDuration> &durations_table,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
std::vector<NodeID> &middle_nodes_table,
|
||||
const PhantomNode &phantom_node)
|
||||
const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
// Take a copy of the extracted node because otherwise could be modified later if toHeapNode is
|
||||
// the same
|
||||
@ -455,7 +462,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<DIRECTION>(facade, heapNode, query_heap, phantom_node);
|
||||
relaxOutgoingEdges<DIRECTION>(facade, heapNode, query_heap, candidates);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
@ -463,7 +470,7 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const unsigned column_idx,
|
||||
typename SearchEngineData<Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
std::vector<NodeBucket> &search_space_with_buckets,
|
||||
const PhantomNode &phantom_node)
|
||||
const PhantomNodeCandidates &candidates)
|
||||
{
|
||||
// Take a copy of the extracted node because otherwise could be modified later if toHeapNode is
|
||||
// the same
|
||||
@ -481,7 +488,7 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto &partition = facade.GetMultiLevelPartition();
|
||||
const auto maximal_level = partition.GetNumberOfLevels() - 1;
|
||||
|
||||
relaxOutgoingEdges<!DIRECTION>(facade, heapNode, query_heap, phantom_node, maximal_level);
|
||||
relaxOutgoingEdges<!DIRECTION>(facade, heapNode, query_heap, candidates, maximal_level);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
@ -524,7 +531,7 @@ template <bool DIRECTION>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance)
|
||||
@ -545,22 +552,22 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
for (std::uint32_t column_idx = 0; column_idx < target_indices.size(); ++column_idx)
|
||||
{
|
||||
const auto index = target_indices[column_idx];
|
||||
const auto &target_phantom = phantom_nodes[index];
|
||||
const auto &target_candidates = candidates_list[index];
|
||||
|
||||
engine_working_data.InitializeOrClearManyToManyThreadLocalStorage(
|
||||
facade.GetNumberOfNodes(), facade.GetMaxBorderNodeID() + 1);
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
insertTargetInHeap(query_heap, target_phantom);
|
||||
insertTargetInHeap(query_heap, target_candidates);
|
||||
else
|
||||
insertSourceInHeap(query_heap, target_phantom);
|
||||
insertSourceInHeap(query_heap, target_candidates);
|
||||
|
||||
// explore search space
|
||||
while (!query_heap.Empty())
|
||||
{
|
||||
backwardRoutingStep<DIRECTION>(
|
||||
facade, column_idx, query_heap, search_space_with_buckets, target_phantom);
|
||||
facade, column_idx, query_heap, search_space_with_buckets, target_candidates);
|
||||
}
|
||||
}
|
||||
|
||||
@ -571,7 +578,7 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
for (std::uint32_t row_idx = 0; row_idx < source_indices.size(); ++row_idx)
|
||||
{
|
||||
const auto source_index = source_indices[row_idx];
|
||||
const auto &source_phantom = phantom_nodes[source_index];
|
||||
const auto &source_candidates = candidates_list[source_index];
|
||||
|
||||
// Clear heap and insert source nodes
|
||||
engine_working_data.InitializeOrClearManyToManyThreadLocalStorage(
|
||||
@ -580,9 +587,9 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
insertSourceInHeap(query_heap, source_phantom);
|
||||
insertSourceInHeap(query_heap, source_candidates);
|
||||
else
|
||||
insertTargetInHeap(query_heap, source_phantom);
|
||||
insertTargetInHeap(query_heap, source_candidates);
|
||||
|
||||
// Explore search space
|
||||
while (!query_heap.Empty())
|
||||
@ -597,7 +604,7 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
durations_table,
|
||||
distances_table,
|
||||
middle_nodes_table,
|
||||
source_phantom);
|
||||
source_candidates);
|
||||
}
|
||||
}
|
||||
|
||||
@ -622,7 +629,7 @@ template <>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<PhantomNodeCandidates> &candidates_list,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance)
|
||||
@ -631,7 +638,7 @@ manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
{ // TODO: check if target_indices.size() == 1 and do a bi-directional search
|
||||
return mld::oneToManySearch<FORWARD_DIRECTION>(engine_working_data,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
candidates_list,
|
||||
source_indices.front(),
|
||||
target_indices,
|
||||
calculate_distance);
|
||||
@ -641,7 +648,7 @@ manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
{
|
||||
return mld::oneToManySearch<REVERSE_DIRECTION>(engine_working_data,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
candidates_list,
|
||||
target_indices.front(),
|
||||
source_indices,
|
||||
calculate_distance);
|
||||
@ -651,7 +658,7 @@ manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
{
|
||||
return mld::manyToManySearch<REVERSE_DIRECTION>(engine_working_data,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
candidates_list,
|
||||
target_indices,
|
||||
source_indices,
|
||||
calculate_distance);
|
||||
@ -659,7 +666,7 @@ manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
|
||||
return mld::manyToManySearch<FORWARD_DIRECTION>(engine_working_data,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
candidates_list,
|
||||
source_indices,
|
||||
target_indices,
|
||||
calculate_distance);
|
||||
|
||||
@ -7,30 +7,104 @@ namespace engine
|
||||
namespace routing_algorithms
|
||||
{
|
||||
|
||||
bool needsLoopForward(const PhantomNode &source_phantom, const PhantomNode &target_phantom)
|
||||
bool requiresForwardLoop(const PhantomNode &source, const PhantomNode &target)
|
||||
{
|
||||
return source_phantom.IsValidForwardSource() && target_phantom.IsValidForwardTarget() &&
|
||||
source_phantom.forward_segment_id.id == target_phantom.forward_segment_id.id &&
|
||||
source_phantom.GetForwardWeightPlusOffset() >
|
||||
target_phantom.GetForwardWeightPlusOffset();
|
||||
return source.IsValidForwardSource() && target.IsValidForwardTarget() &&
|
||||
source.forward_segment_id.id == target.forward_segment_id.id &&
|
||||
source.GetForwardWeightPlusOffset() > target.GetForwardWeightPlusOffset();
|
||||
}
|
||||
|
||||
bool needsLoopBackwards(const PhantomNode &source_phantom, const PhantomNode &target_phantom)
|
||||
bool requiresBackwardLoop(const PhantomNode &source, const PhantomNode &target)
|
||||
{
|
||||
return source_phantom.IsValidReverseSource() && target_phantom.IsValidReverseTarget() &&
|
||||
source_phantom.reverse_segment_id.id == target_phantom.reverse_segment_id.id &&
|
||||
source_phantom.GetReverseWeightPlusOffset() >
|
||||
target_phantom.GetReverseWeightPlusOffset();
|
||||
return source.IsValidReverseSource() && target.IsValidReverseTarget() &&
|
||||
source.reverse_segment_id.id == target.reverse_segment_id.id &&
|
||||
source.GetReverseWeightPlusOffset() > target.GetReverseWeightPlusOffset();
|
||||
}
|
||||
|
||||
bool needsLoopForward(const PhantomNodes &phantoms)
|
||||
std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
return needsLoopForward(phantoms.source_phantom, phantoms.target_phantom);
|
||||
std::vector<NodeID> res;
|
||||
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
|
||||
{
|
||||
auto requires_loop =
|
||||
std::any_of(endpoint_candidates.target_phantoms.begin(),
|
||||
endpoint_candidates.target_phantoms.end(),
|
||||
[&](const auto &target_phantom) {
|
||||
return requiresForwardLoop(source_phantom, target_phantom);
|
||||
});
|
||||
if (requires_loop)
|
||||
{
|
||||
res.push_back(source_phantom.forward_segment_id.id);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
bool needsLoopBackwards(const PhantomNodes &phantoms)
|
||||
std::vector<NodeID> getForwardLoopNodes(const PhantomCandidatesToTarget &endpoint_candidates)
|
||||
{
|
||||
return needsLoopBackwards(phantoms.source_phantom, phantoms.target_phantom);
|
||||
std::vector<NodeID> res;
|
||||
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
|
||||
{
|
||||
if (requiresForwardLoop(source_phantom, endpoint_candidates.target_phantom))
|
||||
{
|
||||
res.push_back(source_phantom.forward_segment_id.id);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &endpoint_candidates)
|
||||
{
|
||||
std::vector<NodeID> res;
|
||||
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
|
||||
{
|
||||
auto requires_loop =
|
||||
std::any_of(endpoint_candidates.target_phantoms.begin(),
|
||||
endpoint_candidates.target_phantoms.end(),
|
||||
[&](const auto &target_phantom) {
|
||||
return requiresBackwardLoop(source_phantom, target_phantom);
|
||||
});
|
||||
if (requires_loop)
|
||||
{
|
||||
res.push_back(source_phantom.reverse_segment_id.id);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::vector<NodeID> getBackwardLoopNodes(const PhantomCandidatesToTarget &endpoint_candidates)
|
||||
{
|
||||
std::vector<NodeID> res;
|
||||
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
|
||||
{
|
||||
if (requiresBackwardLoop(source_phantom, endpoint_candidates.target_phantom))
|
||||
{
|
||||
res.push_back(source_phantom.reverse_segment_id.id);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
PhantomEndpoints endpointsFromCandidates(const PhantomEndpointCandidates &candidates,
|
||||
const std::vector<NodeID> &path)
|
||||
{
|
||||
auto source_it = std::find_if(candidates.source_phantoms.begin(),
|
||||
candidates.source_phantoms.end(),
|
||||
[&path](const auto &source_phantom) {
|
||||
return path.front() == source_phantom.forward_segment_id.id ||
|
||||
path.front() == source_phantom.reverse_segment_id.id;
|
||||
});
|
||||
BOOST_ASSERT(source_it != candidates.source_phantoms.end());
|
||||
|
||||
auto target_it = std::find_if(candidates.target_phantoms.begin(),
|
||||
candidates.target_phantoms.end(),
|
||||
[&path](const auto &target_phantom) {
|
||||
return path.back() == target_phantom.forward_segment_id.id ||
|
||||
path.back() == target_phantom.reverse_segment_id.id;
|
||||
});
|
||||
BOOST_ASSERT(target_it != candidates.target_phantoms.end());
|
||||
|
||||
return PhantomEndpoints{*source_it, *target_it};
|
||||
}
|
||||
|
||||
} // namespace routing_algorithms
|
||||
|
||||
@ -95,9 +95,8 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
|
||||
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
EdgeWeight &weight,
|
||||
std::vector<NodeID> &packed_leg,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
const PhantomNodes & /*phantom_nodes*/,
|
||||
const std::vector<NodeID> &force_loop_forward_nodes,
|
||||
const std::vector<NodeID> &force_loop_reverse_nodes,
|
||||
const EdgeWeight weight_upper_bound)
|
||||
{
|
||||
if (forward_heap.Empty() || reverse_heap.Empty())
|
||||
@ -126,8 +125,8 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
|
||||
middle,
|
||||
weight,
|
||||
min_edge_offset,
|
||||
force_loop_forward,
|
||||
force_loop_reverse);
|
||||
force_loop_forward_nodes,
|
||||
force_loop_reverse_nodes);
|
||||
}
|
||||
if (!reverse_heap.Empty())
|
||||
{
|
||||
@ -137,8 +136,8 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
|
||||
middle,
|
||||
weight,
|
||||
min_edge_offset,
|
||||
force_loop_reverse,
|
||||
force_loop_forward);
|
||||
force_loop_reverse_nodes,
|
||||
force_loop_forward_nodes);
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,7 +178,8 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
forward_heap.Clear();
|
||||
reverse_heap.Clear();
|
||||
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, {source_phantom, target_phantom});
|
||||
PhantomEndpoints endpoints{source_phantom, target_phantom};
|
||||
insertNodesInHeaps(forward_heap, reverse_heap, endpoints);
|
||||
|
||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
||||
std::vector<NodeID> packed_path;
|
||||
@ -189,9 +189,9 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
reverse_heap,
|
||||
weight,
|
||||
packed_path,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
{source_phantom, target_phantom},
|
||||
{},
|
||||
{},
|
||||
endpoints,
|
||||
weight_upper_bound);
|
||||
|
||||
if (weight == INVALID_EDGE_WEIGHT)
|
||||
|
||||
@ -12,13 +12,13 @@ namespace routing_algorithms
|
||||
template InternalRouteResult
|
||||
shortestPathSearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
const DataFacade<ch::Algorithm> &facade,
|
||||
const std::vector<PhantomNodes> &phantom_nodes_vector,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint);
|
||||
|
||||
template InternalRouteResult
|
||||
shortestPathSearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const std::vector<PhantomNodes> &phantom_nodes_vector,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates,
|
||||
const boost::optional<bool> continue_straight_at_waypoint);
|
||||
|
||||
} // namespace routing_algorithms
|
||||
|
||||
@ -32,7 +32,7 @@
|
||||
|
||||
#include <tbb/blocked_range.h>
|
||||
#include <tbb/parallel_for.h>
|
||||
#include <tbb/pipeline.h>
|
||||
#include <tbb/parallel_pipeline.h>
|
||||
|
||||
namespace std
|
||||
{
|
||||
@ -414,6 +414,48 @@ EdgeBasedGraphFactory::GenerateEdgeExpandedNodes(const WayRestrictionMap &way_re
|
||||
// the only consumer of this mapping).
|
||||
mapping.push_back(NBGToEBG{node_u, node_v, edge_based_node_id, SPECIAL_NODEID});
|
||||
|
||||
// We also want to include duplicate via edges in the list of segments that
|
||||
// an input location can snap to. Without this, it would be possible to not find
|
||||
// certain routes that end on a via-way, because they are only routable via the
|
||||
// duplicated edge.
|
||||
const auto &forward_geometry = m_compressed_edge_container.GetBucketReference(eid);
|
||||
const auto segment_count = forward_geometry.size();
|
||||
|
||||
NodeID current_edge_source_coordinate_id = node_u;
|
||||
const EdgeData &forward_data = m_node_based_graph.GetEdgeData(eid);
|
||||
|
||||
const auto edge_id_to_segment_id = [](const NodeID edge_based_node_id) {
|
||||
if (edge_based_node_id == SPECIAL_NODEID)
|
||||
{
|
||||
return SegmentID{SPECIAL_SEGMENTID, false};
|
||||
}
|
||||
|
||||
return SegmentID{edge_based_node_id, true};
|
||||
};
|
||||
|
||||
// Add segments of edge-based nodes
|
||||
for (const auto i : util::irange(std::size_t{0}, segment_count))
|
||||
{
|
||||
const NodeID current_edge_target_coordinate_id = forward_geometry[i].node_id;
|
||||
|
||||
// don't add node-segments for penalties
|
||||
if (current_edge_target_coordinate_id == current_edge_source_coordinate_id)
|
||||
continue;
|
||||
|
||||
BOOST_ASSERT(current_edge_target_coordinate_id !=
|
||||
current_edge_source_coordinate_id);
|
||||
|
||||
// build edges
|
||||
m_edge_based_node_segments.emplace_back(edge_id_to_segment_id(edge_based_node_id),
|
||||
SegmentID{SPECIAL_SEGMENTID, false},
|
||||
current_edge_source_coordinate_id,
|
||||
current_edge_target_coordinate_id,
|
||||
i,
|
||||
forward_data.flags.startpoint);
|
||||
|
||||
current_edge_source_coordinate_id = current_edge_target_coordinate_id;
|
||||
}
|
||||
|
||||
edge_based_node_id++;
|
||||
progress.PrintStatus(progress_counter++);
|
||||
}
|
||||
@ -489,8 +531,8 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
// Sets of intersection IDs are batched in groups of GRAINSIZE (100) `generator_stage`, then
|
||||
// those groups are processed in parallel `processor_stage`. Finally, results are appended to
|
||||
// the various buffer vectors by the `output_stage` in the same order that the `generator_stage`
|
||||
// created them in (tbb::filter::serial_in_order creates this guarantee). The order needs to be
|
||||
// maintained because we depend on it later in the processing pipeline.
|
||||
// created them in (tbb::filter_mode::serial_in_order creates this guarantee). The order needs
|
||||
// to be maintained because we depend on it later in the processing pipeline.
|
||||
{
|
||||
const NodeID node_count = m_node_based_graph.GetNumberOfNodes();
|
||||
|
||||
@ -540,8 +582,8 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
const constexpr unsigned GRAINSIZE = 100;
|
||||
|
||||
// First part of the pipeline generates iterator ranges of IDs in sets of GRAINSIZE
|
||||
tbb::filter_t<void, tbb::blocked_range<NodeID>> generator_stage(
|
||||
tbb::filter::serial_in_order, [&](tbb::flow_control &fc) {
|
||||
tbb::filter<void, tbb::blocked_range<NodeID>> generator_stage(
|
||||
tbb::filter_mode::serial_in_order, [&](tbb::flow_control &fc) {
|
||||
if (current_node < node_count)
|
||||
{
|
||||
auto next_node = std::min(current_node + GRAINSIZE, node_count);
|
||||
@ -675,8 +717,9 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
//
|
||||
// Edge-based-graph stage
|
||||
//
|
||||
tbb::filter_t<tbb::blocked_range<NodeID>, EdgesPipelineBufferPtr> processor_stage(
|
||||
tbb::filter::parallel, [&](const tbb::blocked_range<NodeID> &intersection_node_range) {
|
||||
tbb::filter<tbb::blocked_range<NodeID>, EdgesPipelineBufferPtr> processor_stage(
|
||||
tbb::filter_mode::parallel,
|
||||
[&](const tbb::blocked_range<NodeID> &intersection_node_range) {
|
||||
auto buffer = std::make_shared<EdgesPipelineBuffer>();
|
||||
buffer->nodes_processed = intersection_node_range.size();
|
||||
|
||||
@ -1085,8 +1128,8 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
util::UnbufferedLog log;
|
||||
util::Percent routing_progress(log, node_count);
|
||||
std::vector<EdgeWithData> delayed_data;
|
||||
tbb::filter_t<EdgesPipelineBufferPtr, void> output_stage(
|
||||
tbb::filter::serial_in_order, [&](auto buffer) {
|
||||
tbb::filter<EdgesPipelineBufferPtr, void> output_stage(
|
||||
tbb::filter_mode::serial_in_order, [&](auto buffer) {
|
||||
routing_progress.PrintAddition(buffer->nodes_processed);
|
||||
|
||||
m_connectivity_checksum = buffer->checksum.update_checksum(m_connectivity_checksum);
|
||||
|
||||
@ -43,13 +43,8 @@
|
||||
#include <osmium/io/any_input.hpp>
|
||||
#include <osmium/thread/pool.hpp>
|
||||
#include <osmium/visitor.hpp>
|
||||
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#endif
|
||||
#include <tbb/pipeline.h>
|
||||
#include <tbb/parallel_pipeline.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <atomic>
|
||||
@ -206,13 +201,8 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
const unsigned recommended_num_threads = std::thread::hardware_concurrency();
|
||||
const auto number_of_threads = std::min(recommended_num_threads, config.requested_num_threads);
|
||||
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
tbb::global_control gc(tbb::global_control::max_allowed_parallelism,
|
||||
config.requested_num_threads);
|
||||
#else
|
||||
tbb::task_scheduler_init init(config.requested_num_threads);
|
||||
BOOST_ASSERT(init.is_active());
|
||||
#endif
|
||||
|
||||
LaneDescriptionMap turn_lane_map;
|
||||
std::vector<TurnRestriction> turn_restrictions;
|
||||
@ -456,8 +446,8 @@ std::
|
||||
ExtractionRelationContainer relations;
|
||||
|
||||
const auto buffer_reader = [](osmium::io::Reader &reader) {
|
||||
return tbb::filter_t<void, SharedBuffer>(
|
||||
tbb::filter::serial_in_order, [&reader](tbb::flow_control &fc) {
|
||||
return tbb::filter<void, SharedBuffer>(
|
||||
tbb::filter_mode::serial_in_order, [&reader](tbb::flow_control &fc) {
|
||||
if (auto buffer = reader.read())
|
||||
{
|
||||
return std::make_shared<osmium::memory::Buffer>(std::move(buffer));
|
||||
@ -478,15 +468,15 @@ std::
|
||||
osmium_index_type location_cache;
|
||||
osmium_location_handler_type location_handler(location_cache);
|
||||
|
||||
tbb::filter_t<SharedBuffer, SharedBuffer> location_cacher(
|
||||
tbb::filter::serial_in_order, [&location_handler](SharedBuffer buffer) {
|
||||
tbb::filter<SharedBuffer, SharedBuffer> location_cacher(
|
||||
tbb::filter_mode::serial_in_order, [&location_handler](SharedBuffer buffer) {
|
||||
osmium::apply(buffer->begin(), buffer->end(), location_handler);
|
||||
return buffer;
|
||||
});
|
||||
|
||||
// OSM elements Lua parser
|
||||
tbb::filter_t<SharedBuffer, ParsedBuffer> buffer_transformer(
|
||||
tbb::filter::parallel,
|
||||
tbb::filter<SharedBuffer, ParsedBuffer> buffer_transformer(
|
||||
tbb::filter_mode::parallel,
|
||||
// NOLINTNEXTLINE(performance-unnecessary-value-param)
|
||||
[&](const SharedBuffer buffer) {
|
||||
ParsedBuffer parsed_buffer;
|
||||
@ -507,8 +497,8 @@ std::
|
||||
unsigned number_of_ways = 0;
|
||||
unsigned number_of_restrictions = 0;
|
||||
unsigned number_of_maneuver_overrides = 0;
|
||||
tbb::filter_t<ParsedBuffer, void> buffer_storage(
|
||||
tbb::filter::serial_in_order, [&](const ParsedBuffer &parsed_buffer) {
|
||||
tbb::filter<ParsedBuffer, void> buffer_storage(
|
||||
tbb::filter_mode::serial_in_order, [&](const ParsedBuffer &parsed_buffer) {
|
||||
number_of_nodes += parsed_buffer.resulting_nodes.size();
|
||||
// put parsed objects thru extractor callbacks
|
||||
for (const auto &result : parsed_buffer.resulting_nodes)
|
||||
@ -534,8 +524,8 @@ std::
|
||||
}
|
||||
});
|
||||
|
||||
tbb::filter_t<SharedBuffer, std::shared_ptr<ExtractionRelationContainer>> buffer_relation_cache(
|
||||
tbb::filter::parallel,
|
||||
tbb::filter<SharedBuffer, std::shared_ptr<ExtractionRelationContainer>> buffer_relation_cache(
|
||||
tbb::filter_mode::parallel,
|
||||
// NOLINTNEXTLINE(performance-unnecessary-value-param)
|
||||
[&](const SharedBuffer buffer) {
|
||||
if (!buffer)
|
||||
@ -572,8 +562,8 @@ std::
|
||||
});
|
||||
|
||||
unsigned number_of_relations = 0;
|
||||
tbb::filter_t<std::shared_ptr<ExtractionRelationContainer>, void> buffer_storage_relation(
|
||||
tbb::filter::serial_in_order,
|
||||
tbb::filter<std::shared_ptr<ExtractionRelationContainer>, void> buffer_storage_relation(
|
||||
tbb::filter_mode::serial_in_order,
|
||||
// NOLINTNEXTLINE(performance-unnecessary-value-param)
|
||||
[&](const std::shared_ptr<ExtractionRelationContainer> parsed_relations) {
|
||||
number_of_relations += parsed_relations->GetRelationsNum();
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
#include "util/percent.hpp"
|
||||
|
||||
#include <tbb/blocked_range.h>
|
||||
#include <tbb/pipeline.h>
|
||||
#include <tbb/parallel_pipeline.h>
|
||||
|
||||
#include <thread>
|
||||
|
||||
@ -97,8 +97,8 @@ void annotateTurns(const util::NodeBasedDynamicGraph &node_based_graph,
|
||||
const constexpr unsigned GRAINSIZE = 100;
|
||||
|
||||
// First part of the pipeline generates iterator ranges of IDs in sets of GRAINSIZE
|
||||
tbb::filter_t<void, tbb::blocked_range<NodeID>> generator_stage(
|
||||
tbb::filter::serial_in_order, [&](tbb::flow_control &fc) {
|
||||
tbb::filter<void, tbb::blocked_range<NodeID>> generator_stage(
|
||||
tbb::filter_mode::serial_in_order, [&](tbb::flow_control &fc) {
|
||||
if (current_node < node_count)
|
||||
{
|
||||
auto next_node = std::min(current_node + GRAINSIZE, node_count);
|
||||
@ -116,8 +116,9 @@ void annotateTurns(const util::NodeBasedDynamicGraph &node_based_graph,
|
||||
//
|
||||
// Guidance stage
|
||||
//
|
||||
tbb::filter_t<tbb::blocked_range<NodeID>, TurnsPipelineBufferPtr> guidance_stage(
|
||||
tbb::filter::parallel, [&](const tbb::blocked_range<NodeID> &intersection_node_range) {
|
||||
tbb::filter<tbb::blocked_range<NodeID>, TurnsPipelineBufferPtr> guidance_stage(
|
||||
tbb::filter_mode::parallel,
|
||||
[&](const tbb::blocked_range<NodeID> &intersection_node_range) {
|
||||
auto buffer = std::make_shared<TurnsPipelineBuffer>();
|
||||
buffer->nodes_processed = intersection_node_range.size();
|
||||
|
||||
@ -307,8 +308,8 @@ void annotateTurns(const util::NodeBasedDynamicGraph &node_based_graph,
|
||||
util::Percent guidance_progress(log, node_count);
|
||||
std::vector<guidance::TurnData> delayed_turn_data;
|
||||
|
||||
tbb::filter_t<TurnsPipelineBufferPtr, void> guidance_output_stage(
|
||||
tbb::filter::serial_in_order, [&](auto buffer) {
|
||||
tbb::filter<TurnsPipelineBufferPtr, void> guidance_output_stage(
|
||||
tbb::filter_mode::serial_in_order, [&](auto buffer) {
|
||||
guidance_progress.PrintAddition(buffer->nodes_processed);
|
||||
|
||||
connectivity_checksum = buffer->checksum.update_checksum(connectivity_checksum);
|
||||
|
||||
@ -33,8 +33,8 @@ endforeach(binary)
|
||||
|
||||
# For mason-enabled builds we copy over tbb's shared objects for packaging.
|
||||
# TODO: consider using statically linked tbb library (for node_osrm only!)
|
||||
if (ENABLE_MASON)
|
||||
foreach(libpath ${MASON_PACKAGE_tbb_LIBRARY_DIRS})
|
||||
if (ENABLE_CONAN)
|
||||
foreach(libpath ${CONAN_LIB_DIRS_ONETBB})
|
||||
file(GLOB TBBGlob ${libpath}/*.*)
|
||||
foreach(filepath ${TBBGlob})
|
||||
get_filename_component(filename ${filepath} NAME)
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
|
||||
#include <exception>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
@ -128,8 +129,7 @@ inline void async(const Nan::FunctionCallbackInfo<v8::Value> &info,
|
||||
auto params = argsToParams(info, requires_multiple_coordinates);
|
||||
if (!params)
|
||||
return;
|
||||
|
||||
auto pluginParams = argumentsToPluginParameters(info);
|
||||
auto pluginParams = argumentsToPluginParameters(info, params->format);
|
||||
|
||||
BOOST_ASSERT(params->IsValid());
|
||||
|
||||
@ -155,13 +155,18 @@ inline void async(const Nan::FunctionCallbackInfo<v8::Value> &info,
|
||||
|
||||
void Execute() override
|
||||
try
|
||||
{
|
||||
switch (
|
||||
params->format.value_or(osrm::engine::api::BaseParameters::OutputFormatType::JSON))
|
||||
{
|
||||
case osrm::engine::api::BaseParameters::OutputFormatType::JSON:
|
||||
{
|
||||
osrm::engine::api::ResultT r;
|
||||
r = osrm::util::json::Object();
|
||||
const auto status = ((*osrm).*(service))(*params, r);
|
||||
auto json_result = r.get<osrm::json::Object>();
|
||||
auto &json_result = r.get<osrm::json::Object>();
|
||||
ParseResult(status, json_result);
|
||||
if (pluginParams.renderJSONToBuffer)
|
||||
if (pluginParams.renderToBuffer)
|
||||
{
|
||||
std::ostringstream buf;
|
||||
osrm::util::json::render(buf, json_result);
|
||||
@ -172,6 +177,22 @@ inline void async(const Nan::FunctionCallbackInfo<v8::Value> &info,
|
||||
result = json_result;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case osrm::engine::api::BaseParameters::OutputFormatType::FLATBUFFERS:
|
||||
{
|
||||
osrm::engine::api::ResultT r = flatbuffers::FlatBufferBuilder();
|
||||
const auto status = ((*osrm).*(service))(*params, r);
|
||||
const auto &fbs_result = r.get<flatbuffers::FlatBufferBuilder>();
|
||||
ParseResult(status, fbs_result);
|
||||
BOOST_ASSERT(pluginParams.renderToBuffer);
|
||||
std::string result_str(
|
||||
reinterpret_cast<const char *>(fbs_result.GetBufferPointer()),
|
||||
fbs_result.GetSize());
|
||||
result = std::move(result_str);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (const std::exception &e)
|
||||
{
|
||||
SetErrorMessage(e.what());
|
||||
@ -286,6 +307,7 @@ inline void asyncForTiles(const Nan::FunctionCallbackInfo<v8::Value> &info,
|
||||
* Can be `null` or an array of `[{value},{range}]` with `integer 0 .. 360,integer 0 .. 180`.
|
||||
* @param {Array} [options.radiuses] Limits the coordinate snapping to streets in the given radius in meters. Can be `null` (unlimited, default) or `double >= 0`.
|
||||
* @param {Array} [options.hints] Hints for the coordinate snapping. Array of base64 encoded strings.
|
||||
* @param {Array} [options.exclude] List of classes to avoid, order does not matter.
|
||||
* @param {Boolean} [options.generate_hints=true] Whether or not adds a Hint to the response which can be used in subsequent requests.
|
||||
* @param {Boolean} [options.alternatives=false] Search for alternative routes.
|
||||
* @param {Number} [options.alternatives=0] Search for up to this many alternative routes.
|
||||
@ -298,7 +320,9 @@ inline void asyncForTiles(const Nan::FunctionCallbackInfo<v8::Value> &info,
|
||||
* @param {Array} [options.approaches] Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
||||
* `null`/`true`/`false`
|
||||
* @param {Array} [options.waypoints] Indices to coordinates to treat as waypoints. If not supplied, all coordinates are waypoints. Must include first and last coordinate index.
|
||||
* @param {String} [options.format] Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||
* @param {String} [options.snapping] Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||
* @param {Boolean} [options.skip_waypoints=false] Removes waypoints from the response. Waypoints are still calculated, but not serialized. Could be useful in case you are interested in some other part of response and do not want to transfer waste data.
|
||||
* @param {Function} callback
|
||||
*
|
||||
* @returns {Object} An array of [Waypoint](#waypoint) objects representing all waypoints in order AND an array of [`Route`](#route) objects ordered by descending recommendation rank.
|
||||
@ -338,6 +362,7 @@ NAN_METHOD(Engine::route) //
|
||||
* @param {Number} [options.number=1] Number of nearest segments that should be returned.
|
||||
* Must be an integer greater than or equal to `1`.
|
||||
* @param {Array} [options.approaches] Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
||||
* @param {String} [options.format] Which output format to use, either `json`, or [`flatbuffers`](https://github.com/Project-OSRM/osrm-backend/tree/master/include/engine/api/flatbuffers).
|
||||
* @param {String} [options.snapping] Which edges can be snapped to, either `default`, or `any`. `default` only snaps to edges marked by the profile as `is_startpoint`, `any` will allow snapping to any edge in the routing graph.
|
||||
* @param {Function} callback
|
||||
*
|
||||
@ -604,12 +629,15 @@ NAN_METHOD(Engine::trip) //
|
||||
* @name Configuration
|
||||
* @param {Object} [plugin_config] - Object literal containing parameters for the trip query.
|
||||
* @param {String} [plugin_config.format] The format of the result object to various API calls.
|
||||
* Valid options are `object` (default), which returns a
|
||||
* standard Javascript object, as described above, and `json_buffer`, which will return a NodeJS
|
||||
* **[Buffer](https://nodejs.org/api/buffer.html)** object, containing a JSON string. The latter has
|
||||
* the advantage that it can be immediately serialized to disk/sent over the network, and the
|
||||
* generation of the string is performed outside the main NodeJS event loop. This option is ignored
|
||||
* by the `tile` plugin.
|
||||
* Valid options are `object` (default if `options.format` is
|
||||
* `json`), which returns a standard Javascript object, as described above, and `buffer`(default if
|
||||
* `options.format` is `flatbuffers`), which will return a NodeJS
|
||||
* **[Buffer](https://nodejs.org/api/buffer.html)** object, containing a JSON string or Flatbuffers
|
||||
* object. The latter has the advantage that it can be immediately serialized to disk/sent over the
|
||||
* network, and the generation of the string is performed outside the main NodeJS event loop. This
|
||||
* option is ignored by the `tile` plugin. Also note that `options.format` set to `flatbuffers`
|
||||
* cannot be used with `plugin_config.format` set to `object`. `json_buffer` is deprecated alias for
|
||||
* `buffer`.
|
||||
*
|
||||
* @example
|
||||
* var osrm = new OSRM('network.osrm');
|
||||
@ -619,7 +647,7 @@ NAN_METHOD(Engine::trip) //
|
||||
* [13.374481201171875, 52.506191342034576]
|
||||
* ]
|
||||
* };
|
||||
* osrm.route(options, { format: "json_buffer" }, function(err, response) {
|
||||
* osrm.route(options, { format: "buffer" }, function(err, response) {
|
||||
* if (err) throw err;
|
||||
* console.log(response.toString("utf-8"));
|
||||
* });
|
||||
|
||||
@ -27,12 +27,7 @@
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/filesystem/operations.hpp>
|
||||
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#endif
|
||||
|
||||
#include "util/geojson_debug_logger.hpp"
|
||||
#include "util/geojson_debug_policies.hpp"
|
||||
@ -74,13 +69,8 @@ auto getGraphBisection(const PartitionerConfig &config)
|
||||
|
||||
int Partitioner::Run(const PartitionerConfig &config)
|
||||
{
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
tbb::global_control gc(tbb::global_control::max_allowed_parallelism,
|
||||
config.requested_num_threads);
|
||||
#else
|
||||
tbb::task_scheduler_init init(config.requested_num_threads);
|
||||
BOOST_ASSERT(init.is_active());
|
||||
#endif
|
||||
|
||||
const std::vector<BisectionID> &node_based_partition_ids = getGraphBisection(config);
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
#include "util/log.hpp"
|
||||
#include "util/timing_util.hpp"
|
||||
|
||||
#include <tbb/parallel_do.h>
|
||||
#include <tbb/parallel_for_each.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <climits> // for CHAR_BIT
|
||||
@ -64,12 +64,12 @@ RecursiveBisection::RecursiveBisection(BisectionGraph &bisection_graph_,
|
||||
return TreeNode{std::move(graph), internal_state.SCCDepth()};
|
||||
});
|
||||
|
||||
using Feeder = tbb::parallel_do_feeder<TreeNode>;
|
||||
using Feeder = tbb::feeder<TreeNode>;
|
||||
|
||||
TIMER_START(bisection);
|
||||
|
||||
// Bisect graph into two parts. Get partition point and recurse left and right in parallel.
|
||||
tbb::parallel_do(begin(forest), end(forest), [&](const TreeNode &node, Feeder &feeder) {
|
||||
tbb::parallel_for_each(begin(forest), end(forest), [&](const TreeNode &node, Feeder &feeder) {
|
||||
const auto cut =
|
||||
computeInertialFlowCut(node.graph, num_optimizing_cuts, balance, boundary_factor);
|
||||
const auto center = internal_state.ApplyBisection(
|
||||
|
||||
@ -27,14 +27,14 @@ http::compression_type select_compression(const boost::beast::http::fields &fiel
|
||||
{
|
||||
const auto header_value = fields[boost::beast::http::field::accept_encoding];
|
||||
/* giving gzip precedence over deflate */
|
||||
if (boost::icontains(header_value, "deflate"))
|
||||
{
|
||||
return http::deflate_rfc1951;
|
||||
}
|
||||
if (boost::icontains(header_value, "gzip"))
|
||||
{
|
||||
return http::gzip_rfc1952;
|
||||
}
|
||||
if (boost::icontains(header_value, "deflate"))
|
||||
{
|
||||
return http::deflate_rfc1951;
|
||||
}
|
||||
return http::no_compression;
|
||||
}
|
||||
|
||||
|
||||
@ -290,29 +290,29 @@ int Storage::Run(int max_wait, const std::string &dataset_name, bool only_metric
|
||||
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> Storage::GetStaticFiles()
|
||||
{
|
||||
constexpr bool REQUIRED = true;
|
||||
constexpr bool OPTIONAL = false;
|
||||
constexpr bool IS_REQUIRED = true;
|
||||
constexpr bool IS_OPTIONAL = false;
|
||||
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> files = {
|
||||
{OPTIONAL, config.GetPath(".osrm.cells")},
|
||||
{OPTIONAL, config.GetPath(".osrm.partition")},
|
||||
{REQUIRED, config.GetPath(".osrm.icd")},
|
||||
{REQUIRED, config.GetPath(".osrm.properties")},
|
||||
{REQUIRED, config.GetPath(".osrm.nbg_nodes")},
|
||||
{REQUIRED, config.GetPath(".osrm.ebg_nodes")},
|
||||
{REQUIRED, config.GetPath(".osrm.tls")},
|
||||
{REQUIRED, config.GetPath(".osrm.tld")},
|
||||
{REQUIRED, config.GetPath(".osrm.timestamp")},
|
||||
{REQUIRED, config.GetPath(".osrm.maneuver_overrides")},
|
||||
{REQUIRED, config.GetPath(".osrm.edges")},
|
||||
{REQUIRED, config.GetPath(".osrm.names")},
|
||||
{REQUIRED, config.GetPath(".osrm.ramIndex")}};
|
||||
{IS_OPTIONAL, config.GetPath(".osrm.cells")},
|
||||
{IS_OPTIONAL, config.GetPath(".osrm.partition")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.icd")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.properties")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.nbg_nodes")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.ebg_nodes")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.tls")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.tld")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.timestamp")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.maneuver_overrides")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.edges")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.names")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.ramIndex")}};
|
||||
|
||||
for (const auto &file : files)
|
||||
{
|
||||
if (file.first == REQUIRED && !boost::filesystem::exists(file.second))
|
||||
if (file.first == IS_REQUIRED && !boost::filesystem::exists(file.second))
|
||||
{
|
||||
throw util::exception("Could not find required filed: " + std::get<1>(file).string());
|
||||
throw util::exception("Could not find required file(s): " + std::get<1>(file).string());
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,23 +321,23 @@ std::vector<std::pair<bool, boost::filesystem::path>> Storage::GetStaticFiles()
|
||||
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> Storage::GetUpdatableFiles()
|
||||
{
|
||||
constexpr bool REQUIRED = true;
|
||||
constexpr bool OPTIONAL = false;
|
||||
constexpr bool IS_REQUIRED = true;
|
||||
constexpr bool IS_OPTIONAL = false;
|
||||
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> files = {
|
||||
{OPTIONAL, config.GetPath(".osrm.mldgr")},
|
||||
{OPTIONAL, config.GetPath(".osrm.cell_metrics")},
|
||||
{OPTIONAL, config.GetPath(".osrm.hsgr")},
|
||||
{REQUIRED, config.GetPath(".osrm.datasource_names")},
|
||||
{REQUIRED, config.GetPath(".osrm.geometry")},
|
||||
{REQUIRED, config.GetPath(".osrm.turn_weight_penalties")},
|
||||
{REQUIRED, config.GetPath(".osrm.turn_duration_penalties")}};
|
||||
{IS_OPTIONAL, config.GetPath(".osrm.mldgr")},
|
||||
{IS_OPTIONAL, config.GetPath(".osrm.cell_metrics")},
|
||||
{IS_OPTIONAL, config.GetPath(".osrm.hsgr")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.datasource_names")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.geometry")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.turn_weight_penalties")},
|
||||
{IS_REQUIRED, config.GetPath(".osrm.turn_duration_penalties")}};
|
||||
|
||||
for (const auto &file : files)
|
||||
{
|
||||
if (file.first == REQUIRED && !boost::filesystem::exists(file.second))
|
||||
if (file.first == IS_REQUIRED && !boost::filesystem::exists(file.second))
|
||||
{
|
||||
throw util::exception("Could not find required filed: " + std::get<1>(file).string());
|
||||
throw util::exception("Could not find required file(s): " + std::get<1>(file).string());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,24 @@ var data_path = require('./constants').data_path;
|
||||
var mld_data_path = require('./constants').mld_data_path;
|
||||
var three_test_coordinates = require('./constants').three_test_coordinates;
|
||||
var two_test_coordinates = require('./constants').two_test_coordinates;
|
||||
const flatbuffers = require('../../features/support/flatbuffers').flatbuffers;
|
||||
const FBResult = require('../../features/support/fbresult_generated').osrm.engine.api.fbresult.FBResult;
|
||||
|
||||
|
||||
test('match: match in Monaco with flatbuffers format', function(assert) {
|
||||
assert.plan(2);
|
||||
var osrm = new OSRM(data_path);
|
||||
var options = {
|
||||
coordinates: three_test_coordinates,
|
||||
timestamps: [1424684612, 1424684616, 1424684620],
|
||||
format: 'flatbuffers'
|
||||
};
|
||||
osrm.match(options, function(err, response) {
|
||||
assert.ifError(err);
|
||||
const fb = FBResult.getRootAsFBResult(new flatbuffers.ByteBuffer(response));
|
||||
assert.equal(fb.routesLength(), 1);
|
||||
});
|
||||
});
|
||||
|
||||
test('match: match in Monaco', function(assert) {
|
||||
assert.plan(5);
|
||||
|
||||
@ -4,8 +4,26 @@ var data_path = require('./constants').data_path;
|
||||
var mld_data_path = require('./constants').mld_data_path;
|
||||
var three_test_coordinates = require('./constants').three_test_coordinates;
|
||||
var two_test_coordinates = require('./constants').two_test_coordinates;
|
||||
const flatbuffers = require('../../features/support/flatbuffers').flatbuffers;
|
||||
const FBResult = require('../../features/support/fbresult_generated').osrm.engine.api.fbresult.FBResult;
|
||||
|
||||
|
||||
test('nearest with flatbuffers format', function(assert) {
|
||||
assert.plan(5);
|
||||
var osrm = new OSRM(data_path);
|
||||
osrm.nearest({
|
||||
coordinates: [three_test_coordinates[0]],
|
||||
format: 'flatbuffers'
|
||||
}, function(err, result) {
|
||||
assert.ifError(err);
|
||||
assert.ok(result instanceof Buffer);
|
||||
const fb = FBResult.getRootAsFBResult(new flatbuffers.ByteBuffer(result));
|
||||
assert.equals(fb.waypointsLength(), 1);
|
||||
assert.ok(fb.waypoints(0).location());
|
||||
assert.ok(fb.waypoints(0).name());
|
||||
});
|
||||
});
|
||||
|
||||
test('nearest', function(assert) {
|
||||
assert.plan(4);
|
||||
var osrm = new OSRM(data_path);
|
||||
|
||||
@ -5,6 +5,51 @@ var monaco_mld_path = require('./constants').mld_data_path;
|
||||
var monaco_corech_path = require('./constants').corech_data_path;
|
||||
var three_test_coordinates = require('./constants').three_test_coordinates;
|
||||
var two_test_coordinates = require('./constants').two_test_coordinates;
|
||||
const flatbuffers = require('../../features/support/flatbuffers').flatbuffers;
|
||||
const FBResult = require('../../features/support/fbresult_generated').osrm.engine.api.fbresult.FBResult;
|
||||
|
||||
test('route: routes Monaco and can return result in flatbuffers', function(assert) {
|
||||
assert.plan(5);
|
||||
var osrm = new OSRM(monaco_path);
|
||||
osrm.route({coordinates: two_test_coordinates, format: 'flatbuffers'}, function(err, result) {
|
||||
assert.ifError(err);
|
||||
assert.ok(result instanceof Buffer);
|
||||
const fb = FBResult.getRootAsFBResult(new flatbuffers.ByteBuffer(result));
|
||||
assert.equals(fb.waypointsLength(), 2);
|
||||
assert.equals(fb.routesLength(), 1);
|
||||
assert.ok(fb.routes(0).polyline);
|
||||
});
|
||||
});
|
||||
|
||||
test('route: routes Monaco and can return result in flatbuffers if output format is passed explicitly', function(assert) {
|
||||
assert.plan(5);
|
||||
var osrm = new OSRM(monaco_path);
|
||||
osrm.route({coordinates: two_test_coordinates, format: 'flatbuffers'}, {output: 'buffer'}, function(err, result) {
|
||||
assert.ifError(err);
|
||||
assert.ok(result instanceof Buffer);
|
||||
var buf = new flatbuffers.ByteBuffer(result);
|
||||
const fb = FBResult.getRootAsFBResult(buf);
|
||||
assert.equals(fb.waypointsLength(), 2);
|
||||
assert.equals(fb.routesLength(), 1);
|
||||
assert.ok(fb.routes(0).polyline);
|
||||
});
|
||||
});
|
||||
|
||||
test('route: throws error if required output is object in flatbuffers format', function(assert) {
|
||||
assert.plan(1);
|
||||
var osrm = new OSRM(monaco_path);
|
||||
assert.throws(function() {
|
||||
osrm.route({coordinates: two_test_coordinates, format: 'flatbuffers'}, {format: 'object'}, function(err, result) {});
|
||||
});
|
||||
});
|
||||
|
||||
test('route: throws error if required output is json_buffer in flatbuffers format', function(assert) {
|
||||
assert.plan(1);
|
||||
var osrm = new OSRM(monaco_path);
|
||||
assert.throws(function() {
|
||||
osrm.route({coordinates: two_test_coordinates, format: 'flatbuffers'}, {format: 'json_buffer'}, function(err, result) {});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
test('route: routes Monaco', function(assert) {
|
||||
|
||||
@ -4,6 +4,24 @@ var data_path = require('./constants').data_path;
|
||||
var mld_data_path = require('./constants').mld_data_path;
|
||||
var three_test_coordinates = require('./constants').three_test_coordinates;
|
||||
var two_test_coordinates = require('./constants').two_test_coordinates;
|
||||
const flatbuffers = require('../../features/support/flatbuffers').flatbuffers;
|
||||
const FBResult = require('../../features/support/fbresult_generated').osrm.engine.api.fbresult.FBResult;
|
||||
|
||||
test('table: flatbuffer format', function(assert) {
|
||||
assert.plan(3);
|
||||
var osrm = new OSRM(data_path);
|
||||
var options = {
|
||||
coordinates: [three_test_coordinates[0], three_test_coordinates[1]],
|
||||
format: 'flatbuffers'
|
||||
};
|
||||
osrm.table(options, function(err, table) {
|
||||
assert.ifError(err);
|
||||
assert.ok(table instanceof Buffer);
|
||||
const fb = FBResult.getRootAsFBResult(new flatbuffers.ByteBuffer(table));
|
||||
assert.ok(fb.table());
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
test('table: test annotations paramater combination', function(assert) {
|
||||
assert.plan(12);
|
||||
|
||||
@ -4,7 +4,18 @@ var data_path = require('./constants').data_path;
|
||||
var mld_data_path = require('./constants').mld_data_path;
|
||||
var three_test_coordinates = require('./constants').three_test_coordinates;
|
||||
var two_test_coordinates = require('./constants').two_test_coordinates;
|
||||
const flatbuffers = require('../../features/support/flatbuffers').flatbuffers;
|
||||
const FBResult = require('../../features/support/fbresult_generated').osrm.engine.api.fbresult.FBResult;
|
||||
|
||||
test('trip: trip in Monaco with flatbuffers format', function(assert) {
|
||||
assert.plan(2);
|
||||
var osrm = new OSRM(data_path);
|
||||
osrm.trip({coordinates: two_test_coordinates, format: 'flatbuffers'}, function(err, trip) {
|
||||
assert.ifError(err);
|
||||
const fb = FBResult.getRootAsFBResult(new flatbuffers.ByteBuffer(trip));
|
||||
assert.equal(fb.routesLength(), 1);
|
||||
});
|
||||
});
|
||||
|
||||
test('trip: trip in Monaco', function(assert) {
|
||||
assert.plan(2);
|
||||
|
||||
@ -4,13 +4,7 @@
|
||||
#include "helper.hpp"
|
||||
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
#include <tbb/tbb_stddef.h> // For version lookup
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#endif
|
||||
|
||||
using namespace osrm;
|
||||
using namespace osrm::contractor;
|
||||
@ -20,11 +14,7 @@ BOOST_AUTO_TEST_SUITE(graph_contractor)
|
||||
|
||||
BOOST_AUTO_TEST_CASE(contract_graph)
|
||||
{
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
tbb::global_control scheduler(tbb::global_control::max_allowed_parallelism, 1);
|
||||
#else
|
||||
tbb::task_scheduler_init scheduler(1);
|
||||
#endif
|
||||
/*
|
||||
* <--1--<
|
||||
* (0) >--3--> (1) >--3--> (3)
|
||||
|
||||
@ -44,16 +44,16 @@ BOOST_AUTO_TEST_CASE(hint_encoding_decoding_roundtrip)
|
||||
const PhantomNode phantom;
|
||||
const osrm::test::MockDataFacade<osrm::engine::routing_algorithms::ch::Algorithm> facade{};
|
||||
|
||||
const Hint hint{phantom, facade.GetCheckSum()};
|
||||
const SegmentHint seg_hint{phantom, facade.GetCheckSum()};
|
||||
|
||||
const auto base64 = hint.ToBase64();
|
||||
const auto base64 = seg_hint.ToBase64();
|
||||
|
||||
BOOST_CHECK(0 == std::count(begin(base64), end(base64), '+'));
|
||||
BOOST_CHECK(0 == std::count(begin(base64), end(base64), '/'));
|
||||
|
||||
const auto decoded = Hint::FromBase64(base64);
|
||||
const auto decoded = SegmentHint::FromBase64(base64);
|
||||
|
||||
BOOST_CHECK_EQUAL(hint, decoded);
|
||||
BOOST_CHECK_EQUAL(seg_hint, decoded);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(hint_encoding_decoding_roundtrip_bytewise)
|
||||
@ -65,12 +65,12 @@ BOOST_AUTO_TEST_CASE(hint_encoding_decoding_roundtrip_bytewise)
|
||||
const PhantomNode phantom;
|
||||
const osrm::test::MockDataFacade<osrm::engine::routing_algorithms::ch::Algorithm> facade{};
|
||||
|
||||
const Hint hint{phantom, facade.GetCheckSum()};
|
||||
const SegmentHint seg_hint{phantom, facade.GetCheckSum()};
|
||||
|
||||
const auto decoded = Hint::FromBase64(hint.ToBase64());
|
||||
const auto decoded = SegmentHint::FromBase64(seg_hint.ToBase64());
|
||||
|
||||
BOOST_CHECK(std::equal(reinterpret_cast<const unsigned char *>(&hint),
|
||||
reinterpret_cast<const unsigned char *>(&hint) + sizeof(Hint),
|
||||
BOOST_CHECK(std::equal(reinterpret_cast<const unsigned char *>(&seg_hint),
|
||||
reinterpret_cast<const unsigned char *>(&seg_hint) + sizeof(Hint),
|
||||
reinterpret_cast<const unsigned char *>(&decoded)));
|
||||
}
|
||||
|
||||
|
||||
@ -23,7 +23,7 @@ BOOST_AUTO_TEST_CASE(unchanged_collapse_route_result)
|
||||
PathData kathy{0, 1, 1, 2, 3, 4, 1, boost::none};
|
||||
InternalRouteResult one_leg_result;
|
||||
one_leg_result.unpacked_path_segments = {{pathy, kathy}};
|
||||
one_leg_result.segment_end_coordinates = {PhantomNodes{source, target}};
|
||||
one_leg_result.leg_endpoints = {PhantomEndpoints{source, target}};
|
||||
one_leg_result.source_traversed_in_reverse = {true};
|
||||
one_leg_result.target_traversed_in_reverse = {true};
|
||||
one_leg_result.shortest_path_weight = 50;
|
||||
@ -50,18 +50,17 @@ BOOST_AUTO_TEST_CASE(two_legs_to_one_leg)
|
||||
node_3.forward_segment_id = {12, true};
|
||||
InternalRouteResult two_leg_result;
|
||||
two_leg_result.unpacked_path_segments = {{pathy, kathy}, {kathy, cathy}};
|
||||
two_leg_result.segment_end_coordinates = {PhantomNodes{node_1, node_2},
|
||||
PhantomNodes{node_2, node_3}};
|
||||
two_leg_result.leg_endpoints = {PhantomEndpoints{node_1, node_2},
|
||||
PhantomEndpoints{node_2, node_3}};
|
||||
two_leg_result.source_traversed_in_reverse = {true, false};
|
||||
two_leg_result.target_traversed_in_reverse = {true, false};
|
||||
two_leg_result.shortest_path_weight = 80;
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(two_leg_result, {true, false, true, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates.size(), 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].target_phantom.forward_segment_id.id,
|
||||
12);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints.size(), 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].target_phantom.forward_segment_id.id, 12);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0].size(), 4);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0][0].turn_via_node, 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0][1].turn_via_node, 1);
|
||||
@ -88,20 +87,20 @@ BOOST_AUTO_TEST_CASE(three_legs_to_two_legs)
|
||||
three_leg_result.unpacked_path_segments = {std::vector<PathData>{pathy, kathy},
|
||||
std::vector<PathData>{kathy, qathy, cathy},
|
||||
std::vector<PathData>{cathy, mathy}};
|
||||
three_leg_result.segment_end_coordinates = {
|
||||
PhantomNodes{node_1, node_2}, PhantomNodes{node_2, node_3}, PhantomNodes{node_3, node_4}};
|
||||
three_leg_result.leg_endpoints = {PhantomEndpoints{node_1, node_2},
|
||||
PhantomEndpoints{node_2, node_3},
|
||||
PhantomEndpoints{node_3, node_4}};
|
||||
three_leg_result.source_traversed_in_reverse = {true, false, true},
|
||||
three_leg_result.target_traversed_in_reverse = {true, false, true},
|
||||
three_leg_result.shortest_path_weight = 140;
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(three_leg_result, {true, true, false, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].target_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[1].source_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[1].target_phantom.forward_segment_id.id,
|
||||
18);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].target_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[1].source_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[1].target_phantom.forward_segment_id.id, 18);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0].size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[1].size(), 5);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0][0].turn_via_node, 2);
|
||||
@ -126,20 +125,19 @@ BOOST_AUTO_TEST_CASE(two_legs_to_two_legs)
|
||||
node_3.forward_segment_id = {12, true};
|
||||
InternalRouteResult two_leg_result;
|
||||
two_leg_result.unpacked_path_segments = {{pathy, kathy}, {kathy, cathy}};
|
||||
two_leg_result.segment_end_coordinates = {PhantomNodes{node_1, node_2},
|
||||
PhantomNodes{node_2, node_3}};
|
||||
two_leg_result.leg_endpoints = {PhantomEndpoints{node_1, node_2},
|
||||
PhantomEndpoints{node_2, node_3}};
|
||||
two_leg_result.source_traversed_in_reverse = {true, false};
|
||||
two_leg_result.target_traversed_in_reverse = {true, false};
|
||||
two_leg_result.shortest_path_weight = 80;
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(two_leg_result, {true, true, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[0].target_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[1].source_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.segment_end_coordinates[1].target_phantom.forward_segment_id.id,
|
||||
12);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints.size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].source_phantom.forward_segment_id.id, 1);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[0].target_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[1].source_phantom.forward_segment_id.id, 6);
|
||||
BOOST_CHECK_EQUAL(collapsed.leg_endpoints[1].target_phantom.forward_segment_id.id, 12);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0].size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[1].size(), 2);
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments[0][0].turn_via_node, 2);
|
||||
|
||||
@ -224,101 +224,35 @@ class ContiguousInternalMemoryDataFacade<routing_algorithms::offline::Algorithm>
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate /*input_coordinate*/,
|
||||
const float /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate /*input_coordinate*/,
|
||||
const float /*max_distance*/,
|
||||
const Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const double /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const Approach /*approach*/) const override
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const Approach /*approach*/) const override
|
||||
const size_t /*max_results*/,
|
||||
const boost::optional<double> /*max_distance*/,
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const double /*max_distance*/,
|
||||
const Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const Approach /*approach*/,
|
||||
engine::PhantomCandidateAlternatives NearestCandidatesWithAlternativeFromBigComponent(
|
||||
const util::Coordinate /*input_coordinate*/,
|
||||
const boost::optional<double> /*max_distance*/,
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const double /*max_distance*/,
|
||||
const Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const double /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<PhantomNode, PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
util::guidance::LaneTupleIdPair GetLaneData(const EdgeID /*id*/) const override
|
||||
{
|
||||
@ -394,15 +328,16 @@ namespace routing_algorithms
|
||||
namespace offline
|
||||
{
|
||||
|
||||
template <typename PhantomT>
|
||||
inline void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const datafacade::ContiguousInternalMemoryDataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
EdgeWeight &weight,
|
||||
std::vector<NodeID> &packed_leg,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const std::vector<NodeID> &forward_loop_nodes,
|
||||
const std::vector<NodeID> &reverse_loop_nodes,
|
||||
const PhantomT &endpoints,
|
||||
const EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
mld::search(engine_working_data,
|
||||
@ -411,9 +346,9 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
reverse_heap,
|
||||
weight,
|
||||
packed_leg,
|
||||
force_loop_forward,
|
||||
force_loop_reverse,
|
||||
phantom_nodes,
|
||||
forward_loop_nodes,
|
||||
reverse_loop_nodes,
|
||||
endpoints,
|
||||
weight_upper_bound);
|
||||
}
|
||||
|
||||
@ -421,10 +356,10 @@ template <typename RandomIter, typename FacadeT>
|
||||
void unpackPath(const FacadeT &facade,
|
||||
RandomIter packed_path_begin,
|
||||
RandomIter packed_path_end,
|
||||
const PhantomNodes &phantom_nodes,
|
||||
const PhantomEndpoints &endpoints,
|
||||
std::vector<PathData> &unpacked_path)
|
||||
{
|
||||
mld::unpackPath(facade, packed_path_begin, packed_path_end, phantom_nodes, unpacked_path);
|
||||
mld::unpackPath(facade, packed_path_begin, packed_path_end, endpoints, unpacked_path);
|
||||
}
|
||||
|
||||
} // namespace offline
|
||||
@ -442,11 +377,12 @@ BOOST_AUTO_TEST_CASE(shortest_path)
|
||||
osrm::engine::SearchEngineData<Algorithm> heaps;
|
||||
osrm::engine::datafacade::ContiguousInternalMemoryDataFacade<Algorithm> facade;
|
||||
|
||||
std::vector<osrm::engine::PhantomNodes> phantom_nodes;
|
||||
phantom_nodes.push_back({osrm::engine::PhantomNode{}, osrm::engine::PhantomNode{}});
|
||||
std::vector<osrm::engine::PhantomNodeCandidates> waypoints;
|
||||
waypoints.push_back({osrm::engine::PhantomNode{}});
|
||||
waypoints.push_back({osrm::engine::PhantomNode{}});
|
||||
|
||||
auto route =
|
||||
osrm::engine::routing_algorithms::shortestPathSearch(heaps, facade, phantom_nodes, false);
|
||||
osrm::engine::routing_algorithms::shortestPathSearch(heaps, facade, waypoints, false);
|
||||
|
||||
BOOST_CHECK_EQUAL(route.shortest_path_weight, INVALID_EDGE_WEIGHT);
|
||||
}
|
||||
|
||||
@ -110,99 +110,33 @@ class MockBaseDataFacade : public engine::datafacade::BaseDataFacade
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate /*input_coordinate*/,
|
||||
const float /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const double /*max_distance*/,
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate /*input_coordinate*/,
|
||||
const float /*max_distance*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const double /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const size_t /*max_results*/,
|
||||
const boost::optional<double> /*max_distance*/,
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const engine::Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const engine::Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<engine::PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate /*input_coordinate*/,
|
||||
const unsigned /*max_results*/,
|
||||
const double /*max_distance*/,
|
||||
const engine::Approach /*approach*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<engine::PhantomNode, engine::PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
engine::PhantomCandidateAlternatives NearestCandidatesWithAlternativeFromBigComponent(
|
||||
const util::Coordinate /*input_coordinate*/,
|
||||
const boost::optional<double> /*max_distance*/,
|
||||
const boost::optional<engine::Bearing> /*bearing*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /*use_all_edges*/) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<engine::PhantomNode, engine::PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const double /*max_distance*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<engine::PhantomNode, engine::PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const double /*max_distance*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
std::pair<engine::PhantomNode, engine::PhantomNode>
|
||||
NearestPhantomNodeWithAlternativeFromBigComponent(const util::Coordinate /*input_coordinate*/,
|
||||
const int /*bearing*/,
|
||||
const int /*bearing_range*/,
|
||||
const engine::Approach /*approach*/,
|
||||
const bool /* use_all_edges */) const override
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
std::uint32_t GetCheckSum() const override { return 0; }
|
||||
|
||||
|
||||
@ -7,15 +7,6 @@
|
||||
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
// make sure not to leak in recursive bisection
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
tbb::global_control scheduler(tbb::global_control::max_allowed_parallelism, 2);
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
tbb::task_scheduler_init init(2);
|
||||
#endif
|
||||
|
||||
using namespace osrm::partitioner;
|
||||
using namespace osrm::util;
|
||||
|
||||
|
||||
@ -19,6 +19,20 @@
|
||||
#define CHECK_EQUAL_RANGE(R1, R2) \
|
||||
BOOST_CHECK_EQUAL_COLLECTIONS((R1).begin(), (R1).end(), (R2).begin(), (R2).end());
|
||||
|
||||
#define CHECK_EQUAL_RANGE_OF_HINTS(R1, R2) \
|
||||
BOOST_REQUIRE_EQUAL((R1).size(), (R2).size()); \
|
||||
for (const auto i : util::irange<std::size_t>(0UL, (R1).size())) \
|
||||
{ \
|
||||
BOOST_REQUIRE(((R1)[i] && (R2)[i]) || !((R1)[i] || (R2)[i])); \
|
||||
if ((R1)[i]) \
|
||||
{ \
|
||||
BOOST_CHECK_EQUAL_COLLECTIONS((R1)[i]->segment_hints.begin(), \
|
||||
(R1)[i]->segment_hints.end(), \
|
||||
(R2)[i]->segment_hints.begin(), \
|
||||
(R2)[i]->segment_hints.end()); \
|
||||
} \
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE(api_parameters_parser)
|
||||
|
||||
using namespace osrm;
|
||||
@ -117,15 +131,16 @@ BOOST_AUTO_TEST_CASE(invalid_table_urls)
|
||||
// BOOST_CHECK_EQUAL(testInvalidOptions<TableParameters>("1,2;3,4?destinations=2"), 7UL);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(valid_route_hint)
|
||||
BOOST_AUTO_TEST_CASE(valid_route_segment_hint)
|
||||
{
|
||||
engine::PhantomNode reference_node;
|
||||
reference_node.input_location =
|
||||
util::Coordinate(util::FloatLongitude{7.432251}, util::FloatLatitude{43.745995});
|
||||
engine::Hint reference_hint{reference_node, 0x1337};
|
||||
auto encoded_hint = reference_hint.ToBase64();
|
||||
auto hint = engine::Hint::FromBase64(encoded_hint);
|
||||
BOOST_CHECK_EQUAL(hint.phantom.input_location, reference_hint.phantom.input_location);
|
||||
engine::SegmentHint reference_segment_hint{reference_node, 0x1337};
|
||||
auto encoded_hint = reference_segment_hint.ToBase64();
|
||||
auto seg_hint = engine::SegmentHint::FromBase64(encoded_hint);
|
||||
BOOST_CHECK_EQUAL(seg_hint.phantom.input_location,
|
||||
reference_segment_hint.phantom.input_location);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
@ -147,7 +162,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_1.radiuses, result_1->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_1.approaches, result_1->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_1.coordinates, result_1->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_1.hints, result_1->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_1.hints, result_1->hints);
|
||||
|
||||
RouteParameters reference_2{};
|
||||
reference_2.alternatives = true;
|
||||
@ -170,7 +185,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_2.radiuses, result_2->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_2.approaches, result_2->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_2.coordinates, result_2->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_2.hints, result_2->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_2.hints, result_2->hints);
|
||||
BOOST_CHECK_EQUAL(result_2->annotations_type == RouteParameters::AnnotationsType::All, true);
|
||||
|
||||
RouteParameters reference_3{false,
|
||||
@ -195,14 +210,15 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_3.radiuses, result_3->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_3.approaches, result_3->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_3.coordinates, result_3->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_3.hints, result_3->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_3.hints, result_3->hints);
|
||||
|
||||
engine::PhantomNode phantom_1;
|
||||
phantom_1.input_location = coords_1[0];
|
||||
engine::PhantomNode phantom_2;
|
||||
phantom_2.input_location = coords_1[1];
|
||||
std::vector<boost::optional<engine::Hint>> hints_4 = {engine::Hint{phantom_1, 0x1337},
|
||||
engine::Hint{phantom_2, 0x1337}};
|
||||
std::vector<boost::optional<engine::Hint>> hints_4 = {
|
||||
engine::Hint{{engine::SegmentHint{phantom_1, 0x1337}}},
|
||||
engine::Hint{{engine::SegmentHint{phantom_2, 0x1337}}}};
|
||||
RouteParameters reference_4{false,
|
||||
false,
|
||||
false,
|
||||
@ -226,7 +242,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_4.radiuses, result_4->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_4.approaches, result_4->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_4.coordinates, result_4->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_4.hints, result_4->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_4.hints, result_4->hints);
|
||||
|
||||
std::vector<boost::optional<engine::Bearing>> bearings_4 = {
|
||||
boost::none,
|
||||
@ -255,7 +271,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_5.radiuses, result_5->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_5.approaches, result_5->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_5.coordinates, result_5->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_5.hints, result_5->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_5.hints, result_5->hints);
|
||||
|
||||
std::vector<util::Coordinate> coords_2 = {{util::FloatLongitude{0}, util::FloatLatitude{1}},
|
||||
{util::FloatLongitude{2}, util::FloatLatitude{3}},
|
||||
@ -275,7 +291,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_6.radiuses, result_6->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_6.approaches, result_6->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_6.coordinates, result_6->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_6.hints, result_6->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_6.hints, result_6->hints);
|
||||
|
||||
auto result_7 = parseParameters<RouteParameters>("1,2;3,4?radiuses=;unlimited");
|
||||
RouteParameters reference_7{};
|
||||
@ -293,7 +309,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_7.radiuses, result_7->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_7.approaches, result_7->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_7.coordinates, result_7->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_7.hints, result_7->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_7.hints, result_7->hints);
|
||||
|
||||
auto result_8 = parseParameters<RouteParameters>("1,2;3,4?radiuses=;");
|
||||
RouteParameters reference_8{};
|
||||
@ -320,7 +336,10 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
engine::PhantomNode phantom_4;
|
||||
phantom_4.input_location = coords_3[2];
|
||||
std::vector<boost::optional<engine::Hint>> hints_10 = {
|
||||
engine::Hint{phantom_3, 0x1337}, boost::none, engine::Hint{phantom_4, 0x1337}, boost::none};
|
||||
engine::Hint{{engine::SegmentHint{phantom_3, 0x1337}}},
|
||||
{},
|
||||
engine::Hint{{engine::SegmentHint{phantom_4, 0x1337}}},
|
||||
{}};
|
||||
|
||||
RouteParameters reference_10{false,
|
||||
false,
|
||||
@ -346,7 +365,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_10.radiuses, result_10->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_10.approaches, result_10->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_10.coordinates, result_10->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_10.hints, result_10->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_10.hints, result_10->hints);
|
||||
|
||||
// Do not generate Hints when they are explicitly disabled
|
||||
auto result_11 = parseParameters<RouteParameters>("1,2;3,4?generate_hints=false");
|
||||
@ -459,7 +478,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_18.radiuses, result_18->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_18.approaches, result_18->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_18.coordinates, result_18->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_18.hints, result_18->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_18.hints, result_18->hints);
|
||||
|
||||
RouteParameters reference_19{};
|
||||
reference_19.alternatives = true;
|
||||
@ -478,7 +497,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_19.radiuses, result_19->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_19.approaches, result_19->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_19.coordinates, result_19->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_19.hints, result_19->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_19.hints, result_19->hints);
|
||||
|
||||
RouteParameters reference_20{};
|
||||
reference_20.alternatives = false;
|
||||
@ -497,7 +516,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_20.radiuses, result_20->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_20.approaches, result_20->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_20.coordinates, result_20->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_20.hints, result_20->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_20.hints, result_20->hints);
|
||||
|
||||
// exclude flags
|
||||
RouteParameters reference_21{};
|
||||
@ -516,7 +535,7 @@ BOOST_AUTO_TEST_CASE(valid_route_urls)
|
||||
CHECK_EQUAL_RANGE(reference_21.radiuses, result_21->radiuses);
|
||||
CHECK_EQUAL_RANGE(reference_21.approaches, result_21->approaches);
|
||||
CHECK_EQUAL_RANGE(reference_21.coordinates, result_21->coordinates);
|
||||
CHECK_EQUAL_RANGE(reference_21.hints, result_21->hints);
|
||||
CHECK_EQUAL_RANGE_OF_HINTS(reference_21.hints, result_21->hints);
|
||||
CHECK_EQUAL_RANGE(reference_21.exclude, result_21->exclude);
|
||||
}
|
||||
|
||||
|
||||
@ -24,15 +24,6 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
// explicit TBB scheduler init to register resources cleanup at exit
|
||||
#if TBB_VERSION_MAJOR == 2020
|
||||
#include <tbb/global_control.h>
|
||||
tbb::global_control scheduler(tbb::global_control::max_allowed_parallelism, 2);
|
||||
#else
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
tbb::task_scheduler_init init(2);
|
||||
#endif
|
||||
|
||||
BOOST_AUTO_TEST_SUITE(static_rtree)
|
||||
|
||||
using namespace osrm;
|
||||
@ -209,8 +200,8 @@ void simple_verify_rtree(RTreeT &rtree,
|
||||
auto result_u = rtree.Nearest(pu, 1);
|
||||
auto result_v = rtree.Nearest(pv, 1);
|
||||
BOOST_CHECK(result_u.size() == 1 && result_v.size() == 1);
|
||||
BOOST_CHECK(result_u.front().u == e.u || result_u.front().v == e.u);
|
||||
BOOST_CHECK(result_v.front().u == e.v || result_v.front().v == e.v);
|
||||
BOOST_CHECK(result_u.front().data.u == e.u || result_u.front().data.v == e.u);
|
||||
BOOST_CHECK(result_v.front().data.u == e.v || result_v.front().data.v == e.v);
|
||||
}
|
||||
}
|
||||
|
||||
@ -235,8 +226,8 @@ void sampling_verify_rtree(RTreeT &rtree,
|
||||
auto result_lsnn = lsnn.Nearest(q, 1);
|
||||
BOOST_CHECK(result_rtree.size() == 1);
|
||||
BOOST_CHECK(result_lsnn.size() == 1);
|
||||
auto rtree_u = result_rtree.back().u;
|
||||
auto rtree_v = result_rtree.back().v;
|
||||
auto rtree_u = result_rtree.back().data.u;
|
||||
auto rtree_v = result_rtree.back().data.v;
|
||||
auto lsnn_u = result_lsnn.back().u;
|
||||
auto lsnn_v = result_lsnn.back().v;
|
||||
|
||||
@ -331,8 +322,8 @@ BOOST_AUTO_TEST_CASE(regression_test)
|
||||
BOOST_CHECK(result_rtree.size() == 1);
|
||||
BOOST_CHECK(result_ls.size() == 1);
|
||||
|
||||
BOOST_CHECK_EQUAL(result_ls.front().u, result_rtree.front().u);
|
||||
BOOST_CHECK_EQUAL(result_ls.front().v, result_rtree.front().v);
|
||||
BOOST_CHECK_EQUAL(result_ls.front().u, result_rtree.front().data.u);
|
||||
BOOST_CHECK_EQUAL(result_ls.front().v, result_rtree.front().data.v);
|
||||
}
|
||||
|
||||
// Bug: If you querry a point with a narrow radius, no result should be returned
|
||||
@ -356,8 +347,8 @@ BOOST_AUTO_TEST_CASE(radius_regression_test)
|
||||
Coordinate input(FloatLongitude{5.2}, FloatLatitude{5.0});
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 0.01, osrm::engine::Approach::UNRESTRICTED, true);
|
||||
auto results = query.NearestPhantomNodes(
|
||||
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 0.01, boost::none, true);
|
||||
BOOST_CHECK_EQUAL(results.size(), 0);
|
||||
}
|
||||
}
|
||||
@ -382,14 +373,14 @@ BOOST_AUTO_TEST_CASE(permissive_edge_snapping)
|
||||
Coordinate input(FloatLongitude{0.0005}, FloatLatitude{0.0005});
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 1000, osrm::engine::Approach::UNRESTRICTED, false);
|
||||
auto results = query.NearestPhantomNodes(
|
||||
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 1000, boost::none, false);
|
||||
BOOST_CHECK_EQUAL(results.size(), 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 1000, osrm::engine::Approach::UNRESTRICTED, true);
|
||||
auto results = query.NearestPhantomNodes(
|
||||
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 1000, boost::none, true);
|
||||
BOOST_CHECK_EQUAL(results.size(), 2);
|
||||
}
|
||||
}
|
||||
@ -414,21 +405,30 @@ BOOST_AUTO_TEST_CASE(bearing_tests)
|
||||
Coordinate input(FloatLongitude{5.1}, FloatLatitude{5.0});
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodes(input, 5, osrm::engine::Approach::UNRESTRICTED);
|
||||
auto results = query.NearestPhantomNodes(
|
||||
input, osrm::engine::Approach::UNRESTRICTED, 5, boost::none, boost::none, false);
|
||||
BOOST_CHECK_EQUAL(results.size(), 2);
|
||||
BOOST_CHECK_EQUAL(results.back().phantom_node.forward_segment_id.id, 0);
|
||||
BOOST_CHECK_EQUAL(results.back().phantom_node.reverse_segment_id.id, 1);
|
||||
}
|
||||
|
||||
{
|
||||
auto results =
|
||||
query.NearestPhantomNodes(input, 5, 270, 10, osrm::engine::Approach::UNRESTRICTED);
|
||||
auto results = query.NearestPhantomNodes(input,
|
||||
osrm::engine::Approach::UNRESTRICTED,
|
||||
5,
|
||||
boost::none,
|
||||
engine::Bearing{270, 10},
|
||||
false);
|
||||
BOOST_CHECK_EQUAL(results.size(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
auto results =
|
||||
query.NearestPhantomNodes(input, 5, 45, 10, osrm::engine::Approach::UNRESTRICTED);
|
||||
auto results = query.NearestPhantomNodes(input,
|
||||
osrm::engine::Approach::UNRESTRICTED,
|
||||
5,
|
||||
boost::none,
|
||||
engine::Bearing{45, 10},
|
||||
false);
|
||||
BOOST_CHECK_EQUAL(results.size(), 2);
|
||||
|
||||
BOOST_CHECK(results[0].phantom_node.forward_segment_id.enabled);
|
||||
@ -441,20 +441,28 @@ BOOST_AUTO_TEST_CASE(bearing_tests)
|
||||
}
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 11000, osrm::engine::Approach::UNRESTRICTED, true);
|
||||
auto results = query.NearestPhantomNodes(
|
||||
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 11000, boost::none, true);
|
||||
BOOST_CHECK_EQUAL(results.size(), 2);
|
||||
}
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 11000, 270, 10, osrm::engine::Approach::UNRESTRICTED, true);
|
||||
auto results = query.NearestPhantomNodes(input,
|
||||
osrm::engine::Approach::UNRESTRICTED,
|
||||
boost::none,
|
||||
11000,
|
||||
engine::Bearing{270, 10},
|
||||
true);
|
||||
BOOST_CHECK_EQUAL(results.size(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
auto results = query.NearestPhantomNodesInRange(
|
||||
input, 11000, 45, 10, osrm::engine::Approach::UNRESTRICTED, true);
|
||||
auto results = query.NearestPhantomNodes(input,
|
||||
osrm::engine::Approach::UNRESTRICTED,
|
||||
boost::none,
|
||||
11000,
|
||||
engine::Bearing{45, 10},
|
||||
true);
|
||||
BOOST_CHECK_EQUAL(results.size(), 2);
|
||||
|
||||
BOOST_CHECK(results[0].phantom_node.forward_segment_id.enabled);
|
||||
|
||||
Loading…
Reference in New Issue
Block a user