Merge branch 'develop'
This commit is contained in:
		
						commit
						a2e114e852
					
				
							
								
								
									
										4
									
								
								.clang-tidy
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								.clang-tidy
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,4 @@ | ||||
| --- | ||||
| Checks:          '-clang-analyzer-*,google-*,llvm-*,misc-*,readability-*,-google-build-explicit-make-pair,-google-explicit-constructor,-google-readability-braces-around-statements,-google-readability-casting,-google-readability-namespace-comments,-google-readability-function,-google-readability-todo,-google-runtime-int,-llvm-namespace-comment,-llvm-header-guard,-llvm-twine-local,-misc-argument-comment,-readability-braces-around-statements,-readability-identifier-naming' | ||||
| ... | ||||
| 
 | ||||
							
								
								
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -40,8 +40,6 @@ Thumbs.db | ||||
| # build related files # | ||||
| ####################### | ||||
| /build/ | ||||
| /util/fingerprint_impl.hpp | ||||
| /util/git_sha.cpp | ||||
| /cmake/postinst | ||||
| 
 | ||||
| # Eclipse related files # | ||||
|  | ||||
							
								
								
									
										197
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										197
									
								
								.travis.yml
									
									
									
									
									
								
							| @ -1,63 +1,146 @@ | ||||
| language: cpp | ||||
| compiler: | ||||
|   - gcc | ||||
| #  - clang | ||||
| # Make sure CMake is installed | ||||
| install: | ||||
|  - sudo apt-add-repository -y ppa:ubuntu-toolchain-r/test | ||||
|  - sudo add-apt-repository -y ppa:boost-latest/ppa | ||||
|  - sudo apt-get update >/dev/null | ||||
|  - sudo apt-get -q install libbz2-dev libstxxl-dev libstxxl1 libxml2-dev libzip-dev lua5.1 liblua5.1-0-dev rubygems libtbb-dev | ||||
|  - sudo apt-get -q install g++-4.8 | ||||
|  - sudo apt-get install libboost1.54-all-dev | ||||
|  - sudo apt-get install libgdal-dev | ||||
|  # luabind | ||||
|  - curl https://gist.githubusercontent.com/DennisOSRM/f2eb7b948e6fe1ae319e/raw/install-luabind.sh | sudo bash | ||||
|  # osmosis | ||||
|  - curl -s https://gist.githubusercontent.com/DennisOSRM/803a64a9178ec375069f/raw/ | sudo bash | ||||
|  # cmake | ||||
|  - curl -s https://gist.githubusercontent.com/DennisOSRM/5fad9bee5c7f09fd7fc9/raw/ | sudo bash | ||||
| before_script: | ||||
|  - rvm use 1.9.3 | ||||
|  - gem install bundler | ||||
|  - bundle install | ||||
|  - mkdir build | ||||
|  - cd build | ||||
|  - cmake .. $CMAKEOPTIONS -DBUILD_TOOLS=1 | ||||
| script: | ||||
|  - make | ||||
|  - make tests | ||||
|  - make benchmarks | ||||
|  - ./algorithm-tests | ||||
|  - ./datastructure-tests | ||||
|  - cd .. | ||||
|  - cucumber -p verify | ||||
| after_script: | ||||
| # - cd .. | ||||
| # - cucumber -p verify | ||||
| sudo: required | ||||
| dist: trusty | ||||
| 
 | ||||
| notifications: | ||||
|   email: false | ||||
| 
 | ||||
| branches: | ||||
|   only: | ||||
|     - master | ||||
|     - develop | ||||
| cache: | ||||
| - bundler | ||||
| - apt | ||||
| env: | ||||
|  - CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5000 OSRM_TIMEOUT=60 | ||||
|  - CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5010 OSRM_TIMEOUT=60 | ||||
|  - CMAKEOPTIONS="-DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON -DCMAKE_CXX_COMPILER=g++-4.8" OSRM_PORT=5020 OSRM_TIMEOUT=60 | ||||
| notifications: | ||||
|  slack: mapbox:4A6euphDwfxAQnhLurXbu6A1 | ||||
|  irc: | ||||
|   channels: | ||||
|     - irc.oftc.net#osrm | ||||
|   on_success: change | ||||
|   on_failure: always | ||||
|   use_notice: true | ||||
|   skip_join: false | ||||
| 
 | ||||
|   recipients: | ||||
|     - patrick@mapbox.com | ||||
|   email: | ||||
|     on_success: change | ||||
|     on_failure: always | ||||
| matrix: | ||||
|   include: | ||||
| 
 | ||||
|     # 1/ Linux Clang Builds | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: clang | ||||
|       addons: &clang38 | ||||
|         apt: | ||||
|           sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test'] | ||||
|           packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] | ||||
|       env: COMPILER='clang++-3.8' BUILD_TYPE='Release' | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: clang | ||||
|       addons: &clang38 | ||||
|         apt: | ||||
|           sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test'] | ||||
|           packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] | ||||
|       env: COMPILER='clang++-3.8' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: clang | ||||
|       addons: *clang38 | ||||
|       env: COMPILER='clang++-3.8' BUILD_TYPE='Debug' | ||||
| 
 | ||||
| 
 | ||||
|     # 2/ Linux GCC Builds | ||||
|     - os: linux | ||||
|       compiler: gcc | ||||
|       addons: &gcc48 | ||||
|         apt: | ||||
|           sources: ['ubuntu-toolchain-r-test'] | ||||
|           packages: ['g++-4.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] | ||||
|       env: COMPILER='g++-4.8' BUILD_TYPE='Release' | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: gcc | ||||
|       addons: *gcc48 | ||||
|       env: COMPILER='g++-4.8' BUILD_TYPE='Debug' | ||||
| 
 | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: gcc | ||||
|       addons: &gcc5 | ||||
|         apt: | ||||
|           sources: ['ubuntu-toolchain-r-test'] | ||||
|           packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] | ||||
|       env: COMPILER='g++-5' BUILD_TYPE='Release' | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: gcc | ||||
|       addons: &gcc5 | ||||
|         apt: | ||||
|           sources: ['ubuntu-toolchain-r-test'] | ||||
|           packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] | ||||
|       env: COMPILER='g++-5' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON | ||||
| 
 | ||||
|     - os: linux | ||||
|       compiler: gcc | ||||
|       addons: *gcc5 | ||||
|       env: COMPILER='g++-5' BUILD_TYPE='Debug' | ||||
| 
 | ||||
| 
 | ||||
|     # Disabled until tests all pass on OSX: | ||||
|     # | ||||
|     # 3/ OSX Clang Builds | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode6.4 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Debug' | ||||
| 
 | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode6.4 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Release' | ||||
| 
 | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode6.4 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON | ||||
| 
 | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode7 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Debug' | ||||
| 
 | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode7 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Release' | ||||
| 
 | ||||
|     #- os: osx | ||||
|     #  osx_image: xcode7 | ||||
|     #  compiler: clang | ||||
|     #  env: COMPILER='clang++' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON | ||||
| 
 | ||||
| 
 | ||||
| install: | ||||
|   - DEPS_DIR="${TRAVIS_BUILD_DIR}/deps" | ||||
|   - mkdir -p ${DEPS_DIR} && cd ${DEPS_DIR} | ||||
|   - | | ||||
|     if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then | ||||
|       CMAKE_URL="http://www.cmake.org/files/v3.3/cmake-3.3.2-Linux-x86_64.tar.gz" | ||||
|       mkdir cmake && travis_retry wget --quiet -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake | ||||
|       export PATH=${DEPS_DIR}/cmake/bin:${PATH} | ||||
| 
 | ||||
|       OSMOSIS_URL="http://bretth.dev.openstreetmap.org/osmosis-build/osmosis-latest.tgz" | ||||
|       mkdir osmosis && travis_retry wget --quiet -O - ${OSMOSIS_URL} | tar -xz -C osmosis | ||||
|       export PATH=${DEPS_DIR}/osmosis/bin:${PATH} | ||||
| 
 | ||||
|     elif [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then | ||||
|       brew install cmake boost libzip libstxxl libxml2 lua51 luabind tbb GDAL osmosis | ||||
|     fi | ||||
| 
 | ||||
| before_script: | ||||
|   - cd ${TRAVIS_BUILD_DIR} | ||||
|   - rvm use 1.9.3 | ||||
|   - gem install bundler | ||||
|   - bundle install | ||||
|   - mkdir build && cd build | ||||
|   - export CXX=${COMPILER} | ||||
|   - export OSRM_PORT=5000 OSRM_TIMEOUT=60 | ||||
|   - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} -DBUILD_TOOLS=1 | ||||
| 
 | ||||
| script: | ||||
|   - make --jobs=2 | ||||
|   - make tests --jobs=2 | ||||
|   - make benchmarks | ||||
|   - ./algorithm-tests | ||||
|   - ./datastructure-tests | ||||
|   - ./util-tests | ||||
|   - cd .. | ||||
|   - cucumber -p verify | ||||
|  | ||||
							
								
								
									
										164
									
								
								CMakeLists.txt
									
									
									
									
									
								
							
							
						
						
									
										164
									
								
								CMakeLists.txt
									
									
									
									
									
								
							| @ -7,13 +7,15 @@ This process created the file `CMakeCache.txt' and the directory `CMakeFiles'. P | ||||
| endif() | ||||
| 
 | ||||
| project(OSRM C CXX) | ||||
| set(OSRM_VERSION_MAJOR 4) | ||||
| set(OSRM_VERSION_MINOR 9) | ||||
| set(OSRM_VERSION_PATCH 0) | ||||
| 
 | ||||
| set(CMAKE_EXPORT_COMPILE_COMMANDS ON) | ||||
| include(CheckCXXCompilerFlag) | ||||
| include(FindPackageHandleStandardArgs) | ||||
| 
 | ||||
| list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") | ||||
| include(GetGitRevisionDescription) | ||||
| git_describe(GIT_DESCRIPTION) | ||||
| 
 | ||||
| set(bitness 32) | ||||
| if(CMAKE_SIZEOF_VOID_P EQUAL 8) | ||||
| @ -28,29 +30,32 @@ if(WIN32 AND MSVC_VERSION LESS 1800) | ||||
| endif() | ||||
| 
 | ||||
| option(ENABLE_JSON_LOGGING "Adds additional JSON debug logging to the response" OFF) | ||||
| option(WITH_TOOLS "Build OSRM tools" OFF) | ||||
| option(DEBUG_GEOMETRY "Enables an option to dump GeoJSON of the final routing graph" OFF) | ||||
| option(BUILD_TOOLS "Build OSRM tools" OFF) | ||||
| 
 | ||||
| include_directories(${CMAKE_CURRENT_SOURCE_DIR}) | ||||
| include_directories(${CMAKE_CURRENT_BINARY_DIR}) | ||||
| include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) | ||||
| include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/) | ||||
| include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include/) | ||||
| include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/third_party/) | ||||
| include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include/) | ||||
| 
 | ||||
| add_custom_target(FingerPrintConfigure ALL | ||||
|   ${CMAKE_COMMAND} -DSOURCE_DIR=${CMAKE_SOURCE_DIR} | ||||
|     -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/FingerPrint-Config.cmake | ||||
| add_custom_target(FingerPrintConfigure ALL ${CMAKE_COMMAND} | ||||
|   "-DOUTPUT_DIR=${CMAKE_CURRENT_BINARY_DIR}" | ||||
|   "-DSOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR}" | ||||
|   -P "${CMAKE_CURRENT_SOURCE_DIR}/cmake/FingerPrint-Config.cmake" | ||||
|   COMMENT "Configuring revision fingerprint" | ||||
|   VERBATIM) | ||||
| 
 | ||||
| add_custom_target(tests DEPENDS datastructure-tests algorithm-tests) | ||||
| add_custom_target(tests DEPENDS datastructure-tests algorithm-tests util-tests) | ||||
| add_custom_target(benchmarks DEPENDS rtree-bench) | ||||
| 
 | ||||
| set(BOOST_COMPONENTS date_time filesystem iostreams program_options regex system thread unit_test_framework) | ||||
| 
 | ||||
| configure_file( | ||||
|   ${CMAKE_CURRENT_SOURCE_DIR}/util/git_sha.cpp.in | ||||
|   ${CMAKE_CURRENT_SOURCE_DIR}/util/git_sha.cpp | ||||
|   ${CMAKE_CURRENT_SOURCE_DIR}/util/version.hpp.in | ||||
|   ${CMAKE_CURRENT_BINARY_DIR}/util/version.hpp | ||||
| ) | ||||
| file(GLOB ExtractorGlob extractor/*.cpp) | ||||
| file(GLOB ExtractorGlob extractor/*.cpp data_structures/hilbert_value.cpp) | ||||
| file(GLOB ImporterGlob data_structures/import_edge.cpp data_structures/external_memory_node.cpp data_structures/raster_source.cpp) | ||||
| add_library(IMPORT OBJECT ${ImporterGlob}) | ||||
| add_library(LOGGER OBJECT util/simple_logger.cpp) | ||||
| @ -61,7 +66,7 @@ add_library(MERCATOR OBJECT util/mercator.cpp) | ||||
| add_library(ANGLE OBJECT util/compute_angle.cpp) | ||||
| 
 | ||||
| set(ExtractorSources extract.cpp ${ExtractorGlob}) | ||||
| add_executable(osrm-extract ${ExtractorSources} $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:IMPORT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR>) | ||||
| add_executable(osrm-extract ${ExtractorSources} $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:IMPORT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:COMPRESSEDEDGE> $<TARGET_OBJECTS:GRAPHCOMPRESSOR> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:ANGLE>) | ||||
| 
 | ||||
| add_library(RESTRICTION OBJECT data_structures/restriction_map.cpp) | ||||
| add_library(COMPRESSEDEDGE OBJECT data_structures/compressed_edge_container.cpp) | ||||
| @ -69,7 +74,7 @@ add_library(GRAPHCOMPRESSOR OBJECT algorithms/graph_compressor.cpp) | ||||
| 
 | ||||
| file(GLOB PrepareGlob contractor/*.cpp data_structures/hilbert_value.cpp {RestrictionMapGlob}) | ||||
| set(PrepareSources prepare.cpp ${PrepareGlob}) | ||||
| add_executable(osrm-prepare ${PrepareSources} $<TARGET_OBJECTS:ANGLE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:IMPORT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:COMPRESSEDEDGE> $<TARGET_OBJECTS:GRAPHCOMPRESSOR>) | ||||
| add_executable(osrm-prepare ${PrepareSources} $<TARGET_OBJECTS:ANGLE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:IMPORT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:COMPRESSEDEDGE> $<TARGET_OBJECTS:GRAPHCOMPRESSOR>) | ||||
| 
 | ||||
| file(GLOB ServerGlob server/*.cpp) | ||||
| file(GLOB DescriptorGlob descriptors/*.cpp) | ||||
| @ -80,6 +85,7 @@ file(GLOB HttpGlob server/http/*.cpp) | ||||
| file(GLOB LibOSRMGlob library/*.cpp) | ||||
| file(GLOB DataStructureTestsGlob unit_tests/data_structures/*.cpp data_structures/hilbert_value.cpp) | ||||
| file(GLOB AlgorithmTestsGlob unit_tests/algorithms/*.cpp algorithms/graph_compressor.cpp) | ||||
| file(GLOB UtilTestsGlob unit_tests/util/*.cpp) | ||||
| 
 | ||||
| set( | ||||
|   OSRMSources | ||||
| @ -91,8 +97,7 @@ set( | ||||
| ) | ||||
| 
 | ||||
| add_library(COORDINATE OBJECT ${CoordinateGlob}) | ||||
| add_library(GITDESCRIPTION OBJECT util/git_sha.cpp) | ||||
| add_library(OSRM ${OSRMSources} $<TARGET_OBJECTS:ANGLE> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:PHANTOMNODE> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:IMPORT>) | ||||
| add_library(OSRM ${OSRMSources} $<TARGET_OBJECTS:ANGLE> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:PHANTOMNODE> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:IMPORT>) | ||||
| 
 | ||||
| add_library(FINGERPRINT OBJECT util/fingerprint.cpp) | ||||
| add_dependencies(FINGERPRINT FingerPrintConfigure) | ||||
| @ -100,11 +105,12 @@ add_dependencies(OSRM FingerPrintConfigure) | ||||
| set_target_properties(FINGERPRINT PROPERTIES LINKER_LANGUAGE CXX) | ||||
| 
 | ||||
| add_executable(osrm-routed routed.cpp ${ServerGlob} $<TARGET_OBJECTS:EXCEPTION>) | ||||
| add_executable(osrm-datastore datastore.cpp $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR>) | ||||
| add_executable(osrm-datastore datastore.cpp $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR>) | ||||
| 
 | ||||
| # Unit tests | ||||
| add_executable(datastructure-tests EXCLUDE_FROM_ALL unit_tests/datastructure_tests.cpp ${DataStructureTestsGlob} $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:PHANTOMNODE> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR> $<TARGET_OBJECTS:COMPRESSEDEDGE> $<TARGET_OBJECTS:GRAPHCOMPRESSOR> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:RASTERSOURCE>) | ||||
| add_executable(algorithm-tests EXCLUDE_FROM_ALL unit_tests/algorithm_tests.cpp ${AlgorithmTestsGlob} $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:PHANTOMNODE> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:COMPRESSEDEDGE>) | ||||
| add_executable(util-tests EXCLUDE_FROM_ALL unit_tests/util_tests.cpp ${UtilTestsGlob}) | ||||
| 
 | ||||
| # Benchmarks | ||||
| add_executable(rtree-bench EXCLUDE_FROM_ALL benchmarks/static_rtree.cpp $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:PHANTOMNODE> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR>) | ||||
| @ -116,18 +122,29 @@ endif() | ||||
| if(CMAKE_BUILD_TYPE MATCHES Debug) | ||||
|   message(STATUS "Configuring OSRM in debug mode") | ||||
|   if(NOT ${CMAKE_CXX_COMPILER_ID} STREQUAL "MSVC") | ||||
|     message(STATUS "adding profiling flags") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage -fno-inline") | ||||
|     set(CMAKE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage -fno-inline") | ||||
| 
 | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-inline -fno-omit-frame-pointer") | ||||
| 
 | ||||
|   if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Og -ggdb") | ||||
|   endif() | ||||
| 
 | ||||
|   endif() | ||||
| endif() | ||||
| if(CMAKE_BUILD_TYPE MATCHES Release) | ||||
|   message(STATUS "Configuring OSRM in release mode") | ||||
|   # Check if LTO is available | ||||
|   set(LTO_FLAGS "") | ||||
|   check_cxx_compiler_flag("-flto" LTO_AVAILABLE) | ||||
|   if(LTO_AVAILABLE) | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto") | ||||
|     set(OLD_CXX_FLAGS ${CMAKE_CXX_FLAGS}) | ||||
|     # GCC in addition allows parallelizing LTO | ||||
|     if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") | ||||
|       include(ProcessorCount) | ||||
|       ProcessorCount(NPROC) | ||||
|       set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto=${NPROC}") | ||||
|     else() | ||||
|       set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto") | ||||
|     endif() | ||||
|     set(CHECK_LTO_SRC "int main(){return 0;}") | ||||
|     check_cxx_source_compiles("${CHECK_LTO_SRC}" LTO_WORKS) | ||||
|     if(LTO_WORKS) | ||||
| @ -144,6 +161,11 @@ if(CMAKE_BUILD_TYPE MATCHES Release) | ||||
|       set(CMAKE_AR     "/usr/bin/gcc-ar") | ||||
|       set(CMAKE_RANLIB "/usr/bin/gcc-ranlib") | ||||
|     endif() | ||||
| 
 | ||||
|     if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" AND "${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS "4.9.0") | ||||
|       message(STATUS "Disabling LTO on GCC < 4.9.0 since it is broken, see: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=57038") | ||||
|       set(CMAKE_CXX_FLAGS "${OLD_CXX_FLAGS}") | ||||
|     endif() | ||||
|   endif() | ||||
| endif() | ||||
| 
 | ||||
| @ -153,9 +175,7 @@ endif() | ||||
| 
 | ||||
| # Configuring compilers | ||||
| if(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang") | ||||
|   # using Clang | ||||
|   #  -Weverything -Wno-c++98-compat -Wno-shadow -Wno-exit-time-destructors | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wunreachable-code -pedantic -fPIC") | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -pedantic -Wuninitialized -Wunreachable-code -Wstrict-overflow=2 -D_FORTIFY_SOURCE=2 -fPIC") | ||||
| elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") | ||||
|   set(COLOR_FLAG "-fdiagnostics-color=auto") | ||||
|   check_cxx_compiler_flag("-fdiagnostics-color=auto" HAS_COLOR_FLAG) | ||||
| @ -163,7 +183,7 @@ elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") | ||||
|     set(COLOR_FLAG "") | ||||
|   endif() | ||||
|   # using GCC | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pedantic -fPIC ${COLOR_FLAG}") | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -pedantic -Wuninitialized -Wunreachable-code -Wstrict-overflow=1 -D_FORTIFY_SOURCE=2 ${COLOR_FLAG} -fPIC") | ||||
|   if(WIN32) # using mingw | ||||
|     add_definitions(-D_USE_MATH_DEFINES) # define M_PI, M_1_PI etc. | ||||
|     add_definitions(-DWIN32) | ||||
| @ -184,6 +204,26 @@ elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "MSVC") | ||||
|   target_link_libraries(osrm-extract wsock32 ws2_32) | ||||
| endif() | ||||
| 
 | ||||
| # Configuring linker | ||||
| execute_process(COMMAND ${CMAKE_CXX_COMPILER} "-Wl,--version" ERROR_QUIET OUTPUT_VARIABLE LINKER_VERSION) | ||||
| # For ld.gold and ld.bfs (the GNU linkers) we optimize hard | ||||
| if("${LINKER_VERSION}" MATCHES "GNU gold" OR "${LINKER_VERSION}" MATCHES "GNU ld") | ||||
|   message(STATUS "Setting linker optimizations") | ||||
|   if(NOT ${CMAKE_CXX_COMPILER_ID} STREQUAL "MSVC") | ||||
|     # Tell compiler to put every function in separate section, linker can then match sections and functions | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffunction-sections -fdata-sections") | ||||
|     # Tell linker to do dead code and data eminination during link time discarding sections | ||||
|     set(LINKER_FLAGS "${LINKER_FLAGS} -Wl,--gc-sections") | ||||
|   endif() | ||||
|   # Default linker optimization flags | ||||
|   set(LINKER_FLAGS "${LINKER_FLAGS} -Wl,-O1 -Wl,--hash-style=gnu -Wl,--sort-common") | ||||
| else() | ||||
|   message(STATUS "Using unknown linker, not setting linker optimizations") | ||||
| endif () | ||||
| set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${LINKER_FLAGS}") | ||||
| set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${LINKER_FLAGS}") | ||||
| set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${LINKER_FLAGS}") | ||||
| 
 | ||||
| # Activate C++11 | ||||
| if(NOT ${CMAKE_CXX_COMPILER_ID} STREQUAL "MSVC") | ||||
|   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ") | ||||
| @ -211,12 +251,11 @@ if(UNIX AND NOT APPLE) | ||||
| endif() | ||||
| 
 | ||||
| #Check Boost | ||||
| set(BOOST_MIN_VERSION "1.49.0") | ||||
| find_package(Boost ${BOOST_MIN_VERSION} COMPONENTS ${BOOST_COMPONENTS} REQUIRED) | ||||
| find_package(Boost 1.49.0 COMPONENTS ${BOOST_COMPONENTS} REQUIRED) | ||||
| if(NOT Boost_FOUND) | ||||
|   message(FATAL_ERROR "Fatal error: Boost (version >= 1.49.0) required.\n") | ||||
| endif() | ||||
| include_directories(${Boost_INCLUDE_DIRS}) | ||||
| include_directories(SYSTEM ${Boost_INCLUDE_DIRS}) | ||||
| 
 | ||||
| target_link_libraries(OSRM ${Boost_LIBRARIES}) | ||||
| target_link_libraries(osrm-extract ${Boost_LIBRARIES}) | ||||
| @ -225,6 +264,7 @@ target_link_libraries(osrm-routed ${Boost_LIBRARIES} ${OPTIONAL_SOCKET_LIBS} OSR | ||||
| target_link_libraries(osrm-datastore ${Boost_LIBRARIES}) | ||||
| target_link_libraries(datastructure-tests ${Boost_LIBRARIES}) | ||||
| target_link_libraries(algorithm-tests ${Boost_LIBRARIES} ${OPTIONAL_SOCKET_LIBS} OSRM) | ||||
| target_link_libraries(util-tests ${Boost_LIBRARIES}) | ||||
| target_link_libraries(rtree-bench ${Boost_LIBRARIES}) | ||||
| 
 | ||||
| find_package(Threads REQUIRED) | ||||
| @ -247,12 +287,12 @@ target_link_libraries(osrm-routed ${TBB_LIBRARIES}) | ||||
| target_link_libraries(datastructure-tests ${TBB_LIBRARIES}) | ||||
| target_link_libraries(algorithm-tests ${TBB_LIBRARIES}) | ||||
| target_link_libraries(rtree-bench ${TBB_LIBRARIES}) | ||||
| include_directories(${TBB_INCLUDE_DIR}) | ||||
| include_directories(SYSTEM ${TBB_INCLUDE_DIR}) | ||||
| 
 | ||||
| find_package( Luabind REQUIRED ) | ||||
| include(check_luabind) | ||||
| 
 | ||||
| include_directories(${LUABIND_INCLUDE_DIR}) | ||||
| include_directories(SYSTEM ${LUABIND_INCLUDE_DIR}) | ||||
| target_link_libraries(osrm-extract ${LUABIND_LIBRARY}) | ||||
| target_link_libraries(osrm-prepare ${LUABIND_LIBRARY}) | ||||
| 
 | ||||
| @ -263,17 +303,18 @@ else() | ||||
|   target_link_libraries(osrm-extract ${LUA_LIBRARY}) | ||||
|   target_link_libraries(osrm-prepare ${LUA_LIBRARY}) | ||||
| endif() | ||||
| include_directories(${LUA_INCLUDE_DIR}) | ||||
| include_directories(SYSTEM ${LUA_INCLUDE_DIR}) | ||||
| 
 | ||||
| find_package(EXPAT REQUIRED) | ||||
| include_directories(${EXPAT_INCLUDE_DIRS}) | ||||
| include_directories(SYSTEM ${EXPAT_INCLUDE_DIRS}) | ||||
| target_link_libraries(osrm-extract ${EXPAT_LIBRARIES}) | ||||
| 
 | ||||
| find_package(STXXL REQUIRED) | ||||
| include_directories(${STXXL_INCLUDE_DIR}) | ||||
| include_directories(SYSTEM ${STXXL_INCLUDE_DIR}) | ||||
| target_link_libraries(OSRM ${STXXL_LIBRARY}) | ||||
| target_link_libraries(osrm-extract ${STXXL_LIBRARY}) | ||||
| target_link_libraries(osrm-prepare ${STXXL_LIBRARY}) | ||||
| target_link_libraries(datastructure-tests ${STXXL_LIBRARY}) | ||||
| 
 | ||||
| set(OpenMP_FIND_QUIETLY ON) | ||||
| find_package(OpenMP) | ||||
| @ -283,11 +324,11 @@ if(OPENMP_FOUND) | ||||
| endif() | ||||
| 
 | ||||
| find_package(BZip2 REQUIRED) | ||||
| include_directories(${BZIP_INCLUDE_DIRS}) | ||||
| include_directories(SYSTEM ${BZIP_INCLUDE_DIRS}) | ||||
| target_link_libraries(osrm-extract ${BZIP2_LIBRARIES}) | ||||
| 
 | ||||
| find_package(ZLIB REQUIRED) | ||||
| include_directories(${ZLIB_INCLUDE_DIRS}) | ||||
| include_directories(SYSTEM ${ZLIB_INCLUDE_DIRS}) | ||||
| target_link_libraries(osrm-extract ${ZLIB_LIBRARY}) | ||||
| target_link_libraries(osrm-routed ${ZLIB_LIBRARY}) | ||||
| 
 | ||||
| @ -296,16 +337,19 @@ if (ENABLE_JSON_LOGGING) | ||||
|   add_definitions(-DENABLE_JSON_LOGGING) | ||||
| endif() | ||||
| 
 | ||||
| if(WITH_TOOLS OR BUILD_TOOLS) | ||||
| if (DEBUG_GEOMETRY) | ||||
|   message(STATUS "Enabling final edge weight GeoJSON output option") | ||||
|   add_definitions(-DDEBUG_GEOMETRY) | ||||
| endif() | ||||
| 
 | ||||
| if(BUILD_TOOLS) | ||||
|   message(STATUS "Activating OSRM internal tools") | ||||
|   find_package(GDAL) | ||||
|   if(GDAL_FOUND) | ||||
|     add_executable(osrm-components tools/components.cpp $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:IMPORT> $<TARGET_OBJECTS:COORDINATE> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:RESTRICTION> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:MERCATOR>) | ||||
|     target_link_libraries(osrm-components ${TBB_LIBRARIES}) | ||||
|     include_directories(${GDAL_INCLUDE_DIR}) | ||||
|     target_link_libraries( | ||||
|       osrm-components | ||||
|       ${GDAL_LIBRARIES} ${Boost_LIBRARIES}) | ||||
|     include_directories(SYSTEM ${GDAL_INCLUDE_DIR}) | ||||
|     target_link_libraries(osrm-components ${GDAL_LIBRARIES} ${Boost_LIBRARIES}) | ||||
|     install(TARGETS osrm-components DESTINATION bin) | ||||
|   else() | ||||
|     message(FATAL_ERROR "libgdal and/or development headers not found") | ||||
| @ -313,16 +357,16 @@ if(WITH_TOOLS OR BUILD_TOOLS) | ||||
|   add_executable(osrm-cli tools/simpleclient.cpp $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:COORDINATE>) | ||||
|   target_link_libraries(osrm-cli ${Boost_LIBRARIES} ${OPTIONAL_SOCKET_LIBS} OSRM) | ||||
|   target_link_libraries(osrm-cli ${TBB_LIBRARIES}) | ||||
|   add_executable(osrm-io-benchmark tools/io-benchmark.cpp $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:LOGGER>) | ||||
|   add_executable(osrm-io-benchmark tools/io-benchmark.cpp $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:LOGGER>) | ||||
|   target_link_libraries(osrm-io-benchmark ${Boost_LIBRARIES}) | ||||
|   add_executable(osrm-unlock-all tools/unlock_all_mutexes.cpp $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION>) | ||||
|   add_executable(osrm-unlock-all tools/unlock_all_mutexes.cpp $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION>) | ||||
|   target_link_libraries(osrm-unlock-all ${Boost_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) | ||||
|   if(UNIX AND NOT APPLE) | ||||
|     target_link_libraries(osrm-unlock-all rt) | ||||
|   endif() | ||||
|   add_executable(osrm-check-hsgr tools/check-hsgr.cpp $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:EXCEPTION> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:IMPORT>) | ||||
|   target_link_libraries(osrm-check-hsgr ${Boost_LIBRARIES} ${TBB_LIBRARIES}) | ||||
|   add_executable(osrm-springclean tools/springclean.cpp $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:GITDESCRIPTION> $<TARGET_OBJECTS:EXCEPTION>) | ||||
|   add_executable(osrm-springclean tools/springclean.cpp $<TARGET_OBJECTS:FINGERPRINT> $<TARGET_OBJECTS:LOGGER> $<TARGET_OBJECTS:EXCEPTION>) | ||||
|   target_link_libraries(osrm-springclean ${Boost_LIBRARIES}) | ||||
| 
 | ||||
|   install(TARGETS osrm-cli DESTINATION bin) | ||||
| @ -332,7 +376,7 @@ if(WITH_TOOLS OR BUILD_TOOLS) | ||||
|   install(TARGETS osrm-springclean DESTINATION bin) | ||||
| endif() | ||||
| 
 | ||||
| file(GLOB InstallGlob include/osrm/*.hpp library/osrm.hpp) | ||||
| file(GLOB InstallGlob include/osrm/*.hpp) | ||||
| file(GLOB VariantGlob third_party/variant/*.hpp) | ||||
| 
 | ||||
| # Add RPATH info to executables so that when they are run after being installed | ||||
| @ -350,6 +394,7 @@ install(TARGETS osrm-prepare DESTINATION bin) | ||||
| install(TARGETS osrm-datastore DESTINATION bin) | ||||
| install(TARGETS osrm-routed DESTINATION bin) | ||||
| install(TARGETS OSRM DESTINATION lib) | ||||
| 
 | ||||
| list(GET Boost_LIBRARIES 1 BOOST_LIBRARY_FIRST) | ||||
| get_filename_component(BOOST_LIBRARY_LISTING "${BOOST_LIBRARY_FIRST}" PATH) | ||||
| set(BOOST_LIBRARY_LISTING "-L${BOOST_LIBRARY_LISTING}") | ||||
| @ -358,6 +403,14 @@ foreach(lib ${Boost_LIBRARIES}) | ||||
|   string(REPLACE "lib" "" BOOST_LIBRARY_NAME ${BOOST_LIBRARY_NAME}) | ||||
|   set(BOOST_LIBRARY_LISTING "${BOOST_LIBRARY_LISTING} -l${BOOST_LIBRARY_NAME}") | ||||
| endforeach() | ||||
| list(GET TBB_LIBRARIES 1 TBB_LIBRARY_FIRST) | ||||
| get_filename_component(TBB_LIBRARY_LISTING "${TBB_LIBRARY_FIRST}" PATH) | ||||
| set(TBB_LIBRARY_LISTING "-L${TBB_LIBRARY_LISTING}") | ||||
| foreach(lib ${TBB_LIBRARIES}) | ||||
|   get_filename_component(TBB_LIBRARY_NAME "${lib}" NAME_WE) | ||||
|   string(REPLACE "lib" "" TBB_LIBRARY_NAME ${TBB_LIBRARY_NAME}) | ||||
|   set(TBB_LIBRARY_LISTING "${TBB_LIBRARY_LISTING} -l${TBB_LIBRARY_NAME}") | ||||
| endforeach() | ||||
| 
 | ||||
| configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/pkgconfig.in libosrm.pc @ONLY) | ||||
| install(FILES ${PROJECT_BINARY_DIR}/libosrm.pc DESTINATION lib/pkgconfig) | ||||
| @ -366,3 +419,24 @@ if(BUILD_DEBIAN_PACKAGE) | ||||
|   include(CPackDebianConfig) | ||||
|   include(CPack) | ||||
| endif() | ||||
| 
 | ||||
| # add a target to generate API documentation with Doxygen | ||||
| find_package(Doxygen) | ||||
| if(DOXYGEN_FOUND) | ||||
| configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY) | ||||
| add_custom_target(doc | ||||
| ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile | ||||
| WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} | ||||
| COMMENT "Generating API documentation with Doxygen" VERBATIM | ||||
| ) | ||||
| endif() | ||||
| 
 | ||||
| # prefix compilation with ccache by default if available and on clang or gcc | ||||
| if(${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") | ||||
|   find_program(CCACHE_FOUND ccache) | ||||
|   if(CCACHE_FOUND) | ||||
|     set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) | ||||
|     set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache) | ||||
|     set(ENV{CCACHE_CPP2} "true") | ||||
|   endif() | ||||
| endif() | ||||
|  | ||||
							
								
								
									
										43
									
								
								Doxyfile.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								Doxyfile.in
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,43 @@ | ||||
| PROJECT_NAME           = "Project OSRM" | ||||
| PROJECT_BRIEF          = "Open Source Routing Machine" | ||||
| BUILTIN_STL_SUPPORT    = YES | ||||
| 
 | ||||
| EXTRACT_ALL            = YES | ||||
| EXTRACT_PRIVATE        = YES | ||||
| EXTRACT_PACKAGE        = YES | ||||
| EXTRACT_STATIC         = YES | ||||
| EXTRACT_LOCAL_CLASSES  = YES | ||||
| EXTRACT_ANON_NSPACES   = YES | ||||
| 
 | ||||
| QUIET                  = YES | ||||
| 
 | ||||
| INPUT                  = @CMAKE_CURRENT_SOURCE_DIR@ | ||||
| USE_MDFILE_AS_MAINPAGE = @CMAKE_CURRENT_SOURCE_DIR@/README.md | ||||
| FILE_PATTERNS          = *.h *.hpp *.c *.cc *.cpp *.md | ||||
| RECURSIVE              = YES | ||||
| 
 | ||||
| EXCLUDE                = @CMAKE_CURRENT_SOURCE_DIR@/third_party \ | ||||
|                          @CMAKE_CURRENT_SOURCE_DIR@/build \ | ||||
|                          @CMAKE_CURRENT_SOURCE_DIR@/unit_tests \ | ||||
| 			 @CMAKE_CURRENT_SOURCE_DIR@/benchmarks \ | ||||
|                          @CMAKE_CURRENT_SOURCE_DIR@/features | ||||
| 
 | ||||
| SOURCE_BROWSER         = YES | ||||
| 
 | ||||
| CLANG_ASSISTED_PARSING = NO | ||||
| 
 | ||||
| HTML_COLORSTYLE_HUE    = 217 | ||||
| HTML_COLORSTYLE_SAT    = 71 | ||||
| HTML_COLORSTYLE_GAMMA  = 50 | ||||
| 
 | ||||
| GENERATE_TREEVIEW      = YES | ||||
| 
 | ||||
| HAVE_DOT               = @DOXYGEN_DOT_FOUND@ | ||||
| CALL_GRAPH             = YES | ||||
| CALLER_GRAPH           = YES | ||||
| 
 | ||||
| DOT_IMAGE_FORMAT       = svg | ||||
| INTERACTIVE_SVG        = YES | ||||
| DOT_GRAPH_MAX_NODES    = 500 | ||||
| DOT_TRANSPARENT        = YES | ||||
| DOT_MULTI_TARGETS      = YES | ||||
| @ -49,7 +49,7 @@ constexpr static const float earth_radius = 6372797.560856f; | ||||
| namespace coordinate_calculation | ||||
| { | ||||
| 
 | ||||
| double great_circle_distance(const int lat1, | ||||
| double haversine_distance(const int lat1, | ||||
|                                                      const int lon1, | ||||
|                                                      const int lat2, | ||||
|                                                      const int lon2) | ||||
| @ -77,21 +77,21 @@ double great_circle_distance(const int lat1, | ||||
|     return earth_radius * cHarv; | ||||
| } | ||||
| 
 | ||||
| double great_circle_distance(const FixedPointCoordinate &coordinate_1, | ||||
| double haversine_distance(const FixedPointCoordinate &coordinate_1, | ||||
|                                                      const FixedPointCoordinate &coordinate_2) | ||||
| { | ||||
|     return great_circle_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat, | ||||
|     return haversine_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat, | ||||
|                                  coordinate_2.lon); | ||||
| } | ||||
| 
 | ||||
| float euclidean_distance(const FixedPointCoordinate &coordinate_1, | ||||
| float great_circle_distance(const FixedPointCoordinate &coordinate_1, | ||||
|                                                  const FixedPointCoordinate &coordinate_2) | ||||
| { | ||||
|     return euclidean_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat, | ||||
|     return great_circle_distance(coordinate_1.lat, coordinate_1.lon, coordinate_2.lat, | ||||
|                               coordinate_2.lon); | ||||
| } | ||||
| 
 | ||||
| float euclidean_distance(const int lat1, | ||||
| float great_circle_distance(const int lat1, | ||||
|                                                  const int lon1, | ||||
|                                                  const int lat2, | ||||
|                                                  const int lon2) | ||||
| @ -224,7 +224,7 @@ float perpendicular_distance_from_projected_coordinate( | ||||
|     BOOST_ASSERT(nearest_location.is_valid()); | ||||
| 
 | ||||
|     const float approximate_distance = | ||||
|         euclidean_distance(query_location, nearest_location); | ||||
|         great_circle_distance(query_location, nearest_location); | ||||
|     BOOST_ASSERT(0.f <= approximate_distance); | ||||
|     return approximate_distance; | ||||
| } | ||||
|  | ||||
| @ -36,15 +36,15 @@ struct FixedPointCoordinate; | ||||
| namespace coordinate_calculation | ||||
| { | ||||
|     double | ||||
|     great_circle_distance(const int lat1, const int lon1, const int lat2, const int lon2); | ||||
|     haversine_distance(const int lat1, const int lon1, const int lat2, const int lon2); | ||||
| 
 | ||||
|     double great_circle_distance(const FixedPointCoordinate &first_coordinate, | ||||
|     double haversine_distance(const FixedPointCoordinate &first_coordinate, | ||||
|                                  const FixedPointCoordinate &second_coordinate); | ||||
| 
 | ||||
|     float euclidean_distance(const FixedPointCoordinate &first_coordinate, | ||||
|     float great_circle_distance(const FixedPointCoordinate &first_coordinate, | ||||
|                              const FixedPointCoordinate &second_coordinate); | ||||
| 
 | ||||
|     float euclidean_distance(const int lat1, const int lon1, const int lat2, const int lon2); | ||||
|     float great_circle_distance(const int lat1, const int lon1, const int lat2, const int lon2); | ||||
| 
 | ||||
|     void lat_or_lon_to_string(const int value, std::string &output); | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										180
									
								
								algorithms/geospatial_query.hpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										180
									
								
								algorithms/geospatial_query.hpp
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,180 @@ | ||||
| #ifndef GEOSPATIAL_QUERY_HPP | ||||
| #define GEOSPATIAL_QUERY_HPP | ||||
| 
 | ||||
| #include "coordinate_calculation.hpp" | ||||
| #include "../typedefs.h" | ||||
| #include "../data_structures/phantom_node.hpp" | ||||
| #include "../util/bearing.hpp" | ||||
| 
 | ||||
| #include <osrm/coordinate.hpp> | ||||
| 
 | ||||
| #include <vector> | ||||
| #include <memory> | ||||
| #include <algorithm> | ||||
| 
 | ||||
| // Implements complex queries on top of an RTree and builds PhantomNodes from it.
 | ||||
| //
 | ||||
| // Only holds a weak reference on the RTree!
 | ||||
| template <typename RTreeT> class GeospatialQuery | ||||
| { | ||||
|     using EdgeData = typename RTreeT::EdgeData; | ||||
|     using CoordinateList = typename RTreeT::CoordinateList; | ||||
| 
 | ||||
|   public: | ||||
|     GeospatialQuery(RTreeT &rtree_, std::shared_ptr<CoordinateList> coordinates_) | ||||
|         : rtree(rtree_), coordinates(coordinates_) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     // Returns nearest PhantomNodes in the given bearing range within max_distance.
 | ||||
|     // Does not filter by small/big component!
 | ||||
|     std::vector<PhantomNodeWithDistance> | ||||
|     NearestPhantomNodesInRange(const FixedPointCoordinate &input_coordinate, | ||||
|                                const float max_distance, | ||||
|                                const int bearing = 0, | ||||
|                                const int bearing_range = 180) | ||||
|     { | ||||
|         auto results = | ||||
|             rtree.Nearest(input_coordinate, | ||||
|                           [this, bearing, bearing_range, max_distance](const EdgeData &data) | ||||
|                           { | ||||
|                               return checkSegmentBearing(data, bearing, bearing_range); | ||||
|                           }, | ||||
|                           [max_distance](const std::size_t, const float min_dist) | ||||
|                           { | ||||
|                               return min_dist > max_distance; | ||||
|                           }); | ||||
| 
 | ||||
|         return MakePhantomNodes(input_coordinate, results); | ||||
|     } | ||||
| 
 | ||||
|     // Returns max_results nearest PhantomNodes in the given bearing range.
 | ||||
|     // Does not filter by small/big component!
 | ||||
|     std::vector<PhantomNodeWithDistance> | ||||
|     NearestPhantomNodes(const FixedPointCoordinate &input_coordinate, | ||||
|                         const unsigned max_results, | ||||
|                         const int bearing = 0, | ||||
|                         const int bearing_range = 180) | ||||
|     { | ||||
|         auto results = rtree.Nearest(input_coordinate, | ||||
|                                      [this, bearing, bearing_range](const EdgeData &data) | ||||
|                                      { | ||||
|                                          return checkSegmentBearing(data, bearing, bearing_range); | ||||
|                                      }, | ||||
|                                      [max_results](const std::size_t num_results, const float) | ||||
|                                      { | ||||
|                                          return num_results >= max_results; | ||||
|                                      }); | ||||
| 
 | ||||
|         return MakePhantomNodes(input_coordinate, results); | ||||
|     } | ||||
| 
 | ||||
|     // Returns the nearest phantom node. If this phantom node is not from a big component
 | ||||
|     // a second phantom node is return that is the nearest coordinate in a big component.
 | ||||
|     std::pair<PhantomNode, PhantomNode> | ||||
|     NearestPhantomNodeWithAlternativeFromBigComponent(const FixedPointCoordinate &input_coordinate, | ||||
|                                                       const int bearing = 0, | ||||
|                                                       const int bearing_range = 180) | ||||
|     { | ||||
|         bool has_small_component = false; | ||||
|         bool has_big_component = false; | ||||
|         auto results = rtree.Nearest( | ||||
|             input_coordinate, | ||||
|             [this, bearing, bearing_range, &has_big_component, | ||||
|              &has_small_component](const EdgeData &data) | ||||
|             { | ||||
|                 auto use_segment = | ||||
|                     (!has_small_component || (!has_big_component && !data.component.is_tiny)); | ||||
|                 auto use_directions = std::make_pair(use_segment, use_segment); | ||||
| 
 | ||||
|                 if (use_segment) | ||||
|                 { | ||||
|                     use_directions = checkSegmentBearing(data, bearing, bearing_range); | ||||
|                     if (use_directions.first || use_directions.second) | ||||
|                     { | ||||
|                         has_big_component = has_big_component || !data.component.is_tiny; | ||||
|                         has_small_component = has_small_component || data.component.is_tiny; | ||||
|                     } | ||||
|                 } | ||||
| 
 | ||||
|                 return use_directions; | ||||
|             }, | ||||
|             [&has_big_component](const std::size_t num_results, const float) | ||||
|             { | ||||
|                 return num_results > 0 && has_big_component; | ||||
|             }); | ||||
| 
 | ||||
|         if (results.size() == 0) | ||||
|         { | ||||
|             return std::make_pair(PhantomNode{}, PhantomNode{}); | ||||
|         } | ||||
| 
 | ||||
|         BOOST_ASSERT(results.size() > 0); | ||||
|         return std::make_pair(MakePhantomNode(input_coordinate, results.front()).phantom_node, | ||||
|                               MakePhantomNode(input_coordinate, results.back()).phantom_node); | ||||
|     } | ||||
| 
 | ||||
|   private: | ||||
|     std::vector<PhantomNodeWithDistance> | ||||
|     MakePhantomNodes(const FixedPointCoordinate &input_coordinate, | ||||
|                      const std::vector<EdgeData> &results) const | ||||
|     { | ||||
|         std::vector<PhantomNodeWithDistance> distance_and_phantoms(results.size()); | ||||
|         std::transform(results.begin(), results.end(), distance_and_phantoms.begin(), | ||||
|                        [this, &input_coordinate](const EdgeData &data) | ||||
|                        { | ||||
|                            return MakePhantomNode(input_coordinate, data); | ||||
|                        }); | ||||
|         return distance_and_phantoms; | ||||
|     } | ||||
| 
 | ||||
|     PhantomNodeWithDistance MakePhantomNode(const FixedPointCoordinate &input_coordinate, | ||||
|                                                    const EdgeData &data) const | ||||
|     { | ||||
|         FixedPointCoordinate point_on_segment; | ||||
|         float ratio; | ||||
|         const auto current_perpendicular_distance = coordinate_calculation::perpendicular_distance( | ||||
|             coordinates->at(data.u), coordinates->at(data.v), input_coordinate, point_on_segment, | ||||
|             ratio); | ||||
| 
 | ||||
|         auto transformed = | ||||
|             PhantomNodeWithDistance { PhantomNode{data, point_on_segment}, current_perpendicular_distance }; | ||||
| 
 | ||||
|         ratio = std::min(1.f, std::max(0.f, ratio)); | ||||
| 
 | ||||
|         if (SPECIAL_NODEID != transformed.phantom_node.forward_node_id) | ||||
|         { | ||||
|             transformed.phantom_node.forward_weight *= ratio; | ||||
|         } | ||||
|         if (SPECIAL_NODEID != transformed.phantom_node.reverse_node_id) | ||||
|         { | ||||
|             transformed.phantom_node.reverse_weight *= 1.f - ratio; | ||||
|         } | ||||
|         return transformed; | ||||
|     } | ||||
| 
 | ||||
|     std::pair<bool, bool> checkSegmentBearing(const EdgeData &segment, | ||||
|                                               const float filter_bearing, | ||||
|                                               const float filter_bearing_range) | ||||
|     { | ||||
|         const float forward_edge_bearing = | ||||
|             coordinate_calculation::bearing(coordinates->at(segment.u), coordinates->at(segment.v)); | ||||
| 
 | ||||
|         const float backward_edge_bearing = (forward_edge_bearing + 180) > 360 | ||||
|                                                 ? (forward_edge_bearing - 180) | ||||
|                                                 : (forward_edge_bearing + 180); | ||||
| 
 | ||||
|         const bool forward_bearing_valid = | ||||
|             bearing::CheckInBounds(forward_edge_bearing, filter_bearing, filter_bearing_range) && | ||||
|             segment.forward_edge_based_node_id != SPECIAL_NODEID; | ||||
|         const bool backward_bearing_valid = | ||||
|             bearing::CheckInBounds(backward_edge_bearing, filter_bearing, filter_bearing_range) && | ||||
|             segment.reverse_edge_based_node_id != SPECIAL_NODEID; | ||||
|         return std::make_pair(forward_bearing_valid, backward_bearing_valid); | ||||
|     } | ||||
| 
 | ||||
|     RTreeT &rtree; | ||||
|     const std::shared_ptr<CoordinateList> coordinates; | ||||
| }; | ||||
| 
 | ||||
| #endif | ||||
| @ -29,7 +29,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include "../typedefs.h" | ||||
| 
 | ||||
| #include "../contractor/speed_profile.hpp" | ||||
| #include "../extractor/speed_profile.hpp" | ||||
| #include "../data_structures/node_based_graph.hpp" | ||||
| 
 | ||||
| #include <memory> | ||||
|  | ||||
| @ -28,14 +28,13 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #ifndef OBJECT_ENCODER_HPP | ||||
| #define OBJECT_ENCODER_HPP | ||||
| 
 | ||||
| #include "../util/string_util.hpp" | ||||
| 
 | ||||
| #include <boost/assert.hpp> | ||||
| #include <boost/archive/iterators/base64_from_binary.hpp> | ||||
| #include <boost/archive/iterators/binary_from_base64.hpp> | ||||
| #include <boost/archive/iterators/transform_width.hpp> | ||||
| 
 | ||||
| #include <algorithm> | ||||
| #include <iterator> | ||||
| #include <string> | ||||
| #include <vector> | ||||
| 
 | ||||
| @ -66,8 +65,8 @@ struct ObjectEncoder | ||||
|         encoded.resize(sizeof(ObjectT)); | ||||
|         encoded.assign(base64_t(&data[0]), | ||||
|                        base64_t(&data[0] + (data.size() - number_of_padded_chars))); | ||||
|         replaceAll(encoded, "+", "-"); | ||||
|         replaceAll(encoded, "/", "_"); | ||||
|         std::replace(begin(encoded), end(encoded), '+', '-'); | ||||
|         std::replace(begin(encoded), end(encoded), '/', '_'); | ||||
|     } | ||||
| 
 | ||||
|     template <class ObjectT> static void DecodeFromBase64(const std::string &input, ObjectT &object) | ||||
| @ -75,9 +74,8 @@ struct ObjectEncoder | ||||
|         try | ||||
|         { | ||||
|             std::string encoded(input); | ||||
|             // replace "-" with "+" and "_" with "/"
 | ||||
|             replaceAll(encoded, "-", "+"); | ||||
|             replaceAll(encoded, "_", "/"); | ||||
|             std::replace(begin(encoded), end(encoded), '-', '+'); | ||||
|             std::replace(begin(encoded), end(encoded), '_', '/'); | ||||
| 
 | ||||
|             std::copy(binary_t(encoded.begin()), binary_t(encoded.begin() + encoded.length()), | ||||
|                       reinterpret_cast<char *>(&object)); | ||||
|  | ||||
| @ -73,6 +73,8 @@ std::vector<NodeID> BruteForceTrip(const NodeIDIterator start, | ||||
|                                    const std::size_t number_of_locations, | ||||
|                                    const DistTableWrapper<EdgeWeight> &dist_table) | ||||
| { | ||||
|     (void)number_of_locations; // unused
 | ||||
| 
 | ||||
|     const auto component_size = std::distance(start, end); | ||||
| 
 | ||||
|     std::vector<NodeID> perm(start, end); | ||||
| @ -103,4 +105,4 @@ std::vector<NodeID> BruteForceTrip(const NodeIDIterator start, | ||||
| 
 | ||||
| } // end namespace trip
 | ||||
| } // end namespace osrm
 | ||||
| #endif // TRIP_BRUTE_FORCE_HPP
 | ||||
| #endif // TRIP_BRUTE_FORCE_HPP
 | ||||
|  | ||||
| @ -54,6 +54,7 @@ GetShortestRoundTrip(const NodeID new_loc, | ||||
|                      const std::size_t number_of_locations, | ||||
|                      std::vector<NodeID> &route) | ||||
| { | ||||
|     (void)number_of_locations; // unused
 | ||||
| 
 | ||||
|     auto min_trip_distance = INVALID_EDGE_WEIGHT; | ||||
|     NodeIDIter next_insert_point_candidate; | ||||
| @ -76,7 +77,13 @@ GetShortestRoundTrip(const NodeID new_loc, | ||||
| 
 | ||||
|         BOOST_ASSERT_MSG(dist_from != INVALID_EDGE_WEIGHT, "distance has invalid edge weight"); | ||||
|         BOOST_ASSERT_MSG(dist_to != INVALID_EDGE_WEIGHT, "distance has invalid edge weight"); | ||||
|         BOOST_ASSERT_MSG(trip_dist >= 0, "previous trip was not minimal. something's wrong"); | ||||
|         // This is not neccessarily true:
 | ||||
|         // Lets say you have an edge (u, v) with duration 100. If you place a coordinate exactly in
 | ||||
|         // the middle of the segment yielding (u, v'), the adjusted duration will be 100 * 0.5 = 50.
 | ||||
|         // Now imagine two coordinates. One placed at 0.99 and one at 0.999. This means (u, v') now
 | ||||
|         // has a duration of 100 * 0.99 = 99, but (u, v'') also has a duration of 100 * 0.995 = 99.
 | ||||
|         // In which case (v', v'') has a duration of 0.
 | ||||
|         // BOOST_ASSERT_MSG(trip_dist >= 0, "previous trip was not minimal. something's wrong");
 | ||||
| 
 | ||||
|         // from all possible insertions to the current trip, choose the shortest of all insertions
 | ||||
|         if (trip_dist < min_trip_distance) | ||||
| @ -118,7 +125,7 @@ std::vector<NodeID> FindRoute(const std::size_t &number_of_locations, | ||||
|     for (std::size_t j = 2; j < component_size; ++j) | ||||
|     { | ||||
| 
 | ||||
|         auto farthest_distance = 0; | ||||
|         auto farthest_distance = std::numeric_limits<int>::min(); | ||||
|         auto next_node = -1; | ||||
|         NodeIDIter next_insert_point; | ||||
| 
 | ||||
|  | ||||
| @ -6,6 +6,8 @@ ECHO ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %~f0 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||||
| 
 | ||||
| SET PROJECT_DIR=%CD% | ||||
| ECHO PROJECT_DIR^: %PROJECT_DIR% | ||||
| ECHO NUMBER_OF_PROCESSORS^: %NUMBER_OF_PROCESSORS% | ||||
| ECHO cmake^: && cmake --version | ||||
| 
 | ||||
| ECHO activating VS command prompt ... | ||||
| SET PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH% | ||||
| @ -50,7 +52,7 @@ set TBB_ARCH_PLATFORM=intel64/vc14 | ||||
| 
 | ||||
| ECHO calling cmake .... | ||||
| cmake .. ^ | ||||
| -G "Visual Studio 14 Win64" ^ | ||||
| -G "Visual Studio 14 2015 Win64" ^ | ||||
| -DBOOST_ROOT=%BOOST_ROOT% ^ | ||||
| -DBoost_ADDITIONAL_VERSIONS=1.58 ^ | ||||
| -DBoost_USE_MULTITHREADED=ON ^ | ||||
|  | ||||
| @ -25,16 +25,16 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| */ | ||||
| 
 | ||||
| #include "../data_structures/original_edge_data.hpp" | ||||
| #include "../data_structures/query_node.hpp" | ||||
| #include "../data_structures/shared_memory_vector_wrapper.hpp" | ||||
| #include "../data_structures/static_rtree.hpp" | ||||
| #include "../util/boost_filesystem_2_fix.hpp" | ||||
| #include "../data_structures/edge_based_node.hpp" | ||||
| #include "../algorithms/geospatial_query.hpp" | ||||
| #include "../util/timing_util.hpp" | ||||
| 
 | ||||
| #include <osrm/coordinate.hpp> | ||||
| 
 | ||||
| #include <random> | ||||
| #include <iostream> | ||||
| 
 | ||||
| // Choosen by a fair W20 dice roll (this value is completely arbitrary)
 | ||||
| constexpr unsigned RANDOM_SEED = 13; | ||||
| @ -46,6 +46,7 @@ constexpr int32_t WORLD_MAX_LON = 180 * COORDINATE_PRECISION; | ||||
| using RTreeLeaf = EdgeBasedNode; | ||||
| using FixedPointCoordinateListPtr = std::shared_ptr<std::vector<FixedPointCoordinate>>; | ||||
| using BenchStaticRTree = StaticRTree<RTreeLeaf, ShM<FixedPointCoordinate, false>::vector, false>; | ||||
| using BenchQuery = GeospatialQuery<BenchStaticRTree>; | ||||
| 
 | ||||
| FixedPointCoordinateListPtr LoadCoordinates(const boost::filesystem::path &nodes_file) | ||||
| { | ||||
| @ -66,7 +67,28 @@ FixedPointCoordinateListPtr LoadCoordinates(const boost::filesystem::path &nodes | ||||
|     return coords; | ||||
| } | ||||
| 
 | ||||
| void Benchmark(BenchStaticRTree &rtree, unsigned num_queries) | ||||
| template <typename QueryT> | ||||
| void BenchmarkQuery(const std::vector<FixedPointCoordinate> &queries, | ||||
|                     const std::string& name, | ||||
|                     QueryT query) | ||||
| { | ||||
|     std::cout << "Running " << name << " with " << queries.size() << " coordinates: " << std::flush; | ||||
| 
 | ||||
|     TIMER_START(query); | ||||
|     for (const auto &q : queries) | ||||
|     { | ||||
|         auto result = query(q); | ||||
|     } | ||||
|     TIMER_STOP(query); | ||||
| 
 | ||||
|     std::cout << "Took " << TIMER_SEC(query) << " seconds " | ||||
|               << "(" << TIMER_MSEC(query) << "ms" | ||||
|               << ")  ->  " << TIMER_MSEC(query) / queries.size() << " ms/query " | ||||
|               << "(" << TIMER_MSEC(query) << "ms" | ||||
|               << ")" << std::endl; | ||||
| } | ||||
| 
 | ||||
| void Benchmark(BenchStaticRTree &rtree, BenchQuery &geo_query, unsigned num_queries) | ||||
| { | ||||
|     std::mt19937 mt_rand(RANDOM_SEED); | ||||
|     std::uniform_int_distribution<> lat_udist(WORLD_MIN_LAT, WORLD_MAX_LAT); | ||||
| @ -74,91 +96,36 @@ void Benchmark(BenchStaticRTree &rtree, unsigned num_queries) | ||||
|     std::vector<FixedPointCoordinate> queries; | ||||
|     for (unsigned i = 0; i < num_queries; i++) | ||||
|     { | ||||
|         queries.emplace_back(FixedPointCoordinate(lat_udist(mt_rand), lon_udist(mt_rand))); | ||||
|         queries.emplace_back(lat_udist(mt_rand), lon_udist(mt_rand)); | ||||
|     } | ||||
| 
 | ||||
|     { | ||||
|         const unsigned num_results = 5; | ||||
|         std::cout << "#### IncrementalFindPhantomNodeForCoordinate : " << num_results | ||||
|                   << " phantom nodes" | ||||
|                   << "\n"; | ||||
|     BenchmarkQuery(queries, "raw RTree queries (1 result)", [&rtree](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return rtree.Nearest(q, 1); | ||||
|                    }); | ||||
|     BenchmarkQuery(queries, "raw RTree queries (10 results)", | ||||
|                    [&rtree](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return rtree.Nearest(q, 10); | ||||
|                    }); | ||||
| 
 | ||||
|         TIMER_START(query_phantom); | ||||
|         std::vector<PhantomNode> phantom_node_vector; | ||||
|         for (const auto &q : queries) | ||||
|         { | ||||
|             phantom_node_vector.clear(); | ||||
|             rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 3, num_results); | ||||
|             phantom_node_vector.clear(); | ||||
|             rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 17, num_results); | ||||
|         } | ||||
|         TIMER_STOP(query_phantom); | ||||
| 
 | ||||
|         std::cout << "Took " << TIMER_MSEC(query_phantom) << " msec for " << num_queries | ||||
|                   << " queries." | ||||
|                   << "\n"; | ||||
|         std::cout << TIMER_MSEC(query_phantom) / ((double)num_queries) << " msec/query." | ||||
|                   << "\n"; | ||||
| 
 | ||||
|         std::cout << "#### LocateClosestEndPointForCoordinate" | ||||
|                   << "\n"; | ||||
|     } | ||||
| 
 | ||||
|     TIMER_START(query_endpoint); | ||||
|     FixedPointCoordinate result; | ||||
|     for (const auto &q : queries) | ||||
|     { | ||||
|         rtree.LocateClosestEndPointForCoordinate(q, result, 3); | ||||
|     } | ||||
|     TIMER_STOP(query_endpoint); | ||||
| 
 | ||||
|     std::cout << "Took " << TIMER_MSEC(query_endpoint) << " msec for " << num_queries << " queries." | ||||
|               << "\n"; | ||||
|     std::cout << TIMER_MSEC(query_endpoint) / ((double)num_queries) << " msec/query." | ||||
|               << "\n"; | ||||
| 
 | ||||
|     std::cout << "#### FindPhantomNodeForCoordinate" | ||||
|               << "\n"; | ||||
| 
 | ||||
|     TIMER_START(query_node); | ||||
|     for (const auto &q : queries) | ||||
|     { | ||||
|         PhantomNode phantom; | ||||
|         rtree.FindPhantomNodeForCoordinate(q, phantom, 3); | ||||
|     } | ||||
|     TIMER_STOP(query_node); | ||||
| 
 | ||||
|     std::cout << "Took " << TIMER_MSEC(query_node) << " msec for " << num_queries << " queries." | ||||
|               << "\n"; | ||||
|     std::cout << TIMER_MSEC(query_node) / ((double)num_queries) << " msec/query." | ||||
|               << "\n"; | ||||
| 
 | ||||
|     { | ||||
|         const unsigned num_results = 1; | ||||
|         std::cout << "#### IncrementalFindPhantomNodeForCoordinate : " << num_results | ||||
|                   << " phantom nodes" | ||||
|                   << "\n"; | ||||
| 
 | ||||
|         TIMER_START(query_phantom); | ||||
|         std::vector<PhantomNode> phantom_node_vector; | ||||
|         for (const auto &q : queries) | ||||
|         { | ||||
|             phantom_node_vector.clear(); | ||||
|             rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 3, num_results); | ||||
|             phantom_node_vector.clear(); | ||||
|             rtree.IncrementalFindPhantomNodeForCoordinate(q, phantom_node_vector, 17, num_results); | ||||
|         } | ||||
|         TIMER_STOP(query_phantom); | ||||
| 
 | ||||
|         std::cout << "Took " << TIMER_MSEC(query_phantom) << " msec for " << num_queries | ||||
|                   << " queries." | ||||
|                   << "\n"; | ||||
|         std::cout << TIMER_MSEC(query_phantom) / ((double)num_queries) << " msec/query." | ||||
|                   << "\n"; | ||||
| 
 | ||||
|         std::cout << "#### LocateClosestEndPointForCoordinate" | ||||
|                   << "\n"; | ||||
|     } | ||||
|     BenchmarkQuery(queries, "big component alternative queries", | ||||
|                    [&geo_query](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return geo_query.NearestPhantomNodeWithAlternativeFromBigComponent(q); | ||||
|                    }); | ||||
|     BenchmarkQuery(queries, "max distance 1000", [&geo_query](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return geo_query.NearestPhantomNodesInRange(q, 1000); | ||||
|                    }); | ||||
|     BenchmarkQuery(queries, "PhantomNode query (1 result)", [&geo_query](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return geo_query.NearestPhantomNodes(q, 1); | ||||
|                    }); | ||||
|     BenchmarkQuery(queries, "PhantomNode query (10 result)", [&geo_query](const FixedPointCoordinate &q) | ||||
|                    { | ||||
|                        return geo_query.NearestPhantomNodes(q, 10); | ||||
|                    }); | ||||
| } | ||||
| 
 | ||||
| int main(int argc, char **argv) | ||||
| @ -177,8 +144,9 @@ int main(int argc, char **argv) | ||||
|     auto coords = LoadCoordinates(nodesPath); | ||||
| 
 | ||||
|     BenchStaticRTree rtree(ramPath, filePath, coords); | ||||
|     BenchQuery query(rtree, coords); | ||||
| 
 | ||||
|     Benchmark(rtree, 10000); | ||||
|     Benchmark(rtree, query, 10000); | ||||
| 
 | ||||
|     return 0; | ||||
| } | ||||
|  | ||||
| @ -11,7 +11,7 @@ SET CONFIGURATION=Release | ||||
| FOR /F "tokens=*" %%i in ('git rev-parse --abbrev-ref HEAD') do SET APPVEYOR_REPO_BRANCH=%%i | ||||
| ECHO APPVEYOR_REPO_BRANCH^: %APPVEYOR_REPO_BRANCH% | ||||
| 
 | ||||
| SET PATH=C:\mb\windows-builds-64\tmp-bin\cmake-3.1.0-win32-x86\bin;%PATH% | ||||
| SET PATH=C:\mb\windows-builds-64\tmp-bin\cmake-3.4.0-win32-x86\bin;%PATH% | ||||
| SET PATH=C:\Program Files\7-Zip;%PATH% | ||||
| 
 | ||||
| powershell Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted -Force | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| set(OLDFILE ${SOURCE_DIR}/util/fingerprint_impl.hpp) | ||||
| set(OLDFILE ${OUTPUT_DIR}/util/fingerprint_impl.hpp) | ||||
| set(NEWFILE ${OLDFILE}.tmp) | ||||
| set(INFILE ${OLDFILE}.in) | ||||
| set(INFILE ${SOURCE_DIR}/util/fingerprint_impl.hpp.in) | ||||
| file(MD5 ${SOURCE_DIR}/prepare.cpp MD5PREPARE) | ||||
| file(MD5 ${SOURCE_DIR}/data_structures/static_rtree.hpp MD5RTREE) | ||||
| file(MD5 ${SOURCE_DIR}/util/graph_loader.hpp MD5GRAPH) | ||||
| @ -13,7 +13,7 @@ file(MD5 ${NEWFILE} MD5NEW) | ||||
| if (EXISTS ${OLDFILE}) | ||||
|     file(MD5 ${OLDFILE} MD5OLD) | ||||
|     if(NOT ${MD5NEW} STREQUAL ${MD5OLD}) | ||||
| 	    file(REMOVE_RECURSE ${OLDFILE}) | ||||
|         file(REMOVE_RECURSE ${OLDFILE}) | ||||
|         file(RENAME ${NEWFILE} ${OLDFILE}) | ||||
|     else() | ||||
|         file(REMOVE_RECURSE ${NEWFILE}) | ||||
|  | ||||
| @ -1,123 +0,0 @@ | ||||
| # - Returns a version string from Git | ||||
| # | ||||
| # These functions force a re-configure on each git commit so that you can | ||||
| # trust the values of the variables in your build system. | ||||
| # | ||||
| #  get_git_head_revision(<refspecvar> <hashvar> [<additional arguments to git describe> ...]) | ||||
| # | ||||
| # Returns the refspec and sha hash of the current head revision | ||||
| # | ||||
| #  git_describe(<var> [<additional arguments to git describe> ...]) | ||||
| # | ||||
| # Returns the results of git describe on the source tree, and adjusting | ||||
| # the output so that it tests false if an error occurs. | ||||
| # | ||||
| #  git_get_exact_tag(<var> [<additional arguments to git describe> ...]) | ||||
| # | ||||
| # Returns the results of git describe --exact-match on the source tree, | ||||
| # and adjusting the output so that it tests false if there was no exact | ||||
| # matching tag. | ||||
| # | ||||
| # Requires CMake 2.6 or newer (uses the 'function' command) | ||||
| # | ||||
| # Original Author: | ||||
| # 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net> | ||||
| # http://academic.cleardefinition.com | ||||
| # Iowa State University HCI Graduate Program/VRAC | ||||
| # | ||||
| # Copyright Iowa State University 2009-2010. | ||||
| # Distributed under the Boost Software License, Version 1.0. | ||||
| # (See accompanying file LICENSE_1_0.txt or copy at | ||||
| # http://www.boost.org/LICENSE_1_0.txt) | ||||
| 
 | ||||
| if(__get_git_revision_description) | ||||
| 	return() | ||||
| endif() | ||||
| set(__get_git_revision_description YES) | ||||
| 
 | ||||
| # We must run the following at "include" time, not at function call time, | ||||
| # to find the path to this module rather than the path to a calling list file | ||||
| get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH) | ||||
| 
 | ||||
| function(get_git_head_revision _refspecvar _hashvar) | ||||
| 	set(GIT_PARENT_DIR "${CMAKE_SOURCE_DIR}") | ||||
| 	set(GIT_DIR "${GIT_PARENT_DIR}/.git") | ||||
| 	while(NOT EXISTS "${GIT_DIR}")	# .git dir not found, search parent directories | ||||
| 		set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}") | ||||
| 		get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH) | ||||
| 		if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT) | ||||
| 			# We have reached the root directory, we are not in git | ||||
| 			set(${_refspecvar} "GITDIR-NOTFOUND" PARENT_SCOPE) | ||||
| 			set(${_hashvar} "GITDIR-NOTFOUND" PARENT_SCOPE) | ||||
| 			return() | ||||
| 		endif() | ||||
| 		set(GIT_DIR "${GIT_PARENT_DIR}/.git") | ||||
| 	endwhile() | ||||
| 	set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data") | ||||
| 	if(NOT EXISTS "${GIT_DATA}") | ||||
| 		file(MAKE_DIRECTORY "${GIT_DATA}") | ||||
| 	endif() | ||||
| 
 | ||||
| 	if(NOT EXISTS "${GIT_DIR}/HEAD") | ||||
| 		return() | ||||
| 	endif() | ||||
| 	set(HEAD_FILE "${GIT_DATA}/HEAD") | ||||
| 	configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY) | ||||
| 
 | ||||
| 	configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in" | ||||
| 		"${GIT_DATA}/grabRef.cmake" | ||||
| 		@ONLY) | ||||
| 	include("${GIT_DATA}/grabRef.cmake") | ||||
| 
 | ||||
| 	set(${_refspecvar} "${HEAD_REF}" PARENT_SCOPE) | ||||
| 	set(${_hashvar} "${HEAD_HASH}" PARENT_SCOPE) | ||||
| endfunction() | ||||
| 
 | ||||
| function(git_describe _var) | ||||
| 	if(NOT GIT_FOUND) | ||||
| 		find_package(Git QUIET) | ||||
| 	endif() | ||||
| 	get_git_head_revision(refspec hash) | ||||
| 	if(NOT GIT_FOUND) | ||||
| 		set(${_var} "GIT-NOTFOUND" PARENT_SCOPE) | ||||
| 		return() | ||||
| 	endif() | ||||
| 	if(NOT hash) | ||||
| 		set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE) | ||||
| 		return() | ||||
| 	endif() | ||||
| 
 | ||||
| 	# TODO sanitize | ||||
| 	#if((${ARGN}" MATCHES "&&") OR | ||||
| 	#	(ARGN MATCHES "||") OR | ||||
| 	#	(ARGN MATCHES "\\;")) | ||||
| 	#	message("Please report the following error to the project!") | ||||
| 	#	message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}") | ||||
| 	#endif() | ||||
| 
 | ||||
| 	#message(STATUS "Arguments to execute_process: ${ARGN}") | ||||
| 
 | ||||
| 	execute_process(COMMAND | ||||
| 		"${GIT_EXECUTABLE}" | ||||
| 		describe | ||||
| 		${hash} | ||||
| 		${ARGN} | ||||
| 		WORKING_DIRECTORY | ||||
| 		"${CMAKE_SOURCE_DIR}" | ||||
| 		RESULT_VARIABLE | ||||
| 		res | ||||
| 		OUTPUT_VARIABLE | ||||
| 		out | ||||
| 		ERROR_QUIET | ||||
| 		OUTPUT_STRIP_TRAILING_WHITESPACE) | ||||
| 	if(NOT res EQUAL 0) | ||||
| 		set(out "${out}-${res}-NOTFOUND") | ||||
| 	endif() | ||||
| 
 | ||||
| 	set(${_var} "${out}" PARENT_SCOPE) | ||||
| endfunction() | ||||
| 
 | ||||
| function(git_get_exact_tag _var) | ||||
| 	git_describe(out --exact-match ${ARGN}) | ||||
| 	set(${_var} "${out}" PARENT_SCOPE) | ||||
| endfunction() | ||||
| @ -1,38 +0,0 @@ | ||||
| #  | ||||
| # Internal file for GetGitRevisionDescription.cmake | ||||
| # | ||||
| # Requires CMake 2.6 or newer (uses the 'function' command) | ||||
| # | ||||
| # Original Author: | ||||
| # 2009-2010 Ryan Pavlik <rpavlik@iastate.edu> <abiryan@ryand.net> | ||||
| # http://academic.cleardefinition.com | ||||
| # Iowa State University HCI Graduate Program/VRAC | ||||
| # | ||||
| # Copyright Iowa State University 2009-2010. | ||||
| # Distributed under the Boost Software License, Version 1.0. | ||||
| # (See accompanying file LICENSE_1_0.txt or copy at | ||||
| # http://www.boost.org/LICENSE_1_0.txt) | ||||
| 
 | ||||
| set(HEAD_HASH) | ||||
| 
 | ||||
| file(READ "@HEAD_FILE@" HEAD_CONTENTS LIMIT 1024) | ||||
| 
 | ||||
| string(STRIP "${HEAD_CONTENTS}" HEAD_CONTENTS) | ||||
| if(HEAD_CONTENTS MATCHES "ref") | ||||
| 	# named branch | ||||
| 	string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}") | ||||
| 	if(EXISTS "@GIT_DIR@/${HEAD_REF}") | ||||
| 		configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY) | ||||
| 	elseif(EXISTS "@GIT_DIR@/logs/${HEAD_REF}") | ||||
| 		configure_file("@GIT_DIR@/logs/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY) | ||||
| 		set(HEAD_HASH "${HEAD_REF}") | ||||
| 	endif() | ||||
| else() | ||||
| 	# detached HEAD | ||||
| 	configure_file("@GIT_DIR@/HEAD" "@GIT_DATA@/head-ref" COPYONLY) | ||||
| endif() | ||||
| 
 | ||||
| if(NOT HEAD_HASH) | ||||
| 	file(READ "@GIT_DATA@/head-ref" HEAD_HASH LIMIT 1024) | ||||
| 	string(STRIP "${HEAD_HASH}" HEAD_HASH) | ||||
| endif() | ||||
| @ -1,7 +1,7 @@ | ||||
| INCLUDE (CheckCXXSourceCompiles) | ||||
| unset(LUABIND_WORKS CACHE) | ||||
| unset(LUABIND51_WORKS CACHE) | ||||
| set (LUABIND_CHECK_SRC "#include  \"lua.h\"\n#include <luabind/luabind.hpp>\n int main() { lua_State *myLuaState = luaL_newstate(); luabind::open(myLuaState);  return 0;}") | ||||
| set (LUABIND_CHECK_SRC "extern \"C\" {\n#include \"lua.h\"\n#include \"lauxlib.h\"\n}\n#include <luabind/open.hpp>\nint main() { lua_State *x = luaL_newstate(); luabind::open(x); }") | ||||
| set (CMAKE_TRY_COMPILE_CONFIGURATION ${CMAKE_BUILD_TYPE}) | ||||
| set (CMAKE_REQUIRED_INCLUDES "${Boost_INCLUDE_DIR};${LUABIND_INCLUDE_DIR};${LUA_INCLUDE_DIR}") | ||||
| set (CMAKE_REQUIRED_LIBRARIES "${LUABIND_LIBRARY};${LUA_LIBRARY}") | ||||
|  | ||||
| @ -1,11 +1,11 @@ | ||||
| prefix=@CMAKE_INSTALL_PREFIX@ | ||||
| includedir=${prefix}/include/osrm | ||||
| includedir=${prefix}/include | ||||
| libdir=${prefix}/lib | ||||
| 
 | ||||
| Name: libOSRM | ||||
| Description: Project OSRM library | ||||
| Version: @GIT_DESCRIPTION@ | ||||
| Version: v@OSRM_VERSION_MAJOR@.@OSRM_VERSION_MINOR@.@OSRM_VERSION_PATCH@ | ||||
| Requires: | ||||
| Libs: -L${libdir} -lOSRM | ||||
| Libs.private: @BOOST_LIBRARY_LISTING@ | ||||
| Libs.private: @BOOST_LIBRARY_LISTING@ @TBB_LIBRARY_LISTING@ | ||||
| Cflags: -I${includedir} | ||||
|  | ||||
| @ -50,6 +50,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include <algorithm> | ||||
| #include <limits> | ||||
| #include <memory> | ||||
| #include <vector> | ||||
| 
 | ||||
| class Contractor | ||||
| @ -157,6 +158,15 @@ class Contractor | ||||
| 
 | ||||
|   public: | ||||
|     template <class ContainerT> Contractor(int nodes, ContainerT &input_edge_list) | ||||
|       : Contractor(nodes, input_edge_list, {}, {}) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     template <class ContainerT> | ||||
|     Contractor(int nodes, | ||||
|                ContainerT &input_edge_list, | ||||
|                std::vector<float> &&node_levels_) | ||||
|         : node_levels(std::move(node_levels_)) | ||||
|     { | ||||
|         std::vector<ContractorEdge> edges; | ||||
|         edges.reserve(input_edge_list.size() * 2); | ||||
| @ -172,7 +182,8 @@ class Contractor | ||||
|                 SimpleLogger().Write(logWARNING) | ||||
|                     << "Edge weight large -> " | ||||
|                     << static_cast<unsigned int>(std::max(diter->weight, 1)) << " : " | ||||
|                     << static_cast<unsigned int>(diter->source) << " -> " << static_cast<unsigned int>(diter->target); | ||||
|                     << static_cast<unsigned int>(diter->source) << " -> " | ||||
|                     << static_cast<unsigned int>(diter->target); | ||||
|             } | ||||
| #endif | ||||
|             edges.emplace_back(diter->source, diter->target, | ||||
| @ -284,7 +295,7 @@ class Contractor | ||||
| 
 | ||||
|     ~Contractor() {} | ||||
| 
 | ||||
|     void Run( double core_factor = 1.0 ) | ||||
|     void Run(double core_factor = 1.0) | ||||
|     { | ||||
|         // for the preperation we can use a big grain size, which is much faster (probably cache)
 | ||||
|         constexpr size_t InitGrainSize = 100000; | ||||
| @ -303,14 +314,14 @@ class Contractor | ||||
|         ThreadDataContainer thread_data_list(number_of_nodes); | ||||
| 
 | ||||
|         NodeID number_of_contracted_nodes = 0; | ||||
|         std::vector<RemainingNodeData> remaining_nodes(number_of_nodes); | ||||
|         std::vector<float> node_priorities(number_of_nodes); | ||||
|         std::vector<NodePriorityData> node_data(number_of_nodes); | ||||
|         std::vector<NodePriorityData> node_data; | ||||
|         std::vector<float> node_priorities; | ||||
|         is_core_node.resize(number_of_nodes, false); | ||||
| 
 | ||||
|         std::vector<RemainingNodeData> remaining_nodes(number_of_nodes); | ||||
|         // initialize priorities in parallel
 | ||||
|         tbb::parallel_for(tbb::blocked_range<int>(0, number_of_nodes, InitGrainSize), | ||||
|                           [&remaining_nodes](const tbb::blocked_range<int> &range) | ||||
|                           [this, &remaining_nodes](const tbb::blocked_range<int> &range) | ||||
|                           { | ||||
|                               for (int x = range.begin(), end = range.end(); x != end; ++x) | ||||
|                               { | ||||
| @ -318,25 +329,44 @@ class Contractor | ||||
|                               } | ||||
|                           }); | ||||
| 
 | ||||
|         std::cout << "initializing elimination PQ ..." << std::flush; | ||||
|         tbb::parallel_for(tbb::blocked_range<int>(0, number_of_nodes, PQGrainSize), | ||||
|                           [this, &node_priorities, &node_data, &thread_data_list]( | ||||
|                               const tbb::blocked_range<int> &range) | ||||
|                           { | ||||
|                               ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                               for (int x = range.begin(), end = range.end(); x != end; ++x) | ||||
|                               { | ||||
|                                   node_priorities[x] = | ||||
|                                       this->EvaluateNodePriority(data, &node_data[x], x); | ||||
|                               } | ||||
|                           }); | ||||
|         std::cout << "ok" << std::endl << "preprocessing " << number_of_nodes << " nodes ..." | ||||
|                   << std::flush; | ||||
| 
 | ||||
|         bool flushed_contractor = false; | ||||
|         while (number_of_nodes > 2 && number_of_contracted_nodes < static_cast<NodeID>(number_of_nodes * core_factor) ) | ||||
|         bool use_cached_node_priorities = !node_levels.empty(); | ||||
|         if (use_cached_node_priorities) | ||||
|         { | ||||
|             if (!flushed_contractor && (number_of_contracted_nodes > static_cast<NodeID>(number_of_nodes * 0.65 * core_factor))) | ||||
|             std::cout << "using cached node priorities ..." << std::flush; | ||||
|             node_priorities.swap(node_levels); | ||||
|             std::cout << "ok" << std::endl; | ||||
|         } | ||||
|         else | ||||
|         { | ||||
|             node_data.resize(number_of_nodes); | ||||
|             node_priorities.resize(number_of_nodes); | ||||
|             node_levels.resize(number_of_nodes); | ||||
| 
 | ||||
|             std::cout << "initializing elimination PQ ..." << std::flush; | ||||
|             tbb::parallel_for(tbb::blocked_range<int>(0, number_of_nodes, PQGrainSize), | ||||
|                               [this, &node_priorities, &node_data, &thread_data_list]( | ||||
|                                   const tbb::blocked_range<int> &range) | ||||
|                               { | ||||
|                                   ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                                   for (int x = range.begin(), end = range.end(); x != end; ++x) | ||||
|                                   { | ||||
|                                       node_priorities[x] = | ||||
|                                           this->EvaluateNodePriority(data, &node_data[x], x); | ||||
|                                   } | ||||
|                               }); | ||||
|             std::cout << "ok" << std::endl; | ||||
|         } | ||||
|         BOOST_ASSERT(node_priorities.size() == number_of_nodes); | ||||
| 
 | ||||
|         std::cout << "preprocessing " << number_of_nodes << " nodes ..." << std::flush; | ||||
| 
 | ||||
|         unsigned current_level = 0; | ||||
|         bool flushed_contractor = false; | ||||
|         while (number_of_nodes > 2 && | ||||
|                number_of_contracted_nodes < static_cast<NodeID>(number_of_nodes * core_factor)) | ||||
|         { | ||||
|             if (!flushed_contractor && (number_of_contracted_nodes > | ||||
|                                         static_cast<NodeID>(number_of_nodes * 0.65 * core_factor))) | ||||
|             { | ||||
|                 DeallocatingVector<ContractorEdge> new_edge_set; // this one is not explicitely
 | ||||
|                                                                  // cleared since it goes out of
 | ||||
| @ -355,28 +385,32 @@ class Contractor | ||||
|                 // remaining graph
 | ||||
|                 std::vector<NodeID> new_node_id_from_orig_id_map(number_of_nodes, UINT_MAX); | ||||
| 
 | ||||
|                 // build forward and backward renumbering map and remap ids in remaining_nodes and
 | ||||
|                 // Priorities.
 | ||||
|                 for (const auto new_node_id : osrm::irange<std::size_t>(0, remaining_nodes.size())) | ||||
|                 { | ||||
|                     auto& node = remaining_nodes[new_node_id]; | ||||
|                     BOOST_ASSERT(node_priorities.size() > node.id); | ||||
|                     new_node_priority[new_node_id] = node_priorities[node.id]; | ||||
|                 } | ||||
| 
 | ||||
|                 // build forward and backward renumbering map and remap ids in remaining_nodes
 | ||||
|                 for (const auto new_node_id : osrm::irange<std::size_t>(0, remaining_nodes.size())) | ||||
|                 { | ||||
|                     auto& node = remaining_nodes[new_node_id]; | ||||
|                     // create renumbering maps in both directions
 | ||||
|                     orig_node_id_from_new_node_id_map[new_node_id] = remaining_nodes[new_node_id].id; | ||||
|                     new_node_id_from_orig_id_map[remaining_nodes[new_node_id].id] = new_node_id; | ||||
|                     new_node_priority[new_node_id] = | ||||
|                         node_priorities[remaining_nodes[new_node_id].id]; | ||||
|                     remaining_nodes[new_node_id].id = new_node_id; | ||||
|                     orig_node_id_from_new_node_id_map[new_node_id] = node.id; | ||||
|                     new_node_id_from_orig_id_map[node.id] = new_node_id; | ||||
|                     node.id = new_node_id; | ||||
|                 } | ||||
|                 // walk over all nodes
 | ||||
|                 for (const auto i : | ||||
|                      osrm::irange<std::size_t>(0, contractor_graph->GetNumberOfNodes())) | ||||
|                 for (const auto source : | ||||
|                      osrm::irange<NodeID>(0, contractor_graph->GetNumberOfNodes())) | ||||
|                 { | ||||
|                     const NodeID source = i; | ||||
|                     for (auto current_edge : contractor_graph->GetAdjacentEdgeRange(source)) | ||||
|                     { | ||||
|                         ContractorGraph::EdgeData &data = | ||||
|                             contractor_graph->GetEdgeData(current_edge); | ||||
|                         const NodeID target = contractor_graph->GetTarget(current_edge); | ||||
|                         if (SPECIAL_NODEID == new_node_id_from_orig_id_map[i]) | ||||
|                         if (SPECIAL_NODEID == new_node_id_from_orig_id_map[source]) | ||||
|                         { | ||||
|                             external_edge_list.push_back({source, target, data}); | ||||
|                         } | ||||
| @ -411,7 +445,7 @@ class Contractor | ||||
|                 contractor_graph.reset(); | ||||
| 
 | ||||
|                 // create new graph
 | ||||
|                 std::sort(new_edge_set.begin(), new_edge_set.end()); | ||||
|                 tbb::parallel_sort(new_edge_set.begin(), new_edge_set.end()); | ||||
|                 contractor_graph = | ||||
|                     std::make_shared<ContractorGraph>(remaining_nodes.size(), new_edge_set); | ||||
| 
 | ||||
| @ -423,14 +457,13 @@ class Contractor | ||||
|                 thread_data_list.number_of_nodes = contractor_graph->GetNumberOfNodes(); | ||||
|             } | ||||
| 
 | ||||
|             const int last = (int)remaining_nodes.size(); | ||||
|             tbb::parallel_for(tbb::blocked_range<int>(0, last, IndependentGrainSize), | ||||
|             tbb::parallel_for(tbb::blocked_range<std::size_t>(0, remaining_nodes.size(), IndependentGrainSize), | ||||
|                               [this, &node_priorities, &remaining_nodes, &thread_data_list]( | ||||
|                                   const tbb::blocked_range<int> &range) | ||||
|                                   const tbb::blocked_range<std::size_t> &range) | ||||
|                               { | ||||
|                                   ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                                   // determine independent node set
 | ||||
|                                   for (int i = range.begin(), end = range.end(); i != end; ++i) | ||||
|                                   for (auto i = range.begin(), end = range.end(); i != end; ++i) | ||||
|                                   { | ||||
|                                       const NodeID node = remaining_nodes[i].id; | ||||
|                                       remaining_nodes[i].is_independent = | ||||
| @ -438,17 +471,45 @@ class Contractor | ||||
|                                   } | ||||
|                               }); | ||||
| 
 | ||||
|             const auto first = stable_partition(remaining_nodes.begin(), remaining_nodes.end(), | ||||
|             // sort all remaining nodes to the beginning of the sequence
 | ||||
|             const auto begin_independent_nodes = stable_partition(remaining_nodes.begin(), remaining_nodes.end(), | ||||
|                                                 [](RemainingNodeData node_data) | ||||
|                                                 { | ||||
|                                                     return !node_data.is_independent; | ||||
|                                                 }); | ||||
|             const int first_independent_node = static_cast<int>(first - remaining_nodes.begin()); | ||||
|             auto begin_independent_nodes_idx = std::distance(remaining_nodes.begin(), begin_independent_nodes); | ||||
|             auto end_independent_nodes_idx = remaining_nodes.size(); | ||||
| 
 | ||||
|             if (!use_cached_node_priorities) | ||||
|             { | ||||
|                 // write out contraction level
 | ||||
|                 tbb::parallel_for( | ||||
|                     tbb::blocked_range<std::size_t>(begin_independent_nodes_idx, end_independent_nodes_idx, ContractGrainSize), | ||||
|                     [this, remaining_nodes, flushed_contractor, current_level](const tbb::blocked_range<std::size_t> &range) | ||||
|                     { | ||||
|                         if (flushed_contractor) | ||||
|                         { | ||||
|                             for (int position = range.begin(), end = range.end(); position != end; ++position) | ||||
|                             { | ||||
|                                 const NodeID x = remaining_nodes[position].id; | ||||
|                                 node_levels[orig_node_id_from_new_node_id_map[x]] = current_level; | ||||
|                             } | ||||
|                         } | ||||
|                         else | ||||
|                         { | ||||
|                             for (int position = range.begin(), end = range.end(); position != end; ++position) | ||||
|                             { | ||||
|                                 const NodeID x = remaining_nodes[position].id; | ||||
|                                 node_levels[x] = current_level; | ||||
|                             } | ||||
|                         } | ||||
|                     }); | ||||
|             } | ||||
| 
 | ||||
|             // contract independent nodes
 | ||||
|             tbb::parallel_for( | ||||
|                 tbb::blocked_range<int>(first_independent_node, last, ContractGrainSize), | ||||
|                 [this, &remaining_nodes, &thread_data_list](const tbb::blocked_range<int> &range) | ||||
|                 tbb::blocked_range<std::size_t>(begin_independent_nodes_idx, end_independent_nodes_idx, ContractGrainSize), | ||||
|                 [this, &remaining_nodes, &thread_data_list](const tbb::blocked_range<std::size_t> &range) | ||||
|                 { | ||||
|                     ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                     for (int position = range.begin(), end = range.end(); position != end; ++position) | ||||
| @ -457,16 +518,9 @@ class Contractor | ||||
|                         this->ContractNode<false>(data, x); | ||||
|                     } | ||||
|                 }); | ||||
|             // make sure we really sort each block
 | ||||
| 
 | ||||
|             tbb::parallel_for( | ||||
|                 thread_data_list.data.range(), | ||||
|                 [&](const ThreadDataContainer::EnumerableThreadData::range_type &range) | ||||
|                 { | ||||
|                     for (auto &data : range) | ||||
|                         std::sort(data->inserted_edges.begin(), data->inserted_edges.end()); | ||||
|                 }); | ||||
|             tbb::parallel_for( | ||||
|                 tbb::blocked_range<int>(first_independent_node, last, DeleteGrainSize), | ||||
|                 tbb::blocked_range<int>(begin_independent_nodes_idx, end_independent_nodes_idx, DeleteGrainSize), | ||||
|                 [this, &remaining_nodes, &thread_data_list](const tbb::blocked_range<int> &range) | ||||
|                 { | ||||
|                     ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
| @ -477,6 +531,16 @@ class Contractor | ||||
|                     } | ||||
|                 }); | ||||
| 
 | ||||
|             // make sure we really sort each block
 | ||||
|             tbb::parallel_for( | ||||
|                 thread_data_list.data.range(), | ||||
|                 [&](const ThreadDataContainer::EnumerableThreadData::range_type &range) | ||||
|                 { | ||||
|                     for (auto &data : range) | ||||
|                         tbb::parallel_sort(data->inserted_edges.begin(), | ||||
|                                            data->inserted_edges.end()); | ||||
|                 }); | ||||
| 
 | ||||
|             // insert new edges
 | ||||
|             for (auto &data : thread_data_list.data) | ||||
|             { | ||||
| @ -502,23 +566,25 @@ class Contractor | ||||
|                 data->inserted_edges.clear(); | ||||
|             } | ||||
| 
 | ||||
|             tbb::parallel_for( | ||||
|                 tbb::blocked_range<int>(first_independent_node, last, NeighboursGrainSize), | ||||
|                 [this, &remaining_nodes, &node_priorities, &node_data, &thread_data_list]( | ||||
|                     const tbb::blocked_range<int> &range) | ||||
|                 { | ||||
|                     ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                     for (int position = range.begin(), end = range.end(); position != end; ++position) | ||||
|             if (!use_cached_node_priorities) | ||||
|             { | ||||
|                 tbb::parallel_for( | ||||
|                     tbb::blocked_range<int>(begin_independent_nodes_idx, end_independent_nodes_idx, NeighboursGrainSize), | ||||
|                     [this, &node_priorities, &remaining_nodes, &node_data, &thread_data_list]( | ||||
|                         const tbb::blocked_range<int> &range) | ||||
|                     { | ||||
|                         NodeID x = remaining_nodes[position].id; | ||||
|                         this->UpdateNodeNeighbours(node_priorities, node_data, data, x); | ||||
|                     } | ||||
|                 }); | ||||
|                         ContractorThreadData *data = thread_data_list.getThreadData(); | ||||
|                         for (int position = range.begin(), end = range.end(); position != end; ++position) | ||||
|                         { | ||||
|                             NodeID x = remaining_nodes[position].id; | ||||
|                             this->UpdateNodeNeighbours(node_priorities, node_data, data, x); | ||||
|                         } | ||||
|                     }); | ||||
|             } | ||||
| 
 | ||||
|             // remove contracted nodes from the pool
 | ||||
|             number_of_contracted_nodes += last - first_independent_node; | ||||
|             remaining_nodes.resize(first_independent_node); | ||||
|             remaining_nodes.shrink_to_fit(); | ||||
|             number_of_contracted_nodes += end_independent_nodes_idx - begin_independent_nodes_idx; | ||||
|             remaining_nodes.resize(begin_independent_nodes_idx); | ||||
|             //            unsigned maxdegree = 0;
 | ||||
|             //            unsigned avgdegree = 0;
 | ||||
|             //            unsigned mindegree = UINT_MAX;
 | ||||
| @ -545,16 +611,37 @@ class Contractor | ||||
|             //            quad: " << quaddegree;
 | ||||
| 
 | ||||
|             p.printStatus(number_of_contracted_nodes); | ||||
|             ++current_level; | ||||
|         } | ||||
| 
 | ||||
|         if (remaining_nodes.size() > 2) | ||||
|         { | ||||
|             // TODO: for small cores a sorted array of core ids might also work good
 | ||||
|             for (const auto& node : remaining_nodes) | ||||
|             { | ||||
|                 auto orig_id = orig_node_id_from_new_node_id_map[node.id]; | ||||
|                 is_core_node[orig_id] = true; | ||||
|             } | ||||
|               if (orig_node_id_from_new_node_id_map.size() > 0) | ||||
|               { | ||||
|                   tbb::parallel_for( | ||||
|                       tbb::blocked_range<int>(0, remaining_nodes.size(), InitGrainSize), | ||||
|                       [this, &remaining_nodes](const tbb::blocked_range<int> &range) | ||||
|                       { | ||||
|                           for (int x = range.begin(), end = range.end(); x != end; ++x) | ||||
|                           { | ||||
|                               const auto orig_id = remaining_nodes[x].id; | ||||
|                               is_core_node[orig_node_id_from_new_node_id_map[orig_id]] = true; | ||||
|                           } | ||||
|                       }); | ||||
|               } | ||||
|               else | ||||
|               { | ||||
|                   tbb::parallel_for( | ||||
|                       tbb::blocked_range<int>(0, remaining_nodes.size(), InitGrainSize), | ||||
|                       [this, &remaining_nodes](const tbb::blocked_range<int> &range) | ||||
|                       { | ||||
|                           for (int x = range.begin(), end = range.end(); x != end; ++x) | ||||
|                           { | ||||
|                               const auto orig_id = remaining_nodes[x].id; | ||||
|                               is_core_node[orig_id] = true; | ||||
|                           } | ||||
|                       }); | ||||
|               } | ||||
|         } | ||||
|         else | ||||
|         { | ||||
| @ -563,7 +650,8 @@ class Contractor | ||||
|             is_core_node.clear(); | ||||
|         } | ||||
| 
 | ||||
|         SimpleLogger().Write() << "[core] " << remaining_nodes.size() << " nodes " << contractor_graph->GetNumberOfEdges() << " edges." << std::endl; | ||||
|         SimpleLogger().Write() << "[core] " << remaining_nodes.size() << " nodes " | ||||
|                                << contractor_graph->GetNumberOfEdges() << " edges." << std::endl; | ||||
| 
 | ||||
|         thread_data_list.data.clear(); | ||||
|     } | ||||
| @ -573,6 +661,11 @@ class Contractor | ||||
|         out_is_core_node.swap(is_core_node); | ||||
|     } | ||||
| 
 | ||||
|     inline void GetNodeLevels(std::vector<float> &out_node_levels) | ||||
|     { | ||||
|         out_node_levels.swap(node_levels); | ||||
|     } | ||||
| 
 | ||||
|     template <class Edge> inline void GetEdges(DeallocatingVector<Edge> &edges) | ||||
|     { | ||||
|         Percent p(contractor_graph->GetNumberOfNodes()); | ||||
| @ -982,6 +1075,7 @@ class Contractor | ||||
|     std::shared_ptr<ContractorGraph> contractor_graph; | ||||
|     stxxl::vector<QueryEdge> external_edge_list; | ||||
|     std::vector<NodeID> orig_node_id_from_new_node_id_map; | ||||
|     std::vector<float> node_levels; | ||||
|     std::vector<bool> is_core_node; | ||||
|     XORFastHash fast_hash; | ||||
| }; | ||||
|  | ||||
| @ -27,7 +27,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include "contractor_options.hpp" | ||||
| 
 | ||||
| #include "../util/git_sha.hpp" | ||||
| #include "util/version.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| 
 | ||||
| #include <boost/filesystem.hpp> | ||||
| @ -48,19 +48,27 @@ ContractorOptions::ParseArguments(int argc, char *argv[], ContractorConfig &cont | ||||
|     // declare a group of options that will be allowed both on command line and in config file
 | ||||
|     boost::program_options::options_description config_options("Configuration"); | ||||
|     config_options.add_options()( | ||||
|         "restrictions,r", | ||||
|         boost::program_options::value<boost::filesystem::path>(&contractor_config.restrictions_path), | ||||
|         "Restrictions file in .osrm.restrictions format")( | ||||
|         "profile,p", boost::program_options::value<boost::filesystem::path>(&contractor_config.profile_path) | ||||
|                          ->default_value("profile.lua"), | ||||
|         "profile,p", | ||||
|         boost::program_options::value<boost::filesystem::path>(&contractor_config.profile_path) | ||||
|             ->default_value("profile.lua"), | ||||
|         "Path to LUA routing profile")( | ||||
|         "threads,t", boost::program_options::value<unsigned int>(&contractor_config.requested_num_threads) | ||||
|                          ->default_value(tbb::task_scheduler_init::default_num_threads()), | ||||
|         "threads,t", | ||||
|         boost::program_options::value<unsigned int>(&contractor_config.requested_num_threads) | ||||
|             ->default_value(tbb::task_scheduler_init::default_num_threads()), | ||||
|         "Number of threads to use")( | ||||
| 		"core,k", boost::program_options::value<double>(&contractor_config.core_factor) | ||||
| 						 ->default_value(1.0),"Percentage of the graph (in vertices) to contract [0.1]"); | ||||
| 
 | ||||
| 						 ->default_value(1.0),"Percentage of the graph (in vertices) to contract [0..1]")( | ||||
| 		"segment-speed-file", boost::program_options::value<std::string>(&contractor_config.segment_speed_lookup_path), | ||||
| 						 "Lookup file containing nodeA,nodeB,speed data to adjust edge weights")( | ||||
|         "level-cache,o", | ||||
|         boost::program_options::value<bool>(&contractor_config.use_cached_priority)->default_value(false), | ||||
|         "Use .level file to retain the contaction level for each node from the last run."); | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     config_options.add_options()( | ||||
| 		"debug-geometry", boost::program_options::value<std::string>(&contractor_config.debug_geometry_path) | ||||
| 						 ,"Write out edge-weight debugging geometry data in GeoJSON format to this file"); | ||||
| #endif | ||||
| 
 | ||||
|     // hidden options, will be allowed both on command line and in config file, but will not be
 | ||||
|     // shown to the user
 | ||||
| @ -102,7 +110,7 @@ ContractorOptions::ParseArguments(int argc, char *argv[], ContractorConfig &cont | ||||
| 
 | ||||
|     if (option_variables.count("version")) | ||||
|     { | ||||
|         SimpleLogger().Write() << g_GIT_DESCRIPTION; | ||||
|         SimpleLogger().Write() << OSRM_VERSION; | ||||
|         return return_code::exit; | ||||
|     } | ||||
| 
 | ||||
| @ -114,11 +122,6 @@ ContractorOptions::ParseArguments(int argc, char *argv[], ContractorConfig &cont | ||||
| 
 | ||||
|     boost::program_options::notify(option_variables); | ||||
| 
 | ||||
|     if (!option_variables.count("restrictions")) | ||||
|     { | ||||
|         contractor_config.restrictions_path = contractor_config.osrm_input_path.string() + ".restrictions"; | ||||
|     } | ||||
| 
 | ||||
|     if (!option_variables.count("input")) | ||||
|     { | ||||
|         SimpleLogger().Write() << "\n" << visible_options; | ||||
| @ -130,11 +133,10 @@ ContractorOptions::ParseArguments(int argc, char *argv[], ContractorConfig &cont | ||||
| 
 | ||||
| void ContractorOptions::GenerateOutputFilesNames(ContractorConfig &contractor_config) | ||||
| { | ||||
|     contractor_config.node_output_path = contractor_config.osrm_input_path.string() + ".nodes"; | ||||
|     contractor_config.level_output_path = contractor_config.osrm_input_path.string() + ".level"; | ||||
|     contractor_config.core_output_path = contractor_config.osrm_input_path.string() + ".core"; | ||||
|     contractor_config.edge_output_path = contractor_config.osrm_input_path.string() + ".edges"; | ||||
|     contractor_config.geometry_output_path = contractor_config.osrm_input_path.string() + ".geometry"; | ||||
|     contractor_config.graph_output_path = contractor_config.osrm_input_path.string() + ".hsgr"; | ||||
|     contractor_config.rtree_nodes_output_path = contractor_config.osrm_input_path.string() + ".ramIndex"; | ||||
|     contractor_config.rtree_leafs_output_path = contractor_config.osrm_input_path.string() + ".fileIndex"; | ||||
|     contractor_config.edge_based_graph_path = contractor_config.osrm_input_path.string() + ".ebg"; | ||||
|     contractor_config.edge_segment_lookup_path = contractor_config.osrm_input_path.string() + ".edge_segment_lookup"; | ||||
|     contractor_config.edge_penalty_path = contractor_config.osrm_input_path.string() + ".edge_penalties"; | ||||
| } | ||||
|  | ||||
| @ -45,16 +45,16 @@ struct ContractorConfig | ||||
| 
 | ||||
|     boost::filesystem::path config_file_path; | ||||
|     boost::filesystem::path osrm_input_path; | ||||
|     boost::filesystem::path restrictions_path; | ||||
|     boost::filesystem::path profile_path; | ||||
| 
 | ||||
|     std::string node_output_path; | ||||
|     std::string level_output_path; | ||||
|     std::string core_output_path; | ||||
|     std::string edge_output_path; | ||||
|     std::string geometry_output_path; | ||||
|     std::string graph_output_path; | ||||
|     std::string rtree_nodes_output_path; | ||||
|     std::string rtree_leafs_output_path; | ||||
|     std::string edge_based_graph_path; | ||||
| 
 | ||||
|     std::string edge_segment_lookup_path; | ||||
|     std::string edge_penalty_path; | ||||
|     bool use_cached_priority; | ||||
| 
 | ||||
|     unsigned requested_num_threads; | ||||
| 
 | ||||
| @ -63,6 +63,12 @@ struct ContractorConfig | ||||
|     //The remaining vertices form the core of the hierarchy 
 | ||||
|     //(e.g. 0.8 contracts 80 percent of the hierarchy, leaving a core of 20%)
 | ||||
|     double core_factor; | ||||
| 
 | ||||
|     std::string segment_speed_lookup_path; | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     std::string debug_geometry_path; | ||||
| #endif | ||||
| }; | ||||
| 
 | ||||
| struct ContractorOptions | ||||
|  | ||||
| @ -26,17 +26,13 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| */ | ||||
| 
 | ||||
| #include "processing_chain.hpp" | ||||
| #include "contractor.hpp" | ||||
| 
 | ||||
| #include "contractor.hpp" | ||||
| #include "../algorithms/graph_compressor.hpp" | ||||
| #include "../algorithms/tarjan_scc.hpp" | ||||
| #include "../algorithms/crc32_processor.hpp" | ||||
| #include "../data_structures/compressed_edge_container.hpp" | ||||
| #include "../data_structures/deallocating_vector.hpp" | ||||
| #include "../data_structures/static_rtree.hpp" | ||||
| #include "../data_structures/restriction_map.hpp" | ||||
| 
 | ||||
| #include "../util/git_sha.hpp" | ||||
| #include "../data_structures/deallocating_vector.hpp" | ||||
| 
 | ||||
| #include "../algorithms/crc32_processor.hpp" | ||||
| #include "../util/graph_loader.hpp" | ||||
| #include "../util/integer_range.hpp" | ||||
| #include "../util/lua_util.hpp" | ||||
| @ -46,6 +42,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "../util/timing_util.hpp" | ||||
| #include "../typedefs.h" | ||||
| 
 | ||||
| #include <fast-cpp-csv-parser/csv.h> | ||||
| 
 | ||||
| #include <boost/filesystem/fstream.hpp> | ||||
| #include <boost/program_options.hpp> | ||||
| 
 | ||||
| @ -57,6 +55,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <thread> | ||||
| #include <vector> | ||||
| 
 | ||||
| #include "../util/debug_geometry.hpp" | ||||
| 
 | ||||
| Prepare::~Prepare() {} | ||||
| 
 | ||||
| int Prepare::Run() | ||||
| @ -70,58 +70,49 @@ int Prepare::Run() | ||||
|                   "changing EdgeBasedEdge type has influence on memory consumption!"); | ||||
| #endif | ||||
| 
 | ||||
|     if (config.core_factor > 1.0 || config.core_factor < 0)  | ||||
|     { | ||||
|        throw osrm::exception("Core factor must be between 0.0 to 1.0 (inclusive)"); | ||||
|     } | ||||
| 
 | ||||
|     TIMER_START(preparing); | ||||
| 
 | ||||
|     // Create a new lua state
 | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Generating edge-expanded graph representation"; | ||||
|     SimpleLogger().Write() << "Loading edge-expanded graph representation"; | ||||
| 
 | ||||
|     TIMER_START(expansion); | ||||
| 
 | ||||
|     std::vector<EdgeBasedNode> node_based_edge_list; | ||||
|     DeallocatingVector<EdgeBasedEdge> edge_based_edge_list; | ||||
|     std::vector<QueryNode> internal_to_external_node_map; | ||||
|     auto graph_size = BuildEdgeExpandedGraph(internal_to_external_node_map, node_based_edge_list, | ||||
|                                              edge_based_edge_list); | ||||
| 
 | ||||
|     auto number_of_node_based_nodes = graph_size.first; | ||||
|     auto max_edge_id = graph_size.second; | ||||
| 
 | ||||
|     TIMER_STOP(expansion); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "building r-tree ..."; | ||||
|     TIMER_START(rtree); | ||||
| 
 | ||||
|     FindComponents(max_edge_id, edge_based_edge_list, node_based_edge_list); | ||||
| 
 | ||||
|     BuildRTree(node_based_edge_list, internal_to_external_node_map); | ||||
| 
 | ||||
|     TIMER_STOP(rtree); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "writing node map ..."; | ||||
|     WriteNodeMapping(internal_to_external_node_map); | ||||
|     size_t max_edge_id = LoadEdgeExpandedGraph( | ||||
|         config.edge_based_graph_path, edge_based_edge_list, config.edge_segment_lookup_path, | ||||
|         config.edge_penalty_path, config.segment_speed_lookup_path); | ||||
| 
 | ||||
|     // Contracting the edge-expanded graph
 | ||||
| 
 | ||||
|     TIMER_START(contraction); | ||||
|     std::vector<bool> is_core_node; | ||||
|     std::vector<float> node_levels; | ||||
|     if (config.use_cached_priority) | ||||
|     { | ||||
|         ReadNodeLevels(node_levels); | ||||
|     } | ||||
|     DeallocatingVector<QueryEdge> contracted_edge_list; | ||||
|     ContractGraph(max_edge_id, edge_based_edge_list, contracted_edge_list, is_core_node); | ||||
|     ContractGraph(max_edge_id, edge_based_edge_list, contracted_edge_list, is_core_node, | ||||
|                   node_levels); | ||||
|     TIMER_STOP(contraction); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Contraction took " << TIMER_SEC(contraction) << " sec"; | ||||
| 
 | ||||
|     std::size_t number_of_used_edges = | ||||
|         WriteContractedGraph(max_edge_id, node_based_edge_list, contracted_edge_list); | ||||
|     std::size_t number_of_used_edges = WriteContractedGraph(max_edge_id, contracted_edge_list); | ||||
|     WriteCoreNodeMarker(std::move(is_core_node)); | ||||
|     if (!config.use_cached_priority) | ||||
|     { | ||||
|         WriteNodeLevels(std::move(node_levels)); | ||||
|     } | ||||
| 
 | ||||
|     TIMER_STOP(preparing); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Preprocessing : " << TIMER_SEC(preparing) << " seconds"; | ||||
|     SimpleLogger().Write() << "Expansion  : " << (number_of_node_based_nodes / TIMER_SEC(expansion)) | ||||
|                            << " nodes/sec and " << ((max_edge_id + 1) / TIMER_SEC(expansion)) | ||||
|                            << " edges/sec"; | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Contraction: " << ((max_edge_id + 1) / TIMER_SEC(contraction)) | ||||
|                            << " nodes/sec and " << number_of_used_edges / TIMER_SEC(contraction) | ||||
|                            << " edges/sec"; | ||||
| @ -131,85 +122,179 @@ int Prepare::Run() | ||||
|     return 0; | ||||
| } | ||||
| 
 | ||||
| void Prepare::FindComponents(unsigned max_edge_id, | ||||
|                              const DeallocatingVector<EdgeBasedEdge> &input_edge_list, | ||||
|                              std::vector<EdgeBasedNode> &input_nodes) const | ||||
| namespace std | ||||
| { | ||||
|     struct UncontractedEdgeData | ||||
| 
 | ||||
| template <> struct hash<std::pair<OSMNodeID, OSMNodeID>> | ||||
| { | ||||
|     std::size_t operator()(const std::pair<OSMNodeID, OSMNodeID> &k) const | ||||
|     { | ||||
|     }; | ||||
|     struct InputEdge | ||||
|         return OSMNodeID_to_uint64_t(k.first) ^ (OSMNodeID_to_uint64_t(k.second) << 12); | ||||
|     } | ||||
| }; | ||||
| } | ||||
| 
 | ||||
| std::size_t Prepare::LoadEdgeExpandedGraph(std::string const &edge_based_graph_filename, | ||||
|                                            DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list, | ||||
|                                            const std::string &edge_segment_lookup_filename, | ||||
|                                            const std::string &edge_penalty_filename, | ||||
|                                            const std::string &segment_speed_filename) | ||||
| { | ||||
|     SimpleLogger().Write() << "Opening " << edge_based_graph_filename; | ||||
|     boost::filesystem::ifstream input_stream(edge_based_graph_filename, std::ios::binary); | ||||
| 
 | ||||
|     const bool update_edge_weights = segment_speed_filename != ""; | ||||
| 
 | ||||
|     boost::filesystem::ifstream edge_segment_input_stream; | ||||
|     boost::filesystem::ifstream edge_fixed_penalties_input_stream; | ||||
| 
 | ||||
|     if (update_edge_weights) | ||||
|     { | ||||
|         unsigned source; | ||||
|         unsigned target; | ||||
|         UncontractedEdgeData data; | ||||
| 
 | ||||
|         bool operator<(const InputEdge &rhs) const | ||||
|         edge_segment_input_stream.open(edge_segment_lookup_filename, std::ios::binary); | ||||
|         edge_fixed_penalties_input_stream.open(edge_penalty_filename, std::ios::binary); | ||||
|         if (!edge_segment_input_stream || !edge_fixed_penalties_input_stream) | ||||
|         { | ||||
|             return source < rhs.source || (source == rhs.source && target < rhs.target); | ||||
|         } | ||||
| 
 | ||||
|         bool operator==(const InputEdge &rhs) const | ||||
|         { | ||||
|             return source == rhs.source && target == rhs.target; | ||||
|         } | ||||
|     }; | ||||
|     using UncontractedGraph = StaticGraph<UncontractedEdgeData>; | ||||
|     std::vector<InputEdge> edges; | ||||
|     edges.reserve(input_edge_list.size() * 2); | ||||
| 
 | ||||
|     for (const auto &edge : input_edge_list) | ||||
|     { | ||||
|         BOOST_ASSERT_MSG(static_cast<unsigned int>(std::max(edge.weight, 1)) > 0, | ||||
|                          "edge distance < 1"); | ||||
|         if (edge.forward) | ||||
|         { | ||||
|             edges.push_back({edge.source, edge.target, {}}); | ||||
|         } | ||||
| 
 | ||||
|         if (edge.backward) | ||||
|         { | ||||
|             edges.push_back({edge.target, edge.source, {}}); | ||||
|             throw osrm::exception("Could not load .edge_segment_lookup or .edge_penalties, did you " | ||||
|                                   "run osrm-extract with '--generate-edge-lookup'?"); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // connect forward and backward nodes of each edge
 | ||||
|     for (const auto &node : input_nodes) | ||||
|     const FingerPrint fingerprint_valid = FingerPrint::GetValid(); | ||||
|     FingerPrint fingerprint_loaded; | ||||
|     input_stream.read((char *)&fingerprint_loaded, sizeof(FingerPrint)); | ||||
|     fingerprint_loaded.TestPrepare(fingerprint_valid); | ||||
| 
 | ||||
|     size_t number_of_edges = 0; | ||||
|     size_t max_edge_id = SPECIAL_EDGEID; | ||||
|     input_stream.read((char *)&number_of_edges, sizeof(size_t)); | ||||
|     input_stream.read((char *)&max_edge_id, sizeof(size_t)); | ||||
| 
 | ||||
|     edge_based_edge_list.resize(number_of_edges); | ||||
|     SimpleLogger().Write() << "Reading " << number_of_edges << " edges from the edge based graph"; | ||||
| 
 | ||||
|     std::unordered_map<std::pair<OSMNodeID, OSMNodeID>, unsigned> segment_speed_lookup; | ||||
| 
 | ||||
|     if (update_edge_weights) | ||||
|     { | ||||
|         if (node.reverse_edge_based_node_id != SPECIAL_NODEID) | ||||
|         SimpleLogger().Write() << "Segment speed data supplied, will update edge weights from " | ||||
|                                << segment_speed_filename; | ||||
|         io::CSVReader<3> csv_in(segment_speed_filename); | ||||
|         csv_in.set_header("from_node", "to_node", "speed"); | ||||
|         uint64_t from_node_id; | ||||
|         uint64_t to_node_id; | ||||
|         unsigned speed; | ||||
|         while (csv_in.read_row(from_node_id, to_node_id, speed)) | ||||
|         { | ||||
|             edges.push_back({node.forward_edge_based_node_id, node.reverse_edge_based_node_id, {}}); | ||||
|             edges.push_back({node.reverse_edge_based_node_id, node.forward_edge_based_node_id, {}}); | ||||
|             segment_speed_lookup[std::make_pair(OSMNodeID(from_node_id), OSMNodeID(to_node_id))] = speed; | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     tbb::parallel_sort(edges.begin(), edges.end()); | ||||
|     auto new_end = std::unique(edges.begin(), edges.end()); | ||||
|     edges.resize(new_end - edges.begin()); | ||||
|     DEBUG_GEOMETRY_START(config); | ||||
| 
 | ||||
|     auto uncontractor_graph = std::make_shared<UncontractedGraph>(max_edge_id + 1, edges); | ||||
| 
 | ||||
|     TarjanSCC<UncontractedGraph> component_search( | ||||
|         std::const_pointer_cast<const UncontractedGraph>(uncontractor_graph)); | ||||
|     component_search.run(); | ||||
| 
 | ||||
|     for (auto &node : input_nodes) | ||||
|     // TODO: can we read this in bulk?  DeallocatingVector isn't necessarily
 | ||||
|     // all stored contiguously
 | ||||
|     for (; number_of_edges > 0; --number_of_edges) | ||||
|     { | ||||
|         auto forward_component = component_search.get_component_id(node.forward_edge_based_node_id); | ||||
|         BOOST_ASSERT(node.reverse_edge_based_node_id == SPECIAL_EDGEID || | ||||
|                      forward_component == | ||||
|                          component_search.get_component_id(node.reverse_edge_based_node_id)); | ||||
|         EdgeBasedEdge inbuffer; | ||||
|         input_stream.read((char *) &inbuffer, sizeof(EdgeBasedEdge)); | ||||
| 
 | ||||
|         const unsigned component_size = component_search.get_component_size(forward_component); | ||||
|         const bool is_tiny_component = component_size < 1000; | ||||
|         node.component_id = is_tiny_component ? (1 + forward_component) : 0; | ||||
|         if (update_edge_weights) | ||||
|         { | ||||
|             // Processing-time edge updates
 | ||||
|             unsigned fixed_penalty; | ||||
|             edge_fixed_penalties_input_stream.read(reinterpret_cast<char *>(&fixed_penalty), | ||||
|                                                    sizeof(fixed_penalty)); | ||||
| 
 | ||||
|             int new_weight = 0; | ||||
| 
 | ||||
|             unsigned num_osm_nodes = 0; | ||||
|             edge_segment_input_stream.read(reinterpret_cast<char *>(&num_osm_nodes), | ||||
|                                            sizeof(num_osm_nodes)); | ||||
|             OSMNodeID previous_osm_node_id; | ||||
|             edge_segment_input_stream.read(reinterpret_cast<char *>(&previous_osm_node_id), | ||||
|                                            sizeof(previous_osm_node_id)); | ||||
|             OSMNodeID this_osm_node_id; | ||||
|             double segment_length; | ||||
|             int segment_weight; | ||||
|             --num_osm_nodes; | ||||
|             for (; num_osm_nodes != 0; --num_osm_nodes) | ||||
|             { | ||||
|                 edge_segment_input_stream.read(reinterpret_cast<char *>(&this_osm_node_id), | ||||
|                                                sizeof(this_osm_node_id)); | ||||
|                 edge_segment_input_stream.read(reinterpret_cast<char *>(&segment_length), | ||||
|                                                sizeof(segment_length)); | ||||
|                 edge_segment_input_stream.read(reinterpret_cast<char *>(&segment_weight), | ||||
|                                                sizeof(segment_weight)); | ||||
| 
 | ||||
|                 auto speed_iter = segment_speed_lookup.find( | ||||
|                     std::make_pair(previous_osm_node_id, this_osm_node_id)); | ||||
|                 if (speed_iter != segment_speed_lookup.end()) | ||||
|                 { | ||||
|                     // This sets the segment weight using the same formula as the
 | ||||
|                     // EdgeBasedGraphFactory for consistency.  The *why* of this formula
 | ||||
|                     // is lost in the annals of time.
 | ||||
|                     int new_segment_weight = | ||||
|                         std::max(1, static_cast<int>(std::floor( | ||||
|                                         (segment_length * 10.) / (speed_iter->second / 3.6) + .5))); | ||||
|                     new_weight += new_segment_weight; | ||||
| 
 | ||||
|                     DEBUG_GEOMETRY_EDGE(  | ||||
|                             new_segment_weight,  | ||||
|                             segment_length, | ||||
|                             previous_osm_node_id, | ||||
|                             this_osm_node_id); | ||||
|                 } | ||||
|                 else | ||||
|                 { | ||||
|                     // If no lookup found, use the original weight value for this segment
 | ||||
|                     new_weight += segment_weight; | ||||
| 
 | ||||
|                     DEBUG_GEOMETRY_EDGE( | ||||
|                             segment_weight, | ||||
|                             segment_length, | ||||
|                             previous_osm_node_id, | ||||
|                             this_osm_node_id); | ||||
|                 } | ||||
| 
 | ||||
|                 previous_osm_node_id = this_osm_node_id; | ||||
|             } | ||||
| 
 | ||||
|             inbuffer.weight = fixed_penalty + new_weight; | ||||
|         } | ||||
| 
 | ||||
|         edge_based_edge_list.emplace_back(std::move(inbuffer)); | ||||
|     } | ||||
| 
 | ||||
|     DEBUG_GEOMETRY_STOP(); | ||||
|     SimpleLogger().Write() << "Done reading edges"; | ||||
|     return max_edge_id; | ||||
| } | ||||
| 
 | ||||
| void Prepare::ReadNodeLevels(std::vector<float> &node_levels) const | ||||
| { | ||||
|     boost::filesystem::ifstream order_input_stream(config.level_output_path, std::ios::binary); | ||||
| 
 | ||||
|     unsigned level_size; | ||||
|     order_input_stream.read((char *)&level_size, sizeof(unsigned)); | ||||
|     node_levels.resize(level_size); | ||||
|     order_input_stream.read((char *)node_levels.data(), sizeof(float) * node_levels.size()); | ||||
| } | ||||
| 
 | ||||
| void Prepare::WriteNodeLevels(std::vector<float> &&in_node_levels) const | ||||
| { | ||||
|     std::vector<float> node_levels(std::move(in_node_levels)); | ||||
| 
 | ||||
|     boost::filesystem::ofstream order_output_stream(config.level_output_path, std::ios::binary); | ||||
| 
 | ||||
|     unsigned level_size = node_levels.size(); | ||||
|     order_output_stream.write((char *)&level_size, sizeof(unsigned)); | ||||
|     order_output_stream.write((char *)node_levels.data(), sizeof(float) * node_levels.size()); | ||||
| } | ||||
| 
 | ||||
| void Prepare::WriteCoreNodeMarker(std::vector<bool> &&in_is_core_node) const | ||||
| { | ||||
|     std::vector<bool> is_core_node(in_is_core_node); | ||||
|     std::vector<char> unpacked_bool_flags(is_core_node.size()); | ||||
|     std::vector<bool> is_core_node(std::move(in_is_core_node)); | ||||
|     std::vector<char> unpacked_bool_flags(std::move(is_core_node.size())); | ||||
|     for (auto i = 0u; i < is_core_node.size(); ++i) | ||||
|     { | ||||
|         unpacked_bool_flags[i] = is_core_node[i] ? 1 : 0; | ||||
| @ -224,11 +309,8 @@ void Prepare::WriteCoreNodeMarker(std::vector<bool> &&in_is_core_node) const | ||||
| } | ||||
| 
 | ||||
| std::size_t Prepare::WriteContractedGraph(unsigned max_node_id, | ||||
|                                           const std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                                           const DeallocatingVector<QueryEdge> &contracted_edge_list) | ||||
| { | ||||
|     const unsigned crc32_value = CalculateEdgeChecksum(node_based_edge_list); | ||||
| 
 | ||||
|     // Sorting contracted edges in a way that the static query graph can read some in in-place.
 | ||||
|     tbb::parallel_sort(contracted_edge_list.begin(), contracted_edge_list.end()); | ||||
|     const unsigned contracted_edge_count = contracted_edge_list.size(); | ||||
| @ -284,9 +366,13 @@ std::size_t Prepare::WriteContractedGraph(unsigned max_node_id, | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Serializing node array"; | ||||
| 
 | ||||
|     RangebasedCRC32 crc32_calculator; | ||||
|     const unsigned edges_crc32 = crc32_calculator(contracted_edge_list); | ||||
|     SimpleLogger().Write() << "Writing CRC32: " << edges_crc32; | ||||
| 
 | ||||
|     const unsigned node_array_size = node_array.size(); | ||||
|     // serialize crc32, aka checksum
 | ||||
|     hsgr_output_stream.write((char *)&crc32_value, sizeof(unsigned)); | ||||
|     hsgr_output_stream.write((char *)&edges_crc32, sizeof(unsigned)); | ||||
|     // serialize number of nodes
 | ||||
|     hsgr_output_stream.write((char *)&node_array_size, sizeof(unsigned)); | ||||
|     // serialize number of edges
 | ||||
| @ -335,205 +421,21 @@ std::size_t Prepare::WriteContractedGraph(unsigned max_node_id, | ||||
|     return number_of_used_edges; | ||||
| } | ||||
| 
 | ||||
| unsigned Prepare::CalculateEdgeChecksum(const std::vector<EdgeBasedNode> &node_based_edge_list) | ||||
| { | ||||
|     RangebasedCRC32 crc32; | ||||
|     if (crc32.using_hardware()) | ||||
|     { | ||||
|         SimpleLogger().Write() << "using hardware based CRC32 computation"; | ||||
|     } | ||||
|     else | ||||
|     { | ||||
|         SimpleLogger().Write() << "using software based CRC32 computation"; | ||||
|     } | ||||
| 
 | ||||
|     const unsigned crc32_value = crc32(node_based_edge_list); | ||||
|     SimpleLogger().Write() << "CRC32: " << crc32_value; | ||||
| 
 | ||||
|     return crc32_value; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|     \brief Setups scripting environment (lua-scripting) | ||||
|     Also initializes speed profile. | ||||
| */ | ||||
| void Prepare::SetupScriptingEnvironment(lua_State *lua_state, SpeedProfileProperties &speed_profile) | ||||
| { | ||||
|     // open utility libraries string library;
 | ||||
|     luaL_openlibs(lua_state); | ||||
| 
 | ||||
|     // adjust lua load path
 | ||||
|     luaAddScriptFolderToLoadPath(lua_state, config.profile_path.string().c_str()); | ||||
| 
 | ||||
|     // Now call our function in a lua script
 | ||||
|     if (0 != luaL_dofile(lua_state, config.profile_path.string().c_str())) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
| 
 | ||||
|     if (0 != luaL_dostring(lua_state, "return traffic_signal_penalty\n")) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
|     speed_profile.traffic_signal_penalty = 10 * lua_tointeger(lua_state, -1); | ||||
|     SimpleLogger().Write(logDEBUG) | ||||
|         << "traffic_signal_penalty: " << speed_profile.traffic_signal_penalty; | ||||
| 
 | ||||
|     if (0 != luaL_dostring(lua_state, "return u_turn_penalty\n")) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
| 
 | ||||
|     speed_profile.u_turn_penalty = 10 * lua_tointeger(lua_state, -1); | ||||
|     speed_profile.has_turn_penalty_function = lua_function_exists(lua_state, "turn_function"); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Build load restrictions from .restriction file | ||||
|   */ | ||||
| std::shared_ptr<RestrictionMap> Prepare::LoadRestrictionMap() | ||||
| { | ||||
|     boost::filesystem::ifstream input_stream(config.restrictions_path, | ||||
|                                              std::ios::in | std::ios::binary); | ||||
| 
 | ||||
|     std::vector<TurnRestriction> restriction_list; | ||||
|     loadRestrictionsFromFile(input_stream, restriction_list); | ||||
| 
 | ||||
|     SimpleLogger().Write() << " - " << restriction_list.size() << " restrictions."; | ||||
| 
 | ||||
|     return std::make_shared<RestrictionMap>(restriction_list); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Load node based graph from .osrm file | ||||
|   */ | ||||
| std::shared_ptr<NodeBasedDynamicGraph> | ||||
| Prepare::LoadNodeBasedGraph(std::unordered_set<NodeID> &barrier_nodes, | ||||
|                             std::unordered_set<NodeID> &traffic_lights, | ||||
|                             std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     std::vector<NodeBasedEdge> edge_list; | ||||
| 
 | ||||
|     boost::filesystem::ifstream input_stream(config.osrm_input_path, | ||||
|                                              std::ios::in | std::ios::binary); | ||||
| 
 | ||||
|     std::vector<NodeID> barrier_list; | ||||
|     std::vector<NodeID> traffic_light_list; | ||||
|     NodeID number_of_node_based_nodes = loadNodesFromFile( | ||||
|         input_stream, barrier_list, traffic_light_list, internal_to_external_node_map); | ||||
| 
 | ||||
|     SimpleLogger().Write() << " - " << barrier_list.size() << " bollard nodes, " | ||||
|                            << traffic_light_list.size() << " traffic lights"; | ||||
| 
 | ||||
|     // insert into unordered sets for fast lookup
 | ||||
|     barrier_nodes.insert(barrier_list.begin(), barrier_list.end()); | ||||
|     traffic_lights.insert(traffic_light_list.begin(), traffic_light_list.end()); | ||||
| 
 | ||||
|     barrier_list.clear(); | ||||
|     barrier_list.shrink_to_fit(); | ||||
|     traffic_light_list.clear(); | ||||
|     traffic_light_list.shrink_to_fit(); | ||||
| 
 | ||||
|     loadEdgesFromFile(input_stream, edge_list); | ||||
| 
 | ||||
|     if (edge_list.empty()) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "The input data is empty, exiting."; | ||||
|         return std::shared_ptr<NodeBasedDynamicGraph>(); | ||||
|     } | ||||
| 
 | ||||
|     return NodeBasedDynamicGraphFromEdges(number_of_node_based_nodes, edge_list); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  \brief Building an edge-expanded graph from node-based input and turn restrictions | ||||
| */ | ||||
| std::pair<std::size_t, std::size_t> | ||||
| Prepare::BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map, | ||||
|                                 std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                                 DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list) | ||||
| { | ||||
|     lua_State *lua_state = luaL_newstate(); | ||||
|     luabind::open(lua_state); | ||||
| 
 | ||||
|     SpeedProfileProperties speed_profile; | ||||
|     SetupScriptingEnvironment(lua_state, speed_profile); | ||||
| 
 | ||||
|     std::unordered_set<NodeID> barrier_nodes; | ||||
|     std::unordered_set<NodeID> traffic_lights; | ||||
| 
 | ||||
|     auto restriction_map = LoadRestrictionMap(); | ||||
|     auto node_based_graph = | ||||
|         LoadNodeBasedGraph(barrier_nodes, traffic_lights, internal_to_external_node_map); | ||||
| 
 | ||||
|     CompressedEdgeContainer compressed_edge_container; | ||||
|     GraphCompressor graph_compressor(speed_profile); | ||||
|     graph_compressor.Compress(barrier_nodes, traffic_lights, *restriction_map, *node_based_graph, | ||||
|                               compressed_edge_container); | ||||
| 
 | ||||
|     EdgeBasedGraphFactory edge_based_graph_factory( | ||||
|         node_based_graph, compressed_edge_container, barrier_nodes, traffic_lights, | ||||
|         std::const_pointer_cast<RestrictionMap const>(restriction_map), | ||||
|         internal_to_external_node_map, speed_profile); | ||||
| 
 | ||||
|     compressed_edge_container.SerializeInternalVector(config.geometry_output_path); | ||||
| 
 | ||||
|     edge_based_graph_factory.Run(config.edge_output_path, lua_state); | ||||
|     lua_close(lua_state); | ||||
| 
 | ||||
|     edge_based_graph_factory.GetEdgeBasedEdges(edge_based_edge_list); | ||||
|     edge_based_graph_factory.GetEdgeBasedNodes(node_based_edge_list); | ||||
|     auto max_edge_id = edge_based_graph_factory.GetHighestEdgeID(); | ||||
| 
 | ||||
|     const std::size_t number_of_node_based_nodes = node_based_graph->GetNumberOfNodes(); | ||||
|     return std::make_pair(number_of_node_based_nodes, max_edge_id); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  \brief Build contracted graph. | ||||
|  */ | ||||
| void Prepare::ContractGraph(const unsigned max_edge_id, | ||||
|                             DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list, | ||||
|                             DeallocatingVector<QueryEdge> &contracted_edge_list, | ||||
|                             std::vector<bool> &is_core_node) | ||||
|                             std::vector<bool> &is_core_node, | ||||
|                             std::vector<float> &inout_node_levels) const | ||||
| { | ||||
|     Contractor contractor(max_edge_id + 1, edge_based_edge_list); | ||||
|     std::vector<float> node_levels; | ||||
|     node_levels.swap(inout_node_levels); | ||||
| 
 | ||||
|     Contractor contractor(max_edge_id + 1, edge_based_edge_list, std::move(node_levels)); | ||||
|     contractor.Run(config.core_factor); | ||||
|     contractor.GetEdges(contracted_edge_list); | ||||
|     contractor.GetCoreMarker(is_core_node); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Writing info on original (node-based) nodes | ||||
|  */ | ||||
| void Prepare::WriteNodeMapping(const std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     boost::filesystem::ofstream node_stream(config.node_output_path, std::ios::binary); | ||||
|     const unsigned size_of_mapping = internal_to_external_node_map.size(); | ||||
|     node_stream.write((char *)&size_of_mapping, sizeof(unsigned)); | ||||
|     if (size_of_mapping > 0) | ||||
|     { | ||||
|         node_stream.write((char *)internal_to_external_node_map.data(), | ||||
|                           size_of_mapping * sizeof(QueryNode)); | ||||
|     } | ||||
|     node_stream.close(); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|     \brief Building rtree-based nearest-neighbor data structure | ||||
| 
 | ||||
|     Saves tree into '.ramIndex' and leaves into '.fileIndex'. | ||||
|  */ | ||||
| void Prepare::BuildRTree(const std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                          const std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     StaticRTree<EdgeBasedNode>(node_based_edge_list, config.rtree_nodes_output_path.c_str(), | ||||
|                                config.rtree_leafs_output_path.c_str(), | ||||
|                                internal_to_external_node_map); | ||||
|     contractor.GetNodeLevels(inout_node_levels); | ||||
| } | ||||
|  | ||||
| @ -28,10 +28,12 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #ifndef PROCESSING_CHAIN_HPP | ||||
| #define PROCESSING_CHAIN_HPP | ||||
| 
 | ||||
| #include "contractor.hpp" | ||||
| #include "contractor_options.hpp" | ||||
| #include "edge_based_graph_factory.hpp" | ||||
| #include "../data_structures/query_edge.hpp" | ||||
| #include "../data_structures/static_graph.hpp" | ||||
| #include "../data_structures/deallocating_vector.hpp" | ||||
| #include "../data_structures/node_based_graph.hpp" | ||||
| 
 | ||||
| struct SpeedProfileProperties; | ||||
| struct EdgeBasedNode; | ||||
| @ -48,8 +50,6 @@ class Prepare | ||||
| { | ||||
|   public: | ||||
|     using EdgeData = QueryEdge::EdgeData; | ||||
|     using InputEdge = DynamicGraph<EdgeData>::InputEdge; | ||||
|     using StaticEdge = StaticGraph<EdgeData>::InputEdge; | ||||
| 
 | ||||
|     explicit Prepare(ContractorConfig contractor_config) : config(std::move(contractor_config)) {} | ||||
|     Prepare(const Prepare &) = delete; | ||||
| @ -58,34 +58,27 @@ class Prepare | ||||
|     int Run(); | ||||
| 
 | ||||
|   protected: | ||||
|     void SetupScriptingEnvironment(lua_State *myLuaState, SpeedProfileProperties &speed_profile); | ||||
|     unsigned CalculateEdgeChecksum(const std::vector<EdgeBasedNode> &node_based_edge_list); | ||||
|     void ContractGraph(const unsigned max_edge_id, | ||||
|                        DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list, | ||||
|                        DeallocatingVector<QueryEdge> &contracted_edge_list, | ||||
|                        std::vector<bool> &is_core_node); | ||||
|                        std::vector<bool> &is_core_node, | ||||
|                        std::vector<float> &node_levels) const; | ||||
|     void WriteCoreNodeMarker(std::vector<bool> &&is_core_node) const; | ||||
|     void WriteNodeLevels(std::vector<float> &&node_levels) const; | ||||
|     void ReadNodeLevels(std::vector<float> &contraction_order) const; | ||||
|     std::size_t WriteContractedGraph(unsigned number_of_edge_based_nodes, | ||||
|                                      const std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                                      const DeallocatingVector<QueryEdge> &contracted_edge_list); | ||||
|     std::shared_ptr<RestrictionMap> LoadRestrictionMap(); | ||||
|     std::shared_ptr<NodeBasedDynamicGraph> | ||||
|     LoadNodeBasedGraph(std::unordered_set<NodeID> &barrier_nodes, | ||||
|                        std::unordered_set<NodeID> &traffic_lights, | ||||
|                        std::vector<QueryNode> &internal_to_external_node_map); | ||||
|     std::pair<std::size_t, std::size_t> | ||||
|     BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map, | ||||
|                            std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                            DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list); | ||||
|     void WriteNodeMapping(const std::vector<QueryNode> &internal_to_external_node_map); | ||||
|     void FindComponents(unsigned max_edge_id, | ||||
|                         const DeallocatingVector<EdgeBasedEdge> &edges, | ||||
|                         std::vector<EdgeBasedNode> &nodes) const; | ||||
|     void BuildRTree(const std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                     const std::vector<QueryNode> &internal_to_external_node_map); | ||||
| 
 | ||||
|   private: | ||||
|     ContractorConfig config; | ||||
|     std::size_t LoadEdgeExpandedGraph(const std::string &edge_based_graph_path, | ||||
|                                       DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list, | ||||
|                                       const std::string &edge_segment_lookup_path, | ||||
|                                       const std::string &edge_penalty_path, | ||||
|                                       const std::string &segment_speed_path); | ||||
| }; | ||||
| 
 | ||||
| #endif // PROCESSING_CHAIN_HPP
 | ||||
|  | ||||
| @ -1,85 +0,0 @@ | ||||
| /*
 | ||||
| 
 | ||||
| Copyright (c) 2014, Project OSRM contributors | ||||
| All rights reserved. | ||||
| 
 | ||||
| Redistribution and use in source and binary forms, with or without modification, | ||||
| are permitted provided that the following conditions are met: | ||||
| 
 | ||||
| Redistributions of source code must retain the above copyright notice, this list | ||||
| of conditions and the following disclaimer. | ||||
| Redistributions in binary form must reproduce the above copyright notice, this | ||||
| list of conditions and the following disclaimer in the documentation and/or | ||||
| other materials provided with the distribution. | ||||
| 
 | ||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | ||||
| ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | ||||
| WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||||
| DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR | ||||
| ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | ||||
| (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | ||||
| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | ||||
| ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||
| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | ||||
| SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| */ | ||||
| 
 | ||||
| #ifndef CONCURRENT_QUEUE_HPP | ||||
| #define CONCURRENT_QUEUE_HPP | ||||
| 
 | ||||
| #include <boost/circular_buffer.hpp> | ||||
| #include <condition_variable> | ||||
| #include <mutex> | ||||
| 
 | ||||
| template <typename Data> class ConcurrentQueue | ||||
| { | ||||
|   public: | ||||
|     explicit ConcurrentQueue(const size_t max_size) : m_internal_queue(max_size) {} | ||||
| 
 | ||||
|     inline void push(const Data &data) | ||||
|     { | ||||
|         std::unique_lock<std::mutex> lock(m_mutex); | ||||
|         m_not_full.wait(lock, [this] | ||||
|                         { | ||||
|                             return m_internal_queue.size() < m_internal_queue.capacity(); | ||||
|                         }); | ||||
|         m_internal_queue.push_back(data); | ||||
|         m_not_empty.notify_one(); | ||||
|     } | ||||
| 
 | ||||
|     inline bool empty() const { return m_internal_queue.empty(); } | ||||
| 
 | ||||
|     inline void wait_and_pop(Data &popped_value) | ||||
|     { | ||||
|         std::unique_lock<std::mutex> lock(m_mutex); | ||||
|         m_not_empty.wait(lock, [this] | ||||
|                          { | ||||
|                              return !m_internal_queue.empty(); | ||||
|                          }); | ||||
|         popped_value = m_internal_queue.front(); | ||||
|         m_internal_queue.pop_front(); | ||||
|         m_not_full.notify_one(); | ||||
|     } | ||||
| 
 | ||||
|     inline bool try_pop(Data &popped_value) | ||||
|     { | ||||
|         std::unique_lock<std::mutex> lock(m_mutex); | ||||
|         if (m_internal_queue.empty()) | ||||
|         { | ||||
|             return false; | ||||
|         } | ||||
|         popped_value = m_internal_queue.front(); | ||||
|         m_internal_queue.pop_front(); | ||||
|         m_not_full.notify_one(); | ||||
|         return true; | ||||
|     } | ||||
| 
 | ||||
|   private: | ||||
|     boost::circular_buffer<Data> m_internal_queue; | ||||
|     std::mutex m_mutex; | ||||
|     std::condition_variable m_not_empty; | ||||
|     std::condition_variable m_not_full; | ||||
| }; | ||||
| 
 | ||||
| #endif // CONCURRENT_QUEUE_HPP
 | ||||
| @ -237,6 +237,12 @@ class DeallocatingVectorRemoveIterator | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK> | ||||
| class DeallocatingVector; | ||||
| 
 | ||||
| template<typename T, std::size_t S> | ||||
| void swap(DeallocatingVector<T, S>& lhs, DeallocatingVector<T, S>& rhs); | ||||
| 
 | ||||
| template <typename ElementT, std::size_t ELEMENTS_PER_BLOCK = 8388608 / sizeof(ElementT)> | ||||
| class DeallocatingVector | ||||
| { | ||||
| @ -257,6 +263,8 @@ class DeallocatingVector | ||||
| 
 | ||||
|     ~DeallocatingVector() { clear(); } | ||||
| 
 | ||||
|     friend void swap<>(DeallocatingVector<ElementT, ELEMENTS_PER_BLOCK>& lhs, DeallocatingVector<ElementT, ELEMENTS_PER_BLOCK>& rhs); | ||||
| 
 | ||||
|     void swap(DeallocatingVector<ElementT, ELEMENTS_PER_BLOCK> &other) | ||||
|     { | ||||
|         std::swap(current_size, other.current_size); | ||||
| @ -386,4 +394,10 @@ class DeallocatingVector | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| template<typename T, std::size_t S> | ||||
| void swap(DeallocatingVector<T, S>& lhs, DeallocatingVector<T, S>& rhs) | ||||
| { | ||||
|     lhs.swap(rhs); | ||||
| } | ||||
| 
 | ||||
| #endif /* DEALLOCATING_VECTOR_HPP */ | ||||
|  | ||||
| @ -46,7 +46,7 @@ struct EdgeBasedNode | ||||
|           u(SPECIAL_NODEID), v(SPECIAL_NODEID), name_id(0), | ||||
|           forward_weight(INVALID_EDGE_WEIGHT >> 1), reverse_weight(INVALID_EDGE_WEIGHT >> 1), | ||||
|           forward_offset(0), reverse_offset(0), packed_geometry_id(SPECIAL_EDGEID), | ||||
|           component_id(-1), fwd_segment_position(std::numeric_limits<unsigned short>::max()), | ||||
|           component{INVALID_COMPONENTID, false}, fwd_segment_position(std::numeric_limits<unsigned short>::max()), | ||||
|           forward_travel_mode(TRAVEL_MODE_INACCESSIBLE), | ||||
|           backward_travel_mode(TRAVEL_MODE_INACCESSIBLE) | ||||
|     { | ||||
| @ -62,6 +62,7 @@ struct EdgeBasedNode | ||||
|                            int forward_offset, | ||||
|                            int reverse_offset, | ||||
|                            unsigned packed_geometry_id, | ||||
|                            bool is_tiny_component, | ||||
|                            unsigned component_id, | ||||
|                            unsigned short fwd_segment_position, | ||||
|                            TravelMode forward_travel_mode, | ||||
| @ -70,7 +71,7 @@ struct EdgeBasedNode | ||||
|           reverse_edge_based_node_id(reverse_edge_based_node_id), u(u), v(v), name_id(name_id), | ||||
|           forward_weight(forward_weight), reverse_weight(reverse_weight), | ||||
|           forward_offset(forward_offset), reverse_offset(reverse_offset), | ||||
|           packed_geometry_id(packed_geometry_id), component_id(component_id), | ||||
|           packed_geometry_id(packed_geometry_id), component{component_id, is_tiny_component}, | ||||
|           fwd_segment_position(fwd_segment_position), forward_travel_mode(forward_travel_mode), | ||||
|           backward_travel_mode(backward_travel_mode) | ||||
|     { | ||||
| @ -90,8 +91,6 @@ struct EdgeBasedNode | ||||
| 
 | ||||
|     bool IsCompressed() const { return packed_geometry_id != SPECIAL_EDGEID; } | ||||
| 
 | ||||
|     bool is_in_tiny_cc() const { return 0 != component_id; } | ||||
| 
 | ||||
|     NodeID forward_edge_based_node_id; // needed for edge-expanded graph
 | ||||
|     NodeID reverse_edge_based_node_id; // needed for edge-expanded graph
 | ||||
|     NodeID u;                          // indices into the coordinates array
 | ||||
| @ -102,7 +101,10 @@ struct EdgeBasedNode | ||||
|     int forward_offset;          // prefix sum of the weight up the edge TODO: short must suffice
 | ||||
|     int reverse_offset;          // prefix sum of the weight from the edge TODO: short must suffice
 | ||||
|     unsigned packed_geometry_id; // if set, then the edge represents a packed geometry
 | ||||
|     unsigned component_id; | ||||
|     struct { | ||||
|         unsigned id : 31; | ||||
|         bool is_tiny : 1; | ||||
|     } component; | ||||
|     unsigned short fwd_segment_position; // segment id in a compressed geometry
 | ||||
|     TravelMode forward_travel_mode : 4; | ||||
|     TravelMode backward_travel_mode : 4; | ||||
|  | ||||
| @ -31,7 +31,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <limits> | ||||
| 
 | ||||
| ExternalMemoryNode::ExternalMemoryNode( | ||||
|     int lat, int lon, unsigned int node_id, bool barrier, bool traffic_lights) | ||||
|     int lat, int lon, OSMNodeID node_id, bool barrier, bool traffic_lights) | ||||
|     : QueryNode(lat, lon, node_id), barrier(barrier), traffic_lights(traffic_lights) | ||||
| { | ||||
| } | ||||
| @ -40,13 +40,13 @@ ExternalMemoryNode::ExternalMemoryNode() : barrier(false), traffic_lights(false) | ||||
| 
 | ||||
| ExternalMemoryNode ExternalMemoryNode::min_value() | ||||
| { | ||||
|     return ExternalMemoryNode(0, 0, 0, false, false); | ||||
|     return ExternalMemoryNode(0, 0, MIN_OSM_NODEID, false, false); | ||||
| } | ||||
| 
 | ||||
| ExternalMemoryNode ExternalMemoryNode::max_value() | ||||
| { | ||||
|     return ExternalMemoryNode(std::numeric_limits<int>::max(), std::numeric_limits<int>::max(), | ||||
|                               std::numeric_limits<unsigned>::max(), false, false); | ||||
|                               MAX_OSM_NODEID, false, false); | ||||
| } | ||||
| 
 | ||||
| bool ExternalMemoryNodeSTXXLCompare::operator()(const ExternalMemoryNode &left, | ||||
|  | ||||
| @ -34,7 +34,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| struct ExternalMemoryNode : QueryNode | ||||
| { | ||||
|     ExternalMemoryNode(int lat, int lon, NodeID id, bool barrier, bool traffic_light); | ||||
|     ExternalMemoryNode(int lat, int lon, OSMNodeID id, bool barrier, bool traffic_light); | ||||
| 
 | ||||
|     ExternalMemoryNode(); | ||||
| 
 | ||||
|  | ||||
| @ -140,7 +140,7 @@ template <class CandidateLists> struct HiddenMarkovModel | ||||
|             for (const auto s : osrm::irange<std::size_t>(0u, viterbi[initial_timestamp].size())) | ||||
|             { | ||||
|                 viterbi[initial_timestamp][s] = | ||||
|                     emission_log_probability(candidates_list[initial_timestamp][s].second); | ||||
|                     emission_log_probability(candidates_list[initial_timestamp][s].distance); | ||||
|                 parents[initial_timestamp][s] = std::make_pair(initial_timestamp, s); | ||||
|                 pruned[initial_timestamp][s] = | ||||
|                     viterbi[initial_timestamp][s] < osrm::matching::MINIMAL_LOG_PROB; | ||||
|  | ||||
| @ -50,7 +50,7 @@ bool NodeBasedEdge::operator<(const NodeBasedEdge &other) const | ||||
| NodeBasedEdge::NodeBasedEdge() | ||||
|     : source(SPECIAL_NODEID), target(SPECIAL_NODEID), name_id(0), weight(0), forward(false), | ||||
|       backward(false), roundabout(false), | ||||
|       access_restricted(false), is_split(false), travel_mode(false) | ||||
|       access_restricted(false), startpoint(true), is_split(false), travel_mode(false) | ||||
| { | ||||
| } | ||||
| 
 | ||||
| @ -62,11 +62,12 @@ NodeBasedEdge::NodeBasedEdge(NodeID source, | ||||
|                              bool backward, | ||||
|                              bool roundabout, | ||||
|                              bool access_restricted, | ||||
|                              bool startpoint, | ||||
|                              TravelMode travel_mode, | ||||
|                              bool is_split) | ||||
|     : source(source), target(target), name_id(name_id), weight(weight), forward(forward), | ||||
|       backward(backward), roundabout(roundabout), | ||||
|       access_restricted(access_restricted), is_split(is_split), travel_mode(travel_mode) | ||||
|       access_restricted(access_restricted), startpoint(startpoint), is_split(is_split), travel_mode(travel_mode) | ||||
| { | ||||
| } | ||||
| 
 | ||||
|  | ||||
| @ -44,6 +44,7 @@ struct NodeBasedEdge | ||||
|                            bool backward, | ||||
|                            bool roundabout, | ||||
|                            bool access_restricted, | ||||
|                            bool startpoint, | ||||
|                            TravelMode travel_mode, | ||||
|                            bool is_split); | ||||
| 
 | ||||
| @ -55,10 +56,31 @@ struct NodeBasedEdge | ||||
|     bool backward : 1; | ||||
|     bool roundabout : 1; | ||||
|     bool access_restricted : 1; | ||||
|     bool startpoint : 1; | ||||
|     bool is_split : 1; | ||||
|     TravelMode travel_mode : 4; | ||||
| }; | ||||
| 
 | ||||
| struct NodeBasedEdgeWithOSM : NodeBasedEdge | ||||
| { | ||||
|     explicit NodeBasedEdgeWithOSM(OSMNodeID source, | ||||
|                            OSMNodeID target, | ||||
|                            NodeID name_id, | ||||
|                            EdgeWeight weight, | ||||
|                            bool forward, | ||||
|                            bool backward, | ||||
|                            bool roundabout, | ||||
|                            bool access_restricted, | ||||
|                            bool startpoint, | ||||
|                            TravelMode travel_mode, | ||||
|                            bool is_split) | ||||
|         : NodeBasedEdge(SPECIAL_NODEID, SPECIAL_NODEID, name_id, weight, forward, backward, roundabout, access_restricted, startpoint, travel_mode, is_split), | ||||
|         osm_source_id(source), osm_target_id(target) {} | ||||
| 
 | ||||
|     OSMNodeID osm_source_id; | ||||
|     OSMNodeID osm_target_id; | ||||
| }; | ||||
| 
 | ||||
| struct EdgeBasedEdge | ||||
| { | ||||
| 
 | ||||
|  | ||||
| @ -47,10 +47,10 @@ struct NodeBasedEdgeData | ||||
| 
 | ||||
|     NodeBasedEdgeData(int distance, unsigned edge_id, unsigned name_id, | ||||
|             bool access_restricted, bool reversed, | ||||
|             bool roundabout, TravelMode travel_mode) | ||||
|             bool roundabout, bool startpoint, TravelMode travel_mode) | ||||
|         : distance(distance), edge_id(edge_id), name_id(name_id), | ||||
|           access_restricted(access_restricted), reversed(reversed), | ||||
|           roundabout(roundabout), travel_mode(travel_mode) | ||||
|           roundabout(roundabout), startpoint(startpoint), travel_mode(travel_mode) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
| @ -60,6 +60,7 @@ struct NodeBasedEdgeData | ||||
|     bool access_restricted : 1; | ||||
|     bool reversed : 1; | ||||
|     bool roundabout : 1; | ||||
|     bool startpoint : 1; | ||||
|     TravelMode travel_mode : 4; | ||||
| 
 | ||||
|     bool IsCompatibleTo(const NodeBasedEdgeData &other) const | ||||
| @ -72,10 +73,10 @@ struct NodeBasedEdgeData | ||||
| using NodeBasedDynamicGraph = DynamicGraph<NodeBasedEdgeData>; | ||||
| 
 | ||||
| /// Factory method to create NodeBasedDynamicGraph from NodeBasedEdges
 | ||||
| /// The since DynamicGraph expects directed edges, we need to insert
 | ||||
| /// Since DynamicGraph expects directed edges, we need to insert
 | ||||
| /// two edges for undirected edges.
 | ||||
| inline std::shared_ptr<NodeBasedDynamicGraph> | ||||
| NodeBasedDynamicGraphFromEdges(int number_of_nodes, const std::vector<NodeBasedEdge> &input_edge_list) | ||||
| NodeBasedDynamicGraphFromEdges(std::size_t number_of_nodes, const std::vector<NodeBasedEdge> &input_edge_list) | ||||
| { | ||||
|     auto edges_list = directedEdgesFromCompressed<NodeBasedDynamicGraph::InputEdge>(input_edge_list, | ||||
|         [](NodeBasedDynamicGraph::InputEdge& output_edge, const NodeBasedEdge& input_edge) | ||||
| @ -87,6 +88,7 @@ NodeBasedDynamicGraphFromEdges(int number_of_nodes, const std::vector<NodeBasedE | ||||
|             output_edge.data.name_id = input_edge.name_id; | ||||
|             output_edge.data.access_restricted = input_edge.access_restricted; | ||||
|             output_edge.data.travel_mode = input_edge.travel_mode; | ||||
|             output_edge.data.startpoint = input_edge.startpoint; | ||||
|         } | ||||
|     ); | ||||
| 
 | ||||
|  | ||||
| @ -32,10 +32,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| struct Cmp | ||||
| { | ||||
|     using value_type = NodeID; | ||||
|     bool operator()(const NodeID left, const NodeID right) const { return left < right; } | ||||
|     value_type max_value() { return 0xffffffff; } | ||||
|     value_type min_value() { return 0x0; } | ||||
|     using value_type = OSMNodeID; | ||||
|     bool operator()(const value_type left, const value_type right) const { return left < right; } | ||||
|     value_type max_value() { return MAX_OSM_NODEID; } | ||||
|     value_type min_value() { return MIN_OSM_NODEID; } | ||||
| }; | ||||
| 
 | ||||
| #endif // NODE_ID_HPP
 | ||||
|  | ||||
| @ -42,6 +42,7 @@ PhantomNode::PhantomNode(NodeID forward_node_id, | ||||
|                          int forward_offset, | ||||
|                          int reverse_offset, | ||||
|                          unsigned packed_geometry_id, | ||||
|                          bool is_tiny_component, | ||||
|                          unsigned component_id, | ||||
|                          FixedPointCoordinate &location, | ||||
|                          unsigned short fwd_segment_position, | ||||
| @ -50,7 +51,7 @@ PhantomNode::PhantomNode(NodeID forward_node_id, | ||||
|     : forward_node_id(forward_node_id), reverse_node_id(reverse_node_id), name_id(name_id), | ||||
|       forward_weight(forward_weight), reverse_weight(reverse_weight), | ||||
|       forward_offset(forward_offset), reverse_offset(reverse_offset), | ||||
|       packed_geometry_id(packed_geometry_id), component_id(component_id), location(location), | ||||
|       packed_geometry_id(packed_geometry_id), component{component_id, is_tiny_component}, location(location), | ||||
|       fwd_segment_position(fwd_segment_position), forward_travel_mode(forward_travel_mode), | ||||
|       backward_travel_mode(backward_travel_mode) | ||||
| { | ||||
| @ -60,7 +61,7 @@ PhantomNode::PhantomNode() | ||||
|     : forward_node_id(SPECIAL_NODEID), reverse_node_id(SPECIAL_NODEID), | ||||
|       name_id(std::numeric_limits<unsigned>::max()), forward_weight(INVALID_EDGE_WEIGHT), | ||||
|       reverse_weight(INVALID_EDGE_WEIGHT), forward_offset(0), reverse_offset(0), | ||||
|       packed_geometry_id(SPECIAL_EDGEID), component_id(std::numeric_limits<unsigned>::max()), | ||||
|       packed_geometry_id(SPECIAL_EDGEID), component{INVALID_COMPONENTID, false}, | ||||
|       fwd_segment_position(0), forward_travel_mode(TRAVEL_MODE_INACCESSIBLE), | ||||
|       backward_travel_mode(TRAVEL_MODE_INACCESSIBLE) | ||||
| { | ||||
| @ -96,11 +97,9 @@ bool PhantomNode::is_valid(const unsigned number_of_nodes) const | ||||
|     return location.is_valid() && | ||||
|            ((forward_node_id < number_of_nodes) || (reverse_node_id < number_of_nodes)) && | ||||
|            ((forward_weight != INVALID_EDGE_WEIGHT) || (reverse_weight != INVALID_EDGE_WEIGHT)) && | ||||
|            (name_id != INVALID_NAMEID); | ||||
|            (component.id != INVALID_COMPONENTID) && (name_id != INVALID_NAMEID); | ||||
| } | ||||
| 
 | ||||
| bool PhantomNode::is_in_tiny_component() const { return component_id != 0; } | ||||
| 
 | ||||
| bool PhantomNode::is_valid() const { return location.is_valid() && (name_id != INVALID_NAMEID); } | ||||
| 
 | ||||
| bool PhantomNode::operator==(const PhantomNode &other) const { return location == other.location; } | ||||
|  | ||||
| @ -47,6 +47,7 @@ struct PhantomNode | ||||
|                 int forward_offset, | ||||
|                 int reverse_offset, | ||||
|                 unsigned packed_geometry_id, | ||||
|                 bool is_tiny_component, | ||||
|                 unsigned component_id, | ||||
|                 FixedPointCoordinate &location, | ||||
|                 unsigned short fwd_segment_position, | ||||
| @ -68,7 +69,9 @@ struct PhantomNode | ||||
|         reverse_offset = other.reverse_offset; | ||||
| 
 | ||||
|         packed_geometry_id = other.packed_geometry_id; | ||||
|         component_id = other.component_id; | ||||
| 
 | ||||
|         component.id = other.component.id; | ||||
|         component.is_tiny = other.component.is_tiny; | ||||
| 
 | ||||
|         location = foot_point; | ||||
|         fwd_segment_position = other.fwd_segment_position; | ||||
| @ -85,7 +88,14 @@ struct PhantomNode | ||||
|     int forward_offset; | ||||
|     int reverse_offset; | ||||
|     unsigned packed_geometry_id; | ||||
|     unsigned component_id; | ||||
|     struct ComponentType { | ||||
|         uint32_t id : 31; | ||||
|         bool is_tiny : 1; | ||||
|     } component; | ||||
| // bit-fields are broken on Windows
 | ||||
| #ifndef _MSC_VER | ||||
|     static_assert(sizeof(ComponentType) == 4, "ComponentType needs to 4 bytes big"); | ||||
| #endif | ||||
|     FixedPointCoordinate location; | ||||
|     unsigned short fwd_segment_position; | ||||
|     // note 4 bits would suffice for each,
 | ||||
| @ -105,23 +115,19 @@ struct PhantomNode | ||||
| 
 | ||||
|     bool is_valid() const; | ||||
| 
 | ||||
|     bool is_in_tiny_component() const; | ||||
| 
 | ||||
|     bool operator==(const PhantomNode &other) const; | ||||
| }; | ||||
| 
 | ||||
| #ifndef _MSC_VER | ||||
| static_assert(sizeof(PhantomNode) == 48, "PhantomNode has more padding then expected"); | ||||
| #endif | ||||
| 
 | ||||
| using PhantomNodeArray = std::vector<std::vector<PhantomNode>>; | ||||
| using PhantomNodePair = std::pair<PhantomNode, PhantomNode>; | ||||
| 
 | ||||
| class phantom_node_pair : public std::pair<PhantomNode, PhantomNode> | ||||
| struct PhantomNodeWithDistance | ||||
| { | ||||
| }; | ||||
| 
 | ||||
| struct PhantomNodeLists | ||||
| { | ||||
|     std::vector<PhantomNode> source_phantom_list; | ||||
|     std::vector<PhantomNode> target_phantom_list; | ||||
|     PhantomNode phantom_node; | ||||
|     double distance; | ||||
| }; | ||||
| 
 | ||||
| struct PhantomNodes | ||||
| @ -147,7 +153,7 @@ inline std::ostream &operator<<(std::ostream &out, const PhantomNode &pn) | ||||
|         << "fwd-o: " << pn.forward_offset << ", " | ||||
|         << "rev-o: " << pn.reverse_offset << ", " | ||||
|         << "geom: " << pn.packed_geometry_id << ", " | ||||
|         << "comp: " << pn.component_id << ", " | ||||
|         << "comp: " << pn.component.is_tiny << " / " << pn.component.id << ", " | ||||
|         << "pos: " << pn.fwd_segment_position << ", " | ||||
|         << "loc: " << pn.location; | ||||
|     return out; | ||||
|  | ||||
| @ -38,32 +38,32 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| struct QueryNode | ||||
| { | ||||
|     using key_type = NodeID; // type of NodeID
 | ||||
|     using key_type = OSMNodeID; // type of NodeID
 | ||||
|     using value_type = int;  // type of lat,lons
 | ||||
| 
 | ||||
|     explicit QueryNode(int lat, int lon, NodeID node_id) : lat(lat), lon(lon), node_id(node_id) {} | ||||
|     explicit QueryNode(int lat, int lon, OSMNodeID node_id) : lat(lat), lon(lon), node_id(node_id) {} | ||||
|     QueryNode() | ||||
|         : lat(std::numeric_limits<int>::max()), lon(std::numeric_limits<int>::max()), | ||||
|           node_id(std::numeric_limits<unsigned>::max()) | ||||
|           node_id(SPECIAL_OSM_NODEID) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     int lat; | ||||
|     int lon; | ||||
|     NodeID node_id; | ||||
|     OSMNodeID node_id; | ||||
| 
 | ||||
|     static QueryNode min_value() | ||||
|     { | ||||
|         return QueryNode(static_cast<int>(-90 * COORDINATE_PRECISION), | ||||
|                          static_cast<int>(-180 * COORDINATE_PRECISION), | ||||
|                          std::numeric_limits<NodeID>::min()); | ||||
|                          MIN_OSM_NODEID); | ||||
|     } | ||||
| 
 | ||||
|     static QueryNode max_value() | ||||
|     { | ||||
|         return QueryNode(static_cast<int>(90 * COORDINATE_PRECISION), | ||||
|                          static_cast<int>(180 * COORDINATE_PRECISION), | ||||
|                          std::numeric_limits<NodeID>::max()); | ||||
|                          MAX_OSM_NODEID); | ||||
|     } | ||||
| 
 | ||||
|     value_type operator[](const std::size_t n) const | ||||
|  | ||||
| @ -33,9 +33,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "shared_memory_vector_wrapper.hpp" | ||||
| 
 | ||||
| #include <fstream> | ||||
| #include <vector> | ||||
| #include <array> | ||||
| 
 | ||||
| /*
 | ||||
|  * These pre-declarations are needed because parsing C++ is hard | ||||
|  * and otherwise the compiler gets confused. | ||||
| @ -82,7 +80,8 @@ template <unsigned BLOCK_SIZE, bool USE_SHARED_MEMORY> class RangeTable | ||||
|     } | ||||
| 
 | ||||
|     // construct table from length vector
 | ||||
|     explicit RangeTable(const std::vector<unsigned> &lengths) | ||||
|     template<typename VectorT> | ||||
|     explicit RangeTable(const VectorT &lengths) | ||||
|     { | ||||
|         const unsigned number_of_blocks = [&lengths]() | ||||
|         { | ||||
|  | ||||
| @ -42,8 +42,8 @@ RasterSource::RasterSource(RasterGrid _raster_data, | ||||
|                            int _ymin, | ||||
|                            int _ymax) | ||||
|     : xstep(calcSize(_xmin, _xmax, _width)), ystep(calcSize(_ymin, _ymax, _height)), | ||||
|       raster_data(_raster_data), width(_width), height(_height), xmin(_xmin), xmax(_xmax), | ||||
|       ymin(_ymin), ymax(_ymax) | ||||
|       raster_data(std::move(_raster_data)), width(_width), height(_height), xmin(_xmin), | ||||
|       xmax(_xmax), ymin(_ymin), ymax(_ymax) | ||||
| { | ||||
|     BOOST_ASSERT(xstep != 0); | ||||
|     BOOST_ASSERT(ystep != 0); | ||||
|  | ||||
| @ -121,35 +121,35 @@ struct RectangleInt2D | ||||
|         switch (d) | ||||
|         { | ||||
|         case NORTH: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(max_lat, location.lon)); | ||||
|             break; | ||||
|         case SOUTH: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(min_lat, location.lon)); | ||||
|             break; | ||||
|         case WEST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(location.lat, min_lon)); | ||||
|             break; | ||||
|         case EAST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(location.lat, max_lon)); | ||||
|             break; | ||||
|         case NORTH_EAST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(max_lat, max_lon)); | ||||
|             break; | ||||
|         case NORTH_WEST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(max_lat, min_lon)); | ||||
|             break; | ||||
|         case SOUTH_EAST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(min_lat, max_lon)); | ||||
|             break; | ||||
|         case SOUTH_WEST: | ||||
|             min_dist = coordinate_calculation::euclidean_distance( | ||||
|             min_dist = coordinate_calculation::great_circle_distance( | ||||
|                 location, FixedPointCoordinate(min_lat, min_lon)); | ||||
|             break; | ||||
|         default: | ||||
| @ -170,25 +170,25 @@ struct RectangleInt2D | ||||
|         const FixedPointCoordinate lower_right(min_lat, max_lon); | ||||
|         const FixedPointCoordinate lower_left(min_lat, min_lon); | ||||
| 
 | ||||
|         min_max_dist = | ||||
|             std::min(min_max_dist, | ||||
|                      std::max(coordinate_calculation::euclidean_distance(location, upper_left), | ||||
|                               coordinate_calculation::euclidean_distance(location, upper_right))); | ||||
|         min_max_dist = std::min( | ||||
|             min_max_dist, | ||||
|             std::max(coordinate_calculation::great_circle_distance(location, upper_left), | ||||
|                      coordinate_calculation::great_circle_distance(location, upper_right))); | ||||
| 
 | ||||
|         min_max_dist = std::min( | ||||
|             min_max_dist, | ||||
|             std::max(coordinate_calculation::great_circle_distance(location, upper_right), | ||||
|                      coordinate_calculation::great_circle_distance(location, lower_right))); | ||||
| 
 | ||||
|         min_max_dist = | ||||
|             std::min(min_max_dist, | ||||
|                      std::max(coordinate_calculation::euclidean_distance(location, upper_right), | ||||
|                               coordinate_calculation::euclidean_distance(location, lower_right))); | ||||
|                      std::max(coordinate_calculation::great_circle_distance(location, lower_right), | ||||
|                               coordinate_calculation::great_circle_distance(location, lower_left))); | ||||
| 
 | ||||
|         min_max_dist = | ||||
|             std::min(min_max_dist, | ||||
|                      std::max(coordinate_calculation::euclidean_distance(location, lower_right), | ||||
|                               coordinate_calculation::euclidean_distance(location, lower_left))); | ||||
| 
 | ||||
|         min_max_dist = | ||||
|             std::min(min_max_dist, | ||||
|                      std::max(coordinate_calculation::euclidean_distance(location, lower_left), | ||||
|                               coordinate_calculation::euclidean_distance(location, upper_left))); | ||||
|                      std::max(coordinate_calculation::great_circle_distance(location, lower_left), | ||||
|                               coordinate_calculation::great_circle_distance(location, upper_left))); | ||||
|         return min_max_dist; | ||||
|     } | ||||
| 
 | ||||
| @ -198,14 +198,6 @@ struct RectangleInt2D | ||||
|         const bool lons_contained = (location.lon >= min_lon) && (location.lon <= max_lon); | ||||
|         return lats_contained && lons_contained; | ||||
|     } | ||||
| 
 | ||||
|     friend std::ostream &operator<<(std::ostream &out, const RectangleInt2D &rect) | ||||
|     { | ||||
|         out << rect.min_lat / COORDINATE_PRECISION << "," << rect.min_lon / COORDINATE_PRECISION | ||||
|             << " " << rect.max_lat / COORDINATE_PRECISION << "," | ||||
|             << rect.max_lon / COORDINATE_PRECISION; | ||||
|         return out; | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
| #endif | ||||
|  | ||||
| @ -36,8 +36,8 @@ struct TurnRestriction | ||||
| { | ||||
|     union WayOrNode | ||||
|     { | ||||
|         NodeID node; | ||||
|         EdgeID way; | ||||
|         OSMNodeID_weak node; | ||||
|         OSMEdgeID_weak way; | ||||
|     }; | ||||
|     WayOrNode via; | ||||
|     WayOrNode from; | ||||
|  | ||||
| @ -33,10 +33,16 @@ RestrictionMap::RestrictionMap(const std::vector<TurnRestriction> &restriction_l | ||||
|     // a pair of starting edge and a list of all end nodes
 | ||||
|     for (auto &restriction : restriction_list) | ||||
|     { | ||||
|         // This downcasting is OK because when this is called, the node IDs have been
 | ||||
|         // renumbered into internal values, which should be well under 2^32
 | ||||
|         // This will be a problem if we have more than 2^32 actual restrictions
 | ||||
|         BOOST_ASSERT(restriction.from.node < std::numeric_limits<NodeID>::max()); | ||||
|         BOOST_ASSERT(restriction.via.node < std::numeric_limits<NodeID>::max()); | ||||
|         m_restriction_start_nodes.insert(restriction.from.node); | ||||
|         m_no_turn_via_node_set.insert(restriction.via.node); | ||||
| 
 | ||||
|         RestrictionSource restriction_source = {restriction.from.node, restriction.via.node}; | ||||
|         // This explicit downcasting is also OK for the same reason.
 | ||||
|         RestrictionSource restriction_source = {static_cast<NodeID>(restriction.from.node), static_cast<NodeID>(restriction.via.node)}; | ||||
| 
 | ||||
|         std::size_t index; | ||||
|         auto restriction_iter = m_restriction_map.find(restriction_source); | ||||
| @ -62,6 +68,7 @@ RestrictionMap::RestrictionMap(const std::vector<TurnRestriction> &restriction_l | ||||
|             } | ||||
|         } | ||||
|         ++m_count; | ||||
|         BOOST_ASSERT(restriction.to.node < std::numeric_limits<NodeID>::max()); | ||||
|         m_restriction_bucket_list.at(index) | ||||
|             .emplace_back(restriction.to.node, restriction.flags.is_only); | ||||
|     } | ||||
|  | ||||
| @ -28,6 +28,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <boost/fusion/container/vector.hpp> | ||||
| #include <boost/fusion/sequence/intrinsic.hpp> | ||||
| #include <boost/fusion/include/at_c.hpp> | ||||
| #include <boost/spirit/include/qi.hpp> | ||||
| 
 | ||||
| #include <osrm/route_parameters.hpp> | ||||
| 
 | ||||
| @ -60,11 +61,9 @@ void RouteParameters::setAlternateRouteFlag(const bool flag) { alternate_route = | ||||
| 
 | ||||
| void RouteParameters::setUTurn(const bool flag) | ||||
| { | ||||
|     uturns.resize(coordinates.size(), uturn_default); | ||||
|     if (!uturns.empty()) | ||||
|     { | ||||
|         uturns.back() = flag; | ||||
|     } | ||||
|     // the API grammar should make sure this never happens
 | ||||
|     BOOST_ASSERT(!uturns.empty()); | ||||
|     uturns.back() = flag; | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::setAllUTurns(const bool flag) | ||||
| @ -117,6 +116,19 @@ void RouteParameters::addTimestamp(const unsigned timestamp) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::addBearing( | ||||
|     const boost::fusion::vector<int, boost::optional<int>> &received_bearing, | ||||
|         boost::spirit::qi::unused_type /* unused */, bool& pass) | ||||
| { | ||||
|     pass = false; | ||||
|     const int bearing = boost::fusion::at_c<0>(received_bearing); | ||||
|     const boost::optional<int> range = boost::fusion::at_c<1>(received_bearing); | ||||
|     if (bearing < 0 || bearing > 359) return; | ||||
|     if (range && (*range < 0 || *range > 180)) return; | ||||
|     bearings.emplace_back(std::make_pair(bearing,range)); | ||||
|     pass = true; | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::setLanguage(const std::string &language_string) | ||||
| { | ||||
|     language = language_string; | ||||
| @ -132,6 +144,31 @@ void RouteParameters::addCoordinate( | ||||
|     coordinates.emplace_back( | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<0>(received_coordinates)), | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<1>(received_coordinates))); | ||||
|     is_source.push_back(true); | ||||
|     is_destination.push_back(true); | ||||
|     uturns.push_back(uturn_default); | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::addDestination( | ||||
|     const boost::fusion::vector<double, double> &received_coordinates) | ||||
| { | ||||
|     coordinates.emplace_back( | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<0>(received_coordinates)), | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<1>(received_coordinates))); | ||||
|     is_source.push_back(false); | ||||
|     is_destination.push_back(true); | ||||
|     uturns.push_back(uturn_default); | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::addSource( | ||||
|     const boost::fusion::vector<double, double> &received_coordinates) | ||||
| { | ||||
|     coordinates.emplace_back( | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<0>(received_coordinates)), | ||||
|         static_cast<int>(COORDINATE_PRECISION * boost::fusion::at_c<1>(received_coordinates))); | ||||
|     is_source.push_back(true); | ||||
|     is_destination.push_back(false); | ||||
|     uturns.push_back(uturn_default); | ||||
| } | ||||
| 
 | ||||
| void RouteParameters::getCoordinatesFromGeometry(const std::string &geometry_string) | ||||
| @ -139,3 +176,4 @@ void RouteParameters::getCoordinatesFromGeometry(const std::string &geometry_str | ||||
|     PolylineCompressor pc; | ||||
|     coordinates = pc.decode_string(geometry_string); | ||||
| } | ||||
| 
 | ||||
|  | ||||
| @ -43,7 +43,8 @@ struct SegmentInformation | ||||
|     NodeID name_id; | ||||
|     EdgeWeight duration; | ||||
|     float length; | ||||
|     short bearing; // more than enough [0..3600] fits into 12 bits
 | ||||
|     short pre_turn_bearing; // more than enough [0..3600] fits into 12 bits
 | ||||
|     short post_turn_bearing; | ||||
|     TurnInstruction turn_instruction; | ||||
|     TravelMode travel_mode; | ||||
|     bool necessary; | ||||
| @ -58,7 +59,7 @@ struct SegmentInformation | ||||
|                                 const bool is_via_location, | ||||
|                                 const TravelMode travel_mode) | ||||
|         : location(std::move(location)), name_id(name_id), duration(duration), length(length), | ||||
|           bearing(0), turn_instruction(turn_instruction), travel_mode(travel_mode), | ||||
|           pre_turn_bearing(0), post_turn_bearing(0), turn_instruction(turn_instruction), travel_mode(travel_mode), | ||||
|           necessary(necessary), is_via_location(is_via_location) | ||||
|     { | ||||
|     } | ||||
| @ -70,7 +71,7 @@ struct SegmentInformation | ||||
|                                 const TurnInstruction turn_instruction, | ||||
|                                 const TravelMode travel_mode) | ||||
|         : location(std::move(location)), name_id(name_id), duration(duration), length(length), | ||||
|           bearing(0), turn_instruction(turn_instruction), travel_mode(travel_mode), | ||||
|           pre_turn_bearing(0), post_turn_bearing(0), turn_instruction(turn_instruction), travel_mode(travel_mode), | ||||
|           necessary(turn_instruction != TurnInstruction::NoTurn), is_via_location(false) | ||||
|     { | ||||
|     } | ||||
|  | ||||
| @ -30,19 +30,14 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include "deallocating_vector.hpp" | ||||
| #include "hilbert_value.hpp" | ||||
| #include "phantom_node.hpp" | ||||
| #include "query_node.hpp" | ||||
| #include "rectangle.hpp" | ||||
| #include "shared_memory_factory.hpp" | ||||
| #include "shared_memory_vector_wrapper.hpp" | ||||
| #include "upper_bound.hpp" | ||||
| 
 | ||||
| #include "../util/floating_point.hpp" | ||||
| #include "../util/bearing.hpp" | ||||
| #include "../util/integer_range.hpp" | ||||
| #include "../util/mercator.hpp" | ||||
| #include "../util/osrm_exception.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| #include "../util/timing_util.hpp" | ||||
| #include "../typedefs.h" | ||||
| 
 | ||||
| #include <osrm/coordinate.hpp> | ||||
| @ -50,7 +45,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <boost/assert.hpp> | ||||
| #include <boost/filesystem.hpp> | ||||
| #include <boost/filesystem/fstream.hpp> | ||||
| #include <boost/thread.hpp> | ||||
| 
 | ||||
| #include <tbb/parallel_for.h> | ||||
| #include <tbb/parallel_sort.h> | ||||
| @ -65,7 +59,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <string> | ||||
| #include <vector> | ||||
| 
 | ||||
| // Implements a static, i.e. packed, R-tree
 | ||||
| // Static RTree for serving nearest neighbour queries
 | ||||
| template <class EdgeDataT, | ||||
|           class CoordinateListT = std::vector<FixedPointCoordinate>, | ||||
|           bool UseSharedMemory = false, | ||||
| @ -74,198 +68,16 @@ template <class EdgeDataT, | ||||
| class StaticRTree | ||||
| { | ||||
|   public: | ||||
|     struct RectangleInt2D | ||||
|     { | ||||
|         RectangleInt2D() : min_lon(INT_MAX), max_lon(INT_MIN), min_lat(INT_MAX), max_lat(INT_MIN) {} | ||||
|     using Rectangle = RectangleInt2D; | ||||
|     using EdgeData = EdgeDataT; | ||||
|     using CoordinateList = CoordinateListT; | ||||
| 
 | ||||
|         int32_t min_lon, max_lon; | ||||
|         int32_t min_lat, max_lat; | ||||
| 
 | ||||
|         inline void InitializeMBRectangle(const std::array<EdgeDataT, LEAF_NODE_SIZE> &objects, | ||||
|                                           const uint32_t element_count, | ||||
|                                           const std::vector<QueryNode> &coordinate_list) | ||||
|         { | ||||
|             for (uint32_t i = 0; i < element_count; ++i) | ||||
|             { | ||||
|                 min_lon = std::min(min_lon, std::min(coordinate_list.at(objects[i].u).lon, | ||||
|                                                      coordinate_list.at(objects[i].v).lon)); | ||||
|                 max_lon = std::max(max_lon, std::max(coordinate_list.at(objects[i].u).lon, | ||||
|                                                      coordinate_list.at(objects[i].v).lon)); | ||||
| 
 | ||||
|                 min_lat = std::min(min_lat, std::min(coordinate_list.at(objects[i].u).lat, | ||||
|                                                      coordinate_list.at(objects[i].v).lat)); | ||||
|                 max_lat = std::max(max_lat, std::max(coordinate_list.at(objects[i].u).lat, | ||||
|                                                      coordinate_list.at(objects[i].v).lat)); | ||||
|             } | ||||
|             BOOST_ASSERT(min_lat != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(min_lon != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(max_lat != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(max_lon != std::numeric_limits<int>::min()); | ||||
|         } | ||||
| 
 | ||||
|         inline void MergeBoundingBoxes(const RectangleInt2D &other) | ||||
|         { | ||||
|             min_lon = std::min(min_lon, other.min_lon); | ||||
|             max_lon = std::max(max_lon, other.max_lon); | ||||
|             min_lat = std::min(min_lat, other.min_lat); | ||||
|             max_lat = std::max(max_lat, other.max_lat); | ||||
|             BOOST_ASSERT(min_lat != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(min_lon != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(max_lat != std::numeric_limits<int>::min()); | ||||
|             BOOST_ASSERT(max_lon != std::numeric_limits<int>::min()); | ||||
|         } | ||||
| 
 | ||||
|         inline FixedPointCoordinate Centroid() const | ||||
|         { | ||||
|             FixedPointCoordinate centroid; | ||||
|             // The coordinates of the midpoints are given by:
 | ||||
|             // x = (x1 + x2) /2 and y = (y1 + y2) /2.
 | ||||
|             centroid.lon = (min_lon + max_lon) / 2; | ||||
|             centroid.lat = (min_lat + max_lat) / 2; | ||||
|             return centroid; | ||||
|         } | ||||
| 
 | ||||
|         inline bool Intersects(const RectangleInt2D &other) const | ||||
|         { | ||||
|             FixedPointCoordinate upper_left(other.max_lat, other.min_lon); | ||||
|             FixedPointCoordinate upper_right(other.max_lat, other.max_lon); | ||||
|             FixedPointCoordinate lower_right(other.min_lat, other.max_lon); | ||||
|             FixedPointCoordinate lower_left(other.min_lat, other.min_lon); | ||||
| 
 | ||||
|             return (Contains(upper_left) || Contains(upper_right) || Contains(lower_right) || | ||||
|                     Contains(lower_left)); | ||||
|         } | ||||
| 
 | ||||
|         inline float GetMinDist(const FixedPointCoordinate &location) const | ||||
|         { | ||||
|             const bool is_contained = Contains(location); | ||||
|             if (is_contained) | ||||
|             { | ||||
|                 return 0.; | ||||
|             } | ||||
| 
 | ||||
|             enum Direction | ||||
|             { | ||||
|                 INVALID = 0, | ||||
|                 NORTH = 1, | ||||
|                 SOUTH = 2, | ||||
|                 EAST = 4, | ||||
|                 NORTH_EAST = 5, | ||||
|                 SOUTH_EAST = 6, | ||||
|                 WEST = 8, | ||||
|                 NORTH_WEST = 9, | ||||
|                 SOUTH_WEST = 10 | ||||
|             }; | ||||
| 
 | ||||
|             Direction d = INVALID; | ||||
|             if (location.lat > max_lat) | ||||
|                 d = (Direction)(d | NORTH); | ||||
|             else if (location.lat < min_lat) | ||||
|                 d = (Direction)(d | SOUTH); | ||||
|             if (location.lon > max_lon) | ||||
|                 d = (Direction)(d | EAST); | ||||
|             else if (location.lon < min_lon) | ||||
|                 d = (Direction)(d | WEST); | ||||
| 
 | ||||
|             BOOST_ASSERT(d != INVALID); | ||||
| 
 | ||||
|             float min_dist = std::numeric_limits<float>::max(); | ||||
|             switch (d) | ||||
|             { | ||||
|             case NORTH: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(max_lat, location.lon)); | ||||
|                 break; | ||||
|             case SOUTH: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(min_lat, location.lon)); | ||||
|                 break; | ||||
|             case WEST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(location.lat, min_lon)); | ||||
|                 break; | ||||
|             case EAST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(location.lat, max_lon)); | ||||
|                 break; | ||||
|             case NORTH_EAST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(max_lat, max_lon)); | ||||
|                 break; | ||||
|             case NORTH_WEST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(max_lat, min_lon)); | ||||
|                 break; | ||||
|             case SOUTH_EAST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(min_lat, max_lon)); | ||||
|                 break; | ||||
|             case SOUTH_WEST: | ||||
|                 min_dist = coordinate_calculation::euclidean_distance( | ||||
|                     location, FixedPointCoordinate(min_lat, min_lon)); | ||||
|                 break; | ||||
|             default: | ||||
|                 break; | ||||
|             } | ||||
| 
 | ||||
|             BOOST_ASSERT(min_dist != std::numeric_limits<float>::max()); | ||||
| 
 | ||||
|             return min_dist; | ||||
|         } | ||||
| 
 | ||||
|         inline float GetMinMaxDist(const FixedPointCoordinate &location) const | ||||
|         { | ||||
|             float min_max_dist = std::numeric_limits<float>::max(); | ||||
|             // Get minmax distance to each of the four sides
 | ||||
|             const FixedPointCoordinate upper_left(max_lat, min_lon); | ||||
|             const FixedPointCoordinate upper_right(max_lat, max_lon); | ||||
|             const FixedPointCoordinate lower_right(min_lat, max_lon); | ||||
|             const FixedPointCoordinate lower_left(min_lat, min_lon); | ||||
| 
 | ||||
|             min_max_dist = std::min( | ||||
|                 min_max_dist, | ||||
|                 std::max(coordinate_calculation::euclidean_distance(location, upper_left), | ||||
|                          coordinate_calculation::euclidean_distance(location, upper_right))); | ||||
| 
 | ||||
|             min_max_dist = std::min( | ||||
|                 min_max_dist, | ||||
|                 std::max(coordinate_calculation::euclidean_distance(location, upper_right), | ||||
|                          coordinate_calculation::euclidean_distance(location, lower_right))); | ||||
| 
 | ||||
|             min_max_dist = std::min( | ||||
|                 min_max_dist, | ||||
|                 std::max(coordinate_calculation::euclidean_distance(location, lower_right), | ||||
|                          coordinate_calculation::euclidean_distance(location, lower_left))); | ||||
| 
 | ||||
|             min_max_dist = std::min( | ||||
|                 min_max_dist, | ||||
|                 std::max(coordinate_calculation::euclidean_distance(location, lower_left), | ||||
|                          coordinate_calculation::euclidean_distance(location, upper_left))); | ||||
|             return min_max_dist; | ||||
|         } | ||||
| 
 | ||||
|         inline bool Contains(const FixedPointCoordinate &location) const | ||||
|         { | ||||
|             const bool lats_contained = (location.lat >= min_lat) && (location.lat <= max_lat); | ||||
|             const bool lons_contained = (location.lon >= min_lon) && (location.lon <= max_lon); | ||||
|             return lats_contained && lons_contained; | ||||
|         } | ||||
| 
 | ||||
|         inline friend std::ostream &operator<<(std::ostream &out, const RectangleInt2D &rect) | ||||
|         { | ||||
|             out << rect.min_lat / COORDINATE_PRECISION << "," << rect.min_lon / COORDINATE_PRECISION | ||||
|                 << " " << rect.max_lat / COORDINATE_PRECISION << "," | ||||
|                 << rect.max_lon / COORDINATE_PRECISION; | ||||
|             return out; | ||||
|         } | ||||
|     }; | ||||
| 
 | ||||
|     using RectangleT = RectangleInt2D; | ||||
|     static constexpr std::size_t MAX_CHECKED_ELEMENTS = 4 * LEAF_NODE_SIZE; | ||||
| 
 | ||||
|     struct TreeNode | ||||
|     { | ||||
|         TreeNode() : child_count(0), child_is_on_disk(false) {} | ||||
|         RectangleT minimum_bounding_rectangle; | ||||
|         Rectangle minimum_bounding_rectangle; | ||||
|         uint32_t child_count : 31; | ||||
|         bool child_is_on_disk : 1; | ||||
|         uint32_t children[BRANCHING_FACTOR]; | ||||
| @ -297,40 +109,17 @@ class StaticRTree | ||||
|         std::array<EdgeDataT, LEAF_NODE_SIZE> objects; | ||||
|     }; | ||||
| 
 | ||||
|     using QueryNodeType = mapbox::util::variant<TreeNode, EdgeDataT>; | ||||
|     struct QueryCandidate | ||||
|     { | ||||
|         explicit QueryCandidate(const float dist, const uint32_t n_id) | ||||
|             : min_dist(dist), node_id(n_id) | ||||
|         { | ||||
|         } | ||||
|         QueryCandidate() : min_dist(std::numeric_limits<float>::max()), node_id(UINT_MAX) {} | ||||
|         float min_dist; | ||||
|         uint32_t node_id; | ||||
|         inline bool operator<(const QueryCandidate &other) const | ||||
|         { | ||||
|             // Attn: this is reversed order. std::pq is a max pq!
 | ||||
|             return other.min_dist < min_dist; | ||||
|         } | ||||
|     }; | ||||
| 
 | ||||
|     using IncrementalQueryNodeType = mapbox::util::variant<TreeNode, EdgeDataT>; | ||||
|     struct IncrementalQueryCandidate | ||||
|     { | ||||
|         explicit IncrementalQueryCandidate(const float dist, IncrementalQueryNodeType node) | ||||
|             : min_dist(dist), node(std::move(node)) | ||||
|         { | ||||
|         } | ||||
| 
 | ||||
|         IncrementalQueryCandidate() : min_dist(std::numeric_limits<float>::max()) {} | ||||
| 
 | ||||
|         inline bool operator<(const IncrementalQueryCandidate &other) const | ||||
|         { | ||||
|             // Attn: this is reversed order. std::pq is a max pq!
 | ||||
|             return other.min_dist < min_dist; | ||||
|         } | ||||
| 
 | ||||
|         float min_dist; | ||||
|         IncrementalQueryNodeType node; | ||||
|         QueryNodeType node; | ||||
|     }; | ||||
| 
 | ||||
|     typename ShM<TreeNode, UseSharedMemory>::vector m_search_tree; | ||||
| @ -343,18 +132,14 @@ class StaticRTree | ||||
|     StaticRTree() = delete; | ||||
|     StaticRTree(const StaticRTree &) = delete; | ||||
| 
 | ||||
|     template <typename CoordinateT> | ||||
|     // Construct a packed Hilbert-R-Tree with Kamel-Faloutsos algorithm [1]
 | ||||
|     explicit StaticRTree(const std::vector<EdgeDataT> &input_data_vector, | ||||
|                          const std::string &tree_node_filename, | ||||
|                          const std::string &leaf_node_filename, | ||||
|                          const std::vector<QueryNode> &coordinate_list) | ||||
|                          const std::vector<CoordinateT> &coordinate_list) | ||||
|         : m_element_count(input_data_vector.size()), m_leaf_node_filename(leaf_node_filename) | ||||
|     { | ||||
|         SimpleLogger().Write() << "constructing r-tree of " << m_element_count | ||||
|                                << " edge elements build on-top of " << coordinate_list.size() | ||||
|                                << " coordinates"; | ||||
| 
 | ||||
|         TIMER_START(construction); | ||||
|         std::vector<WrappedInputElement> input_wrapper_vector(m_element_count); | ||||
| 
 | ||||
|         HilbertCode get_hilbert_number; | ||||
| @ -362,8 +147,8 @@ class StaticRTree | ||||
|         // generate auxiliary vector of hilbert-values
 | ||||
|         tbb::parallel_for( | ||||
|             tbb::blocked_range<uint64_t>(0, m_element_count), | ||||
|             [&input_data_vector, &input_wrapper_vector, &get_hilbert_number, &coordinate_list]( | ||||
|                 const tbb::blocked_range<uint64_t> &range) | ||||
|             [&input_data_vector, &input_wrapper_vector, &get_hilbert_number, | ||||
|              &coordinate_list](const tbb::blocked_range<uint64_t> &range) | ||||
|             { | ||||
|                 for (uint64_t element_counter = range.begin(), end = range.end(); | ||||
|                      element_counter != end; ++element_counter) | ||||
| @ -402,8 +187,6 @@ class StaticRTree | ||||
| 
 | ||||
|             LeafNode current_leaf; | ||||
|             TreeNode current_node; | ||||
|             // SimpleLogger().Write() << "reading " << tree_size << " tree nodes in " <<
 | ||||
|             // (sizeof(TreeNode)*tree_size) << " bytes";
 | ||||
|             for (uint32_t current_element_index = 0; LEAF_NODE_SIZE > current_element_index; | ||||
|                  ++current_element_index) | ||||
|             { | ||||
| @ -497,13 +280,8 @@ class StaticRTree | ||||
|         tree_node_file.write((char *)&m_search_tree[0], sizeof(TreeNode) * size_of_tree); | ||||
|         // close tree node file.
 | ||||
|         tree_node_file.close(); | ||||
| 
 | ||||
|         TIMER_STOP(construction); | ||||
|         SimpleLogger().Write() << "finished r-tree construction in " << TIMER_SEC(construction) | ||||
|                                << " seconds"; | ||||
|     } | ||||
| 
 | ||||
|     // Read-only operation for queries
 | ||||
|     explicit StaticRTree(const boost::filesystem::path &node_file, | ||||
|                          const boost::filesystem::path &leaf_file, | ||||
|                          const std::shared_ptr<CoordinateListT> coordinate_list) | ||||
| @ -543,9 +321,6 @@ class StaticRTree | ||||
| 
 | ||||
|         leaves_stream.open(leaf_file, std::ios::binary); | ||||
|         leaves_stream.read((char *)&m_element_count, sizeof(uint64_t)); | ||||
| 
 | ||||
|         // SimpleLogger().Write() << tree_size << " nodes in search tree";
 | ||||
|         // SimpleLogger().Write() << m_element_count << " elements in leafs";
 | ||||
|     } | ||||
| 
 | ||||
|     explicit StaticRTree(TreeNode *tree_node_ptr, | ||||
| @ -567,108 +342,47 @@ class StaticRTree | ||||
| 
 | ||||
|         leaves_stream.open(leaf_file, std::ios::binary); | ||||
|         leaves_stream.read((char *)&m_element_count, sizeof(uint64_t)); | ||||
| 
 | ||||
|         // SimpleLogger().Write() << tree_size << " nodes in search tree";
 | ||||
|         // SimpleLogger().Write() << m_element_count << " elements in leafs";
 | ||||
|     } | ||||
|     // Read-only operation for queries
 | ||||
| 
 | ||||
|     bool LocateClosestEndPointForCoordinate(const FixedPointCoordinate &input_coordinate, | ||||
|                                             FixedPointCoordinate &result_coordinate, | ||||
|                                             const unsigned zoom_level) | ||||
|     { | ||||
|         bool ignore_tiny_components = (zoom_level <= 14); | ||||
| 
 | ||||
|         float min_dist = std::numeric_limits<float>::max(); | ||||
|         float min_max_dist = std::numeric_limits<float>::max(); | ||||
| 
 | ||||
|         // initialize queue with root element
 | ||||
|         std::priority_queue<QueryCandidate> traversal_queue; | ||||
|         traversal_queue.emplace(0.f, 0); | ||||
| 
 | ||||
|         while (!traversal_queue.empty()) | ||||
|         { | ||||
|             const QueryCandidate current_query_node = traversal_queue.top(); | ||||
|             traversal_queue.pop(); | ||||
| 
 | ||||
|             const bool prune_downward = (current_query_node.min_dist >= min_max_dist); | ||||
|             const bool prune_upward = (current_query_node.min_dist >= min_dist); | ||||
|             if (!prune_downward && !prune_upward) | ||||
|             { // downward pruning
 | ||||
|                 TreeNode ¤t_tree_node = m_search_tree[current_query_node.node_id]; | ||||
|                 if (current_tree_node.child_is_on_disk) | ||||
|                 { | ||||
|                     LeafNode current_leaf_node; | ||||
|                     LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); | ||||
|                     for (uint32_t i = 0; i < current_leaf_node.object_count; ++i) | ||||
|                     { | ||||
|                         EdgeDataT const ¤t_edge = current_leaf_node.objects[i]; | ||||
|                         if (ignore_tiny_components && current_edge.component_id != 0) | ||||
|                         { | ||||
|                             continue; | ||||
|                         } | ||||
| 
 | ||||
|                         float current_minimum_distance = coordinate_calculation::euclidean_distance( | ||||
|                             input_coordinate.lat, input_coordinate.lon, | ||||
|                             m_coordinate_list->at(current_edge.u).lat, | ||||
|                             m_coordinate_list->at(current_edge.u).lon); | ||||
|                         if (current_minimum_distance < min_dist) | ||||
|                         { | ||||
|                             // found a new minimum
 | ||||
|                             min_dist = current_minimum_distance; | ||||
|                             result_coordinate = m_coordinate_list->at(current_edge.u); | ||||
|                         } | ||||
| 
 | ||||
|                         current_minimum_distance = coordinate_calculation::euclidean_distance( | ||||
|                             input_coordinate.lat, input_coordinate.lon, | ||||
|                             m_coordinate_list->at(current_edge.v).lat, | ||||
|                             m_coordinate_list->at(current_edge.v).lon); | ||||
| 
 | ||||
|                         if (current_minimum_distance < min_dist) | ||||
|                         { | ||||
|                             // found a new minimum
 | ||||
|                             min_dist = current_minimum_distance; | ||||
|                             result_coordinate = m_coordinate_list->at(current_edge.v); | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|                 else | ||||
|                 { | ||||
|                     min_max_dist = ExploreTreeNode(current_tree_node, input_coordinate, min_dist, | ||||
|                                                    min_max_dist, traversal_queue); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         return result_coordinate.is_valid(); | ||||
|     } | ||||
| 
 | ||||
|     bool IncrementalFindPhantomNodeForCoordinate( | ||||
|         const FixedPointCoordinate &input_coordinate, | ||||
|         std::vector<PhantomNode> &result_phantom_node_vector, | ||||
|         const unsigned max_number_of_phantom_nodes, | ||||
|         const float max_distance = 1100, | ||||
|         const unsigned max_checked_elements = 4 * LEAF_NODE_SIZE) | ||||
|     // Override filter and terminator for the desired behaviour.
 | ||||
|     std::vector<EdgeDataT> Nearest(const FixedPointCoordinate &input_coordinate, | ||||
|                                 const std::size_t max_results) | ||||
|     { | ||||
|         unsigned inspected_elements = 0; | ||||
|         unsigned number_of_elements_from_big_cc = 0; | ||||
|         unsigned number_of_elements_from_tiny_cc = 0; | ||||
|         return Nearest(input_coordinate, | ||||
|                        [](const EdgeDataT &) | ||||
|                        { | ||||
|                            return std::make_pair(true, true); | ||||
|                        }, | ||||
|                        [max_results](const std::size_t num_results, const float) | ||||
|                        { | ||||
|                            return num_results >= max_results; | ||||
|                        }); | ||||
|     } | ||||
| 
 | ||||
|     // Override filter and terminator for the desired behaviour.
 | ||||
|     template <typename FilterT, typename TerminationT> | ||||
|     std::vector<EdgeDataT> Nearest(const FixedPointCoordinate &input_coordinate, | ||||
|                                 const FilterT filter, | ||||
|                                 const TerminationT terminate) | ||||
|     { | ||||
|         std::vector<EdgeDataT> results; | ||||
|         std::pair<double, double> projected_coordinate = { | ||||
|             mercator::lat2y(input_coordinate.lat / COORDINATE_PRECISION), | ||||
|             input_coordinate.lon / COORDINATE_PRECISION}; | ||||
| 
 | ||||
|         // initialize queue with root element
 | ||||
|         std::priority_queue<IncrementalQueryCandidate> traversal_queue; | ||||
|         traversal_queue.emplace(0.f, m_search_tree[0]); | ||||
|         std::priority_queue<QueryCandidate> traversal_queue; | ||||
|         traversal_queue.push(QueryCandidate {0.f, m_search_tree[0]}); | ||||
| 
 | ||||
|         while (!traversal_queue.empty()) | ||||
|         { | ||||
|             const IncrementalQueryCandidate current_query_node = traversal_queue.top(); | ||||
|             if (current_query_node.min_dist > max_distance && | ||||
|                 inspected_elements > max_checked_elements) | ||||
|             const QueryCandidate current_query_node = traversal_queue.top(); | ||||
|             if (terminate(results.size(), current_query_node.min_dist)) | ||||
|             { | ||||
|                 traversal_queue = std::priority_queue<QueryCandidate>{}; | ||||
|                 break; | ||||
|             } | ||||
| 
 | ||||
|             traversal_queue.pop(); | ||||
| 
 | ||||
|             if (current_query_node.node.template is<TreeNode>()) | ||||
| @ -677,396 +391,81 @@ class StaticRTree | ||||
|                     current_query_node.node.template get<TreeNode>(); | ||||
|                 if (current_tree_node.child_is_on_disk) | ||||
|                 { | ||||
|                     LeafNode current_leaf_node; | ||||
|                     LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); | ||||
| 
 | ||||
|                     // current object represents a block on disk
 | ||||
|                     for (const auto i : osrm::irange(0u, current_leaf_node.object_count)) | ||||
|                     { | ||||
|                         const auto ¤t_edge = current_leaf_node.objects[i]; | ||||
|                         const float current_perpendicular_distance = coordinate_calculation:: | ||||
|                             perpendicular_distance_from_projected_coordinate( | ||||
|                                 m_coordinate_list->at(current_edge.u), | ||||
|                                 m_coordinate_list->at(current_edge.v), input_coordinate, | ||||
|                                 projected_coordinate); | ||||
|                         // distance must be non-negative
 | ||||
|                         BOOST_ASSERT(0.f <= current_perpendicular_distance); | ||||
| 
 | ||||
|                         traversal_queue.emplace(current_perpendicular_distance, current_edge); | ||||
|                     } | ||||
|                     ExploreLeafNode(current_tree_node.children[0], input_coordinate, | ||||
|                                     projected_coordinate, traversal_queue); | ||||
|                 } | ||||
|                 else | ||||
|                 { | ||||
|                     // for each child mbr get a lower bound and enqueue it
 | ||||
|                     for (const auto i : osrm::irange(0u, current_tree_node.child_count)) | ||||
|                     { | ||||
|                         const int32_t child_id = current_tree_node.children[i]; | ||||
|                         const TreeNode &child_tree_node = m_search_tree[child_id]; | ||||
|                         const RectangleT &child_rectangle = | ||||
|                             child_tree_node.minimum_bounding_rectangle; | ||||
|                         const float lower_bound_to_element = | ||||
|                             child_rectangle.GetMinDist(input_coordinate); | ||||
|                         BOOST_ASSERT(0.f <= lower_bound_to_element); | ||||
| 
 | ||||
|                         traversal_queue.emplace(lower_bound_to_element, child_tree_node); | ||||
|                     } | ||||
|                     ExploreTreeNode(current_tree_node, input_coordinate, traversal_queue); | ||||
|                 } | ||||
|             } | ||||
|             else | ||||
|             { // current object is a leaf node
 | ||||
|                 ++inspected_elements; | ||||
|             { | ||||
|                 // inspecting an actual road segment
 | ||||
|                 const EdgeDataT ¤t_segment = | ||||
|                     current_query_node.node.template get<EdgeDataT>(); | ||||
|                 const auto ¤t_segment = current_query_node.node.template get<EdgeDataT>(); | ||||
| 
 | ||||
|                 // continue searching for the first segment from a big component
 | ||||
|                 if (number_of_elements_from_big_cc == 0 && | ||||
|                     number_of_elements_from_tiny_cc >= max_number_of_phantom_nodes && | ||||
|                     current_segment.is_in_tiny_cc()) | ||||
| 
 | ||||
|                 auto use_segment = filter(current_segment); | ||||
|                 if (!use_segment.first && !use_segment.second) | ||||
|                 { | ||||
|                     continue; | ||||
|                 } | ||||
| 
 | ||||
|                 // check if it is smaller than what we had before
 | ||||
|                 float current_ratio = 0.f; | ||||
|                 FixedPointCoordinate foot_point_coordinate_on_segment; | ||||
| 
 | ||||
|                 // const float current_perpendicular_distance =
 | ||||
|                 coordinate_calculation::perpendicular_distance_from_projected_coordinate( | ||||
|                     m_coordinate_list->at(current_segment.u), | ||||
|                     m_coordinate_list->at(current_segment.v), input_coordinate, | ||||
|                     projected_coordinate, foot_point_coordinate_on_segment, current_ratio); | ||||
| 
 | ||||
|                 // store phantom node in result vector
 | ||||
|                 result_phantom_node_vector.emplace_back(current_segment, | ||||
|                                                         foot_point_coordinate_on_segment); | ||||
|                 results.push_back(std::move(current_segment)); | ||||
| 
 | ||||
|                 // Hack to fix rounding errors and wandering via nodes.
 | ||||
|                 FixUpRoundingIssue(input_coordinate, result_phantom_node_vector.back()); | ||||
| 
 | ||||
|                 // set forward and reverse weights on the phantom node
 | ||||
|                 SetForwardAndReverseWeightsOnPhantomNode(current_segment, | ||||
|                                                          result_phantom_node_vector.back()); | ||||
| 
 | ||||
|                 // update counts on what we found from which result class
 | ||||
|                 if (current_segment.is_in_tiny_cc()) | ||||
|                 { // found an element in tiny component
 | ||||
|                     ++number_of_elements_from_tiny_cc; | ||||
|                 } | ||||
|                 else | ||||
|                 { // found an element in a big component
 | ||||
|                     ++number_of_elements_from_big_cc; | ||||
|                 } | ||||
|             } | ||||
| 
 | ||||
|             // stop the search by flushing the queue
 | ||||
|             if (result_phantom_node_vector.size() >= max_number_of_phantom_nodes && | ||||
|                 number_of_elements_from_big_cc > 0) | ||||
|             { | ||||
|                 traversal_queue = std::priority_queue<IncrementalQueryCandidate>{}; | ||||
|             } | ||||
|         } | ||||
| #ifdef NDEBUG | ||||
| // SimpleLogger().Write() << "result_phantom_node_vector.size(): " <<
 | ||||
| // result_phantom_node_vector.size();
 | ||||
| // SimpleLogger().Write() << "max_number_of_phantom_nodes: " << max_number_of_phantom_nodes;
 | ||||
| // SimpleLogger().Write() << "number_of_elements_from_big_cc: " <<
 | ||||
| // number_of_elements_from_big_cc;
 | ||||
| // SimpleLogger().Write() << "number_of_elements_from_tiny_cc: " <<
 | ||||
| // number_of_elements_from_tiny_cc;
 | ||||
| // SimpleLogger().Write() << "inspected_elements: " << inspected_elements;
 | ||||
| // SimpleLogger().Write() << "max_checked_elements: " << max_checked_elements;
 | ||||
| // SimpleLogger().Write() << "pruned_elements: " << pruned_elements;
 | ||||
| #endif | ||||
|         return !result_phantom_node_vector.empty(); | ||||
|     } | ||||
| 
 | ||||
|     // Returns elements within max_distance.
 | ||||
|     // If the minium of elements could not be found in the search radius, widen
 | ||||
|     // it until the minimum can be satisfied.
 | ||||
|     bool IncrementalFindPhantomNodeForCoordinateWithDistance( | ||||
|         const FixedPointCoordinate &input_coordinate, | ||||
|         std::vector<std::pair<PhantomNode, double>> &result_phantom_node_vector, | ||||
|         const double max_distance, | ||||
|         const unsigned max_checked_elements = 4 * LEAF_NODE_SIZE) | ||||
|     { | ||||
|         unsigned inspected_elements = 0; | ||||
| 
 | ||||
|         std::pair<double, double> projected_coordinate = { | ||||
|             mercator::lat2y(input_coordinate.lat / COORDINATE_PRECISION), | ||||
|             input_coordinate.lon / COORDINATE_PRECISION}; | ||||
| 
 | ||||
|         // initialize queue with root element
 | ||||
|         std::priority_queue<IncrementalQueryCandidate> traversal_queue; | ||||
|         traversal_queue.emplace(0.f, m_search_tree[0]); | ||||
| 
 | ||||
|         while (!traversal_queue.empty()) | ||||
|         { | ||||
|             const IncrementalQueryCandidate current_query_node = traversal_queue.top(); | ||||
|             traversal_queue.pop(); | ||||
| 
 | ||||
|             if (current_query_node.min_dist > max_distance || | ||||
|                 inspected_elements >= max_checked_elements) | ||||
|             { | ||||
|                 break; | ||||
|             } | ||||
| 
 | ||||
|             if (current_query_node.node.template is<TreeNode>()) | ||||
|             { // current object is a tree node
 | ||||
|                 const TreeNode ¤t_tree_node = | ||||
|                     current_query_node.node.template get<TreeNode>(); | ||||
|                 if (current_tree_node.child_is_on_disk) | ||||
|                 if (!use_segment.first) | ||||
|                 { | ||||
|                     LeafNode current_leaf_node; | ||||
|                     LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); | ||||
| 
 | ||||
|                     // current object represents a block on disk
 | ||||
|                     for (const auto i : osrm::irange(0u, current_leaf_node.object_count)) | ||||
|                     { | ||||
|                         const auto ¤t_edge = current_leaf_node.objects[i]; | ||||
|                         const float current_perpendicular_distance = coordinate_calculation:: | ||||
|                             perpendicular_distance_from_projected_coordinate( | ||||
|                                 m_coordinate_list->at(current_edge.u), | ||||
|                                 m_coordinate_list->at(current_edge.v), input_coordinate, | ||||
|                                 projected_coordinate); | ||||
|                         // distance must be non-negative
 | ||||
|                         BOOST_ASSERT(0.f <= current_perpendicular_distance); | ||||
| 
 | ||||
|                         if (current_perpendicular_distance <= max_distance) | ||||
|                         { | ||||
|                             traversal_queue.emplace(current_perpendicular_distance, current_edge); | ||||
|                         } | ||||
|                     } | ||||
|                     results.back().forward_edge_based_node_id = SPECIAL_NODEID; | ||||
|                 } | ||||
|                 else | ||||
|                 else if (!use_segment.second) | ||||
|                 { | ||||
|                     // for each child mbr get a lower bound and enqueue it
 | ||||
|                     for (const auto i : osrm::irange(0u, current_tree_node.child_count)) | ||||
|                     { | ||||
|                         const int32_t child_id = current_tree_node.children[i]; | ||||
|                         const TreeNode &child_tree_node = m_search_tree[child_id]; | ||||
|                         const RectangleT &child_rectangle = | ||||
|                             child_tree_node.minimum_bounding_rectangle; | ||||
|                         const float lower_bound_to_element = | ||||
|                             child_rectangle.GetMinDist(input_coordinate); | ||||
|                         BOOST_ASSERT(0.f <= lower_bound_to_element); | ||||
| 
 | ||||
|                         if (lower_bound_to_element <= max_distance) | ||||
|                         { | ||||
|                             traversal_queue.emplace(lower_bound_to_element, child_tree_node); | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             else | ||||
|             { // current object is a leaf node
 | ||||
|                 ++inspected_elements; | ||||
|                 // inspecting an actual road segment
 | ||||
|                 const EdgeDataT ¤t_segment = | ||||
|                     current_query_node.node.template get<EdgeDataT>(); | ||||
| 
 | ||||
|                 // check if it is smaller than what we had before
 | ||||
|                 float current_ratio = 0.f; | ||||
|                 FixedPointCoordinate foot_point_coordinate_on_segment; | ||||
| 
 | ||||
|                 const float current_perpendicular_distance = | ||||
|                     coordinate_calculation::perpendicular_distance_from_projected_coordinate( | ||||
|                         m_coordinate_list->at(current_segment.u), | ||||
|                         m_coordinate_list->at(current_segment.v), input_coordinate, | ||||
|                         projected_coordinate, foot_point_coordinate_on_segment, current_ratio); | ||||
| 
 | ||||
|                 if (current_perpendicular_distance >= max_distance) | ||||
|                 { | ||||
|                     traversal_queue = std::priority_queue<IncrementalQueryCandidate>{}; | ||||
|                     continue; | ||||
|                 } | ||||
| 
 | ||||
|                 // store phantom node in result vector
 | ||||
|                 result_phantom_node_vector.emplace_back( | ||||
|                     PhantomNode( | ||||
|                         current_segment.forward_edge_based_node_id, | ||||
|                         current_segment.reverse_edge_based_node_id, current_segment.name_id, | ||||
|                         current_segment.forward_weight, current_segment.reverse_weight, | ||||
|                         current_segment.forward_offset, current_segment.reverse_offset, | ||||
|                         current_segment.packed_geometry_id, current_segment.component_id, | ||||
|                         foot_point_coordinate_on_segment, current_segment.fwd_segment_position, | ||||
|                         current_segment.forward_travel_mode, current_segment.backward_travel_mode), | ||||
|                     current_perpendicular_distance); | ||||
| 
 | ||||
|                 // Hack to fix rounding errors and wandering via nodes.
 | ||||
|                 FixUpRoundingIssue(input_coordinate, result_phantom_node_vector.back().first); | ||||
| 
 | ||||
|                 // set forward and reverse weights on the phantom node
 | ||||
|                 SetForwardAndReverseWeightsOnPhantomNode(current_segment, | ||||
|                                                          result_phantom_node_vector.back().first); | ||||
|             } | ||||
| 
 | ||||
|             // stop the search by flushing the queue
 | ||||
|             if (inspected_elements >= max_checked_elements) | ||||
|             { | ||||
|                 traversal_queue = std::priority_queue<IncrementalQueryCandidate>{}; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         return !result_phantom_node_vector.empty(); | ||||
|     } | ||||
| 
 | ||||
|     bool FindPhantomNodeForCoordinate(const FixedPointCoordinate &input_coordinate, | ||||
|                                       PhantomNode &result_phantom_node, | ||||
|                                       const unsigned zoom_level) | ||||
|     { | ||||
|         const bool ignore_tiny_components = (zoom_level <= 14); | ||||
|         EdgeDataT nearest_edge; | ||||
| 
 | ||||
|         float min_dist = std::numeric_limits<float>::max(); | ||||
|         float min_max_dist = std::numeric_limits<float>::max(); | ||||
| 
 | ||||
|         std::priority_queue<QueryCandidate> traversal_queue; | ||||
|         traversal_queue.emplace(0.f, 0); | ||||
| 
 | ||||
|         while (!traversal_queue.empty()) | ||||
|         { | ||||
|             const QueryCandidate current_query_node = traversal_queue.top(); | ||||
|             traversal_queue.pop(); | ||||
| 
 | ||||
|             const bool prune_downward = (current_query_node.min_dist > min_max_dist); | ||||
|             const bool prune_upward = (current_query_node.min_dist > min_dist); | ||||
|             if (!prune_downward && !prune_upward) | ||||
|             { // downward pruning
 | ||||
|                 const TreeNode ¤t_tree_node = m_search_tree[current_query_node.node_id]; | ||||
|                 if (current_tree_node.child_is_on_disk) | ||||
|                 { | ||||
|                     LeafNode current_leaf_node; | ||||
|                     LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); | ||||
|                     for (uint32_t i = 0; i < current_leaf_node.object_count; ++i) | ||||
|                     { | ||||
|                         const EdgeDataT ¤t_edge = current_leaf_node.objects[i]; | ||||
|                         if (ignore_tiny_components && current_edge.component_id != 0) | ||||
|                         { | ||||
|                             continue; | ||||
|                         } | ||||
| 
 | ||||
|                         float current_ratio = 0.; | ||||
|                         FixedPointCoordinate nearest; | ||||
|                         const float current_perpendicular_distance = | ||||
|                             coordinate_calculation::perpendicular_distance( | ||||
|                                 m_coordinate_list->at(current_edge.u), | ||||
|                                 m_coordinate_list->at(current_edge.v), input_coordinate, nearest, | ||||
|                                 current_ratio); | ||||
| 
 | ||||
|                         BOOST_ASSERT(0. <= current_perpendicular_distance); | ||||
| 
 | ||||
|                         if ((current_perpendicular_distance < min_dist) && | ||||
|                             !osrm::epsilon_compare(current_perpendicular_distance, min_dist)) | ||||
|                         { // found a new minimum
 | ||||
|                             min_dist = current_perpendicular_distance; | ||||
|                             result_phantom_node = {current_edge.forward_edge_based_node_id, | ||||
|                                                    current_edge.reverse_edge_based_node_id, | ||||
|                                                    current_edge.name_id, | ||||
|                                                    current_edge.forward_weight, | ||||
|                                                    current_edge.reverse_weight, | ||||
|                                                    current_edge.forward_offset, | ||||
|                                                    current_edge.reverse_offset, | ||||
|                                                    current_edge.packed_geometry_id, | ||||
|                                                    current_edge.component_id, | ||||
|                                                    nearest, | ||||
|                                                    current_edge.fwd_segment_position, | ||||
|                                                    current_edge.forward_travel_mode, | ||||
|                                                    current_edge.backward_travel_mode}; | ||||
|                             nearest_edge = current_edge; | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|                 else | ||||
|                 { | ||||
|                     min_max_dist = ExploreTreeNode(current_tree_node, input_coordinate, min_dist, | ||||
|                                                    min_max_dist, traversal_queue); | ||||
|                     results.back().reverse_edge_based_node_id = SPECIAL_NODEID; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         if (result_phantom_node.location.is_valid()) | ||||
|         { | ||||
|             // Hack to fix rounding errors and wandering via nodes.
 | ||||
|             FixUpRoundingIssue(input_coordinate, result_phantom_node); | ||||
| 
 | ||||
|             // set forward and reverse weights on the phantom node
 | ||||
|             SetForwardAndReverseWeightsOnPhantomNode(nearest_edge, result_phantom_node); | ||||
|         } | ||||
|         return result_phantom_node.location.is_valid(); | ||||
|         return results; | ||||
|     } | ||||
| 
 | ||||
|   private: | ||||
|     inline void SetForwardAndReverseWeightsOnPhantomNode(const EdgeDataT &nearest_edge, | ||||
|                                                          PhantomNode &result_phantom_node) const | ||||
|     template <typename QueueT> | ||||
|     void ExploreLeafNode(const std::uint32_t leaf_id, | ||||
|                          const FixedPointCoordinate &input_coordinate, | ||||
|                          const std::pair<double, double> &projected_coordinate, | ||||
|                          QueueT &traversal_queue) | ||||
|     { | ||||
|         const float distance_1 = coordinate_calculation::euclidean_distance( | ||||
|             m_coordinate_list->at(nearest_edge.u), result_phantom_node.location); | ||||
|         const float distance_2 = coordinate_calculation::euclidean_distance( | ||||
|             m_coordinate_list->at(nearest_edge.u), m_coordinate_list->at(nearest_edge.v)); | ||||
|         const float ratio = std::min(1.f, distance_1 / distance_2); | ||||
|         LeafNode current_leaf_node; | ||||
|         LoadLeafFromDisk(leaf_id, current_leaf_node); | ||||
| 
 | ||||
|         using TreeWeightType = decltype(result_phantom_node.forward_weight); | ||||
|         static_assert(std::is_same<decltype(result_phantom_node.forward_weight), | ||||
|                                    decltype(result_phantom_node.reverse_weight)>::value, | ||||
|                       "forward and reverse weight type in tree must be the same"); | ||||
|         // current object represents a block on disk
 | ||||
|         for (const auto i : osrm::irange(0u, current_leaf_node.object_count)) | ||||
|         { | ||||
|             auto ¤t_edge = current_leaf_node.objects[i]; | ||||
|             const float current_perpendicular_distance = | ||||
|                 coordinate_calculation::perpendicular_distance_from_projected_coordinate( | ||||
|                     m_coordinate_list->at(current_edge.u), m_coordinate_list->at(current_edge.v), | ||||
|                     input_coordinate, projected_coordinate); | ||||
|             // distance must be non-negative
 | ||||
|             BOOST_ASSERT(0.f <= current_perpendicular_distance); | ||||
| 
 | ||||
|         if (SPECIAL_NODEID != result_phantom_node.forward_node_id) | ||||
|         { | ||||
|             const auto new_weight = | ||||
|                 static_cast<TreeWeightType>(result_phantom_node.forward_weight * ratio); | ||||
|             result_phantom_node.forward_weight = new_weight; | ||||
|         } | ||||
|         if (SPECIAL_NODEID != result_phantom_node.reverse_node_id) | ||||
|         { | ||||
|             const auto new_weight = | ||||
|                 static_cast<TreeWeightType>(result_phantom_node.reverse_weight * (1.f - ratio)); | ||||
|             result_phantom_node.reverse_weight = new_weight; | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // fixup locations if too close to inputs
 | ||||
|     inline void FixUpRoundingIssue(const FixedPointCoordinate &input_coordinate, | ||||
|                                    PhantomNode &result_phantom_node) const | ||||
|     { | ||||
|         if (1 == std::abs(input_coordinate.lon - result_phantom_node.location.lon)) | ||||
|         { | ||||
|             result_phantom_node.location.lon = input_coordinate.lon; | ||||
|         } | ||||
|         if (1 == std::abs(input_coordinate.lat - result_phantom_node.location.lat)) | ||||
|         { | ||||
|             result_phantom_node.location.lat = input_coordinate.lat; | ||||
|             traversal_queue.push(QueryCandidate {current_perpendicular_distance, std::move(current_edge)}); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     template <class QueueT> | ||||
|     inline float ExploreTreeNode(const TreeNode &parent, | ||||
|                                  const FixedPointCoordinate &input_coordinate, | ||||
|                                  const float min_dist, | ||||
|                                  const float min_max_dist, | ||||
|                                  QueueT &traversal_queue) | ||||
|     void ExploreTreeNode(const TreeNode &parent, | ||||
|                          const FixedPointCoordinate &input_coordinate, | ||||
|                          QueueT &traversal_queue) | ||||
|     { | ||||
|         float new_min_max_dist = min_max_dist; | ||||
|         // traverse children, prune if global mindist is smaller than local one
 | ||||
|         for (uint32_t i = 0; i < parent.child_count; ++i) | ||||
|         { | ||||
|             const int32_t child_id = parent.children[i]; | ||||
|             const TreeNode &child_tree_node = m_search_tree[child_id]; | ||||
|             const RectangleT &child_rectangle = child_tree_node.minimum_bounding_rectangle; | ||||
|             const auto &child_tree_node = m_search_tree[child_id]; | ||||
|             const auto &child_rectangle = child_tree_node.minimum_bounding_rectangle; | ||||
|             const float lower_bound_to_element = child_rectangle.GetMinDist(input_coordinate); | ||||
|             const float upper_bound_to_element = child_rectangle.GetMinMaxDist(input_coordinate); | ||||
|             new_min_max_dist = std::min(new_min_max_dist, upper_bound_to_element); | ||||
|             if (lower_bound_to_element > new_min_max_dist) | ||||
|             { | ||||
|                 continue; | ||||
|             } | ||||
|             if (lower_bound_to_element > min_dist) | ||||
|             { | ||||
|                 continue; | ||||
|             } | ||||
|             traversal_queue.emplace(lower_bound_to_element, child_id); | ||||
|             traversal_queue.push(QueryCandidate {lower_bound_to_element, m_search_tree[child_id]}); | ||||
|         } | ||||
|         return new_min_max_dist; | ||||
|     } | ||||
| 
 | ||||
|     inline void LoadLeafFromDisk(const uint32_t leaf_id, LeafNode &result_node) | ||||
| @ -1077,8 +476,7 @@ class StaticRTree | ||||
|         } | ||||
|         if (!leaves_stream.good()) | ||||
|         { | ||||
|             leaves_stream.clear(std::ios::goodbit); | ||||
|             SimpleLogger().Write(logDEBUG) << "Resetting stale filestream"; | ||||
|           throw osrm::exception("Could not read from leaf file."); | ||||
|         } | ||||
|         const uint64_t seek_pos = sizeof(uint64_t) + leaf_id * sizeof(LeafNode); | ||||
|         leaves_stream.seekg(seek_pos); | ||||
| @ -1087,18 +485,11 @@ class StaticRTree | ||||
|         BOOST_ASSERT_MSG(leaves_stream.good(), "Reading from leaf file failed."); | ||||
|     } | ||||
| 
 | ||||
|     inline bool EdgesAreEquivalent(const FixedPointCoordinate &a, | ||||
|                                    const FixedPointCoordinate &b, | ||||
|                                    const FixedPointCoordinate &c, | ||||
|                                    const FixedPointCoordinate &d) const | ||||
|     { | ||||
|         return (a == b && c == d) || (a == c && b == d) || (a == d && b == c); | ||||
|     } | ||||
| 
 | ||||
|     inline void InitializeMBRectangle(RectangleT &rectangle, | ||||
|                                       const std::array<EdgeDataT, LEAF_NODE_SIZE> &objects, | ||||
|                                       const uint32_t element_count, | ||||
|                                       const std::vector<QueryNode> &coordinate_list) | ||||
|     template <typename CoordinateT> | ||||
|     void InitializeMBRectangle(Rectangle &rectangle, | ||||
|                                const std::array<EdgeDataT, LEAF_NODE_SIZE> &objects, | ||||
|                                const uint32_t element_count, | ||||
|                                const std::vector<CoordinateT> &coordinate_list) | ||||
|     { | ||||
|         for (uint32_t i = 0; i < element_count; ++i) | ||||
|         { | ||||
|  | ||||
							
								
								
									
										906
									
								
								datastore.cpp
									
									
									
									
									
								
							
							
						
						
									
										906
									
								
								datastore.cpp
									
									
									
									
									
								
							| @ -38,7 +38,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "server/data_structures/datafacade_base.hpp" | ||||
| #include "server/data_structures/shared_datatype.hpp" | ||||
| #include "server/data_structures/shared_barriers.hpp" | ||||
| #include "util/boost_filesystem_2_fix.hpp" | ||||
| #include "util/datastore_options.hpp" | ||||
| #include "util/simple_logger.hpp" | ||||
| #include "util/osrm_exception.hpp" | ||||
| @ -46,7 +45,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "typedefs.h" | ||||
| 
 | ||||
| #include <osrm/coordinate.hpp> | ||||
| #include <osrm/server_paths.hpp> | ||||
| 
 | ||||
| using RTreeLeaf = BaseDataFacade<QueryEdge::EdgeData>::RTreeLeaf; | ||||
| using RTreeNode = StaticRTree<RTreeLeaf, ShM<FixedPointCoordinate, true>::vector, true>::TreeNode; | ||||
| @ -63,6 +61,7 @@ using QueryGraph = StaticGraph<QueryEdge::EdgeData>; | ||||
| 
 | ||||
| #include <fstream> | ||||
| #include <string> | ||||
| #include <new> | ||||
| 
 | ||||
| // delete a shared memory region. report warning if it could not be deleted
 | ||||
| void delete_region(const SharedDataType region) | ||||
| @ -94,360 +93,437 @@ void delete_region(const SharedDataType region) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| int main(const int argc, const char *argv[]) | ||||
| int main(const int argc, const char *argv[]) try | ||||
| { | ||||
|     LogPolicy::GetInstance().Unmute(); | ||||
|     SharedBarriers barrier; | ||||
| 
 | ||||
|     try | ||||
|     { | ||||
| #ifdef __linux__ | ||||
|         // try to disable swapping on Linux
 | ||||
|         const bool lock_flags = MCL_CURRENT | MCL_FUTURE; | ||||
|         if (-1 == mlockall(lock_flags)) | ||||
|         { | ||||
|             SimpleLogger().Write(logWARNING) << "Process " << argv[0] | ||||
|                                              << " could not request RAM lock"; | ||||
|         } | ||||
| #endif | ||||
|         try | ||||
|         { | ||||
|             boost::interprocess::scoped_lock<boost::interprocess::named_mutex> pending_lock( | ||||
|                 barrier.pending_update_mutex); | ||||
|         } | ||||
|         catch (...) | ||||
|         { | ||||
|             // hard unlock in case of any exception.
 | ||||
|             barrier.pending_update_mutex.unlock(); | ||||
|         } | ||||
|     } | ||||
|     catch (const std::exception &e) | ||||
|     // try to disable swapping on Linux
 | ||||
|     const bool lock_flags = MCL_CURRENT | MCL_FUTURE; | ||||
|     if (-1 == mlockall(lock_flags)) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "[exception] " << e.what(); | ||||
|         SimpleLogger().Write(logWARNING) << "Process " << argv[0] << " could not request RAM lock"; | ||||
|     } | ||||
| #endif | ||||
| 
 | ||||
|     try | ||||
|     { | ||||
|         SimpleLogger().Write(logDEBUG) << "Checking input parameters"; | ||||
|         boost::interprocess::scoped_lock<boost::interprocess::named_mutex> pending_lock( | ||||
|             barrier.pending_update_mutex); | ||||
|     } | ||||
|     catch (...) | ||||
|     { | ||||
|         // hard unlock in case of any exception.
 | ||||
|         barrier.pending_update_mutex.unlock(); | ||||
|     } | ||||
| 
 | ||||
|         ServerPaths server_paths; | ||||
|         if (!GenerateDataStoreOptions(argc, argv, server_paths)) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|     SimpleLogger().Write(logDEBUG) << "Checking input parameters"; | ||||
| 
 | ||||
|         if (server_paths.find("hsgrdata") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no hsgr file found"); | ||||
|         } | ||||
|         if (server_paths.find("ramindex") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no ram index file found"); | ||||
|         } | ||||
|         if (server_paths.find("fileindex") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no leaf index file found"); | ||||
|         } | ||||
|         if (server_paths.find("nodesdata") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no nodes file found"); | ||||
|         } | ||||
|         if (server_paths.find("edgesdata") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no edges file found"); | ||||
|         } | ||||
|         if (server_paths.find("namesdata") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no names file found"); | ||||
|         } | ||||
|         if (server_paths.find("geometry") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no geometry file found"); | ||||
|         } | ||||
|         if (server_paths.find("core") == server_paths.end()) | ||||
|         { | ||||
|             throw osrm::exception("no core file found"); | ||||
|         } | ||||
|     std::unordered_map<std::string, boost::filesystem::path> server_paths; | ||||
|     if (!GenerateDataStoreOptions(argc, argv, server_paths)) | ||||
|     { | ||||
|         return EXIT_SUCCESS; | ||||
|     } | ||||
| 
 | ||||
|         ServerPaths::const_iterator paths_iterator = server_paths.find("hsgrdata"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &hsgr_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("timestamp"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path ×tamp_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("ramindex"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &ram_index_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("fileindex"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path index_file_path_absolute = | ||||
|             boost::filesystem::portable_canonical(paths_iterator->second); | ||||
|         const std::string &file_index_path = index_file_path_absolute.string(); | ||||
|         paths_iterator = server_paths.find("nodesdata"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &nodes_data_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("edgesdata"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &edges_data_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("namesdata"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &names_data_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("geometry"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &geometries_data_path = paths_iterator->second; | ||||
|         paths_iterator = server_paths.find("core"); | ||||
|         BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|         BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|         const boost::filesystem::path &core_marker_path = paths_iterator->second; | ||||
|     if (server_paths.find("hsgrdata") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no hsgr file found"); | ||||
|     } | ||||
|     if (server_paths.find("ramindex") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no ram index file found"); | ||||
|     } | ||||
|     if (server_paths.find("fileindex") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no leaf index file found"); | ||||
|     } | ||||
|     if (server_paths.find("nodesdata") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no nodes file found"); | ||||
|     } | ||||
|     if (server_paths.find("edgesdata") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no edges file found"); | ||||
|     } | ||||
|     if (server_paths.find("namesdata") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no names file found"); | ||||
|     } | ||||
|     if (server_paths.find("geometry") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no geometry file found"); | ||||
|     } | ||||
|     if (server_paths.find("core") == server_paths.end()) | ||||
|     { | ||||
|         throw osrm::exception("no core file found"); | ||||
|     } | ||||
| 
 | ||||
|         // determine segment to use
 | ||||
|         bool segment2_in_use = SharedMemory::RegionExists(LAYOUT_2); | ||||
|         const SharedDataType layout_region = [&] | ||||
|         { | ||||
|             return segment2_in_use ? LAYOUT_1 : LAYOUT_2; | ||||
|         }(); | ||||
|         const SharedDataType data_region = [&] | ||||
|         { | ||||
|             return segment2_in_use ? DATA_1 : DATA_2; | ||||
|         }(); | ||||
|         const SharedDataType previous_layout_region = [&] | ||||
|         { | ||||
|             return segment2_in_use ? LAYOUT_2 : LAYOUT_1; | ||||
|         }(); | ||||
|         const SharedDataType previous_data_region = [&] | ||||
|         { | ||||
|             return segment2_in_use ? DATA_2 : DATA_1; | ||||
|         }(); | ||||
|     auto paths_iterator = server_paths.find("hsgrdata"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &hsgr_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("timestamp"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path ×tamp_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("ramindex"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &ram_index_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("fileindex"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path index_file_path_absolute = | ||||
|         boost::filesystem::canonical(paths_iterator->second); | ||||
|     const std::string &file_index_path = index_file_path_absolute.string(); | ||||
|     paths_iterator = server_paths.find("nodesdata"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &nodes_data_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("edgesdata"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &edges_data_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("namesdata"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &names_data_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("geometry"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &geometries_data_path = paths_iterator->second; | ||||
|     paths_iterator = server_paths.find("core"); | ||||
|     BOOST_ASSERT(server_paths.end() != paths_iterator); | ||||
|     BOOST_ASSERT(!paths_iterator->second.empty()); | ||||
|     const boost::filesystem::path &core_marker_path = paths_iterator->second; | ||||
| 
 | ||||
|         // Allocate a memory layout in shared memory, deallocate previous
 | ||||
|         SharedMemory *layout_memory = | ||||
|             SharedMemoryFactory::Get(layout_region, sizeof(SharedDataLayout)); | ||||
|         SharedDataLayout *shared_layout_ptr = new (layout_memory->Ptr()) SharedDataLayout(); | ||||
|     // determine segment to use
 | ||||
|     bool segment2_in_use = SharedMemory::RegionExists(LAYOUT_2); | ||||
|     const SharedDataType layout_region = [&] | ||||
|     { | ||||
|         return segment2_in_use ? LAYOUT_1 : LAYOUT_2; | ||||
|     }(); | ||||
|     const SharedDataType data_region = [&] | ||||
|     { | ||||
|         return segment2_in_use ? DATA_1 : DATA_2; | ||||
|     }(); | ||||
|     const SharedDataType previous_layout_region = [&] | ||||
|     { | ||||
|         return segment2_in_use ? LAYOUT_2 : LAYOUT_1; | ||||
|     }(); | ||||
|     const SharedDataType previous_data_region = [&] | ||||
|     { | ||||
|         return segment2_in_use ? DATA_2 : DATA_1; | ||||
|     }(); | ||||
| 
 | ||||
|         shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::FILE_INDEX_PATH, | ||||
|                                               file_index_path.length() + 1); | ||||
|     // Allocate a memory layout in shared memory, deallocate previous
 | ||||
|     auto *layout_memory = SharedMemoryFactory::Get(layout_region, sizeof(SharedDataLayout)); | ||||
|     auto *shared_layout_ptr = new (layout_memory->Ptr()) SharedDataLayout(); | ||||
| 
 | ||||
|         // collect number of elements to store in shared memory object
 | ||||
|         SimpleLogger().Write() << "load names from: " << names_data_path; | ||||
|         // number of entries in name index
 | ||||
|         boost::filesystem::ifstream name_stream(names_data_path, std::ios::binary); | ||||
|         unsigned name_blocks = 0; | ||||
|         name_stream.read((char *)&name_blocks, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::NAME_OFFSETS, name_blocks); | ||||
|         shared_layout_ptr->SetBlockSize<typename RangeTable<16, true>::BlockT>( | ||||
|             SharedDataLayout::NAME_BLOCKS, name_blocks); | ||||
|         SimpleLogger().Write() << "name offsets size: " << name_blocks; | ||||
|         BOOST_ASSERT_MSG(0 != name_blocks, "name file broken"); | ||||
|     shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::FILE_INDEX_PATH, | ||||
|                                           file_index_path.length() + 1); | ||||
| 
 | ||||
|         unsigned number_of_chars = 0; | ||||
|         name_stream.read((char *)&number_of_chars, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::NAME_CHAR_LIST, number_of_chars); | ||||
|     // collect number of elements to store in shared memory object
 | ||||
|     SimpleLogger().Write() << "load names from: " << names_data_path; | ||||
|     // number of entries in name index
 | ||||
|     boost::filesystem::ifstream name_stream(names_data_path, std::ios::binary); | ||||
|     unsigned name_blocks = 0; | ||||
|     name_stream.read((char *)&name_blocks, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::NAME_OFFSETS, name_blocks); | ||||
|     shared_layout_ptr->SetBlockSize<typename RangeTable<16, true>::BlockT>( | ||||
|         SharedDataLayout::NAME_BLOCKS, name_blocks); | ||||
|     SimpleLogger().Write() << "name offsets size: " << name_blocks; | ||||
|     BOOST_ASSERT_MSG(0 != name_blocks, "name file broken"); | ||||
| 
 | ||||
|         // Loading information for original edges
 | ||||
|         boost::filesystem::ifstream edges_input_stream(edges_data_path, std::ios::binary); | ||||
|         unsigned number_of_original_edges = 0; | ||||
|         edges_input_stream.read((char *)&number_of_original_edges, sizeof(unsigned)); | ||||
|     unsigned number_of_chars = 0; | ||||
|     name_stream.read((char *)&number_of_chars, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::NAME_CHAR_LIST, number_of_chars); | ||||
| 
 | ||||
|         // note: settings this all to the same size is correct, we extract them from the same struct
 | ||||
|         shared_layout_ptr->SetBlockSize<NodeID>(SharedDataLayout::VIA_NODE_LIST, | ||||
|     // Loading information for original edges
 | ||||
|     boost::filesystem::ifstream edges_input_stream(edges_data_path, std::ios::binary); | ||||
|     unsigned number_of_original_edges = 0; | ||||
|     edges_input_stream.read((char *)&number_of_original_edges, sizeof(unsigned)); | ||||
| 
 | ||||
|     // note: settings this all to the same size is correct, we extract them from the same struct
 | ||||
|     shared_layout_ptr->SetBlockSize<NodeID>(SharedDataLayout::VIA_NODE_LIST, | ||||
|                                             number_of_original_edges); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::NAME_ID_LIST, | ||||
|                                               number_of_original_edges); | ||||
|     shared_layout_ptr->SetBlockSize<TravelMode>(SharedDataLayout::TRAVEL_MODE, | ||||
|                                                 number_of_original_edges); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::NAME_ID_LIST, | ||||
|                                                   number_of_original_edges); | ||||
|         shared_layout_ptr->SetBlockSize<TravelMode>(SharedDataLayout::TRAVEL_MODE, | ||||
|                                                     number_of_original_edges); | ||||
|         shared_layout_ptr->SetBlockSize<TurnInstruction>(SharedDataLayout::TURN_INSTRUCTION, | ||||
|                                                          number_of_original_edges); | ||||
|         // note: there are 32 geometry indicators in one unsigned block
 | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_INDICATORS, | ||||
|                                                   number_of_original_edges); | ||||
|     shared_layout_ptr->SetBlockSize<TurnInstruction>(SharedDataLayout::TURN_INSTRUCTION, | ||||
|                                                      number_of_original_edges); | ||||
|     // note: there are 32 geometry indicators in one unsigned block
 | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_INDICATORS, | ||||
|                                               number_of_original_edges); | ||||
| 
 | ||||
|         boost::filesystem::ifstream hsgr_input_stream(hsgr_path, std::ios::binary); | ||||
|     boost::filesystem::ifstream hsgr_input_stream(hsgr_path, std::ios::binary); | ||||
| 
 | ||||
|         FingerPrint fingerprint_valid = FingerPrint::GetValid(); | ||||
|         FingerPrint fingerprint_loaded; | ||||
|         hsgr_input_stream.read((char *)&fingerprint_loaded, sizeof(FingerPrint)); | ||||
|         if (fingerprint_loaded.TestGraphUtil(fingerprint_valid)) | ||||
|     FingerPrint fingerprint_valid = FingerPrint::GetValid(); | ||||
|     FingerPrint fingerprint_loaded; | ||||
|     hsgr_input_stream.read((char *)&fingerprint_loaded, sizeof(FingerPrint)); | ||||
|     if (fingerprint_loaded.TestGraphUtil(fingerprint_valid)) | ||||
|     { | ||||
|         SimpleLogger().Write(logDEBUG) << "Fingerprint checked out ok"; | ||||
|     } | ||||
|     else | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << ".hsgr was prepared with different build. " | ||||
|                                             "Reprocess to get rid of this warning."; | ||||
|     } | ||||
| 
 | ||||
|     // load checksum
 | ||||
|     unsigned checksum = 0; | ||||
|     hsgr_input_stream.read((char *)&checksum, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::HSGR_CHECKSUM, 1); | ||||
|     // load graph node size
 | ||||
|     unsigned number_of_graph_nodes = 0; | ||||
|     hsgr_input_stream.read((char *)&number_of_graph_nodes, sizeof(unsigned)); | ||||
| 
 | ||||
|     BOOST_ASSERT_MSG((0 != number_of_graph_nodes), "number of nodes is zero"); | ||||
|     shared_layout_ptr->SetBlockSize<QueryGraph::NodeArrayEntry>(SharedDataLayout::GRAPH_NODE_LIST, | ||||
|                                                                 number_of_graph_nodes); | ||||
| 
 | ||||
|     // load graph edge size
 | ||||
|     unsigned number_of_graph_edges = 0; | ||||
|     hsgr_input_stream.read((char *)&number_of_graph_edges, sizeof(unsigned)); | ||||
|     // BOOST_ASSERT_MSG(0 != number_of_graph_edges, "number of graph edges is zero");
 | ||||
|     shared_layout_ptr->SetBlockSize<QueryGraph::EdgeArrayEntry>(SharedDataLayout::GRAPH_EDGE_LIST, | ||||
|                                                                 number_of_graph_edges); | ||||
| 
 | ||||
|     // load rsearch tree size
 | ||||
|     boost::filesystem::ifstream tree_node_file(ram_index_path, std::ios::binary); | ||||
| 
 | ||||
|     uint32_t tree_size = 0; | ||||
|     tree_node_file.read((char *)&tree_size, sizeof(uint32_t)); | ||||
|     shared_layout_ptr->SetBlockSize<RTreeNode>(SharedDataLayout::R_SEARCH_TREE, tree_size); | ||||
| 
 | ||||
|     // load timestamp size
 | ||||
|     std::string m_timestamp; | ||||
|     if (boost::filesystem::exists(timestamp_path)) | ||||
|     { | ||||
|         boost::filesystem::ifstream timestamp_stream(timestamp_path); | ||||
|         if (!timestamp_stream) | ||||
|         { | ||||
|             SimpleLogger().Write(logDEBUG) << "Fingerprint checked out ok"; | ||||
|             SimpleLogger().Write(logWARNING) << timestamp_path << " not found. setting to default"; | ||||
|         } | ||||
|         else | ||||
|         { | ||||
|             SimpleLogger().Write(logWARNING) << ".hsgr was prepared with different build. " | ||||
|                                                 "Reprocess to get rid of this warning."; | ||||
|             getline(timestamp_stream, m_timestamp); | ||||
|             timestamp_stream.close(); | ||||
|         } | ||||
|     } | ||||
|     if (m_timestamp.empty()) | ||||
|     { | ||||
|         m_timestamp = "n/a"; | ||||
|     } | ||||
|     if (25 < m_timestamp.length()) | ||||
|     { | ||||
|         m_timestamp.resize(25); | ||||
|     } | ||||
|     shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::TIMESTAMP, m_timestamp.length()); | ||||
| 
 | ||||
|         // load checksum
 | ||||
|         unsigned checksum = 0; | ||||
|         hsgr_input_stream.read((char *)&checksum, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::HSGR_CHECKSUM, 1); | ||||
|         // load graph node size
 | ||||
|         unsigned number_of_graph_nodes = 0; | ||||
|         hsgr_input_stream.read((char *)&number_of_graph_nodes, sizeof(unsigned)); | ||||
|     // load core marker size
 | ||||
|     boost::filesystem::ifstream core_marker_file(core_marker_path, std::ios::binary); | ||||
| 
 | ||||
|         BOOST_ASSERT_MSG((0 != number_of_graph_nodes), "number of nodes is zero"); | ||||
|         shared_layout_ptr->SetBlockSize<QueryGraph::NodeArrayEntry>( | ||||
|             SharedDataLayout::GRAPH_NODE_LIST, number_of_graph_nodes); | ||||
|     uint32_t number_of_core_markers = 0; | ||||
|     core_marker_file.read((char *)&number_of_core_markers, sizeof(uint32_t)); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::CORE_MARKER, | ||||
|                                               number_of_core_markers); | ||||
| 
 | ||||
|         // load graph edge size
 | ||||
|         unsigned number_of_graph_edges = 0; | ||||
|         hsgr_input_stream.read((char *)&number_of_graph_edges, sizeof(unsigned)); | ||||
|         // BOOST_ASSERT_MSG(0 != number_of_graph_edges, "number of graph edges is zero");
 | ||||
|         shared_layout_ptr->SetBlockSize<QueryGraph::EdgeArrayEntry>( | ||||
|             SharedDataLayout::GRAPH_EDGE_LIST, number_of_graph_edges); | ||||
|     // load coordinate size
 | ||||
|     boost::filesystem::ifstream nodes_input_stream(nodes_data_path, std::ios::binary); | ||||
|     unsigned coordinate_list_size = 0; | ||||
|     nodes_input_stream.read((char *)&coordinate_list_size, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<FixedPointCoordinate>(SharedDataLayout::COORDINATE_LIST, | ||||
|                                                           coordinate_list_size); | ||||
| 
 | ||||
|         // load rsearch tree size
 | ||||
|         boost::filesystem::ifstream tree_node_file(ram_index_path, std::ios::binary); | ||||
|     // load geometries sizes
 | ||||
|     std::ifstream geometry_input_stream(geometries_data_path.string().c_str(), std::ios::binary); | ||||
|     unsigned number_of_geometries_indices = 0; | ||||
|     unsigned number_of_compressed_geometries = 0; | ||||
| 
 | ||||
|         uint32_t tree_size = 0; | ||||
|         tree_node_file.read((char *)&tree_size, sizeof(uint32_t)); | ||||
|         shared_layout_ptr->SetBlockSize<RTreeNode>(SharedDataLayout::R_SEARCH_TREE, tree_size); | ||||
|     geometry_input_stream.read((char *)&number_of_geometries_indices, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_INDEX, | ||||
|                                               number_of_geometries_indices); | ||||
|     boost::iostreams::seek(geometry_input_stream, number_of_geometries_indices * sizeof(unsigned), | ||||
|                            BOOST_IOS::cur); | ||||
|     geometry_input_stream.read((char *)&number_of_compressed_geometries, sizeof(unsigned)); | ||||
|     shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_LIST, | ||||
|                                               number_of_compressed_geometries); | ||||
|     // allocate shared memory block
 | ||||
|     SimpleLogger().Write() << "allocating shared memory of " << shared_layout_ptr->GetSizeOfLayout() | ||||
|                            << " bytes"; | ||||
|     SharedMemory *shared_memory = | ||||
|         SharedMemoryFactory::Get(data_region, shared_layout_ptr->GetSizeOfLayout()); | ||||
|     char *shared_memory_ptr = static_cast<char *>(shared_memory->Ptr()); | ||||
| 
 | ||||
|         // load timestamp size
 | ||||
|         std::string m_timestamp; | ||||
|         if (boost::filesystem::exists(timestamp_path)) | ||||
|     // read actual data into shared memory object //
 | ||||
| 
 | ||||
|     // hsgr checksum
 | ||||
|     unsigned *checksum_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::HSGR_CHECKSUM); | ||||
|     *checksum_ptr = checksum; | ||||
| 
 | ||||
|     // ram index file name
 | ||||
|     char *file_index_path_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::FILE_INDEX_PATH); | ||||
|     // make sure we have 0 ending
 | ||||
|     std::fill(file_index_path_ptr, | ||||
|               file_index_path_ptr + | ||||
|                   shared_layout_ptr->GetBlockSize(SharedDataLayout::FILE_INDEX_PATH), | ||||
|               0); | ||||
|     std::copy(file_index_path.begin(), file_index_path.end(), file_index_path_ptr); | ||||
| 
 | ||||
|     // Loading street names
 | ||||
|     unsigned *name_offsets_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::NAME_OFFSETS); | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_OFFSETS) > 0) | ||||
|     { | ||||
|         name_stream.read((char *)name_offsets_ptr, | ||||
|                          shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_OFFSETS)); | ||||
|     } | ||||
| 
 | ||||
|     unsigned *name_blocks_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::NAME_BLOCKS); | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_BLOCKS) > 0) | ||||
|     { | ||||
|         name_stream.read((char *)name_blocks_ptr, | ||||
|                          shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_BLOCKS)); | ||||
|     } | ||||
| 
 | ||||
|     char *name_char_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::NAME_CHAR_LIST); | ||||
|     unsigned temp_length; | ||||
|     name_stream.read((char *)&temp_length, sizeof(unsigned)); | ||||
| 
 | ||||
|     BOOST_ASSERT_MSG(temp_length == | ||||
|                          shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST), | ||||
|                      "Name file corrupted!"); | ||||
| 
 | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST) > 0) | ||||
|     { | ||||
|         name_stream.read(name_char_ptr, | ||||
|                          shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST)); | ||||
|     } | ||||
| 
 | ||||
|     name_stream.close(); | ||||
| 
 | ||||
|     // load original edge information
 | ||||
|     NodeID *via_node_ptr = shared_layout_ptr->GetBlockPtr<NodeID, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::VIA_NODE_LIST); | ||||
| 
 | ||||
|     unsigned *name_id_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::NAME_ID_LIST); | ||||
| 
 | ||||
|     TravelMode *travel_mode_ptr = shared_layout_ptr->GetBlockPtr<TravelMode, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::TRAVEL_MODE); | ||||
| 
 | ||||
|     TurnInstruction *turn_instructions_ptr = shared_layout_ptr->GetBlockPtr<TurnInstruction, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::TURN_INSTRUCTION); | ||||
| 
 | ||||
|     unsigned *geometries_indicator_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::GEOMETRIES_INDICATORS); | ||||
| 
 | ||||
|     OriginalEdgeData current_edge_data; | ||||
|     for (unsigned i = 0; i < number_of_original_edges; ++i) | ||||
|     { | ||||
|         edges_input_stream.read((char *)&(current_edge_data), sizeof(OriginalEdgeData)); | ||||
|         via_node_ptr[i] = current_edge_data.via_node; | ||||
|         name_id_ptr[i] = current_edge_data.name_id; | ||||
|         travel_mode_ptr[i] = current_edge_data.travel_mode; | ||||
|         turn_instructions_ptr[i] = current_edge_data.turn_instruction; | ||||
| 
 | ||||
|         const unsigned bucket = i / 32; | ||||
|         const unsigned offset = i % 32; | ||||
|         const unsigned value = [&] | ||||
|         { | ||||
|             boost::filesystem::ifstream timestamp_stream(timestamp_path); | ||||
|             if (!timestamp_stream) | ||||
|             unsigned return_value = 0; | ||||
|             if (0 != offset) | ||||
|             { | ||||
|                 SimpleLogger().Write(logWARNING) << timestamp_path | ||||
|                                                  << " not found. setting to default"; | ||||
|                 return_value = geometries_indicator_ptr[bucket]; | ||||
|             } | ||||
|             else | ||||
|             { | ||||
|                 getline(timestamp_stream, m_timestamp); | ||||
|                 timestamp_stream.close(); | ||||
|             } | ||||
|         } | ||||
|         if (m_timestamp.empty()) | ||||
|             return return_value; | ||||
|         }(); | ||||
|         if (current_edge_data.compressed_geometry) | ||||
|         { | ||||
|             m_timestamp = "n/a"; | ||||
|             geometries_indicator_ptr[bucket] = (value | (1 << offset)); | ||||
|         } | ||||
|         if (25 < m_timestamp.length()) | ||||
|     } | ||||
|     edges_input_stream.close(); | ||||
| 
 | ||||
|     // load compressed geometry
 | ||||
|     unsigned temporary_value; | ||||
|     unsigned *geometries_index_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::GEOMETRIES_INDEX); | ||||
|     geometry_input_stream.seekg(0, geometry_input_stream.beg); | ||||
|     geometry_input_stream.read((char *)&temporary_value, sizeof(unsigned)); | ||||
|     BOOST_ASSERT(temporary_value == | ||||
|                  shared_layout_ptr->num_entries[SharedDataLayout::GEOMETRIES_INDEX]); | ||||
| 
 | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_INDEX) > 0) | ||||
|     { | ||||
|         geometry_input_stream.read( | ||||
|             (char *)geometries_index_ptr, | ||||
|             shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_INDEX)); | ||||
|     } | ||||
|     unsigned *geometries_list_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::GEOMETRIES_LIST); | ||||
| 
 | ||||
|     geometry_input_stream.read((char *)&temporary_value, sizeof(unsigned)); | ||||
|     BOOST_ASSERT(temporary_value == | ||||
|                  shared_layout_ptr->num_entries[SharedDataLayout::GEOMETRIES_LIST]); | ||||
| 
 | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_LIST) > 0) | ||||
|     { | ||||
|         geometry_input_stream.read( | ||||
|             (char *)geometries_list_ptr, | ||||
|             shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_LIST)); | ||||
|     } | ||||
| 
 | ||||
|     // Loading list of coordinates
 | ||||
|     FixedPointCoordinate *coordinates_ptr = | ||||
|         shared_layout_ptr->GetBlockPtr<FixedPointCoordinate, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::COORDINATE_LIST); | ||||
| 
 | ||||
|     QueryNode current_node; | ||||
|     for (unsigned i = 0; i < coordinate_list_size; ++i) | ||||
|     { | ||||
|         nodes_input_stream.read((char *)¤t_node, sizeof(QueryNode)); | ||||
|         coordinates_ptr[i] = FixedPointCoordinate(current_node.lat, current_node.lon); | ||||
|     } | ||||
|     nodes_input_stream.close(); | ||||
| 
 | ||||
|     // store timestamp
 | ||||
|     char *timestamp_ptr = | ||||
|         shared_layout_ptr->GetBlockPtr<char, true>(shared_memory_ptr, SharedDataLayout::TIMESTAMP); | ||||
|     std::copy(m_timestamp.c_str(), m_timestamp.c_str() + m_timestamp.length(), timestamp_ptr); | ||||
| 
 | ||||
|     // store search tree portion of rtree
 | ||||
|     char *rtree_ptr = shared_layout_ptr->GetBlockPtr<char, true>(shared_memory_ptr, | ||||
|                                                                  SharedDataLayout::R_SEARCH_TREE); | ||||
| 
 | ||||
|     if (tree_size > 0) | ||||
|     { | ||||
|         tree_node_file.read(rtree_ptr, sizeof(RTreeNode) * tree_size); | ||||
|     } | ||||
|     tree_node_file.close(); | ||||
| 
 | ||||
|     // load core markers
 | ||||
|     std::vector<char> unpacked_core_markers(number_of_core_markers); | ||||
|     core_marker_file.read((char *)unpacked_core_markers.data(), | ||||
|                           sizeof(char) * number_of_core_markers); | ||||
| 
 | ||||
|     unsigned *core_marker_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|         shared_memory_ptr, SharedDataLayout::CORE_MARKER); | ||||
| 
 | ||||
|     for (auto i = 0u; i < number_of_core_markers; ++i) | ||||
|     { | ||||
|         BOOST_ASSERT(unpacked_core_markers[i] == 0 || unpacked_core_markers[i] == 1); | ||||
| 
 | ||||
|         if (unpacked_core_markers[i] == 1) | ||||
|         { | ||||
|             m_timestamp.resize(25); | ||||
|         } | ||||
|         shared_layout_ptr->SetBlockSize<char>(SharedDataLayout::TIMESTAMP, m_timestamp.length()); | ||||
| 
 | ||||
|         // load core marker size
 | ||||
|         boost::filesystem::ifstream core_marker_file(core_marker_path, std::ios::binary); | ||||
| 
 | ||||
|         uint32_t number_of_core_markers = 0; | ||||
|         core_marker_file.read((char *)&number_of_core_markers, sizeof(uint32_t)); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::CORE_MARKER, number_of_core_markers); | ||||
| 
 | ||||
|         // load coordinate size
 | ||||
|         boost::filesystem::ifstream nodes_input_stream(nodes_data_path, std::ios::binary); | ||||
|         unsigned coordinate_list_size = 0; | ||||
|         nodes_input_stream.read((char *)&coordinate_list_size, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<FixedPointCoordinate>(SharedDataLayout::COORDINATE_LIST, | ||||
|                                                               coordinate_list_size); | ||||
| 
 | ||||
|         // load geometries sizes
 | ||||
|         std::ifstream geometry_input_stream(geometries_data_path.string().c_str(), | ||||
|                                             std::ios::binary); | ||||
|         unsigned number_of_geometries_indices = 0; | ||||
|         unsigned number_of_compressed_geometries = 0; | ||||
| 
 | ||||
|         geometry_input_stream.read((char *)&number_of_geometries_indices, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_INDEX, | ||||
|                                                   number_of_geometries_indices); | ||||
|         boost::iostreams::seek(geometry_input_stream, | ||||
|                                number_of_geometries_indices * sizeof(unsigned), BOOST_IOS::cur); | ||||
|         geometry_input_stream.read((char *)&number_of_compressed_geometries, sizeof(unsigned)); | ||||
|         shared_layout_ptr->SetBlockSize<unsigned>(SharedDataLayout::GEOMETRIES_LIST, | ||||
|                                                   number_of_compressed_geometries); | ||||
|         // allocate shared memory block
 | ||||
|         SimpleLogger().Write() << "allocating shared memory of " | ||||
|                                << shared_layout_ptr->GetSizeOfLayout() << " bytes"; | ||||
|         SharedMemory *shared_memory = | ||||
|             SharedMemoryFactory::Get(data_region, shared_layout_ptr->GetSizeOfLayout()); | ||||
|         char *shared_memory_ptr = static_cast<char *>(shared_memory->Ptr()); | ||||
| 
 | ||||
|         // read actual data into shared memory object //
 | ||||
| 
 | ||||
|         // hsgr checksum
 | ||||
|         unsigned *checksum_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::HSGR_CHECKSUM); | ||||
|         *checksum_ptr = checksum; | ||||
| 
 | ||||
|         // ram index file name
 | ||||
|         char *file_index_path_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::FILE_INDEX_PATH); | ||||
|         // make sure we have 0 ending
 | ||||
|         std::fill(file_index_path_ptr, | ||||
|                   file_index_path_ptr + | ||||
|                       shared_layout_ptr->GetBlockSize(SharedDataLayout::FILE_INDEX_PATH), | ||||
|                   0); | ||||
|         std::copy(file_index_path.begin(), file_index_path.end(), file_index_path_ptr); | ||||
| 
 | ||||
|         // Loading street names
 | ||||
|         unsigned *name_offsets_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::NAME_OFFSETS); | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_OFFSETS) > 0) | ||||
|         { | ||||
|             name_stream.read((char *)name_offsets_ptr, | ||||
|                              shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_OFFSETS)); | ||||
|         } | ||||
| 
 | ||||
|         unsigned *name_blocks_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::NAME_BLOCKS); | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_BLOCKS) > 0) | ||||
|         { | ||||
|             name_stream.read((char *)name_blocks_ptr, | ||||
|                              shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_BLOCKS)); | ||||
|         } | ||||
| 
 | ||||
|         char *name_char_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::NAME_CHAR_LIST); | ||||
|         unsigned temp_length; | ||||
|         name_stream.read((char *)&temp_length, sizeof(unsigned)); | ||||
| 
 | ||||
|         BOOST_ASSERT_MSG(temp_length == | ||||
|                              shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST), | ||||
|                          "Name file corrupted!"); | ||||
| 
 | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST) > 0) | ||||
|         { | ||||
|             name_stream.read(name_char_ptr, | ||||
|                              shared_layout_ptr->GetBlockSize(SharedDataLayout::NAME_CHAR_LIST)); | ||||
|         } | ||||
| 
 | ||||
|         name_stream.close(); | ||||
| 
 | ||||
|         // load original edge information
 | ||||
|         NodeID *via_node_ptr = shared_layout_ptr->GetBlockPtr<NodeID, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::VIA_NODE_LIST); | ||||
| 
 | ||||
|         unsigned *name_id_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::NAME_ID_LIST); | ||||
| 
 | ||||
|         TravelMode *travel_mode_ptr = shared_layout_ptr->GetBlockPtr<TravelMode, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::TRAVEL_MODE); | ||||
| 
 | ||||
|         TurnInstruction *turn_instructions_ptr = | ||||
|             shared_layout_ptr->GetBlockPtr<TurnInstruction, true>( | ||||
|                 shared_memory_ptr, SharedDataLayout::TURN_INSTRUCTION); | ||||
| 
 | ||||
|         unsigned *geometries_indicator_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::GEOMETRIES_INDICATORS); | ||||
| 
 | ||||
|         OriginalEdgeData current_edge_data; | ||||
|         for (unsigned i = 0; i < number_of_original_edges; ++i) | ||||
|         { | ||||
|             edges_input_stream.read((char *)&(current_edge_data), sizeof(OriginalEdgeData)); | ||||
|             via_node_ptr[i] = current_edge_data.via_node; | ||||
|             name_id_ptr[i] = current_edge_data.name_id; | ||||
|             travel_mode_ptr[i] = current_edge_data.travel_mode; | ||||
|             turn_instructions_ptr[i] = current_edge_data.turn_instruction; | ||||
| 
 | ||||
|             const unsigned bucket = i / 32; | ||||
|             const unsigned offset = i % 32; | ||||
|             const unsigned value = [&] | ||||
| @ -455,154 +531,68 @@ int main(const int argc, const char *argv[]) | ||||
|                 unsigned return_value = 0; | ||||
|                 if (0 != offset) | ||||
|                 { | ||||
|                     return_value = geometries_indicator_ptr[bucket]; | ||||
|                     return_value = core_marker_ptr[bucket]; | ||||
|                 } | ||||
|                 return return_value; | ||||
|             }(); | ||||
|             if (current_edge_data.compressed_geometry) | ||||
|             { | ||||
|                 geometries_indicator_ptr[bucket] = (value | (1 << offset)); | ||||
|             } | ||||
| 
 | ||||
|             core_marker_ptr[bucket] = (value | (1 << offset)); | ||||
|         } | ||||
|         edges_input_stream.close(); | ||||
| 
 | ||||
|         // load compressed geometry
 | ||||
|         unsigned temporary_value; | ||||
|         unsigned *geometries_index_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::GEOMETRIES_INDEX); | ||||
|         geometry_input_stream.seekg(0, geometry_input_stream.beg); | ||||
|         geometry_input_stream.read((char *)&temporary_value, sizeof(unsigned)); | ||||
|         BOOST_ASSERT(temporary_value == | ||||
|                      shared_layout_ptr->num_entries[SharedDataLayout::GEOMETRIES_INDEX]); | ||||
| 
 | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_INDEX) > 0) | ||||
|         { | ||||
|             geometry_input_stream.read( | ||||
|                 (char *)geometries_index_ptr, | ||||
|                 shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_INDEX)); | ||||
|         } | ||||
|         unsigned *geometries_list_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::GEOMETRIES_LIST); | ||||
| 
 | ||||
|         geometry_input_stream.read((char *)&temporary_value, sizeof(unsigned)); | ||||
|         BOOST_ASSERT(temporary_value == | ||||
|                      shared_layout_ptr->num_entries[SharedDataLayout::GEOMETRIES_LIST]); | ||||
| 
 | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_LIST) > 0) | ||||
|         { | ||||
|             geometry_input_stream.read( | ||||
|                 (char *)geometries_list_ptr, | ||||
|                 shared_layout_ptr->GetBlockSize(SharedDataLayout::GEOMETRIES_LIST)); | ||||
|         } | ||||
| 
 | ||||
|         // Loading list of coordinates
 | ||||
|         FixedPointCoordinate *coordinates_ptr = | ||||
|             shared_layout_ptr->GetBlockPtr<FixedPointCoordinate, true>( | ||||
|                 shared_memory_ptr, SharedDataLayout::COORDINATE_LIST); | ||||
| 
 | ||||
|         QueryNode current_node; | ||||
|         for (unsigned i = 0; i < coordinate_list_size; ++i) | ||||
|         { | ||||
|             nodes_input_stream.read((char *)¤t_node, sizeof(QueryNode)); | ||||
|             coordinates_ptr[i] = FixedPointCoordinate(current_node.lat, current_node.lon); | ||||
|         } | ||||
|         nodes_input_stream.close(); | ||||
| 
 | ||||
|         // store timestamp
 | ||||
|         char *timestamp_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::TIMESTAMP); | ||||
|         std::copy(m_timestamp.c_str(), m_timestamp.c_str() + m_timestamp.length(), timestamp_ptr); | ||||
| 
 | ||||
|         // store search tree portion of rtree
 | ||||
|         char *rtree_ptr = shared_layout_ptr->GetBlockPtr<char, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::R_SEARCH_TREE); | ||||
| 
 | ||||
|         if (tree_size > 0) | ||||
|         { | ||||
|             tree_node_file.read(rtree_ptr, sizeof(RTreeNode) * tree_size); | ||||
|         } | ||||
|         tree_node_file.close(); | ||||
| 
 | ||||
|         // load core markers
 | ||||
|         std::vector<char> unpacked_core_markers(number_of_core_markers); | ||||
|         core_marker_file.read((char *)unpacked_core_markers.data(), sizeof(char)*number_of_core_markers); | ||||
| 
 | ||||
|         unsigned *core_marker_ptr = shared_layout_ptr->GetBlockPtr<unsigned, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::CORE_MARKER); | ||||
| 
 | ||||
|         for (auto i = 0u; i < number_of_core_markers; ++i) | ||||
|         { | ||||
|             BOOST_ASSERT(unpacked_core_markers[i] == 0 || unpacked_core_markers[i] == 1); | ||||
| 
 | ||||
|             if (unpacked_core_markers[i] == 1) | ||||
|             { | ||||
|                 const unsigned bucket = i / 32; | ||||
|                 const unsigned offset = i % 32; | ||||
|                 const unsigned value = [&] | ||||
|                 { | ||||
|                     unsigned return_value = 0; | ||||
|                     if (0 != offset) | ||||
|                     { | ||||
|                         return_value = core_marker_ptr[bucket]; | ||||
|                     } | ||||
|                     return return_value; | ||||
|                 }(); | ||||
| 
 | ||||
|                 core_marker_ptr[bucket] = (value | (1 << offset)); | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         // load the nodes of the search graph
 | ||||
|         QueryGraph::NodeArrayEntry *graph_node_list_ptr = | ||||
|             shared_layout_ptr->GetBlockPtr<QueryGraph::NodeArrayEntry, true>( | ||||
|                 shared_memory_ptr, SharedDataLayout::GRAPH_NODE_LIST); | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_NODE_LIST) > 0) | ||||
|         { | ||||
|             hsgr_input_stream.read( | ||||
|                 (char *)graph_node_list_ptr, | ||||
|                 shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_NODE_LIST)); | ||||
|         } | ||||
| 
 | ||||
|         // load the edges of the search graph
 | ||||
|         QueryGraph::EdgeArrayEntry *graph_edge_list_ptr = | ||||
|             shared_layout_ptr->GetBlockPtr<QueryGraph::EdgeArrayEntry, true>( | ||||
|                 shared_memory_ptr, SharedDataLayout::GRAPH_EDGE_LIST); | ||||
|         if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_EDGE_LIST) > 0) | ||||
|         { | ||||
|             hsgr_input_stream.read( | ||||
|                 (char *)graph_edge_list_ptr, | ||||
|                 shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_EDGE_LIST)); | ||||
|         } | ||||
|         hsgr_input_stream.close(); | ||||
| 
 | ||||
|         // acquire lock
 | ||||
|         SharedMemory *data_type_memory = | ||||
|             SharedMemoryFactory::Get(CURRENT_REGIONS, sizeof(SharedDataTimestamp), true, false); | ||||
|         SharedDataTimestamp *data_timestamp_ptr = | ||||
|             static_cast<SharedDataTimestamp *>(data_type_memory->Ptr()); | ||||
| 
 | ||||
|         boost::interprocess::scoped_lock<boost::interprocess::named_mutex> query_lock( | ||||
|             barrier.query_mutex); | ||||
| 
 | ||||
|         // notify all processes that were waiting for this condition
 | ||||
|         if (0 < barrier.number_of_queries) | ||||
|         { | ||||
|             barrier.no_running_queries_condition.wait(query_lock); | ||||
|         } | ||||
| 
 | ||||
|         data_timestamp_ptr->layout = layout_region; | ||||
|         data_timestamp_ptr->data = data_region; | ||||
|         data_timestamp_ptr->timestamp += 1; | ||||
|         delete_region(previous_data_region); | ||||
|         delete_region(previous_layout_region); | ||||
|         SimpleLogger().Write() << "all data loaded"; | ||||
| 
 | ||||
|         shared_layout_ptr->PrintInformation(); | ||||
|     } | ||||
|     catch (const std::exception &e) | ||||
| 
 | ||||
|     // load the nodes of the search graph
 | ||||
|     QueryGraph::NodeArrayEntry *graph_node_list_ptr = | ||||
|         shared_layout_ptr->GetBlockPtr<QueryGraph::NodeArrayEntry, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::GRAPH_NODE_LIST); | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_NODE_LIST) > 0) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "caught exception: " << e.what(); | ||||
|         hsgr_input_stream.read((char *)graph_node_list_ptr, | ||||
|                                shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_NODE_LIST)); | ||||
|     } | ||||
| 
 | ||||
|     return 0; | ||||
|     // load the edges of the search graph
 | ||||
|     QueryGraph::EdgeArrayEntry *graph_edge_list_ptr = | ||||
|         shared_layout_ptr->GetBlockPtr<QueryGraph::EdgeArrayEntry, true>( | ||||
|             shared_memory_ptr, SharedDataLayout::GRAPH_EDGE_LIST); | ||||
|     if (shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_EDGE_LIST) > 0) | ||||
|     { | ||||
|         hsgr_input_stream.read((char *)graph_edge_list_ptr, | ||||
|                                shared_layout_ptr->GetBlockSize(SharedDataLayout::GRAPH_EDGE_LIST)); | ||||
|     } | ||||
|     hsgr_input_stream.close(); | ||||
| 
 | ||||
|     // acquire lock
 | ||||
|     SharedMemory *data_type_memory = | ||||
|         SharedMemoryFactory::Get(CURRENT_REGIONS, sizeof(SharedDataTimestamp), true, false); | ||||
|     SharedDataTimestamp *data_timestamp_ptr = | ||||
|         static_cast<SharedDataTimestamp *>(data_type_memory->Ptr()); | ||||
| 
 | ||||
|     boost::interprocess::scoped_lock<boost::interprocess::named_mutex> query_lock( | ||||
|         barrier.query_mutex); | ||||
| 
 | ||||
|     // notify all processes that were waiting for this condition
 | ||||
|     if (0 < barrier.number_of_queries) | ||||
|     { | ||||
|         barrier.no_running_queries_condition.wait(query_lock); | ||||
|     } | ||||
| 
 | ||||
|     data_timestamp_ptr->layout = layout_region; | ||||
|     data_timestamp_ptr->data = data_region; | ||||
|     data_timestamp_ptr->timestamp += 1; | ||||
|     delete_region(previous_data_region); | ||||
|     delete_region(previous_layout_region); | ||||
|     SimpleLogger().Write() << "all data loaded"; | ||||
| 
 | ||||
|     shared_layout_ptr->PrintInformation(); | ||||
| } | ||||
| catch (const std::bad_alloc &e) | ||||
| { | ||||
|     SimpleLogger().Write(logWARNING) << "[exception] " << e.what(); | ||||
|     SimpleLogger().Write(logWARNING) << "Please provide more memory or disable locking the virtual " | ||||
|                                         "address space (note: this makes OSRM swap, i.e. slow)"; | ||||
|     return EXIT_FAILURE; | ||||
| } | ||||
| catch (const std::exception &e) | ||||
| { | ||||
|     SimpleLogger().Write(logWARNING) << "caught exception: " << e.what(); | ||||
| } | ||||
|  | ||||
| @ -127,7 +127,7 @@ void DescriptionFactory::Run(const unsigned zoom_level) | ||||
|     { | ||||
|         // move down names by one, q&d hack
 | ||||
|         path_description[i - 1].name_id = path_description[i].name_id; | ||||
|         path_description[i].length = coordinate_calculation::euclidean_distance( | ||||
|         path_description[i].length = coordinate_calculation::great_circle_distance( | ||||
|             path_description[i - 1].location, path_description[i].location); | ||||
|     } | ||||
| 
 | ||||
| @ -230,18 +230,20 @@ void DescriptionFactory::Run(const unsigned zoom_level) | ||||
|                 return; | ||||
|             } | ||||
| 
 | ||||
|             ++necessary_segments; | ||||
| 
 | ||||
|             if (first.is_via_location) | ||||
|             { // mark the end of a leg (of several segments)
 | ||||
|                 via_indices.push_back(necessary_segments); | ||||
|             } | ||||
| 
 | ||||
|             const double angle = coordinate_calculation::bearing(first.location, second.location); | ||||
|             first.bearing = static_cast<short>(angle * 10); | ||||
|             const double post_turn_bearing = coordinate_calculation::bearing(first.location, second.location); | ||||
|             const double pre_turn_bearing = coordinate_calculation::bearing(second.location, first.location); | ||||
|             first.post_turn_bearing = static_cast<short>(post_turn_bearing * 10); | ||||
|             first.pre_turn_bearing = static_cast<short>(pre_turn_bearing * 10); | ||||
| 
 | ||||
|             ++necessary_segments; | ||||
|         }); | ||||
| 
 | ||||
|     via_indices.push_back(necessary_segments + 1); | ||||
|     via_indices.push_back(necessary_segments); | ||||
|     BOOST_ASSERT(via_indices.size() >= 2); | ||||
|     return; | ||||
| } | ||||
|  | ||||
| @ -35,6 +35,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "../data_structures/segment_information.hpp" | ||||
| #include "../data_structures/turn_instructions.hpp" | ||||
| #include "../util/bearing.hpp" | ||||
| #include "../util/cast.hpp" | ||||
| #include "../util/integer_range.hpp" | ||||
| #include "../util/json_renderer.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| @ -43,7 +44,9 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include <osrm/json_container.hpp> | ||||
| 
 | ||||
| #include <limits> | ||||
| #include <algorithm> | ||||
| #include <string> | ||||
| 
 | ||||
| template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor<DataFacadeT> | ||||
| { | ||||
| @ -100,9 +103,6 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|         if (INVALID_EDGE_WEIGHT == raw_route.shortest_path_length) | ||||
|         { | ||||
|             // We do not need to do much, if there is no route ;-)
 | ||||
|             json_result.values["status"] = 207; | ||||
|             json_result.values["status_message"] = "Cannot find route between points"; | ||||
|             // osrm::json::render(reply.content, json_result);
 | ||||
|             return; | ||||
|         } | ||||
| 
 | ||||
| @ -113,8 +113,6 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|         description_factory.SetStartSegment( | ||||
|             raw_route.segment_end_coordinates.front().source_phantom, | ||||
|             raw_route.source_traversed_in_reverse.front()); | ||||
|         json_result.values["status"] = 0; | ||||
|         json_result.values["status_message"] = "Found route between points"; | ||||
| 
 | ||||
|         // for each unpacked segment add the leg to the description
 | ||||
|         for (const auto i : osrm::irange<std::size_t>(0, raw_route.unpacked_path_segments.size())) | ||||
| @ -293,14 +291,13 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|                                                      std::vector<Segment> &route_segments_list) const | ||||
|     { | ||||
|         osrm::json::Array json_instruction_array; | ||||
| 
 | ||||
|         // Segment information has following format:
 | ||||
|         //["instruction id","streetname",length,position,time,"length","earth_direction",azimuth]
 | ||||
|         unsigned necessary_segments_running_index = 0; | ||||
| 
 | ||||
|         struct RoundAbout | ||||
|         { | ||||
|             RoundAbout() : start_index(INT_MAX), name_id(INVALID_NAMEID), leave_at_exit(INT_MAX) {} | ||||
|             RoundAbout() : start_index(std::numeric_limits<int>::max()), name_id(INVALID_NAMEID), leave_at_exit(std::numeric_limits<int>::max()) {} | ||||
|             int start_index; | ||||
|             unsigned name_id; | ||||
|             int leave_at_exit; | ||||
| @ -327,18 +324,18 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|                     std::string current_turn_instruction; | ||||
|                     if (TurnInstruction::LeaveRoundAbout == current_instruction) | ||||
|                     { | ||||
|                         temp_instruction = cast::integral_to_string( | ||||
|                         temp_instruction = std::to_string( | ||||
|                             cast::enum_to_underlying(TurnInstruction::EnterRoundAbout)); | ||||
|                         current_turn_instruction += temp_instruction; | ||||
|                         current_turn_instruction += "-"; | ||||
|                         temp_instruction = cast::integral_to_string(round_about.leave_at_exit + 1); | ||||
|                         temp_instruction = std::to_string(round_about.leave_at_exit + 1); | ||||
|                         current_turn_instruction += temp_instruction; | ||||
|                         round_about.leave_at_exit = 0; | ||||
|                     } | ||||
|                     else | ||||
|                     { | ||||
|                         temp_instruction = | ||||
|                             cast::integral_to_string(cast::enum_to_underlying(current_instruction)); | ||||
|                             std::to_string(cast::enum_to_underlying(current_instruction)); | ||||
|                         current_turn_instruction += temp_instruction; | ||||
|                     } | ||||
|                     json_instruction_row.values.push_back(current_turn_instruction); | ||||
| @ -348,17 +345,27 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|                     json_instruction_row.values.push_back(necessary_segments_running_index); | ||||
|                     json_instruction_row.values.push_back(std::round(segment.duration / 10.)); | ||||
|                     json_instruction_row.values.push_back( | ||||
|                         cast::integral_to_string(static_cast<unsigned>(segment.length)) + "m"); | ||||
|                     const double bearing_value = (segment.bearing / 10.); | ||||
|                     json_instruction_row.values.push_back(bearing::get(bearing_value)); | ||||
|                         std::to_string(static_cast<unsigned>(segment.length)) + "m"); | ||||
| 
 | ||||
|                     // post turn bearing
 | ||||
|                     const double post_turn_bearing_value = (segment.post_turn_bearing / 10.); | ||||
|                     json_instruction_row.values.push_back(bearing::get(post_turn_bearing_value)); | ||||
|                     json_instruction_row.values.push_back( | ||||
|                         static_cast<unsigned>(round(bearing_value))); | ||||
|                         static_cast<unsigned>(round(post_turn_bearing_value))); | ||||
| 
 | ||||
|                     json_instruction_row.values.push_back(segment.travel_mode); | ||||
| 
 | ||||
|                     // pre turn bearing
 | ||||
|                     const double pre_turn_bearing_value = (segment.pre_turn_bearing / 10.); | ||||
|                     json_instruction_row.values.push_back(bearing::get(pre_turn_bearing_value)); | ||||
|                     json_instruction_row.values.push_back( | ||||
|                         static_cast<unsigned>(round(pre_turn_bearing_value))); | ||||
| 
 | ||||
|                     json_instruction_array.values.push_back(json_instruction_row); | ||||
| 
 | ||||
|                     route_segments_list.emplace_back( | ||||
|                         segment.name_id, static_cast<int>(segment.length), | ||||
|                         static_cast<unsigned>(route_segments_list.size())); | ||||
|                     json_instruction_array.values.push_back(json_instruction_row); | ||||
|                 } | ||||
|             } | ||||
|             else if (TurnInstruction::StayOnRoundAbout == current_instruction) | ||||
| @ -372,8 +379,8 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|         } | ||||
| 
 | ||||
|         osrm::json::Array json_last_instruction_row; | ||||
|         temp_instruction = cast::integral_to_string( | ||||
|             cast::enum_to_underlying(TurnInstruction::ReachedYourDestination)); | ||||
|         temp_instruction = | ||||
|             std::to_string(cast::enum_to_underlying(TurnInstruction::ReachedYourDestination)); | ||||
|         json_last_instruction_row.values.push_back(temp_instruction); | ||||
|         json_last_instruction_row.values.push_back(""); | ||||
|         json_last_instruction_row.values.push_back(0); | ||||
| @ -382,6 +389,8 @@ template <class DataFacadeT> class JSONDescriptor final : public BaseDescriptor< | ||||
|         json_last_instruction_row.values.push_back("0m"); | ||||
|         json_last_instruction_row.values.push_back(bearing::get(0.0)); | ||||
|         json_last_instruction_row.values.push_back(0.); | ||||
|         json_last_instruction_row.values.push_back(bearing::get(0.0)); | ||||
|         json_last_instruction_row.values.push_back(0.); | ||||
|         json_instruction_array.values.push_back(json_last_instruction_row); | ||||
| 
 | ||||
|         return json_instruction_array; | ||||
|  | ||||
							
								
								
									
										90
									
								
								extract.cpp
									
									
									
									
									
								
							
							
						
						
									
										90
									
								
								extract.cpp
									
									
									
									
									
								
							| @ -31,53 +31,59 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include <boost/filesystem.hpp> | ||||
| 
 | ||||
| #include <cstdlib> | ||||
| #include <exception> | ||||
| #include <new> | ||||
| 
 | ||||
| int main(int argc, char *argv[]) | ||||
| int main(int argc, char *argv[]) try | ||||
| { | ||||
|     try | ||||
|     LogPolicy::GetInstance().Unmute(); | ||||
|     ExtractorConfig extractor_config; | ||||
| 
 | ||||
|     const return_code result = ExtractorOptions::ParseArguments(argc, argv, extractor_config); | ||||
| 
 | ||||
|     if (return_code::fail == result) | ||||
|     { | ||||
|         LogPolicy::GetInstance().Unmute(); | ||||
|         ExtractorConfig extractor_config; | ||||
| 
 | ||||
|         const return_code result = ExtractorOptions::ParseArguments(argc, argv, extractor_config); | ||||
| 
 | ||||
|         if (return_code::fail == result) | ||||
|         { | ||||
|             return 1; | ||||
|         } | ||||
| 
 | ||||
|         if (return_code::exit == result) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
| 
 | ||||
|         ExtractorOptions::GenerateOutputFilesNames(extractor_config); | ||||
| 
 | ||||
|         if (1 > extractor_config.requested_num_threads) | ||||
|         { | ||||
|             SimpleLogger().Write(logWARNING) << "Number of threads must be 1 or larger"; | ||||
|             return 1; | ||||
|         } | ||||
| 
 | ||||
|         if (!boost::filesystem::is_regular_file(extractor_config.input_path)) | ||||
|         { | ||||
|             SimpleLogger().Write(logWARNING) | ||||
|                 << "Input file " << extractor_config.input_path.string() << " not found!"; | ||||
|             return 1; | ||||
|         } | ||||
| 
 | ||||
|         if (!boost::filesystem::is_regular_file(extractor_config.profile_path)) | ||||
|         { | ||||
|             SimpleLogger().Write(logWARNING) << "Profile " << extractor_config.profile_path.string() | ||||
|                                              << " not found!"; | ||||
|             return 1; | ||||
|         } | ||||
|         return extractor(extractor_config).run(); | ||||
|         return EXIT_FAILURE; | ||||
|     } | ||||
|     catch (const std::exception &e) | ||||
| 
 | ||||
|     if (return_code::exit == result) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "[exception] " << e.what(); | ||||
|         return 1; | ||||
|         return EXIT_SUCCESS; | ||||
|     } | ||||
| 
 | ||||
|     ExtractorOptions::GenerateOutputFilesNames(extractor_config); | ||||
| 
 | ||||
|     if (1 > extractor_config.requested_num_threads) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "Number of threads must be 1 or larger"; | ||||
|         return EXIT_FAILURE; | ||||
|     } | ||||
| 
 | ||||
|     if (!boost::filesystem::is_regular_file(extractor_config.input_path)) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "Input file " << extractor_config.input_path.string() | ||||
|                                          << " not found!"; | ||||
|         return EXIT_FAILURE; | ||||
|     } | ||||
| 
 | ||||
|     if (!boost::filesystem::is_regular_file(extractor_config.profile_path)) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "Profile " << extractor_config.profile_path.string() | ||||
|                                          << " not found!"; | ||||
|         return EXIT_FAILURE; | ||||
|     } | ||||
|     return extractor(extractor_config).run(); | ||||
| } | ||||
| catch (const std::bad_alloc &e) | ||||
| { | ||||
|     SimpleLogger().Write(logWARNING) << "[exception] " << e.what(); | ||||
|     SimpleLogger().Write(logWARNING) | ||||
|         << "Please provide more memory or consider using a larger swapfile"; | ||||
|     return EXIT_FAILURE; | ||||
| } | ||||
| catch (const std::exception &e) | ||||
| { | ||||
|     SimpleLogger().Write(logWARNING) << "[exception] " << e.what(); | ||||
|     return EXIT_FAILURE; | ||||
| } | ||||
|  | ||||
| @ -26,12 +26,16 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| */ | ||||
| 
 | ||||
| #include "edge_based_graph_factory.hpp" | ||||
| #include "../algorithms/coordinate_calculation.hpp" | ||||
| #include "../data_structures/percent.hpp" | ||||
| #include "../util/compute_angle.hpp" | ||||
| #include "../util/integer_range.hpp" | ||||
| #include "../util/lua_util.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| #include "../util/timing_util.hpp" | ||||
| #include "../util/osrm_exception.hpp" | ||||
| 
 | ||||
| #include "../util/debug_geometry.hpp" | ||||
| 
 | ||||
| #include <boost/assert.hpp> | ||||
| 
 | ||||
| @ -57,7 +61,8 @@ EdgeBasedGraphFactory::EdgeBasedGraphFactory( | ||||
| void EdgeBasedGraphFactory::GetEdgeBasedEdges(DeallocatingVector<EdgeBasedEdge> &output_edge_list) | ||||
| { | ||||
|     BOOST_ASSERT_MSG(0 == output_edge_list.size(), "Vector is not empty"); | ||||
|     m_edge_based_edge_list.swap(output_edge_list); | ||||
|     using std::swap; // Koenig swap
 | ||||
|     swap(m_edge_based_edge_list, output_edge_list); | ||||
| } | ||||
| 
 | ||||
| void EdgeBasedGraphFactory::GetEdgeBasedNodes(std::vector<EdgeBasedNode> &nodes) | ||||
| @ -71,7 +76,14 @@ void EdgeBasedGraphFactory::GetEdgeBasedNodes(std::vector<EdgeBasedNode> &nodes) | ||||
|         BOOST_ASSERT(m_node_info_list.at(node.v).lat != INT_MAX); | ||||
|     } | ||||
| #endif | ||||
|     nodes.swap(m_edge_based_node_list); | ||||
|     using std::swap; // Koenig swap
 | ||||
|     swap(nodes, m_edge_based_node_list); | ||||
| } | ||||
| 
 | ||||
| void EdgeBasedGraphFactory::GetStartPointMarkers(std::vector<bool> &node_is_startpoint) | ||||
| { | ||||
|     using std::swap; // Koenig swap
 | ||||
|     swap(m_edge_based_node_is_startpoint, node_is_startpoint); | ||||
| } | ||||
| 
 | ||||
| unsigned EdgeBasedGraphFactory::GetHighestEdgeID() | ||||
| @ -160,7 +172,8 @@ void EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u, | ||||
|                 forward_data.name_id, forward_geometry[i].second, | ||||
|                 reverse_geometry[geometry_size - 1 - i].second, forward_dist_prefix_sum[i], | ||||
|                 reverse_dist_prefix_sum[i], m_compressed_edge_container.GetPositionForID(edge_id_1), | ||||
|                 INVALID_COMPONENTID, i, forward_data.travel_mode, reverse_data.travel_mode); | ||||
|                 false, INVALID_COMPONENTID, i, forward_data.travel_mode, reverse_data.travel_mode); | ||||
|             m_edge_based_node_is_startpoint.push_back(forward_data.startpoint || reverse_data.startpoint); | ||||
|             current_edge_source_coordinate_id = current_edge_target_coordinate_id; | ||||
| 
 | ||||
|             BOOST_ASSERT(m_edge_based_node_list.back().IsCompressed()); | ||||
| @ -203,7 +216,8 @@ void EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u, | ||||
|         m_edge_based_node_list.emplace_back( | ||||
|             forward_data.edge_id, reverse_data.edge_id, node_u, node_v, | ||||
|             forward_data.name_id, forward_data.distance, reverse_data.distance, 0, 0, SPECIAL_EDGEID, | ||||
|             INVALID_COMPONENTID, 0, forward_data.travel_mode, reverse_data.travel_mode); | ||||
|             false, INVALID_COMPONENTID, 0, forward_data.travel_mode, reverse_data.travel_mode); | ||||
|         m_edge_based_node_is_startpoint.push_back(forward_data.startpoint || reverse_data.startpoint); | ||||
|         BOOST_ASSERT(!m_edge_based_node_list.back().IsCompressed()); | ||||
|     } | ||||
| } | ||||
| @ -220,8 +234,20 @@ void EdgeBasedGraphFactory::FlushVectorToStream( | ||||
|     original_edge_data_vector.clear(); | ||||
| } | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
| void EdgeBasedGraphFactory::Run(const std::string &original_edge_data_filename, | ||||
|                                 lua_State *lua_state) | ||||
|                                 lua_State *lua_state, | ||||
|                                 const std::string &edge_segment_lookup_filename, | ||||
|                                 const std::string &edge_penalty_filename, | ||||
|                                 const bool generate_edge_lookup, | ||||
|                                 const std::string &debug_turns_path) | ||||
| #else | ||||
| void EdgeBasedGraphFactory::Run(const std::string &original_edge_data_filename, | ||||
|                                 lua_State *lua_state, | ||||
|                                 const std::string &edge_segment_lookup_filename, | ||||
|                                 const std::string &edge_penalty_filename, | ||||
|                                 const bool generate_edge_lookup) | ||||
| #endif | ||||
| { | ||||
|     TIMER_START(renumber); | ||||
|     m_max_edge_id = RenumberEdges() - 1; | ||||
| @ -232,7 +258,16 @@ void EdgeBasedGraphFactory::Run(const std::string &original_edge_data_filename, | ||||
|     TIMER_STOP(generate_nodes); | ||||
| 
 | ||||
|     TIMER_START(generate_edges); | ||||
|     GenerateEdgeExpandedEdges(original_edge_data_filename, lua_state); | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     GenerateEdgeExpandedEdges(original_edge_data_filename, lua_state, | ||||
|             edge_segment_lookup_filename,edge_penalty_filename,  | ||||
|             generate_edge_lookup, debug_turns_path); | ||||
| #else | ||||
|     GenerateEdgeExpandedEdges(original_edge_data_filename, lua_state, | ||||
|             edge_segment_lookup_filename,edge_penalty_filename,  | ||||
|             generate_edge_lookup); | ||||
| #endif | ||||
| 
 | ||||
|     TIMER_STOP(generate_edges); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Timing statistics for edge-expanded graph:"; | ||||
| @ -311,13 +346,27 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedNodes() | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     BOOST_ASSERT(m_edge_based_node_list.size() == m_edge_based_node_is_startpoint.size()); | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Generated " << m_edge_based_node_list.size() | ||||
|                            << " nodes in edge-expanded graph"; | ||||
| } | ||||
| 
 | ||||
| /// Actually it also generates OriginalEdgeData and serializes them...
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
| void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|     const std::string &original_edge_data_filename, lua_State *lua_state) | ||||
|     const std::string &original_edge_data_filename, lua_State *lua_state, | ||||
|     const std::string &edge_segment_lookup_filename, | ||||
|     const std::string &edge_fixed_penalties_filename, | ||||
|     const bool generate_edge_lookup,  | ||||
|     const std::string &debug_turns_path) | ||||
| #else | ||||
| void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|     const std::string &original_edge_data_filename, lua_State *lua_state, | ||||
|     const std::string &edge_segment_lookup_filename, | ||||
|     const std::string &edge_fixed_penalties_filename, | ||||
|     const bool generate_edge_lookup) | ||||
| #endif | ||||
| { | ||||
|     SimpleLogger().Write() << "generating edge-expanded edges"; | ||||
| 
 | ||||
| @ -325,6 +374,14 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|     unsigned original_edges_counter = 0; | ||||
| 
 | ||||
|     std::ofstream edge_data_file(original_edge_data_filename.c_str(), std::ios::binary); | ||||
|     std::ofstream edge_segment_file; | ||||
|     std::ofstream edge_penalty_file; | ||||
| 
 | ||||
|     if (generate_edge_lookup) | ||||
|     { | ||||
|         edge_segment_file.open(edge_segment_lookup_filename.c_str(), std::ios::binary); | ||||
|         edge_penalty_file.open(edge_fixed_penalties_filename.c_str(), std::ios::binary); | ||||
|     } | ||||
| 
 | ||||
|     // writes a dummy value that is updated later
 | ||||
|     edge_data_file.write((char *)&original_edges_counter, sizeof(unsigned)); | ||||
| @ -342,9 +399,13 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
| 
 | ||||
|     Percent progress(m_node_based_graph->GetNumberOfNodes()); | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     DEBUG_TURNS_START(debug_turns_path); | ||||
| #endif | ||||
| 
 | ||||
|     for (const auto node_u : osrm::irange(0u, m_node_based_graph->GetNumberOfNodes())) | ||||
|     { | ||||
|         progress.printStatus(node_u); | ||||
|         //progress.printStatus(node_u);
 | ||||
|         for (const EdgeID e1 : m_node_based_graph->GetAdjacentEdgeRange(node_u)) | ||||
|         { | ||||
|             if (m_node_based_graph->GetEdgeData(e1).reversed) | ||||
| @ -415,6 +476,8 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|                 if (m_traffic_lights.find(node_v) != m_traffic_lights.end()) | ||||
|                 { | ||||
|                     distance += speed_profile.traffic_signal_penalty; | ||||
| 
 | ||||
|                     DEBUG_SIGNAL(node_v, m_node_info_list, speed_profile.traffic_signal_penalty); | ||||
|                 } | ||||
| 
 | ||||
|                 // unpack last node of first segment if packed
 | ||||
| @ -437,7 +500,12 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|                 if (turn_instruction == TurnInstruction::UTurn) | ||||
|                 { | ||||
|                     distance += speed_profile.u_turn_penalty; | ||||
|                 } | ||||
| 
 | ||||
|                     DEBUG_UTURN(node_v, m_node_info_list, speed_profile.u_turn_penalty); | ||||
|                 }  | ||||
| 
 | ||||
|                 DEBUG_TURN(node_v, m_node_info_list, first_coordinate, turn_angle, turn_penalty); | ||||
| 
 | ||||
|                 distance += turn_penalty; | ||||
| 
 | ||||
|                 const bool edge_is_compressed = m_compressed_edge_container.HasEntryForID(e1); | ||||
| @ -462,11 +530,70 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges( | ||||
|                 BOOST_ASSERT(SPECIAL_NODEID != edge_data1.edge_id); | ||||
|                 BOOST_ASSERT(SPECIAL_NODEID != edge_data2.edge_id); | ||||
| 
 | ||||
| 
 | ||||
|                 // NOTE: potential overflow here if we hit 2^32 routable edges
 | ||||
|                 BOOST_ASSERT(m_edge_based_edge_list.size() <= std::numeric_limits<NodeID>::max()); | ||||
|                 m_edge_based_edge_list.emplace_back(edge_data1.edge_id, edge_data2.edge_id, | ||||
|                                   m_edge_based_edge_list.size(), distance, true, false); | ||||
| 
 | ||||
| 
 | ||||
|                 // Here is where we write out the mapping between the edge-expanded edges, and
 | ||||
|                 // the node-based edges that are originally used to calculate the `distance`
 | ||||
|                 // for the edge-expanded edges.  About 40 lines back, there is:
 | ||||
|                 //
 | ||||
|                 //                 unsigned distance = edge_data1.distance;
 | ||||
|                 //
 | ||||
|                 // This tells us that the weight for an edge-expanded-edge is based on the weight
 | ||||
|                 // of the *source* node-based edge.  Therefore, we will look up the individual
 | ||||
|                 // segments of the source node-based edge, and write out a mapping between
 | ||||
|                 // those and the edge-based-edge ID.
 | ||||
|                 // External programs can then use this mapping to quickly perform
 | ||||
|                 // updates to the edge-expanded-edge based directly on its ID.
 | ||||
|                 if (generate_edge_lookup) | ||||
|                 { | ||||
|                     unsigned fixed_penalty = distance - edge_data1.distance; | ||||
|                     edge_penalty_file.write(reinterpret_cast<const char *>(&fixed_penalty), sizeof(fixed_penalty)); | ||||
|                     if (edge_is_compressed) | ||||
|                     { | ||||
|                         const auto node_based_edges = m_compressed_edge_container.GetBucketReference(e1); | ||||
|                         NodeID previous = node_u; | ||||
| 
 | ||||
|                         const unsigned node_count = node_based_edges.size()+1; | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&node_count), sizeof(node_count)); | ||||
|                         const QueryNode &first_node = m_node_info_list[previous]; | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&first_node.node_id), sizeof(first_node.node_id)); | ||||
| 
 | ||||
|                         for (auto target_node : node_based_edges) | ||||
|                         { | ||||
|                             const QueryNode &from = m_node_info_list[previous]; | ||||
|                             const QueryNode &to = m_node_info_list[target_node.first]; | ||||
|                             const double segment_length = coordinate_calculation::great_circle_distance(from.lat, from.lon, to.lat, to.lon); | ||||
| 
 | ||||
|                             edge_segment_file.write(reinterpret_cast<const char *>(&to.node_id), sizeof(to.node_id)); | ||||
|                             edge_segment_file.write(reinterpret_cast<const char *>(&segment_length), sizeof(segment_length)); | ||||
|                             edge_segment_file.write(reinterpret_cast<const char *>(&target_node.second), sizeof(target_node.second)); | ||||
|                             previous = target_node.first; | ||||
|                         } | ||||
|                     } | ||||
|                     else | ||||
|                     { | ||||
|                         static const unsigned node_count = 2; | ||||
|                         const QueryNode from = m_node_info_list[node_u]; | ||||
|                         const QueryNode to = m_node_info_list[node_v]; | ||||
|                         const double segment_length = coordinate_calculation::great_circle_distance(from.lat, from.lon, to.lat, to.lon); | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&node_count), sizeof(node_count)); | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&from.node_id), sizeof(from.node_id)); | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&to.node_id), sizeof(to.node_id)); | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&segment_length), sizeof(segment_length)); | ||||
|                         edge_segment_file.write(reinterpret_cast<const char *>(&edge_data1.distance), sizeof(edge_data1.distance)); | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     DEBUG_TURNS_STOP(); | ||||
| 
 | ||||
|     FlushVectorToStream(edge_data_file, original_edge_data_vector); | ||||
| 
 | ||||
|     edge_data_file.seekp(std::ios::beg); | ||||
| @ -50,6 +50,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <unordered_set> | ||||
| #include <vector> | ||||
| 
 | ||||
| #include <boost/filesystem/fstream.hpp> | ||||
| 
 | ||||
| struct lua_State; | ||||
| 
 | ||||
| class EdgeBasedGraphFactory | ||||
| @ -66,12 +68,25 @@ class EdgeBasedGraphFactory | ||||
|                                    const std::vector<QueryNode> &node_info_list, | ||||
|                                    SpeedProfileProperties speed_profile); | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     void Run(const std::string &original_edge_data_filename, | ||||
|              lua_State *lua_state); | ||||
|              lua_State *lua_state, | ||||
|              const std::string &edge_segment_lookup_filename, | ||||
|              const std::string &edge_penalty_filename, | ||||
|              const bool generate_edge_lookup, | ||||
|              const std::string &debug_turns_path); | ||||
| #else | ||||
|     void Run(const std::string &original_edge_data_filename, | ||||
|              lua_State *lua_state, | ||||
|              const std::string &edge_segment_lookup_filename, | ||||
|              const std::string &edge_penalty_filename, | ||||
|              const bool generate_edge_lookup); | ||||
| #endif | ||||
| 
 | ||||
|     void GetEdgeBasedEdges(DeallocatingVector<EdgeBasedEdge> &edges); | ||||
| 
 | ||||
|     void GetEdgeBasedNodes(std::vector<EdgeBasedNode> &nodes); | ||||
|     void GetStartPointMarkers(std::vector<bool> &node_is_startpoint); | ||||
| 
 | ||||
|     unsigned GetHighestEdgeID(); | ||||
| 
 | ||||
| @ -82,6 +97,9 @@ class EdgeBasedGraphFactory | ||||
|   private: | ||||
|     using EdgeData = NodeBasedDynamicGraph::EdgeData; | ||||
| 
 | ||||
|     //! maps index from m_edge_based_node_list to ture/false if the node is an entry point to the graph
 | ||||
|     std::vector<bool> m_edge_based_node_is_startpoint; | ||||
|     //! list of edge based nodes (compressed segments)
 | ||||
|     std::vector<EdgeBasedNode> m_edge_based_node_list; | ||||
|     DeallocatingVector<EdgeBasedEdge> m_edge_based_edge_list; | ||||
|     unsigned m_max_edge_id; | ||||
| @ -99,8 +117,20 @@ class EdgeBasedGraphFactory | ||||
|     void CompressGeometry(); | ||||
|     unsigned RenumberEdges(); | ||||
|     void GenerateEdgeExpandedNodes(); | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     void GenerateEdgeExpandedEdges(const std::string &original_edge_data_filename, | ||||
|                                    lua_State *lua_state); | ||||
|                                    lua_State *lua_state, | ||||
|                                    const std::string &edge_segment_lookup_filename, | ||||
|                                    const std::string &edge_fixed_penalties_filename, | ||||
|                                    const bool generate_edge_lookup, | ||||
|                                    const std::string &debug_turns_path); | ||||
| #else | ||||
|     void GenerateEdgeExpandedEdges(const std::string &original_edge_data_filename, | ||||
|                                    lua_State *lua_state, | ||||
|                                    const std::string &edge_segment_lookup_filename, | ||||
|                                    const std::string &edge_fixed_penalties_filename, | ||||
|                                    const bool generate_edge_lookup); | ||||
| #endif  | ||||
| 
 | ||||
|     void InsertEdgeBasedNode(const NodeID u, const NodeID v); | ||||
| 
 | ||||
| @ -42,6 +42,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <boost/filesystem.hpp> | ||||
| #include <boost/filesystem/fstream.hpp> | ||||
| #include <boost/ref.hpp> | ||||
| #include <boost/numeric/conversion/cast.hpp> | ||||
| 
 | ||||
| #include <luabind/luabind.hpp> | ||||
| 
 | ||||
| @ -50,11 +51,14 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <chrono> | ||||
| #include <limits> | ||||
| 
 | ||||
| static const int WRITE_BLOCK_BUFFER_SIZE = 8000; | ||||
| 
 | ||||
| ExtractionContainers::ExtractionContainers() | ||||
| { | ||||
|     // Check if stxxl can be instantiated
 | ||||
|     stxxl::vector<unsigned> dummy_vector; | ||||
|     name_list.push_back(""); | ||||
|     // Insert the empty string, it has no data and is zero length
 | ||||
|     name_lengths.push_back(0); | ||||
| } | ||||
| 
 | ||||
| ExtractionContainers::~ExtractionContainers() | ||||
| @ -63,7 +67,8 @@ ExtractionContainers::~ExtractionContainers() | ||||
|     used_node_id_list.clear(); | ||||
|     all_nodes_list.clear(); | ||||
|     all_edges_list.clear(); | ||||
|     name_list.clear(); | ||||
|     name_char_data.clear(); | ||||
|     name_lengths.clear(); | ||||
|     restrictions_list.clear(); | ||||
|     way_start_end_id_list.clear(); | ||||
| } | ||||
| @ -115,13 +120,10 @@ void ExtractionContainers::WriteNames(const std::string& names_file_name) const | ||||
|     boost::filesystem::ofstream name_file_stream(names_file_name, std::ios::binary); | ||||
| 
 | ||||
|     unsigned total_length = 0; | ||||
|     std::vector<unsigned> name_lengths; | ||||
|     for (const std::string &temp_string : name_list) | ||||
| 
 | ||||
|     for (const unsigned &name_length : name_lengths) | ||||
|     { | ||||
|         const unsigned string_length = | ||||
|             std::min(static_cast<unsigned>(temp_string.length()), 255u); | ||||
|         name_lengths.push_back(string_length); | ||||
|         total_length += string_length; | ||||
|         total_length += name_length; | ||||
|     } | ||||
| 
 | ||||
|     // builds and writes the index
 | ||||
| @ -129,14 +131,25 @@ void ExtractionContainers::WriteNames(const std::string& names_file_name) const | ||||
|     name_file_stream << name_index_range; | ||||
| 
 | ||||
|     name_file_stream.write((char *)&total_length, sizeof(unsigned)); | ||||
| 
 | ||||
| 
 | ||||
|     // write all chars consecutively
 | ||||
|     for (const std::string &temp_string : name_list) | ||||
|     char write_buffer[WRITE_BLOCK_BUFFER_SIZE]; | ||||
|     unsigned buffer_len = 0; | ||||
| 
 | ||||
|     for (const char &c : name_char_data) | ||||
|     { | ||||
|         const unsigned string_length = | ||||
|             std::min(static_cast<unsigned>(temp_string.length()), 255u); | ||||
|         name_file_stream.write(temp_string.c_str(), string_length); | ||||
|         write_buffer[buffer_len++] = c; | ||||
| 
 | ||||
|         if (buffer_len >= WRITE_BLOCK_BUFFER_SIZE) | ||||
|         { | ||||
|             name_file_stream.write(write_buffer, WRITE_BLOCK_BUFFER_SIZE); | ||||
|             buffer_len = 0; | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     name_file_stream.write(write_buffer, buffer_len); | ||||
| 
 | ||||
|     name_file_stream.close(); | ||||
|     TIMER_STOP(write_name_index); | ||||
|     std::cout << "ok, after " << TIMER_SEC(write_name_index) << "s" << std::endl; | ||||
| @ -171,7 +184,11 @@ void ExtractionContainers::PrepareNodes() | ||||
|     auto ref_iter = used_node_id_list.begin(); | ||||
|     const auto all_nodes_list_end = all_nodes_list.end(); | ||||
|     const auto used_node_id_list_end = used_node_id_list.end(); | ||||
|     auto internal_id = 0u; | ||||
|     // Note: despite being able to handle 64 bit OSM node ids, we can't
 | ||||
|     // handle > uint32_t actual usable nodes.  This should be OK for a while
 | ||||
|     // because we usually route on a *lot* less than 2^32 of the OSM
 | ||||
|     // graph nodes.
 | ||||
|     std::size_t internal_id = 0; | ||||
| 
 | ||||
|     // compute the intersection of nodes that were referenced and nodes we actually have
 | ||||
|     while (node_iter != all_nodes_list_end && ref_iter != used_node_id_list_end) | ||||
| @ -187,11 +204,15 @@ void ExtractionContainers::PrepareNodes() | ||||
|             continue; | ||||
|         } | ||||
|         BOOST_ASSERT(node_iter->node_id == *ref_iter); | ||||
|         external_to_internal_node_id_map[*ref_iter] = internal_id++; | ||||
|         external_to_internal_node_id_map[*ref_iter] = static_cast<NodeID>(internal_id++); | ||||
|         node_iter++; | ||||
|         ref_iter++; | ||||
|     } | ||||
|     max_internal_node_id = internal_id; | ||||
|     if (internal_id > std::numeric_limits<NodeID>::max()) | ||||
|     { | ||||
|         throw osrm::exception("There are too many nodes remaining after filtering, OSRM only supports 2^32 unique nodes"); | ||||
|     } | ||||
|     max_internal_node_id = boost::numeric_cast<NodeID>(internal_id); | ||||
|     TIMER_STOP(id_map); | ||||
|     std::cout << "ok, after " << TIMER_SEC(id_map) << "s" << std::endl; | ||||
| 
 | ||||
| @ -202,7 +223,7 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
|     // Sort edges by start.
 | ||||
|     std::cout << "[extractor] Sorting edges by start    ... " << std::flush; | ||||
|     TIMER_START(sort_edges_by_start); | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByStartID(), stxxl_memory); | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByOSMStartID(), stxxl_memory); | ||||
|     TIMER_STOP(sort_edges_by_start); | ||||
|     std::cout << "ok, after " << TIMER_SEC(sort_edges_by_start) << "s" << std::endl; | ||||
| 
 | ||||
| @ -217,21 +238,21 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
| 
 | ||||
|     while (edge_iterator != all_edges_list_end && node_iterator != all_nodes_list_end) | ||||
|     { | ||||
|         if (edge_iterator->result.source < node_iterator->node_id) | ||||
|         if (edge_iterator->result.osm_source_id < node_iterator->node_id) | ||||
|         { | ||||
|             SimpleLogger().Write(LogLevel::logWARNING) << "Found invalid node reference " << edge_iterator->result.source; | ||||
|             edge_iterator->result.source = SPECIAL_NODEID; | ||||
|             ++edge_iterator; | ||||
|             continue; | ||||
|         } | ||||
|         if (edge_iterator->result.source > node_iterator->node_id) | ||||
|         if (edge_iterator->result.osm_source_id > node_iterator->node_id) | ||||
|         { | ||||
|             node_iterator++; | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         // remove loops
 | ||||
|         if (edge_iterator->result.source == edge_iterator->result.target) | ||||
|         if (edge_iterator->result.osm_source_id == edge_iterator->result.osm_target_id) | ||||
|         { | ||||
|             edge_iterator->result.source = SPECIAL_NODEID; | ||||
|             edge_iterator->result.target = SPECIAL_NODEID; | ||||
| @ -239,7 +260,7 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         BOOST_ASSERT(edge_iterator->result.source == node_iterator->node_id); | ||||
|         BOOST_ASSERT(edge_iterator->result.osm_source_id == node_iterator->node_id); | ||||
| 
 | ||||
|         // assign new node id
 | ||||
|         auto id_iter = external_to_internal_node_id_map.find(node_iterator->node_id); | ||||
| @ -250,13 +271,24 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
|         edge_iterator->source_coordinate.lon = node_iterator->lon; | ||||
|         ++edge_iterator; | ||||
|     } | ||||
| 
 | ||||
|     // Remove all remaining edges. They are invalid because there are no corresponding nodes for
 | ||||
|     // them. This happens when using osmosis with bbox or polygon to extract smaller areas.
 | ||||
|     auto markSourcesInvalid = [](InternalExtractorEdge &edge) | ||||
|     { | ||||
|         SimpleLogger().Write(LogLevel::logWARNING) << "Found invalid node reference " | ||||
|                                                    << edge.result.source; | ||||
|         edge.result.source = SPECIAL_NODEID; | ||||
|         edge.result.osm_source_id = SPECIAL_OSM_NODEID; | ||||
|     }; | ||||
|     std::for_each(edge_iterator, all_edges_list_end, markSourcesInvalid); | ||||
|     TIMER_STOP(set_start_coords); | ||||
|     std::cout << "ok, after " << TIMER_SEC(set_start_coords) << "s" << std::endl; | ||||
| 
 | ||||
|     // Sort Edges by target
 | ||||
|     std::cout << "[extractor] Sorting edges by target   ... " << std::flush; | ||||
|     TIMER_START(sort_edges_by_target); | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByTargetID(), | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByOSMTargetID(), | ||||
|                 stxxl_memory); | ||||
|     TIMER_STOP(sort_edges_by_target); | ||||
|     std::cout << "ok, after " << TIMER_SEC(sort_edges_by_target) << "s" << std::endl; | ||||
| @ -278,25 +310,25 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         if (edge_iterator->result.target < node_iterator->node_id) | ||||
|         if (edge_iterator->result.osm_target_id < node_iterator->node_id) | ||||
|         { | ||||
|             SimpleLogger().Write(LogLevel::logWARNING) << "Found invalid node reference " << edge_iterator->result.target; | ||||
|             SimpleLogger().Write(LogLevel::logWARNING) << "Found invalid node reference " << OSMNodeID_to_uint64_t(edge_iterator->result.osm_target_id); | ||||
|             edge_iterator->result.target = SPECIAL_NODEID; | ||||
|             ++edge_iterator; | ||||
|             continue; | ||||
|         } | ||||
|         if (edge_iterator->result.target > node_iterator->node_id) | ||||
|         if (edge_iterator->result.osm_target_id > node_iterator->node_id) | ||||
|         { | ||||
|             ++node_iterator; | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         BOOST_ASSERT(edge_iterator->result.target == node_iterator->node_id); | ||||
|         BOOST_ASSERT(edge_iterator->result.osm_target_id == node_iterator->node_id); | ||||
|         BOOST_ASSERT(edge_iterator->weight_data.speed >= 0); | ||||
|         BOOST_ASSERT(edge_iterator->source_coordinate.lat != std::numeric_limits<int>::min()); | ||||
|         BOOST_ASSERT(edge_iterator->source_coordinate.lon != std::numeric_limits<int>::min()); | ||||
| 
 | ||||
|         const double distance = coordinate_calculation::euclidean_distance( | ||||
|         const double distance = coordinate_calculation::great_circle_distance( | ||||
|             edge_iterator->source_coordinate.lat, edge_iterator->source_coordinate.lon, | ||||
|             node_iterator->lat, node_iterator->lon); | ||||
| 
 | ||||
| @ -347,13 +379,23 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
|         } | ||||
|         ++edge_iterator; | ||||
|     } | ||||
| 
 | ||||
|     // Remove all remaining edges. They are invalid because there are no corresponding nodes for
 | ||||
|     // them. This happens when using osmosis with bbox or polygon to extract smaller areas.
 | ||||
|     auto markTargetsInvalid = [](InternalExtractorEdge &edge) | ||||
|     { | ||||
|         SimpleLogger().Write(LogLevel::logWARNING) << "Found invalid node reference " | ||||
|                                                    << edge.result.target; | ||||
|         edge.result.target = SPECIAL_NODEID; | ||||
|     }; | ||||
|     std::for_each(edge_iterator, all_edges_list_end_, markTargetsInvalid); | ||||
|     TIMER_STOP(compute_weights); | ||||
|     std::cout << "ok, after " << TIMER_SEC(compute_weights) << "s" << std::endl; | ||||
| 
 | ||||
|     // Sort edges by start.
 | ||||
|     std::cout << "[extractor] Sorting edges by renumbered start ... " << std::flush; | ||||
|     TIMER_START(sort_edges_by_renumbered_start); | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByStartThenTargetID(), stxxl_memory); | ||||
|     stxxl::sort(all_edges_list.begin(), all_edges_list.end(), CmpEdgeByInternalStartThenInternalTargetID(), stxxl_memory); | ||||
|     TIMER_STOP(sort_edges_by_renumbered_start); | ||||
|     std::cout << "ok, after " << TIMER_SEC(sort_edges_by_renumbered_start) << "s" << std::endl; | ||||
| 
 | ||||
| @ -444,13 +486,14 @@ void ExtractionContainers::PrepareEdges(lua_State *segment_state) | ||||
| 
 | ||||
| void ExtractionContainers::WriteEdges(std::ofstream& file_out_stream) const | ||||
| { | ||||
|     std::cout << "[extractor] Writing used egdes       ... " << std::flush; | ||||
|     std::cout << "[extractor] Writing used edges       ... " << std::flush; | ||||
|     TIMER_START(write_edges); | ||||
|     // Traverse list of edges and nodes in parallel and set target coord
 | ||||
|     unsigned number_of_used_edges = 0; | ||||
|     std::size_t used_edges_counter = 0; | ||||
|     unsigned used_edges_counter_buffer = 0; | ||||
| 
 | ||||
|     auto start_position = file_out_stream.tellp(); | ||||
|     file_out_stream.write((char *)&number_of_used_edges, sizeof(unsigned)); | ||||
|     file_out_stream.write((char *)&used_edges_counter_buffer, sizeof(unsigned)); | ||||
| 
 | ||||
|     for (const auto& edge : all_edges_list) | ||||
|     { | ||||
| @ -459,18 +502,29 @@ void ExtractionContainers::WriteEdges(std::ofstream& file_out_stream) const | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         file_out_stream.write((char*) &edge.result, sizeof(NodeBasedEdge)); | ||||
|         number_of_used_edges++; | ||||
|         // IMPORTANT: here, we're using slicing to only write the data from the base
 | ||||
|         // class of NodeBasedEdgeWithOSM
 | ||||
|         NodeBasedEdge tmp = edge.result; | ||||
|         file_out_stream.write((char*) &tmp, sizeof(NodeBasedEdge)); | ||||
|         used_edges_counter++; | ||||
|     } | ||||
| 
 | ||||
|     if (used_edges_counter > std::numeric_limits<unsigned>::max()) | ||||
|     { | ||||
|         throw osrm::exception("There are too many edges, OSRM only supports 2^32"); | ||||
|     } | ||||
|     TIMER_STOP(write_edges); | ||||
|     std::cout << "ok, after " << TIMER_SEC(write_edges) << "s" << std::endl; | ||||
| 
 | ||||
|     std::cout << "[extractor] setting number of edges   ... " << std::flush; | ||||
| 
 | ||||
|     used_edges_counter_buffer = boost::numeric_cast<unsigned>(used_edges_counter); | ||||
| 
 | ||||
|     file_out_stream.seekp(start_position); | ||||
|     file_out_stream.write((char *)&number_of_used_edges, sizeof(unsigned)); | ||||
|     file_out_stream.write((char *)&used_edges_counter_buffer, sizeof(unsigned)); | ||||
|     std::cout << "ok" << std::endl; | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Processed " << number_of_used_edges << " edges"; | ||||
|     SimpleLogger().Write() << "Processed " << used_edges_counter << " edges"; | ||||
| } | ||||
| 
 | ||||
| void ExtractionContainers::WriteNodes(std::ofstream& file_out_stream) const | ||||
| @ -569,13 +623,13 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|     while (way_start_and_end_iterator != way_start_end_id_list_end && | ||||
|            restrictions_iterator != restrictions_list_end) | ||||
|     { | ||||
|         if (way_start_and_end_iterator->way_id < restrictions_iterator->restriction.from.way) | ||||
|         if (way_start_and_end_iterator->way_id < OSMWayID(restrictions_iterator->restriction.from.way)) | ||||
|         { | ||||
|             ++way_start_and_end_iterator; | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         if (way_start_and_end_iterator->way_id > restrictions_iterator->restriction.from.way) | ||||
|         if (way_start_and_end_iterator->way_id > OSMWayID(restrictions_iterator->restriction.from.way)) | ||||
|         { | ||||
|             SimpleLogger().Write(LogLevel::logDEBUG) << "Restriction references invalid way: " << restrictions_iterator->restriction.from.way; | ||||
|             restrictions_iterator->restriction.from.node = SPECIAL_NODEID; | ||||
| @ -584,9 +638,9 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|         } | ||||
| 
 | ||||
|         BOOST_ASSERT(way_start_and_end_iterator->way_id == | ||||
|                      restrictions_iterator->restriction.from.way); | ||||
|                      OSMWayID(restrictions_iterator->restriction.from.way)); | ||||
|         // we do not remap the via id yet, since we will need it for the to node as well
 | ||||
|         const NodeID via_node_id = restrictions_iterator->restriction.via.node; | ||||
|         const OSMNodeID via_node_id = OSMNodeID(restrictions_iterator->restriction.via.node); | ||||
| 
 | ||||
|         // check if via is actually valid, if not invalidate
 | ||||
|         auto via_id_iter = external_to_internal_node_id_map.find(via_node_id); | ||||
| @ -598,19 +652,19 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         if (way_start_and_end_iterator->first_segment_source_id == via_node_id) | ||||
|         if (OSMNodeID(way_start_and_end_iterator->first_segment_source_id) == via_node_id) | ||||
|         { | ||||
|             // assign new from node id
 | ||||
|             auto id_iter = external_to_internal_node_id_map.find( | ||||
|                     way_start_and_end_iterator->first_segment_target_id); | ||||
|                     OSMNodeID(way_start_and_end_iterator->first_segment_target_id)); | ||||
|             BOOST_ASSERT(id_iter != external_to_internal_node_id_map.end()); | ||||
|             restrictions_iterator->restriction.from.node = id_iter->second; | ||||
|         } | ||||
|         else if (way_start_and_end_iterator->last_segment_target_id == via_node_id) | ||||
|         else if (OSMNodeID(way_start_and_end_iterator->last_segment_target_id) == via_node_id) | ||||
|         { | ||||
|             // assign new from node id
 | ||||
|             auto id_iter = external_to_internal_node_id_map.find( | ||||
|                     way_start_and_end_iterator->last_segment_source_id); | ||||
|                     OSMNodeID(way_start_and_end_iterator->last_segment_source_id)); | ||||
|             BOOST_ASSERT(id_iter != external_to_internal_node_id_map.end()); | ||||
|             restrictions_iterator->restriction.from.node = id_iter->second; | ||||
|         } | ||||
| @ -637,7 +691,7 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|     while (way_start_and_end_iterator != way_start_end_id_list_end_ && | ||||
|            restrictions_iterator != restrictions_list_end_) | ||||
|     { | ||||
|         if (way_start_and_end_iterator->way_id < restrictions_iterator->restriction.to.way) | ||||
|         if (way_start_and_end_iterator->way_id < OSMWayID(restrictions_iterator->restriction.to.way)) | ||||
|         { | ||||
|             ++way_start_and_end_iterator; | ||||
|             continue; | ||||
| @ -648,7 +702,7 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|             ++restrictions_iterator; | ||||
|             continue; | ||||
|         } | ||||
|         if (way_start_and_end_iterator->way_id > restrictions_iterator->restriction.to.way) | ||||
|         if (way_start_and_end_iterator->way_id > OSMWayID(restrictions_iterator->restriction.to.way)) | ||||
|         { | ||||
|             SimpleLogger().Write(LogLevel::logDEBUG) << "Restriction references invalid way: " << restrictions_iterator->restriction.to.way; | ||||
|             restrictions_iterator->restriction.to.way = SPECIAL_NODEID; | ||||
| @ -656,25 +710,25 @@ void ExtractionContainers::PrepareRestrictions() | ||||
|             continue; | ||||
|         } | ||||
|         BOOST_ASSERT(way_start_and_end_iterator->way_id == | ||||
|                      restrictions_iterator->restriction.to.way); | ||||
|         const NodeID via_node_id = restrictions_iterator->restriction.via.node; | ||||
|                      OSMWayID(restrictions_iterator->restriction.to.way)); | ||||
|         const OSMNodeID via_node_id = OSMNodeID(restrictions_iterator->restriction.via.node); | ||||
| 
 | ||||
|         // assign new via node id
 | ||||
|         auto via_id_iter = external_to_internal_node_id_map.find(via_node_id); | ||||
|         BOOST_ASSERT(via_id_iter != external_to_internal_node_id_map.end()); | ||||
|         restrictions_iterator->restriction.via.node = via_id_iter->second; | ||||
| 
 | ||||
|         if (way_start_and_end_iterator->first_segment_source_id == via_node_id) | ||||
|         if (OSMNodeID(way_start_and_end_iterator->first_segment_source_id) == via_node_id) | ||||
|         { | ||||
|             auto to_id_iter = external_to_internal_node_id_map.find( | ||||
|                     way_start_and_end_iterator->first_segment_target_id); | ||||
|                     OSMNodeID(way_start_and_end_iterator->first_segment_target_id)); | ||||
|             BOOST_ASSERT(to_id_iter != external_to_internal_node_id_map.end()); | ||||
|             restrictions_iterator->restriction.to.node = to_id_iter->second; | ||||
|         } | ||||
|         else if (way_start_and_end_iterator->last_segment_target_id == via_node_id) | ||||
|         else if (OSMNodeID(way_start_and_end_iterator->last_segment_target_id) == via_node_id) | ||||
|         { | ||||
|             auto to_id_iter = external_to_internal_node_id_map.find( | ||||
|                     way_start_and_end_iterator->last_segment_source_id); | ||||
|                     OSMNodeID(way_start_and_end_iterator->last_segment_source_id)); | ||||
|             BOOST_ASSERT(to_id_iter != external_to_internal_node_id_map.end()); | ||||
|             restrictions_iterator->restriction.to.node = to_id_iter->second; | ||||
|         } | ||||
|  | ||||
| @ -61,20 +61,20 @@ class ExtractionContainers | ||||
|     void WriteEdges(std::ofstream& file_out_stream) const; | ||||
|     void WriteNames(const std::string& names_file_name) const; | ||||
|   public: | ||||
|     using STXXLNodeIDVector = stxxl::vector<NodeID>; | ||||
|     using STXXLNodeIDVector = stxxl::vector<OSMNodeID>; | ||||
|     using STXXLNodeVector = stxxl::vector<ExternalMemoryNode>; | ||||
|     using STXXLEdgeVector = stxxl::vector<InternalExtractorEdge>; | ||||
|     using STXXLStringVector = stxxl::vector<std::string>; | ||||
|     using STXXLRestrictionsVector = stxxl::vector<InputRestrictionContainer>; | ||||
|     using STXXLWayIDStartEndVector = stxxl::vector<FirstAndLastSegmentOfWay>; | ||||
| 
 | ||||
|     STXXLNodeIDVector used_node_id_list; | ||||
|     STXXLNodeVector all_nodes_list; | ||||
|     STXXLEdgeVector all_edges_list; | ||||
|     STXXLStringVector name_list; | ||||
|     stxxl::vector<char> name_char_data; | ||||
|     stxxl::vector<unsigned> name_lengths; | ||||
|     STXXLRestrictionsVector restrictions_list; | ||||
|     STXXLWayIDStartEndVector way_start_end_id_list; | ||||
|     std::unordered_map<NodeID, NodeID> external_to_internal_node_id_map; | ||||
|     std::unordered_map<OSMNodeID, NodeID> external_to_internal_node_id_map; | ||||
|     unsigned max_internal_node_id; | ||||
| 
 | ||||
|     ExtractionContainers(); | ||||
|  | ||||
| @ -37,6 +37,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <boost/regex.hpp> | ||||
| 
 | ||||
| #include <limits> | ||||
| #include <string> | ||||
| 
 | ||||
| bool simple_duration_is_valid(const std::string &s) | ||||
| { | ||||
| @ -89,18 +90,18 @@ unsigned parseDuration(const std::string &s) | ||||
|         { | ||||
|             if (1 == result.size()) | ||||
|             { | ||||
|                 minutes = cast::string_to_int(result[0]); | ||||
|                 minutes = std::stoul(result[0]); | ||||
|             } | ||||
|             if (2 == result.size()) | ||||
|             { | ||||
|                 minutes = cast::string_to_int(result[1]); | ||||
|                 hours = cast::string_to_int(result[0]); | ||||
|                 minutes = std::stoul(result[1]); | ||||
|                 hours = std::stoul(result[0]); | ||||
|             } | ||||
|             if (3 == result.size()) | ||||
|             { | ||||
|                 seconds = cast::string_to_int(result[2]); | ||||
|                 minutes = cast::string_to_int(result[1]); | ||||
|                 hours = cast::string_to_int(result[0]); | ||||
|                 seconds = std::stoul(result[2]); | ||||
|                 minutes = std::stoul(result[1]); | ||||
|                 hours = std::stoul(result[0]); | ||||
|             } | ||||
|             return (3600 * hours + 60 * minutes + seconds); | ||||
|         } | ||||
|  | ||||
| @ -50,6 +50,7 @@ struct ExtractionWay | ||||
|         backward_speed = -1; | ||||
|         duration = -1; | ||||
|         roundabout = false; | ||||
|         is_startpoint = true; | ||||
|         is_access_restricted = false; | ||||
|         name.clear(); | ||||
|         forward_travel_mode = TRAVEL_MODE_DEFAULT; | ||||
| @ -120,6 +121,7 @@ struct ExtractionWay | ||||
|     std::string name; | ||||
|     bool roundabout; | ||||
|     bool is_access_restricted; | ||||
|     bool is_startpoint; | ||||
|     TravelMode forward_travel_mode : 4; | ||||
|     TravelMode backward_travel_mode : 4; | ||||
| }; | ||||
|  | ||||
| @ -35,16 +35,25 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "scripting_environment.hpp" | ||||
| 
 | ||||
| #include "../data_structures/raster_source.hpp" | ||||
| #include "../util/git_sha.hpp" | ||||
| #include "../util/make_unique.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| #include "../util/timing_util.hpp" | ||||
| #include "../util/lua_util.hpp" | ||||
| #include "../util/graph_loader.hpp" | ||||
| 
 | ||||
| #include "../typedefs.h" | ||||
| 
 | ||||
| #include "../data_structures/static_graph.hpp" | ||||
| #include "../data_structures/static_rtree.hpp" | ||||
| #include "../data_structures/restriction_map.hpp" | ||||
| #include "../data_structures/compressed_edge_container.hpp" | ||||
| 
 | ||||
| #include "../algorithms/tarjan_scc.hpp" | ||||
| #include "../algorithms/crc32_processor.hpp" | ||||
| 
 | ||||
| #include <boost/filesystem.hpp> | ||||
| #include <boost/filesystem/fstream.hpp> | ||||
| #include <boost/optional/optional.hpp> | ||||
| 
 | ||||
| #include <luabind/luabind.hpp> | ||||
| 
 | ||||
| @ -53,8 +62,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <tbb/parallel_for.h> | ||||
| #include <tbb/task_scheduler_init.h> | ||||
| 
 | ||||
| #include <variant/optional.hpp> | ||||
| 
 | ||||
| #include <cstdlib> | ||||
| 
 | ||||
| #include <algorithm> | ||||
| @ -81,7 +88,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
|  * The result of this process are the following files: | ||||
|  *  .names : Names of all streets, stored as long consecutive string with prefix sum based index | ||||
|  *  .osrm  : Nodes and edges in a intermediate format that easy to digest for osrm-prepare | ||||
|  *  .restrictions : Turn restrictions that are used my osrm-prepare to construct the edge-expanded graph | ||||
|  *  .restrictions : Turn restrictions that are used my osrm-prepare to construct the edge-expanded | ||||
|  * graph | ||||
|  * | ||||
|  */ | ||||
| int extractor::run() | ||||
| @ -151,8 +159,7 @@ int extractor::run() | ||||
|         // initialize vectors holding parsed objects
 | ||||
|         tbb::concurrent_vector<std::pair<std::size_t, ExtractionNode>> resulting_nodes; | ||||
|         tbb::concurrent_vector<std::pair<std::size_t, ExtractionWay>> resulting_ways; | ||||
|         tbb::concurrent_vector<mapbox::util::optional<InputRestrictionContainer>> | ||||
|             resulting_restrictions; | ||||
|         tbb::concurrent_vector<boost::optional<InputRestrictionContainer>> resulting_restrictions; | ||||
| 
 | ||||
|         // setup restriction parser
 | ||||
|         const RestrictionParser restriction_parser(scripting_environment.get_lua_state()); | ||||
| @ -249,21 +256,383 @@ int extractor::run() | ||||
|             return 1; | ||||
|         } | ||||
| 
 | ||||
|         extraction_containers.PrepareData(config.output_file_name, | ||||
|                                           config.restriction_file_name, | ||||
|                                           config.names_file_name, | ||||
|                                           segment_state); | ||||
|         extraction_containers.PrepareData(config.output_file_name, config.restriction_file_name, | ||||
|                                           config.names_file_name, segment_state); | ||||
| 
 | ||||
|         TIMER_STOP(extracting); | ||||
|         SimpleLogger().Write() << "extraction finished after " << TIMER_SEC(extracting) << "s"; | ||||
|         SimpleLogger().Write() << "To prepare the data for routing, run: " | ||||
|                                << "./osrm-prepare " << config.output_file_name | ||||
|                                << std::endl; | ||||
|     } | ||||
|     catch (std::exception &e) | ||||
|     catch (const std::exception &e) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << e.what(); | ||||
|         return 1; | ||||
|     } | ||||
| 
 | ||||
|     try | ||||
|     { | ||||
|         // Transform the node-based graph that OSM is based on into an edge-based graph
 | ||||
|         // that is better for routing.  Every edge becomes a node, and every valid
 | ||||
|         // movement (e.g. turn from A->B, and B->A) becomes an edge
 | ||||
|         //
 | ||||
|         //
 | ||||
|         //    // Create a new lua state
 | ||||
| 
 | ||||
|         SimpleLogger().Write() << "Generating edge-expanded graph representation"; | ||||
| 
 | ||||
|         TIMER_START(expansion); | ||||
| 
 | ||||
|         std::vector<EdgeBasedNode> node_based_edge_list; | ||||
|         DeallocatingVector<EdgeBasedEdge> edge_based_edge_list; | ||||
|         std::vector<bool> node_is_startpoint; | ||||
|         std::vector<QueryNode> internal_to_external_node_map; | ||||
|         auto graph_size = | ||||
|             BuildEdgeExpandedGraph(internal_to_external_node_map, node_based_edge_list, | ||||
|                                    node_is_startpoint, edge_based_edge_list); | ||||
| 
 | ||||
|         auto number_of_node_based_nodes = graph_size.first; | ||||
|         auto max_edge_id = graph_size.second; | ||||
| 
 | ||||
|         TIMER_STOP(expansion); | ||||
| 
 | ||||
|         SimpleLogger().Write() << "building r-tree ..."; | ||||
|         TIMER_START(rtree); | ||||
| 
 | ||||
|         FindComponents(max_edge_id, edge_based_edge_list, node_based_edge_list); | ||||
| 
 | ||||
|         BuildRTree(std::move(node_based_edge_list), std::move(node_is_startpoint), | ||||
|                    internal_to_external_node_map); | ||||
| 
 | ||||
|         TIMER_STOP(rtree); | ||||
| 
 | ||||
|         SimpleLogger().Write() << "writing node map ..."; | ||||
|         WriteNodeMapping(internal_to_external_node_map); | ||||
| 
 | ||||
|         WriteEdgeBasedGraph(config.edge_graph_output_path, max_edge_id, edge_based_edge_list); | ||||
| 
 | ||||
|         SimpleLogger().Write() << "Expansion  : " | ||||
|                                << (number_of_node_based_nodes / TIMER_SEC(expansion)) | ||||
|                                << " nodes/sec and " << ((max_edge_id + 1) / TIMER_SEC(expansion)) | ||||
|                                << " edges/sec"; | ||||
|         SimpleLogger().Write() << "To prepare the data for routing, run: " | ||||
|                                << "./osrm-prepare " << config.output_file_name << std::endl; | ||||
|     } | ||||
|     catch (const std::exception &e) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << e.what(); | ||||
|         return 1; | ||||
|     } | ||||
| 
 | ||||
|     return 0; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|     \brief Setups scripting environment (lua-scripting) | ||||
|     Also initializes speed profile. | ||||
| */ | ||||
| void extractor::SetupScriptingEnvironment(lua_State *lua_state, | ||||
|                                           SpeedProfileProperties &speed_profile) | ||||
| { | ||||
|     // open utility libraries string library;
 | ||||
|     luaL_openlibs(lua_state); | ||||
| 
 | ||||
|     // adjust lua load path
 | ||||
|     luaAddScriptFolderToLoadPath(lua_state, config.profile_path.string().c_str()); | ||||
| 
 | ||||
|     // Now call our function in a lua script
 | ||||
|     if (0 != luaL_dofile(lua_state, config.profile_path.string().c_str())) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
| 
 | ||||
|     if (0 != luaL_dostring(lua_state, "return traffic_signal_penalty\n")) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
|     speed_profile.traffic_signal_penalty = 10 * lua_tointeger(lua_state, -1); | ||||
|     SimpleLogger().Write(logDEBUG) << "traffic_signal_penalty: " | ||||
|                                    << speed_profile.traffic_signal_penalty; | ||||
| 
 | ||||
|     if (0 != luaL_dostring(lua_state, "return u_turn_penalty\n")) | ||||
|     { | ||||
|         std::stringstream msg; | ||||
|         msg << lua_tostring(lua_state, -1) << " occured in scripting block"; | ||||
|         throw osrm::exception(msg.str()); | ||||
|     } | ||||
| 
 | ||||
|     speed_profile.u_turn_penalty = 10 * lua_tointeger(lua_state, -1); | ||||
|     speed_profile.has_turn_penalty_function = lua_function_exists(lua_state, "turn_function"); | ||||
| } | ||||
| 
 | ||||
| void extractor::FindComponents(unsigned max_edge_id, | ||||
|                                const DeallocatingVector<EdgeBasedEdge> &input_edge_list, | ||||
|                                std::vector<EdgeBasedNode> &input_nodes) const | ||||
| { | ||||
|     struct UncontractedEdgeData | ||||
|     { | ||||
|     }; | ||||
|     struct InputEdge | ||||
|     { | ||||
|         unsigned source; | ||||
|         unsigned target; | ||||
|         UncontractedEdgeData data; | ||||
| 
 | ||||
|         bool operator<(const InputEdge &rhs) const | ||||
|         { | ||||
|             return source < rhs.source || (source == rhs.source && target < rhs.target); | ||||
|         } | ||||
| 
 | ||||
|         bool operator==(const InputEdge &rhs) const | ||||
|         { | ||||
|             return source == rhs.source && target == rhs.target; | ||||
|         } | ||||
|     }; | ||||
|     using UncontractedGraph = StaticGraph<UncontractedEdgeData>; | ||||
|     std::vector<InputEdge> edges; | ||||
|     edges.reserve(input_edge_list.size() * 2); | ||||
| 
 | ||||
|     for (const auto &edge : input_edge_list) | ||||
|     { | ||||
|         BOOST_ASSERT_MSG(static_cast<unsigned int>(std::max(edge.weight, 1)) > 0, | ||||
|                          "edge distance < 1"); | ||||
|         if (edge.forward) | ||||
|         { | ||||
|             edges.push_back({edge.source, edge.target, {}}); | ||||
|         } | ||||
| 
 | ||||
|         if (edge.backward) | ||||
|         { | ||||
|             edges.push_back({edge.target, edge.source, {}}); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // connect forward and backward nodes of each edge
 | ||||
|     for (const auto &node : input_nodes) | ||||
|     { | ||||
|         if (node.reverse_edge_based_node_id != SPECIAL_NODEID) | ||||
|         { | ||||
|             edges.push_back({node.forward_edge_based_node_id, node.reverse_edge_based_node_id, {}}); | ||||
|             edges.push_back({node.reverse_edge_based_node_id, node.forward_edge_based_node_id, {}}); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     tbb::parallel_sort(edges.begin(), edges.end()); | ||||
|     auto new_end = std::unique(edges.begin(), edges.end()); | ||||
|     edges.resize(new_end - edges.begin()); | ||||
| 
 | ||||
|     auto uncontractor_graph = std::make_shared<UncontractedGraph>(max_edge_id + 1, edges); | ||||
| 
 | ||||
|     TarjanSCC<UncontractedGraph> component_search( | ||||
|         std::const_pointer_cast<const UncontractedGraph>(uncontractor_graph)); | ||||
|     component_search.run(); | ||||
| 
 | ||||
|     for (auto &node : input_nodes) | ||||
|     { | ||||
|         auto forward_component = component_search.get_component_id(node.forward_edge_based_node_id); | ||||
|         BOOST_ASSERT(node.reverse_edge_based_node_id == SPECIAL_EDGEID || | ||||
|                      forward_component == | ||||
|                          component_search.get_component_id(node.reverse_edge_based_node_id)); | ||||
| 
 | ||||
|         const unsigned component_size = component_search.get_component_size(forward_component); | ||||
|         node.component.is_tiny = component_size < config.small_component_size; | ||||
|         node.component.id = 1 + forward_component; | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Build load restrictions from .restriction file | ||||
|   */ | ||||
| std::shared_ptr<RestrictionMap> extractor::LoadRestrictionMap() | ||||
| { | ||||
|     boost::filesystem::ifstream input_stream(config.restriction_file_name, | ||||
|                                              std::ios::in | std::ios::binary); | ||||
| 
 | ||||
|     std::vector<TurnRestriction> restriction_list; | ||||
|     loadRestrictionsFromFile(input_stream, restriction_list); | ||||
| 
 | ||||
|     SimpleLogger().Write() << " - " << restriction_list.size() << " restrictions."; | ||||
| 
 | ||||
|     return std::make_shared<RestrictionMap>(restriction_list); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Load node based graph from .osrm file | ||||
|   */ | ||||
| std::shared_ptr<NodeBasedDynamicGraph> | ||||
| extractor::LoadNodeBasedGraph(std::unordered_set<NodeID> &barrier_nodes, | ||||
|                               std::unordered_set<NodeID> &traffic_lights, | ||||
|                               std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     std::vector<NodeBasedEdge> edge_list; | ||||
| 
 | ||||
|     boost::filesystem::ifstream input_stream(config.output_file_name, | ||||
|                                              std::ios::in | std::ios::binary); | ||||
| 
 | ||||
|     std::vector<NodeID> barrier_list; | ||||
|     std::vector<NodeID> traffic_light_list; | ||||
|     NodeID number_of_node_based_nodes = loadNodesFromFile( | ||||
|         input_stream, barrier_list, traffic_light_list, internal_to_external_node_map); | ||||
| 
 | ||||
|     SimpleLogger().Write() << " - " << barrier_list.size() << " bollard nodes, " | ||||
|                            << traffic_light_list.size() << " traffic lights"; | ||||
| 
 | ||||
|     // insert into unordered sets for fast lookup
 | ||||
|     barrier_nodes.insert(barrier_list.begin(), barrier_list.end()); | ||||
|     traffic_lights.insert(traffic_light_list.begin(), traffic_light_list.end()); | ||||
| 
 | ||||
|     barrier_list.clear(); | ||||
|     barrier_list.shrink_to_fit(); | ||||
|     traffic_light_list.clear(); | ||||
|     traffic_light_list.shrink_to_fit(); | ||||
| 
 | ||||
|     loadEdgesFromFile(input_stream, edge_list); | ||||
| 
 | ||||
|     if (edge_list.empty()) | ||||
|     { | ||||
|         SimpleLogger().Write(logWARNING) << "The input data is empty, exiting."; | ||||
|         return std::shared_ptr<NodeBasedDynamicGraph>(); | ||||
|     } | ||||
| 
 | ||||
|     return NodeBasedDynamicGraphFromEdges(number_of_node_based_nodes, edge_list); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  \brief Building an edge-expanded graph from node-based input and turn restrictions | ||||
| */ | ||||
| std::pair<std::size_t, std::size_t> | ||||
| extractor::BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map, | ||||
|                                   std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                                   std::vector<bool> &node_is_startpoint, | ||||
|                                   DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list) | ||||
| { | ||||
|     lua_State *lua_state = luaL_newstate(); | ||||
|     luabind::open(lua_state); | ||||
| 
 | ||||
|     SpeedProfileProperties speed_profile; | ||||
|     SetupScriptingEnvironment(lua_state, speed_profile); | ||||
| 
 | ||||
|     std::unordered_set<NodeID> barrier_nodes; | ||||
|     std::unordered_set<NodeID> traffic_lights; | ||||
| 
 | ||||
|     auto restriction_map = LoadRestrictionMap(); | ||||
|     auto node_based_graph = | ||||
|         LoadNodeBasedGraph(barrier_nodes, traffic_lights, internal_to_external_node_map); | ||||
| 
 | ||||
|     CompressedEdgeContainer compressed_edge_container; | ||||
|     GraphCompressor graph_compressor(speed_profile); | ||||
|     graph_compressor.Compress(barrier_nodes, traffic_lights, *restriction_map, *node_based_graph, | ||||
|                               compressed_edge_container); | ||||
| 
 | ||||
|     EdgeBasedGraphFactory edge_based_graph_factory( | ||||
|         node_based_graph, compressed_edge_container, barrier_nodes, traffic_lights, | ||||
|         std::const_pointer_cast<RestrictionMap const>(restriction_map), | ||||
|         internal_to_external_node_map, speed_profile); | ||||
| 
 | ||||
|     compressed_edge_container.SerializeInternalVector(config.geometry_output_path); | ||||
| 
 | ||||
|     edge_based_graph_factory.Run(config.edge_output_path, lua_state, | ||||
|                                  config.edge_segment_lookup_path, config.edge_penalty_path, | ||||
|                                  config.generate_edge_lookup | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|                                  , | ||||
|                                  config.debug_turns_path | ||||
| #endif | ||||
|                                  ); | ||||
|     lua_close(lua_state); | ||||
| 
 | ||||
|     edge_based_graph_factory.GetEdgeBasedEdges(edge_based_edge_list); | ||||
|     edge_based_graph_factory.GetEdgeBasedNodes(node_based_edge_list); | ||||
|     edge_based_graph_factory.GetStartPointMarkers(node_is_startpoint); | ||||
|     auto max_edge_id = edge_based_graph_factory.GetHighestEdgeID(); | ||||
| 
 | ||||
|     const std::size_t number_of_node_based_nodes = node_based_graph->GetNumberOfNodes(); | ||||
|     return std::make_pair(number_of_node_based_nodes, max_edge_id); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|   \brief Writing info on original (node-based) nodes | ||||
|  */ | ||||
| void extractor::WriteNodeMapping(const std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     boost::filesystem::ofstream node_stream(config.node_output_path, std::ios::binary); | ||||
|     const unsigned size_of_mapping = internal_to_external_node_map.size(); | ||||
|     node_stream.write((char *)&size_of_mapping, sizeof(unsigned)); | ||||
|     if (size_of_mapping > 0) | ||||
|     { | ||||
|         node_stream.write((char *)internal_to_external_node_map.data(), | ||||
|                           size_of_mapping * sizeof(QueryNode)); | ||||
|     } | ||||
|     node_stream.close(); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|     \brief Building rtree-based nearest-neighbor data structure | ||||
| 
 | ||||
|     Saves tree into '.ramIndex' and leaves into '.fileIndex'. | ||||
|  */ | ||||
| void extractor::BuildRTree(std::vector<EdgeBasedNode> node_based_edge_list, | ||||
|                            std::vector<bool> node_is_startpoint, | ||||
|                            const std::vector<QueryNode> &internal_to_external_node_map) | ||||
| { | ||||
|     SimpleLogger().Write() << "constructing r-tree of " << node_based_edge_list.size() | ||||
|                            << " edge elements build on-top of " | ||||
|                            << internal_to_external_node_map.size() << " coordinates"; | ||||
| 
 | ||||
|     BOOST_ASSERT(node_is_startpoint.size() == node_based_edge_list.size()); | ||||
| 
 | ||||
|     // Filter node based edges based on startpoint
 | ||||
|     auto out_iter = node_based_edge_list.begin(); | ||||
|     auto in_iter = node_based_edge_list.begin(); | ||||
|     for (auto index : osrm::irange<std::size_t>(0, node_is_startpoint.size())) | ||||
|     { | ||||
|         BOOST_ASSERT(in_iter != node_based_edge_list.end()); | ||||
|         if (node_is_startpoint[index]) | ||||
|         { | ||||
|             *out_iter = *in_iter; | ||||
|             out_iter++; | ||||
|         } | ||||
|         in_iter++; | ||||
|     } | ||||
|     auto new_size = out_iter - node_based_edge_list.begin(); | ||||
|     node_based_edge_list.resize(new_size); | ||||
| 
 | ||||
|     TIMER_START(construction); | ||||
|     StaticRTree<EdgeBasedNode>(node_based_edge_list, config.rtree_nodes_output_path, | ||||
|                                config.rtree_leafs_output_path, internal_to_external_node_map); | ||||
| 
 | ||||
|     TIMER_STOP(construction); | ||||
|     SimpleLogger().Write() << "finished r-tree construction in " << TIMER_SEC(construction) | ||||
|                            << " seconds"; | ||||
| } | ||||
| 
 | ||||
| void extractor::WriteEdgeBasedGraph(std::string const &output_file_filename, | ||||
|                                     size_t const max_edge_id, | ||||
|                                     DeallocatingVector<EdgeBasedEdge> const &edge_based_edge_list) | ||||
| { | ||||
| 
 | ||||
|     std::ofstream file_out_stream; | ||||
|     file_out_stream.open(output_file_filename.c_str(), std::ios::binary); | ||||
|     const FingerPrint fingerprint = FingerPrint::GetValid(); | ||||
|     file_out_stream.write((char *)&fingerprint, sizeof(FingerPrint)); | ||||
| 
 | ||||
|     std::cout << "[extractor] Writing edge-based-graph egdes       ... " << std::flush; | ||||
|     TIMER_START(write_edges); | ||||
| 
 | ||||
|     size_t number_of_used_edges = edge_based_edge_list.size(); | ||||
|     file_out_stream.write((char *)&number_of_used_edges, sizeof(size_t)); | ||||
|     file_out_stream.write((char *)&max_edge_id, sizeof(size_t)); | ||||
| 
 | ||||
|     for (const auto &edge : edge_based_edge_list) | ||||
|     { | ||||
|         file_out_stream.write((char *)&edge, sizeof(EdgeBasedEdge)); | ||||
|     } | ||||
| 
 | ||||
|     TIMER_STOP(write_edges); | ||||
|     std::cout << "ok, after " << TIMER_SEC(write_edges) << "s" << std::endl; | ||||
| 
 | ||||
|     SimpleLogger().Write() << "Processed " << number_of_used_edges << " edges"; | ||||
|     file_out_stream.close(); | ||||
| } | ||||
|  | ||||
| @ -29,13 +29,38 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #define EXTRACTOR_HPP | ||||
| 
 | ||||
| #include "extractor_options.hpp" | ||||
| #include "edge_based_graph_factory.hpp" | ||||
| #include "../algorithms/graph_compressor.hpp" | ||||
| 
 | ||||
| class extractor | ||||
| { | ||||
| public: | ||||
|   extractor(ExtractorConfig extractor_config) : config(std::move(extractor_config)) {} | ||||
|   public: | ||||
|     extractor(ExtractorConfig extractor_config) : config(std::move(extractor_config)) {} | ||||
|     int run(); | ||||
| private: | ||||
|    ExtractorConfig config; | ||||
| 
 | ||||
|   private: | ||||
|     ExtractorConfig config; | ||||
|     void SetupScriptingEnvironment(lua_State *myLuaState, SpeedProfileProperties &speed_profile); | ||||
|     std::pair<std::size_t, std::size_t> | ||||
|     BuildEdgeExpandedGraph(std::vector<QueryNode> &internal_to_external_node_map, | ||||
|                            std::vector<EdgeBasedNode> &node_based_edge_list, | ||||
|                            std::vector<bool> &node_is_startpoint, | ||||
|                            DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list); | ||||
|     void WriteNodeMapping(const std::vector<QueryNode> &internal_to_external_node_map); | ||||
|     void FindComponents(unsigned max_edge_id, | ||||
|                         const DeallocatingVector<EdgeBasedEdge> &edges, | ||||
|                         std::vector<EdgeBasedNode> &nodes) const; | ||||
|     void BuildRTree(std::vector<EdgeBasedNode> node_based_edge_list, | ||||
|                     std::vector<bool> node_is_startpoint, | ||||
|                     const std::vector<QueryNode> &internal_to_external_node_map); | ||||
|     std::shared_ptr<RestrictionMap> LoadRestrictionMap(); | ||||
|     std::shared_ptr<NodeBasedDynamicGraph> | ||||
|     LoadNodeBasedGraph(std::unordered_set<NodeID> &barrier_nodes, | ||||
|                        std::unordered_set<NodeID> &traffic_lights, | ||||
|                        std::vector<QueryNode> &internal_to_external_node_map); | ||||
| 
 | ||||
|     void WriteEdgeBasedGraph(std::string const &output_file_filename, | ||||
|                              size_t const max_edge_id, | ||||
|                              DeallocatingVector<EdgeBasedEdge> const &edge_based_edge_list); | ||||
| }; | ||||
| #endif /* EXTRACTOR_HPP */ | ||||
|  | ||||
| @ -35,6 +35,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include "../util/container.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| 
 | ||||
| #include <boost/optional/optional.hpp> | ||||
| 
 | ||||
| #include <osmium/osm.hpp> | ||||
| 
 | ||||
| #include <osrm/coordinate.hpp> | ||||
| 
 | ||||
| #include <limits> | ||||
| @ -59,13 +63,13 @@ void ExtractorCallbacks::ProcessNode(const osmium::Node &input_node, | ||||
|     external_memory.all_nodes_list.push_back( | ||||
|         {static_cast<int>(input_node.location().lat() * COORDINATE_PRECISION), | ||||
|          static_cast<int>(input_node.location().lon() * COORDINATE_PRECISION), | ||||
|          static_cast<NodeID>(input_node.id()), | ||||
|          OSMNodeID(input_node.id()), | ||||
|          result_node.barrier, | ||||
|          result_node.traffic_lights}); | ||||
| } | ||||
| 
 | ||||
| void ExtractorCallbacks::ProcessRestriction( | ||||
|     const mapbox::util::optional<InputRestrictionContainer> &restriction) | ||||
|     const boost::optional<InputRestrictionContainer> &restriction) | ||||
| { | ||||
|     if (restriction) | ||||
|     { | ||||
| @ -140,8 +144,8 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     if (forward_weight_data.type == InternalExtractorEdge::WeightType::INVALID | ||||
|      && backward_weight_data.type == InternalExtractorEdge::WeightType::INVALID) | ||||
|     if (forward_weight_data.type == InternalExtractorEdge::WeightType::INVALID && | ||||
|         backward_weight_data.type == InternalExtractorEdge::WeightType::INVALID) | ||||
|     { | ||||
|         SimpleLogger().Write(logDEBUG) << "found way with bogus speed, id: " << input_way.id(); | ||||
|         return; | ||||
| @ -149,10 +153,12 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti | ||||
| 
 | ||||
|     // Get the unique identifier for the street name
 | ||||
|     const auto &string_map_iterator = string_map.find(parsed_way.name); | ||||
|     unsigned name_id = external_memory.name_list.size(); | ||||
|     unsigned name_id = external_memory.name_lengths.size(); | ||||
|     if (string_map.end() == string_map_iterator) | ||||
|     { | ||||
|         external_memory.name_list.push_back(parsed_way.name); | ||||
|         auto name_length = std::min<unsigned>(255u, parsed_way.name.size()); | ||||
|         std::copy(parsed_way.name.c_str(), parsed_way.name.c_str() + name_length, std::back_inserter(external_memory.name_char_data)); | ||||
|         external_memory.name_lengths.push_back(name_length); | ||||
|         string_map.insert(std::make_pair(parsed_way.name, name_id)); | ||||
|     } | ||||
|     else | ||||
| @ -169,7 +175,10 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti | ||||
| 
 | ||||
|     std::transform(input_way.nodes().begin(), input_way.nodes().end(), | ||||
|                    std::back_inserter(external_memory.used_node_id_list), | ||||
|                    [](const osmium::NodeRef& ref) { return ref.ref(); }); | ||||
|                    [](const osmium::NodeRef &ref) | ||||
|                    { | ||||
|                        return OSMNodeID(ref.ref()); | ||||
|                    }); | ||||
| 
 | ||||
|     const bool is_opposite_way = TRAVEL_MODE_INACCESSIBLE == parsed_way.forward_travel_mode; | ||||
| 
 | ||||
| @ -182,53 +191,51 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti | ||||
|                             [&](const osmium::NodeRef &first_node, const osmium::NodeRef &last_node) | ||||
|                             { | ||||
|                                 external_memory.all_edges_list.push_back(InternalExtractorEdge( | ||||
|                                     first_node.ref(), last_node.ref(), name_id, backward_weight_data, | ||||
|                                     true, false, parsed_way.roundabout, parsed_way.is_access_restricted, | ||||
|                                     OSMNodeID(first_node.ref()), OSMNodeID(last_node.ref()), name_id, | ||||
|                                     backward_weight_data, true, false, parsed_way.roundabout, | ||||
|                                     parsed_way.is_access_restricted, parsed_way.is_startpoint, | ||||
|                                     parsed_way.backward_travel_mode, false)); | ||||
|                             }); | ||||
| 
 | ||||
|         external_memory.way_start_end_id_list.push_back( | ||||
|             { | ||||
|                 static_cast<EdgeID>(input_way.id()), | ||||
|                 static_cast<NodeID>(input_way.nodes().back().ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[input_way.nodes().size() - 2].ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[1].ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[0].ref()) | ||||
|              } | ||||
|         ); | ||||
|             {OSMWayID(input_way.id()), | ||||
|              OSMNodeID(input_way.nodes().back().ref()), | ||||
|              OSMNodeID(input_way.nodes()[input_way.nodes().size() - 2].ref()), | ||||
|              OSMNodeID(input_way.nodes()[1].ref()), | ||||
|              OSMNodeID(input_way.nodes()[0].ref())}); | ||||
|     } | ||||
|     else | ||||
|     { | ||||
|         const bool forward_only = split_edge || TRAVEL_MODE_INACCESSIBLE == parsed_way.backward_travel_mode; | ||||
|         const bool forward_only = | ||||
|             split_edge || TRAVEL_MODE_INACCESSIBLE == parsed_way.backward_travel_mode; | ||||
|         osrm::for_each_pair(input_way.nodes().cbegin(), input_way.nodes().cend(), | ||||
|                             [&](const osmium::NodeRef &first_node, const osmium::NodeRef &last_node) | ||||
|                             { | ||||
|                                 external_memory.all_edges_list.push_back(InternalExtractorEdge( | ||||
|                                     first_node.ref(), last_node.ref(), name_id, forward_weight_data, | ||||
|                                     true, !forward_only, parsed_way.roundabout, parsed_way.is_access_restricted, | ||||
|                                     parsed_way.forward_travel_mode, split_edge)); | ||||
|                                     OSMNodeID(first_node.ref()), OSMNodeID(last_node.ref()), name_id, forward_weight_data, | ||||
|                                     true, !forward_only, parsed_way.roundabout, | ||||
|                                     parsed_way.is_access_restricted, parsed_way.is_startpoint, parsed_way.forward_travel_mode, | ||||
|                                     split_edge)); | ||||
|                             }); | ||||
|         if (split_edge) | ||||
|         { | ||||
|             BOOST_ASSERT(parsed_way.backward_travel_mode != TRAVEL_MODE_INACCESSIBLE); | ||||
|             osrm::for_each_pair(input_way.nodes().cbegin(), input_way.nodes().cend(), | ||||
|                                 [&](const osmium::NodeRef &first_node, const osmium::NodeRef &last_node) | ||||
|                                 { | ||||
|                                     external_memory.all_edges_list.push_back(InternalExtractorEdge( | ||||
|                                         first_node.ref(), last_node.ref(), name_id, backward_weight_data, | ||||
|                                         false, true, parsed_way.roundabout, parsed_way.is_access_restricted, | ||||
|                                         parsed_way.backward_travel_mode, true)); | ||||
|                                 }); | ||||
|             osrm::for_each_pair( | ||||
|                 input_way.nodes().cbegin(), input_way.nodes().cend(), | ||||
|                 [&](const osmium::NodeRef &first_node, const osmium::NodeRef &last_node) | ||||
|                 { | ||||
|                     external_memory.all_edges_list.push_back(InternalExtractorEdge( | ||||
|                         OSMNodeID(first_node.ref()), OSMNodeID(last_node.ref()), name_id, backward_weight_data, false, | ||||
|                         true, parsed_way.roundabout, parsed_way.is_access_restricted, | ||||
|                         parsed_way.is_startpoint, parsed_way.backward_travel_mode, true)); | ||||
|                 }); | ||||
|         } | ||||
| 
 | ||||
|         external_memory.way_start_end_id_list.push_back( | ||||
|             { | ||||
|                 static_cast<EdgeID>(input_way.id()), | ||||
|                 static_cast<NodeID>(input_way.nodes().back().ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[input_way.nodes().size() - 2].ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[1].ref()), | ||||
|                 static_cast<NodeID>(input_way.nodes()[0].ref()) | ||||
|              } | ||||
|         ); | ||||
|             {OSMWayID(input_way.id()), | ||||
|              OSMNodeID(input_way.nodes().back().ref()), | ||||
|              OSMNodeID(input_way.nodes()[input_way.nodes().size() - 2].ref()), | ||||
|              OSMNodeID(input_way.nodes()[1].ref()), | ||||
|              OSMNodeID(input_way.nodes()[0].ref())}); | ||||
|     } | ||||
| } | ||||
|  | ||||
| @ -28,12 +28,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #ifndef EXTRACTOR_CALLBACKS_HPP | ||||
| #define EXTRACTOR_CALLBACKS_HPP | ||||
| 
 | ||||
| #include "extraction_way.hpp" | ||||
| #include "../typedefs.h" | ||||
| 
 | ||||
| #include <osmium/osm.hpp> | ||||
| 
 | ||||
| #include <variant/optional.hpp> | ||||
| #include <boost/optional/optional_fwd.hpp> | ||||
| 
 | ||||
| #include <string> | ||||
| #include <unordered_map> | ||||
| @ -42,6 +38,12 @@ struct ExternalMemoryNode; | ||||
| class ExtractionContainers; | ||||
| struct InputRestrictionContainer; | ||||
| struct ExtractionNode; | ||||
| struct ExtractionWay; | ||||
| namespace osmium | ||||
| { | ||||
| class Node; | ||||
| class Way; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * This class is uses by the extractor with the results of the | ||||
| @ -66,7 +68,7 @@ class ExtractorCallbacks | ||||
|     void ProcessNode(const osmium::Node ¤t_node, const ExtractionNode &result_node); | ||||
| 
 | ||||
|     // warning: caller needs to take care of synchronization!
 | ||||
|     void ProcessRestriction(const mapbox::util::optional<InputRestrictionContainer> &restriction); | ||||
|     void ProcessRestriction(const boost::optional<InputRestrictionContainer> &restriction); | ||||
| 
 | ||||
|     // warning: caller needs to take care of synchronization!
 | ||||
|     void ProcessWay(const osmium::Way ¤t_way, const ExtractionWay &result_way); | ||||
|  | ||||
| @ -27,7 +27,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include "extractor_options.hpp" | ||||
| 
 | ||||
| #include "../util/git_sha.hpp" | ||||
| #include "util/version.hpp" | ||||
| #include "../util/ini_file.hpp" | ||||
| #include "../util/simple_logger.hpp" | ||||
| 
 | ||||
| @ -42,6 +42,12 @@ ExtractorOptions::ParseArguments(int argc, char *argv[], ExtractorConfig &extrac | ||||
|     // declare a group of options that will be allowed only on command line
 | ||||
|     boost::program_options::options_description generic_options("Options"); | ||||
|     generic_options.add_options()("version,v", "Show version")("help,h", "Show this help message")( | ||||
|         /*
 | ||||
|          * TODO: re-enable this | ||||
|         "restrictions,r", | ||||
|         boost::program_options::value<boost::filesystem::path>(&extractor_config.restrictions_path), | ||||
|         "Restrictions file in .osrm.restrictions format")( | ||||
|         */ | ||||
|         "config,c", boost::program_options::value<boost::filesystem::path>( | ||||
|                         &extractor_config.config_file_path)->default_value("extractor.ini"), | ||||
|         "Path to a configuration file."); | ||||
| @ -55,7 +61,20 @@ ExtractorOptions::ParseArguments(int argc, char *argv[], ExtractorConfig &extrac | ||||
|         "threads,t", | ||||
|         boost::program_options::value<unsigned int>(&extractor_config.requested_num_threads) | ||||
|             ->default_value(tbb::task_scheduler_init::default_num_threads()), | ||||
|         "Number of threads to use"); | ||||
|         "Number of threads to use")( | ||||
|             "generate-edge-lookup",boost::program_options::value<bool>( | ||||
|                                                 &extractor_config.generate_edge_lookup)->implicit_value(true)->default_value(false), | ||||
|                                  "Generate a lookup table for internal edge-expanded-edge IDs to OSM node pairs")( | ||||
|         "small-component-size", | ||||
|         boost::program_options::value<unsigned int>(&extractor_config.small_component_size) | ||||
|             ->default_value(1000), | ||||
|         "Number of nodes required before a strongly-connected-componennt is considered big (affects nearest neighbor snapping)"); | ||||
| 
 | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|         config_options.add_options()("debug-turns", | ||||
|             boost::program_options::value<std::string>(&extractor_config.debug_turns_path), | ||||
|             "Write out GeoJSON with turn penalty data"); | ||||
| #endif // DEBUG_GEOMETRY
 | ||||
| 
 | ||||
|     // hidden options, will be allowed both on command line and in config file, but will not be
 | ||||
|     // shown to the user
 | ||||
| @ -64,6 +83,7 @@ ExtractorOptions::ParseArguments(int argc, char *argv[], ExtractorConfig &extrac | ||||
|                                                 &extractor_config.input_path), | ||||
|                                  "Input file in .osm, .osm.bz2 or .osm.pbf format"); | ||||
| 
 | ||||
| 
 | ||||
|     // positional option
 | ||||
|     boost::program_options::positional_options_description positional_options; | ||||
|     positional_options.add("input", 1); | ||||
| @ -90,7 +110,7 @@ ExtractorOptions::ParseArguments(int argc, char *argv[], ExtractorConfig &extrac | ||||
|                                       option_variables); | ||||
|         if (option_variables.count("version")) | ||||
|         { | ||||
|             SimpleLogger().Write() << g_GIT_DESCRIPTION; | ||||
|             SimpleLogger().Write() << OSRM_VERSION; | ||||
|             return return_code::exit; | ||||
|         } | ||||
| 
 | ||||
| @ -137,6 +157,14 @@ void ExtractorOptions::GenerateOutputFilesNames(ExtractorConfig &extractor_confi | ||||
|     extractor_config.restriction_file_name = input_path.string(); | ||||
|     extractor_config.names_file_name = input_path.string(); | ||||
|     extractor_config.timestamp_file_name = input_path.string(); | ||||
|     extractor_config.geometry_output_path = input_path.string(); | ||||
|     extractor_config.edge_output_path = input_path.string(); | ||||
|     extractor_config.edge_graph_output_path = input_path.string(); | ||||
|     extractor_config.node_output_path = input_path.string(); | ||||
|     extractor_config.rtree_nodes_output_path = input_path.string(); | ||||
|     extractor_config.rtree_leafs_output_path = input_path.string(); | ||||
|     extractor_config.edge_segment_lookup_path = input_path.string(); | ||||
|     extractor_config.edge_penalty_path = input_path.string(); | ||||
|     std::string::size_type pos = extractor_config.output_file_name.find(".osm.bz2"); | ||||
|     if (pos == std::string::npos) | ||||
|     { | ||||
| @ -159,6 +187,14 @@ void ExtractorOptions::GenerateOutputFilesNames(ExtractorConfig &extractor_confi | ||||
|             extractor_config.restriction_file_name.append(".osrm.restrictions"); | ||||
|             extractor_config.names_file_name.append(".osrm.names"); | ||||
|             extractor_config.timestamp_file_name.append(".osrm.timestamp"); | ||||
|             extractor_config.geometry_output_path.append(".osrm.geometry"); | ||||
|             extractor_config.node_output_path.append(".osrm.nodes"); | ||||
|             extractor_config.edge_output_path.append(".osrm.edges"); | ||||
|             extractor_config.edge_graph_output_path.append(".osrm.ebg"); | ||||
|             extractor_config.rtree_nodes_output_path.append(".osrm.ramIndex"); | ||||
|             extractor_config.rtree_leafs_output_path.append(".osrm.fileIndex"); | ||||
|             extractor_config.edge_segment_lookup_path.append(".osrm.edge_segment_lookup"); | ||||
|             extractor_config.edge_penalty_path.append(".osrm.edge_penalties"); | ||||
|         } | ||||
|         else | ||||
|         { | ||||
| @ -166,6 +202,14 @@ void ExtractorOptions::GenerateOutputFilesNames(ExtractorConfig &extractor_confi | ||||
|             extractor_config.restriction_file_name.replace(pos, 5, ".osrm.restrictions"); | ||||
|             extractor_config.names_file_name.replace(pos, 5, ".osrm.names"); | ||||
|             extractor_config.timestamp_file_name.replace(pos, 5, ".osrm.timestamp"); | ||||
|             extractor_config.geometry_output_path.replace(pos, 5, ".osrm.geometry"); | ||||
|             extractor_config.node_output_path.replace(pos, 5, ".osrm.nodes"); | ||||
|             extractor_config.edge_output_path.replace(pos, 5, ".osrm.edges"); | ||||
|             extractor_config.edge_graph_output_path.replace(pos, 5, ".osrm.ebg"); | ||||
|             extractor_config.rtree_nodes_output_path.replace(pos, 5, ".osrm.ramIndex"); | ||||
|             extractor_config.rtree_leafs_output_path.replace(pos, 5, ".osrm.fileIndex"); | ||||
|             extractor_config.edge_segment_lookup_path.replace(pos,5, ".osrm.edge_segment_lookup"); | ||||
|             extractor_config.edge_penalty_path.replace(pos,5, ".osrm.edge_penalties"); | ||||
|         } | ||||
|     } | ||||
|     else | ||||
| @ -174,5 +218,13 @@ void ExtractorOptions::GenerateOutputFilesNames(ExtractorConfig &extractor_confi | ||||
|         extractor_config.restriction_file_name.replace(pos, 8, ".osrm.restrictions"); | ||||
|         extractor_config.names_file_name.replace(pos, 8, ".osrm.names"); | ||||
|         extractor_config.timestamp_file_name.replace(pos, 8, ".osrm.timestamp"); | ||||
|         extractor_config.geometry_output_path.replace(pos, 8, ".osrm.geometry"); | ||||
|         extractor_config.node_output_path.replace(pos, 8, ".osrm.nodes"); | ||||
|         extractor_config.edge_output_path.replace(pos, 8, ".osrm.edges"); | ||||
|         extractor_config.edge_graph_output_path.replace(pos, 8, ".osrm.ebg"); | ||||
|         extractor_config.rtree_nodes_output_path.replace(pos, 8, ".osrm.ramIndex"); | ||||
|         extractor_config.rtree_leafs_output_path.replace(pos, 8, ".osrm.fileIndex"); | ||||
|         extractor_config.edge_segment_lookup_path.replace(pos,8, ".osrm.edge_segment_lookup"); | ||||
|         extractor_config.edge_penalty_path.replace(pos,8, ".osrm.edge_penalties"); | ||||
|     } | ||||
| } | ||||
|  | ||||
| @ -50,8 +50,22 @@ struct ExtractorConfig | ||||
|     std::string restriction_file_name; | ||||
|     std::string names_file_name; | ||||
|     std::string timestamp_file_name; | ||||
|     std::string geometry_output_path; | ||||
|     std::string edge_output_path; | ||||
|     std::string edge_graph_output_path; | ||||
|     std::string node_output_path; | ||||
|     std::string rtree_nodes_output_path; | ||||
|     std::string rtree_leafs_output_path; | ||||
| 
 | ||||
|     unsigned requested_num_threads; | ||||
|     unsigned small_component_size; | ||||
| 
 | ||||
|     bool generate_edge_lookup; | ||||
|     std::string edge_penalty_path; | ||||
|     std::string edge_segment_lookup_path; | ||||
| #ifdef DEBUG_GEOMETRY | ||||
|     std::string debug_turns_path; | ||||
| #endif | ||||
| }; | ||||
| 
 | ||||
| struct ExtractorOptions | ||||
|  | ||||
| @ -36,21 +36,22 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| struct FirstAndLastSegmentOfWay | ||||
| { | ||||
|     EdgeID way_id; | ||||
|     NodeID first_segment_source_id; | ||||
|     NodeID first_segment_target_id; | ||||
|     NodeID last_segment_source_id; | ||||
|     NodeID last_segment_target_id; | ||||
|     OSMWayID way_id; | ||||
|     OSMNodeID first_segment_source_id; | ||||
|     OSMNodeID first_segment_target_id; | ||||
|     OSMNodeID last_segment_source_id; | ||||
|     OSMNodeID last_segment_target_id; | ||||
| 
 | ||||
|     FirstAndLastSegmentOfWay() | ||||
|         : way_id(std::numeric_limits<EdgeID>::max()), | ||||
|           first_segment_source_id(std::numeric_limits<NodeID>::max()), | ||||
|           first_segment_target_id(std::numeric_limits<NodeID>::max()), | ||||
|           last_segment_source_id(std::numeric_limits<NodeID>::max()), | ||||
|           last_segment_target_id(std::numeric_limits<NodeID>::max()) | ||||
|         : way_id(SPECIAL_OSM_WAYID), | ||||
|           first_segment_source_id(SPECIAL_OSM_NODEID), | ||||
|           first_segment_target_id(SPECIAL_OSM_NODEID), | ||||
|           last_segment_source_id(SPECIAL_OSM_NODEID), | ||||
|           last_segment_target_id(SPECIAL_OSM_NODEID) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     FirstAndLastSegmentOfWay(EdgeID w, NodeID fs, NodeID ft, NodeID ls, NodeID lt) | ||||
|     FirstAndLastSegmentOfWay(OSMWayID w, OSMNodeID fs, OSMNodeID ft, OSMNodeID ls, OSMNodeID lt) | ||||
|         : way_id(w), first_segment_source_id(fs), first_segment_target_id(ft), | ||||
|           last_segment_source_id(ls), last_segment_target_id(lt) | ||||
|     { | ||||
| @ -58,19 +59,19 @@ struct FirstAndLastSegmentOfWay | ||||
| 
 | ||||
|     static FirstAndLastSegmentOfWay min_value() | ||||
|     { | ||||
|         return {std::numeric_limits<EdgeID>::min(), | ||||
|                 std::numeric_limits<NodeID>::min(), | ||||
|                 std::numeric_limits<NodeID>::min(), | ||||
|                 std::numeric_limits<NodeID>::min(), | ||||
|                 std::numeric_limits<NodeID>::min()}; | ||||
|         return {MIN_OSM_WAYID, | ||||
|                 MIN_OSM_NODEID, | ||||
|                 MIN_OSM_NODEID, | ||||
|                 MIN_OSM_NODEID, | ||||
|                 MIN_OSM_NODEID}; | ||||
|     } | ||||
|     static FirstAndLastSegmentOfWay max_value() | ||||
|     { | ||||
|         return {std::numeric_limits<EdgeID>::max(), | ||||
|                 std::numeric_limits<NodeID>::max(), | ||||
|                 std::numeric_limits<NodeID>::max(), | ||||
|                 std::numeric_limits<NodeID>::max(), | ||||
|                 std::numeric_limits<NodeID>::max()}; | ||||
|         return {MAX_OSM_WAYID, | ||||
|                 MAX_OSM_NODEID, | ||||
|                 MAX_OSM_NODEID, | ||||
|                 MAX_OSM_NODEID, | ||||
|                 MAX_OSM_NODEID}; | ||||
|     } | ||||
| }; | ||||
| 
 | ||||
|  | ||||
| @ -63,29 +63,31 @@ struct InternalExtractorEdge | ||||
|     }; | ||||
| 
 | ||||
|     explicit InternalExtractorEdge() | ||||
|         : result(0, 0, 0, 0, false, false, false, false, | ||||
|         : result(MIN_OSM_NODEID, MIN_OSM_NODEID, 0, 0, false, false, false, false, true, | ||||
|                 TRAVEL_MODE_INACCESSIBLE, false) | ||||
|     { | ||||
|     } | ||||
| 
 | ||||
|     explicit InternalExtractorEdge(NodeID source, | ||||
|                                    NodeID target, | ||||
|     explicit InternalExtractorEdge(OSMNodeID source, | ||||
|                                    OSMNodeID target, | ||||
|                                    NodeID name_id, | ||||
|                                    WeightData weight_data, | ||||
|                                    bool forward, | ||||
|                                    bool backward, | ||||
|                                    bool roundabout, | ||||
|                                    bool access_restricted, | ||||
|                                    bool startpoint, | ||||
|                                    TravelMode travel_mode, | ||||
|                                    bool is_split) | ||||
|         : result(source, | ||||
|                  target, | ||||
|         : result(OSMNodeID(source), | ||||
|                  OSMNodeID(target), | ||||
|                  name_id, | ||||
|                  0, | ||||
|                  forward, | ||||
|                  backward, | ||||
|                  roundabout, | ||||
|                  access_restricted, | ||||
|                  startpoint, | ||||
|                  travel_mode, | ||||
|                  is_split), | ||||
|           weight_data(std::move(weight_data)) | ||||
| @ -93,7 +95,7 @@ struct InternalExtractorEdge | ||||
|     } | ||||
| 
 | ||||
|     // data that will be written to disk
 | ||||
|     NodeBasedEdge result; | ||||
|     NodeBasedEdgeWithOSM result; | ||||
|     // intermediate edge weight
 | ||||
|     WeightData weight_data; | ||||
|     // coordinate of the source node
 | ||||
| @ -101,19 +103,35 @@ struct InternalExtractorEdge | ||||
| 
 | ||||
| 
 | ||||
|     // necessary static util functions for stxxl's sorting
 | ||||
|     static InternalExtractorEdge min_value() | ||||
|     static InternalExtractorEdge min_osm_value() | ||||
|     { | ||||
|         return InternalExtractorEdge(0, 0, 0, WeightData(), false, false, false, | ||||
|                                      false, TRAVEL_MODE_INACCESSIBLE, false); | ||||
|         return InternalExtractorEdge(MIN_OSM_NODEID, MIN_OSM_NODEID, 0, WeightData(), false, false, false, | ||||
|                                      false, true, TRAVEL_MODE_INACCESSIBLE, false); | ||||
|     } | ||||
|     static InternalExtractorEdge max_value() | ||||
|     static InternalExtractorEdge max_osm_value() | ||||
|     { | ||||
|         return InternalExtractorEdge(SPECIAL_NODEID, SPECIAL_NODEID, 0, WeightData(), false, | ||||
|                                      false, false, false, TRAVEL_MODE_INACCESSIBLE, false); | ||||
|         return InternalExtractorEdge(MAX_OSM_NODEID, MAX_OSM_NODEID, 0, WeightData(), false, | ||||
|                                      false, false, false, true, TRAVEL_MODE_INACCESSIBLE, false); | ||||
|     } | ||||
| 
 | ||||
|     static InternalExtractorEdge min_internal_value() | ||||
|     { | ||||
|         auto v = min_osm_value(); | ||||
|         v.result.source = 0; | ||||
|         v.result.target = 0; | ||||
|         return v; | ||||
|     } | ||||
|     static InternalExtractorEdge max_internal_value() | ||||
|     { | ||||
|         auto v = max_osm_value(); | ||||
|         v.result.source = std::numeric_limits<NodeID>::max(); | ||||
|         v.result.target = std::numeric_limits<NodeID>::max(); | ||||
|         return v; | ||||
|     } | ||||
| 
 | ||||
| }; | ||||
| 
 | ||||
| struct CmpEdgeByStartThenTargetID | ||||
| struct CmpEdgeByInternalStartThenInternalTargetID | ||||
| { | ||||
|     using value_type = InternalExtractorEdge; | ||||
|     bool operator()(const InternalExtractorEdge &lhs, const InternalExtractorEdge &rhs) const | ||||
| @ -123,32 +141,32 @@ struct CmpEdgeByStartThenTargetID | ||||
|                (lhs.result.target <  rhs.result.target)); | ||||
|     } | ||||
| 
 | ||||
|     value_type max_value() { return InternalExtractorEdge::max_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_value(); } | ||||
|     value_type max_value() { return InternalExtractorEdge::max_internal_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_internal_value(); } | ||||
| }; | ||||
| 
 | ||||
| struct CmpEdgeByStartID | ||||
| struct CmpEdgeByOSMStartID | ||||
| { | ||||
|     using value_type = InternalExtractorEdge; | ||||
|     bool operator()(const InternalExtractorEdge &lhs, const InternalExtractorEdge &rhs) const | ||||
|     { | ||||
|         return lhs.result.source < rhs.result.source; | ||||
|         return lhs.result.osm_source_id < rhs.result.osm_source_id; | ||||
|     } | ||||
| 
 | ||||
|     value_type max_value() { return InternalExtractorEdge::max_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_value(); } | ||||
|     value_type max_value() { return InternalExtractorEdge::max_osm_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_osm_value(); } | ||||
| }; | ||||
| 
 | ||||
| struct CmpEdgeByTargetID | ||||
| struct CmpEdgeByOSMTargetID | ||||
| { | ||||
|     using value_type = InternalExtractorEdge; | ||||
|     bool operator()(const InternalExtractorEdge &lhs, const InternalExtractorEdge &rhs) const | ||||
|     { | ||||
|         return lhs.result.target < rhs.result.target; | ||||
|         return lhs.result.osm_target_id < rhs.result.osm_target_id; | ||||
|     } | ||||
| 
 | ||||
|     value_type max_value() { return InternalExtractorEdge::max_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_value(); } | ||||
|     value_type max_value() { return InternalExtractorEdge::max_osm_value(); } | ||||
|     value_type min_value() { return InternalExtractorEdge::min_osm_value(); } | ||||
| }; | ||||
| 
 | ||||
| #endif // INTERNAL_EXTRACTOR_EDGE_HPP
 | ||||
|  | ||||
| @ -38,17 +38,20 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| #include <boost/algorithm/string/predicate.hpp> | ||||
| #include <boost/ref.hpp> | ||||
| #include <boost/regex.hpp> | ||||
| #include <boost/optional/optional.hpp> | ||||
| 
 | ||||
| #include <osmium/osm.hpp> | ||||
| #include <osmium/tags/regex_filter.hpp> | ||||
| 
 | ||||
| #include <algorithm> | ||||
| #include <iterator> | ||||
| 
 | ||||
| namespace | ||||
| { | ||||
| int lua_error_callback(lua_State *lua_state) | ||||
| { | ||||
|     std::string error_msg = lua_tostring(lua_state, -1); | ||||
|     std::ostringstream error_stream; | ||||
|     error_stream << error_msg; | ||||
|     throw osrm::exception("ERROR occured in profile script:\n" + error_stream.str()); | ||||
|     throw osrm::exception("ERROR occured in profile script:\n" + error_msg); | ||||
| } | ||||
| } | ||||
| 
 | ||||
| @ -104,18 +107,18 @@ void RestrictionParser::ReadRestrictionExceptions(lua_State *lua_state) | ||||
| 
 | ||||
| /**
 | ||||
|  * Tries to parse an relation as turn restriction. This can fail for a number of | ||||
|  * reasons, this the return type is a mapbox::util::optional<>. | ||||
|  * reasons, this the return type is a boost::optional<T>. | ||||
|  * | ||||
|  * Some restrictions can also be ignored: See the ```get_exceptions``` function | ||||
|  * in the corresponding profile. | ||||
|  */ | ||||
| mapbox::util::optional<InputRestrictionContainer> | ||||
| boost::optional<InputRestrictionContainer> | ||||
| RestrictionParser::TryParse(const osmium::Relation &relation) const | ||||
| { | ||||
|     // return if turn restrictions should be ignored
 | ||||
|     if (!use_turn_restrictions) | ||||
|     { | ||||
|         return mapbox::util::optional<InputRestrictionContainer>(); | ||||
|         return {}; | ||||
|     } | ||||
| 
 | ||||
|     osmium::tags::KeyPrefixFilter filter(false); | ||||
| @ -129,14 +132,14 @@ RestrictionParser::TryParse(const osmium::Relation &relation) const | ||||
|     // if it's a restriction, continue;
 | ||||
|     if (std::distance(fi_begin, fi_end) == 0) | ||||
|     { | ||||
|         return mapbox::util::optional<InputRestrictionContainer>(); | ||||
|         return {}; | ||||
|     } | ||||
| 
 | ||||
|     // check if the restriction should be ignored
 | ||||
|     const char *except = relation.get_value_by_key("except"); | ||||
|     if (except != nullptr && ShouldIgnoreRestriction(except)) | ||||
|     { | ||||
|         return mapbox::util::optional<InputRestrictionContainer>(); | ||||
|         return {}; | ||||
|     } | ||||
| 
 | ||||
|     bool is_only_restriction = false; | ||||
| @ -164,7 +167,7 @@ RestrictionParser::TryParse(const osmium::Relation &relation) const | ||||
| 
 | ||||
|             if (!is_actually_restricted) | ||||
|             { | ||||
|                 return mapbox::util::optional<InputRestrictionContainer>(); | ||||
|                 return {}; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| @ -218,7 +221,7 @@ RestrictionParser::TryParse(const osmium::Relation &relation) const | ||||
|             break; | ||||
|         } | ||||
|     } | ||||
|     return mapbox::util::optional<InputRestrictionContainer>(restriction_container); | ||||
|     return boost::make_optional(std::move(restriction_container)); | ||||
| } | ||||
| 
 | ||||
| bool RestrictionParser::ShouldIgnoreRestriction(const std::string &except_tag_string) const | ||||
|  | ||||
| @ -30,16 +30,16 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| 
 | ||||
| #include "../data_structures/restriction.hpp" | ||||
| 
 | ||||
| #include <osmium/osm.hpp> | ||||
| #include <osmium/tags/regex_filter.hpp> | ||||
| 
 | ||||
| #include <variant/optional.hpp> | ||||
| #include <boost/optional/optional.hpp> | ||||
| 
 | ||||
| #include <string> | ||||
| #include <vector> | ||||
| 
 | ||||
| struct lua_State; | ||||
| class ScriptingEnvironment; | ||||
| namespace osmium | ||||
| { | ||||
| class Relation; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * Parses the relations that represents turn restrictions. | ||||
| @ -63,8 +63,7 @@ class RestrictionParser | ||||
| { | ||||
|   public: | ||||
|     RestrictionParser(lua_State *lua_state); | ||||
|     mapbox::util::optional<InputRestrictionContainer> | ||||
|     TryParse(const osmium::Relation &relation) const; | ||||
|     boost::optional<InputRestrictionContainer> TryParse(const osmium::Relation &relation) const; | ||||
| 
 | ||||
|   private: | ||||
|     void ReadUseRestrictionsSetting(lua_State *lua_state); | ||||
|  | ||||
| @ -117,6 +117,7 @@ void ScriptingEnvironment::init_lua_state(lua_State *lua_state) | ||||
|             .def_readwrite("name", &ExtractionWay::name) | ||||
|             .def_readwrite("roundabout", &ExtractionWay::roundabout) | ||||
|             .def_readwrite("is_access_restricted", &ExtractionWay::is_access_restricted) | ||||
|             .def_readwrite("is_startpoint", &ExtractionWay::is_startpoint) | ||||
|             .def_readwrite("duration", &ExtractionWay::duration) | ||||
|             .property("forward_mode", &ExtractionWay::get_forward_mode, | ||||
|                       &ExtractionWay::set_forward_mode) | ||||
|  | ||||
							
								
								
									
										67
									
								
								features/car/advisory.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								features/car/advisory.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,67 @@ | ||||
| @routing @maxspeed @car | ||||
| Feature: Car - Max speed restrictions | ||||
| OSRM will use 4/5 of the projected free-flow speed. | ||||
| 
 | ||||
|     Background: Use specific speeds | ||||
|         Given the profile "car" | ||||
|         Given a grid size of 1000 meters | ||||
| 
 | ||||
|     Scenario: Car - Advisory speed overwrites maxspeed | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway       | maxspeed | maxspeed:advisory | | ||||
|             | ab    | residential   | 90       | 45                | | ||||
|             | bc    | residential   |          | 45                | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | speed        | | ||||
|             | a    | b  | ab    | 47 km/h +- 1 | | ||||
|             | b    | c  | bc    | 47 km/h +- 1 | | ||||
| 
 | ||||
|     Scenario: Car - Advisory speed overwrites forward maxspeed | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway       | maxspeed:forward | maxspeed:advisory:forward | | ||||
|             | ab    | residential   | 90               | 45                        | | ||||
|             | bc    | residential   |                  | 45                        | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | speed        | | ||||
|             | a    | b  | ab    | 47 km/h +- 1 | | ||||
|             | b    | c  | bc    | 47 km/h +- 1 | | ||||
| 
 | ||||
|     Scenario: Car - Advisory speed overwrites backwards maxspeed | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway       | maxspeed:backward | maxspeed:advisory:backward | | ||||
|             | ab    | residential   | 90                | 45                         | | ||||
|             | bc    | residential   |                   | 45                         | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | speed        | | ||||
|             | b    | a  | ab    | 47 km/h +- 1 | | ||||
|             | c    | b  | bc    | 47 km/h +- 1 | | ||||
| 
 | ||||
|     Scenario: Car - Directional advisory speeds play nice with eachother | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway       | maxspeed:advisory | maxspeed:advisory:forward | maxspeed:advisory:backward | | ||||
|             | ab    | residential   | 90                | 45                        | 60                         | | ||||
|             | bc    | residential   | 90                | 60                        | 45                         | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | speed        | | ||||
|             | a    | b  | ab    | 47 km/h +- 1 | | ||||
|             | b    | a  | ab    | 59 km/h +- 1 | | ||||
|             | b    | c  | bc    | 59 km/h +- 1 | | ||||
|             | c    | b  | bc    | 47 km/h +- 1 | | ||||
| 
 | ||||
| 
 | ||||
							
								
								
									
										40
									
								
								features/car/mode.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								features/car/mode.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,40 @@ | ||||
| @routing @car @mode | ||||
| Feature: Car - Mode flag | ||||
|     Background: | ||||
|         Given the profile "car" | ||||
| 
 | ||||
|     Scenario: Car - Mode when using a ferry | ||||
|         Given the node map | ||||
|             | a | b |   | | ||||
|             |   | c | d | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway | route | duration | | ||||
|             | ab    | primary |       |          | | ||||
|             | bc    |         | ferry | 0:01     | | ||||
|             | cd    | primary |       |          | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route    | turns                       | modes | | ||||
|             | a    | d  | ab,bc,cd | head,right,left,destination | 1,2,1 | | ||||
|             | d    | a  | cd,bc,ab | head,right,left,destination | 1,2,1 | | ||||
|             | c    | a  | bc,ab    | head,left,destination       | 2,1   | | ||||
|             | d    | b  | cd,bc    | head,right,destination      | 1,2   | | ||||
|             | a    | c  | ab,bc    | head,right,destination      | 1,2   | | ||||
|             | b    | d  | bc,cd    | head,left,destination       | 2,1   | | ||||
| 
 | ||||
|     Scenario: Car - Snapping when using a ferry | ||||
|         Given the node map | ||||
|             | a | b |   | c | d |   | e | f | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | highway | route | duration | | ||||
|             | ab    | primary |       |          | | ||||
|             | bcde  |         | ferry | 0:10     | | ||||
|             | ef    | primary |       |          | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | turns            | modes   | time  | | ||||
|             | c    | d  | bcde  | head,destination | 2       | 600s  | | ||||
| 
 | ||||
| 
 | ||||
| @ -1,197 +0,0 @@ | ||||
| @locate | ||||
| Feature: Locate - return nearest node | ||||
| 
 | ||||
|     Background: | ||||
|         Given the profile "testbot" | ||||
| 
 | ||||
|     Scenario: Locate - two ways crossing | ||||
|         Given the node map | ||||
|             |   |  | 0 | c | 1 |  |   | | ||||
|             |   |  |   |   |   |  |   | | ||||
|             | 7 |  |   | n |   |  | 2 | | ||||
|             | a |  | k | x | m |  | b | | ||||
|             | 6 |  |   | l |   |  | 3 | | ||||
|             |   |  |   |   |   |  |   | | ||||
|             |   |  | 5 | d | 4 |  |   | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | axb   | | ||||
|             | cxd   | | ||||
| 
 | ||||
|         When I request locate I should get | ||||
|             | in | out | | ||||
|             | 0  | c   | | ||||
|             | 1  | c   | | ||||
|             | 2  | b   | | ||||
|             | 3  | b   | | ||||
|             | 4  | d   | | ||||
|             | 5  | d   | | ||||
|             | 6  | a   | | ||||
|             | 7  | a   | | ||||
|             | a  | a   | | ||||
|             | b  | b   | | ||||
|             | c  | c   | | ||||
|             | d  | d   | | ||||
|             | k  | x   | | ||||
|             | l  | x   | | ||||
|             | m  | x   | | ||||
|             | n  | x   | | ||||
| 
 | ||||
|     Scenario: Locate - inside a triangle | ||||
|         Given the node map | ||||
|             |   |  |   |   |   | c |   |   |   |  |   | | ||||
|             |   |  |   |   |   | 7 |   |   |   |  |   | | ||||
|             |   |  |   | y |   |   |   | z |   |  |   | | ||||
|             |   |  | 5 |   | 0 |   | 1 |   | 8 |  |   | | ||||
|             | 6 |  |   | 2 |   | 3 |   | 4 |   |  | 9 | | ||||
|             | a |  |   | x |   | u |   | w |   |  | b | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
|             | bc    | | ||||
|             | ca    | | ||||
| 
 | ||||
|         When I request locate I should get | ||||
|             | in | out | | ||||
|             | 0  | c   | | ||||
|             | 1  | c   | | ||||
|             | 2  | a   | | ||||
|             | 3  | c   | | ||||
|             | 4  | b   | | ||||
|             | 5  | a   | | ||||
|             | 6  | a   | | ||||
|             | 7  | c   | | ||||
|             | 8  | b   | | ||||
|             | 9  | b   | | ||||
|             | x  | a   | | ||||
|             | y  | c   | | ||||
|             | z  | c   | | ||||
|             | w  | b   | | ||||
| 
 | ||||
|     Scenario: Nearest - easy-west way | ||||
|         Given the node map | ||||
|             | 3 | 4 |   | 5 | 6 | | ||||
|             | 2 | a | x | b | 7 | | ||||
|             | 1 | 0 |   | 9 | 8 | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
| 
 | ||||
|         When I request locate I should get | ||||
|             | in | out | | ||||
|             | 0  | a   | | ||||
|             | 1  | a   | | ||||
|             | 2  | a   | | ||||
|             | 3  | a   | | ||||
|             | 4  | a   | | ||||
|             | 5  | b   | | ||||
|             | 6  | b   | | ||||
|             | 7  | b   | | ||||
|             | 8  | b   | | ||||
|             | 9  | b   | | ||||
| 
 | ||||
|     Scenario: Nearest - north-south way | ||||
|         Given the node map | ||||
|             | 1 | 2 | 3 | | ||||
|             | 0 | a | 4 | | ||||
|             |   | x |   | | ||||
|             | 9 | b | 5 | | ||||
|             | 8 | 7 | 6 | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
| 
 | ||||
|         When I request locate I should get | ||||
|             | in | out | | ||||
|             | 0  | a   | | ||||
|             | 1  | a   | | ||||
|             | 2  | a   | | ||||
|             | 3  | a   | | ||||
|             | 4  | a   | | ||||
|             | 5  | b   | | ||||
|             | 6  | b   | | ||||
|             | 7  | b   | | ||||
|             | 8  | b   | | ||||
|             | 9  | b   | | ||||
| 
 | ||||
|     Scenario: Nearest - diagonal 1 | ||||
|         Given the node map | ||||
|             | 2 |   | 3 |   |   |   | | ||||
|             |   | a |   | 4 |   |   | | ||||
|             | 1 |   | x |   | 5 |   | | ||||
|             |   | 0 |   | y |   | 6 | | ||||
|             |   |   | 9 |   | b |   | | ||||
|             |   |   |   | 8 |   | 7 | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | axyb  | | ||||
| 
 | ||||
|         When I request locate I should get | ||||
|             | in | out | | ||||
|             | 0  | x   | | ||||
|             | 1  | a   | | ||||
|             | 2  | a   | | ||||
|             | 3  | a   | | ||||
|             | 4  | x   | | ||||
|             | 5  | y   | | ||||
|             | 6  | b   | | ||||
|             | 7  | b   | | ||||
|             | 8  | b   | | ||||
|             | 9  | y   | | ||||
|             | a  | a   | | ||||
|             | b  | b   | | ||||
|             | x  | x   | | ||||
|             | y  | y   | | ||||
| 
 | ||||
|     Scenario: Nearest - diagonal 2 | ||||
|         Given the node map | ||||
|             |   |   |   | 6 |   | 7 | | ||||
|             |   |   | 5 |   | b |   | | ||||
|             |   | 4 |   | y |   | 8 | | ||||
|             | 3 |   | x |   | 9 |   | | ||||
|             |   | a |   | 0 |   |   | | ||||
|             | 2 |   | 1 |   |   |   | | ||||
| 
 | ||||
|         And the ways | ||||
|         | nodes | | ||||
|         | ab    | | ||||
| 
 | ||||
|         When I request nearest I should get | ||||
|             | in | out | | ||||
|             | 0  | x   | | ||||
|             | 1  | a   | | ||||
|             | 2  | a   | | ||||
|             | 3  | a   | | ||||
|             | 4  | x   | | ||||
|             | 5  | y   | | ||||
|             | 6  | b   | | ||||
|             | 7  | b   | | ||||
|             | 8  | b   | | ||||
|             | 9  | y   | | ||||
|             | a  | a   | | ||||
|             | b  | b   | | ||||
|             | x  | x   | | ||||
|             | y  | y   | | ||||
| 
 | ||||
|         Scenario: Locate - High lat/lon | ||||
|            Given the node locations | ||||
|             | node | lat | lon  | | ||||
|             | a    | -85 | -180 | | ||||
|             | b    | 0   | 0    | | ||||
|             | c    | 85  | 180  | | ||||
|             | x    | -84 | -180 | | ||||
|             | y    | 84  | 180  | | ||||
| 
 | ||||
|            And the ways | ||||
|             | nodes | | ||||
|             | abc   | | ||||
| 
 | ||||
|            When I request locate I should get | ||||
|             | in | out | | ||||
|             | x  | a   | | ||||
|             | y  | c   | | ||||
| @ -15,7 +15,9 @@ Feature: osrm-extract command line options: help | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain 12 lines | ||||
|         And stdout should contain "--generate-edge-lookup" | ||||
|         And stdout should contain "--small-component-size" | ||||
|         And stdout should contain 20 lines | ||||
|         And it should exit with code 0 | ||||
| 
 | ||||
|     Scenario: osrm-extract - Help, short | ||||
| @ -29,7 +31,9 @@ Feature: osrm-extract command line options: help | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain 12 lines | ||||
|         And stdout should contain "--generate-edge-lookup" | ||||
|         And stdout should contain "--small-component-size" | ||||
|         And stdout should contain 20 lines | ||||
|         And it should exit with code 0 | ||||
| 
 | ||||
|     Scenario: osrm-extract - Help, long | ||||
| @ -43,5 +47,7 @@ Feature: osrm-extract command line options: help | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain 12 lines | ||||
|         And stdout should contain "--generate-edge-lookup" | ||||
|         And stdout should contain "--small-component-size" | ||||
|         And stdout should contain 20 lines | ||||
|         And it should exit with code 0 | ||||
|  | ||||
| @ -13,11 +13,12 @@ Feature: osrm-prepare command line options: help | ||||
|         And stdout should contain "--help" | ||||
|         And stdout should contain "--config" | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--restrictions" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--core" | ||||
|         And stdout should contain 17 lines | ||||
|         And stdout should contain "--level-cache" | ||||
|         And stdout should contain "--segment-speed-file" | ||||
|         And stdout should contain 21 lines | ||||
|         And it should exit with code 1 | ||||
| 
 | ||||
|     Scenario: osrm-prepare - Help, short | ||||
| @ -29,11 +30,12 @@ Feature: osrm-prepare command line options: help | ||||
|         And stdout should contain "--help" | ||||
|         And stdout should contain "--config" | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--restrictions" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--core" | ||||
|         And stdout should contain 17 lines | ||||
|         And stdout should contain "--level-cache" | ||||
|         And stdout should contain "--segment-speed-file" | ||||
|         And stdout should contain 21 lines | ||||
|         And it should exit with code 0 | ||||
| 
 | ||||
|     Scenario: osrm-prepare - Help, long | ||||
| @ -45,9 +47,10 @@ Feature: osrm-prepare command line options: help | ||||
|         And stdout should contain "--help" | ||||
|         And stdout should contain "--config" | ||||
|         And stdout should contain "Configuration:" | ||||
|         And stdout should contain "--restrictions" | ||||
|         And stdout should contain "--profile" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--core" | ||||
|         And stdout should contain 17 lines | ||||
|         And stdout should contain "--level-cache" | ||||
|         And stdout should contain "--segment-speed-file" | ||||
|         And stdout should contain 21 lines | ||||
|         And it should exit with code 0 | ||||
|  | ||||
| @ -25,9 +25,11 @@ Feature: osrm-routed command line options: help | ||||
|         And stdout should contain "--port" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--shared-memory" | ||||
|         And stdout should contain "--max-viaroute-size" | ||||
|         And stdout should contain "--max-trip-size" | ||||
|         And stdout should contain "--max-table-size" | ||||
|         And stdout should contain "--max-matching-size" | ||||
|         And stdout should contain 26 lines | ||||
|         And stdout should contain 30 lines | ||||
|         And it should exit with code 0 | ||||
| 
 | ||||
|     Scenario: osrm-routed - Help, short | ||||
| @ -51,9 +53,11 @@ Feature: osrm-routed command line options: help | ||||
|         And stdout should contain "--port" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--shared-memory" | ||||
|         And stdout should contain "--max-viaroute-size" | ||||
|         And stdout should contain "--max-trip-size" | ||||
|         And stdout should contain "--max-table-size" | ||||
|         And stdout should contain "--max-matching-size" | ||||
|         And stdout should contain 26 lines | ||||
|         And stdout should contain 30 lines | ||||
|         And it should exit with code 0 | ||||
| 
 | ||||
|     Scenario: osrm-routed - Help, long | ||||
| @ -77,7 +81,9 @@ Feature: osrm-routed command line options: help | ||||
|         And stdout should contain "--port" | ||||
|         And stdout should contain "--threads" | ||||
|         And stdout should contain "--shared-memory" | ||||
|         And stdout should contain "--max-trip-size" | ||||
|         And stdout should contain "--max-table-size" | ||||
|         And stdout should contain "--max-table-size" | ||||
|         And stdout should contain "--max-matching-size" | ||||
|         And stdout should contain 26 lines | ||||
|         And stdout should contain 30 lines | ||||
|         And it should exit with code 0 | ||||
|  | ||||
| @ -6,6 +6,10 @@ Given(/^the import format "(.*?)"$/) do |format| | ||||
|   set_input_format format | ||||
| end | ||||
| 
 | ||||
| Given /^the extract extra arguments "(.*?)"$/ do |args| | ||||
|     set_extract_args args | ||||
| end | ||||
| 
 | ||||
| Given /^a grid size of (\d+) meters$/ do |meters| | ||||
|   set_grid_size meters | ||||
| end | ||||
| @ -28,7 +32,7 @@ Given /^the node map$/ do |table| | ||||
|         raise "*** invalid node name '#{name}', must me alphanumeric" unless name.match /[a-z0-9]/ | ||||
|         if name.match /[a-z]/ | ||||
|           raise "*** duplicate node '#{name}'" if name_node_hash[name] | ||||
|           add_osm_node name, *table_coord_to_lonlat(ci,ri) | ||||
|           add_osm_node name, *table_coord_to_lonlat(ci,ri), nil | ||||
|         else | ||||
|           raise "*** duplicate node '#{name}'" if location_hash[name] | ||||
|           add_location name, *table_coord_to_lonlat(ci,ri) | ||||
| @ -43,7 +47,9 @@ Given /^the node locations$/ do |table| | ||||
|     name = row['node'] | ||||
|     raise "*** duplicate node '#{name}'" if find_node_by_name name | ||||
|     if name.match /[a-z]/ | ||||
|       add_osm_node name, row['lon'].to_f, row['lat'].to_f | ||||
|       id = row['id'] | ||||
|       id = id.to_i if id | ||||
|       add_osm_node name, row['lon'].to_f, row['lat'].to_f, id | ||||
|     else | ||||
|       add_location name, row['lon'].to_f, row['lat'].to_f | ||||
|     end | ||||
|  | ||||
| @ -1,19 +1,29 @@ | ||||
| When /^I request a travel time matrix I should get$/ do |table| | ||||
|    | ||||
|   no_route = 2147483647   # MAX_INT | ||||
|    | ||||
|   raise "*** Top-left cell of matrix table must be empty" unless table.headers[0]=="" | ||||
|    | ||||
|   nodes = [] | ||||
|   waypoints = [] | ||||
|   column_headers = table.headers[1..-1] | ||||
|   row_headers = table.rows.map { |h| h.first } | ||||
|   unless column_headers==row_headers | ||||
|     raise "*** Column and row headers must match in matrix table, got #{column_headers.inspect} and #{row_headers.inspect}" | ||||
|   end | ||||
|   column_headers.each do |node_name| | ||||
|     node = find_node_by_name(node_name) | ||||
|     raise "*** unknown node '#{node_name}" unless node | ||||
|     nodes << node | ||||
|   symmetric = Set.new(column_headers) == Set.new(row_headers) | ||||
|   if symmetric then | ||||
|     column_headers.each do |node_name| | ||||
|       node = find_node_by_name(node_name) | ||||
|       raise "*** unknown node '#{node_name}" unless node | ||||
|       waypoints << {:coord => node, :type => "loc"} | ||||
|     end | ||||
|   else | ||||
|     column_headers.each do |node_name| | ||||
|       node = find_node_by_name(node_name) | ||||
|       raise "*** unknown node '#{node_name}" unless node | ||||
|       waypoints << {:coord => node, :type => "dst"} | ||||
|     end | ||||
|     row_headers.each do |node_name| | ||||
|       node = find_node_by_name(node_name) | ||||
|       raise "*** unknown node '#{node_name}" unless node | ||||
|       waypoints << {:coord => node, :type => "src"} | ||||
|     end | ||||
|   end | ||||
|    | ||||
|   reprocess | ||||
| @ -23,18 +33,18 @@ When /^I request a travel time matrix I should get$/ do |table| | ||||
|      | ||||
|     # compute matrix | ||||
|     params = @query_params | ||||
|     response = request_table nodes, params | ||||
|     response = request_table waypoints, params | ||||
|     if response.body.empty? == false | ||||
|       json = JSON.parse response.body | ||||
|       result = json['distance_table'] | ||||
|       json_result = JSON.parse response.body | ||||
|       result = json_result["distance_table"] | ||||
|     end | ||||
| 
 | ||||
|      | ||||
|     # compare actual and expected result, one row at a time | ||||
|     table.rows.each_with_index do |row,ri| | ||||
|        | ||||
|       # fuzzy match | ||||
|       ok = true | ||||
|       0.upto(nodes.size-1) do |i| | ||||
|       0.upto(result[ri].size-1) do |i| | ||||
|         if FuzzyMatch.match result[ri][i], row[i+1] | ||||
|           result[ri][i] = row[i+1] | ||||
|         elsif row[i+1]=="" and result[ri][i]==no_route | ||||
|  | ||||
| @ -1,51 +0,0 @@ | ||||
| When /^I request locate I should get$/ do |table| | ||||
|   reprocess | ||||
|   actual = [] | ||||
|   OSRMLoader.load(self,"#{prepared_file}.osrm") do | ||||
|     table.hashes.each_with_index do |row,ri| | ||||
|       in_node = find_node_by_name row['in'] | ||||
|       raise "*** unknown in-node '#{row['in']}" unless in_node | ||||
| 
 | ||||
|       out_node = find_node_by_name row['out'] | ||||
|       raise "*** unknown out-node '#{row['out']}" unless out_node | ||||
| 
 | ||||
|       response = request_locate(in_node) | ||||
|       if response.code == "200" && response.body.empty? == false | ||||
|         json = JSON.parse response.body | ||||
|         if json['status'] == 0 | ||||
|           coord =  json['mapped_coordinate'] | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       got = {'in' => row['in'], 'out' => coord } | ||||
| 
 | ||||
|       ok = true | ||||
|       row.keys.each do |key| | ||||
|         if key=='out' | ||||
|           if FuzzyMatch.match_location coord, out_node | ||||
|             got[key] = row[key] | ||||
|           else | ||||
|             row[key] = "#{row[key]} [#{out_node.lat},#{out_node.lon}]" | ||||
|             ok = false | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       unless ok | ||||
|         failed = { :attempt => 'locate', :query => @query, :response => response } | ||||
|         log_fail row,got,[failed] | ||||
|       end | ||||
| 
 | ||||
|       actual << got | ||||
|     end | ||||
|   end | ||||
|   table.diff! actual | ||||
| end | ||||
| 
 | ||||
| When /^I request locate (\d+) times I should get$/ do |n,table| | ||||
|   ok = true | ||||
|   n.to_i.times do | ||||
|     ok = false unless step "I request locate I should get", table | ||||
|   end | ||||
|   ok | ||||
| end | ||||
| @ -8,6 +8,17 @@ When /^I match I should get$/ do |table| | ||||
|         response = request_url row['request'] | ||||
|       else | ||||
|         params = @query_params | ||||
|         got = {} | ||||
|         row.each_pair do |k,v| | ||||
|           if k =~ /param:(.*)/ | ||||
|             if v=='(nil)' | ||||
|               params[$1]=nil | ||||
|             elsif v!=nil | ||||
|               params[$1]=[v] | ||||
|             end | ||||
|             got[k]=v | ||||
|           end | ||||
|         end | ||||
|         trace = [] | ||||
|         timestamps = [] | ||||
|         if row['trace'] | ||||
| @ -19,24 +30,13 @@ When /^I match I should get$/ do |table| | ||||
|           if row['timestamps'] | ||||
|               timestamps = row['timestamps'].split(" ").compact.map { |t| t.to_i} | ||||
|           end | ||||
|           got = {'trace' => row['trace'] } | ||||
|           got = got.merge({'trace' => row['trace'] }) | ||||
|           response = request_matching trace, timestamps, params | ||||
|         else | ||||
|           raise "*** no trace" | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       row.each_pair do |k,v| | ||||
|         if k =~ /param:(.*)/ | ||||
|           if v=='(nil)' | ||||
|             params[$1]=nil | ||||
|           elsif v!=nil | ||||
|             params[$1]=v | ||||
|           end | ||||
|           got[k]=v | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       if response.body.empty? == false | ||||
|         json = JSON.parse response.body | ||||
|       end | ||||
| @ -52,178 +52,35 @@ When /^I match I should get$/ do |table| | ||||
|       end | ||||
| 
 | ||||
|       sub_matchings = [] | ||||
|       turns = '' | ||||
|       route = '' | ||||
|       duration = '' | ||||
|       if response.code == "200" | ||||
|         if table.headers.include? 'matchings' | ||||
|           sub_matchings = json['matchings'].compact.map { |sub| sub['matched_points']} | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       ok = true | ||||
|       encoded_result = "" | ||||
|       extended_target = "" | ||||
|       row['matchings'].split(',').each_with_index do |sub, sub_idx| | ||||
|         if sub_idx >= sub_matchings.length | ||||
|           ok = false | ||||
|           break | ||||
|         end | ||||
|         sub.length.times do |node_idx| | ||||
|           node = find_node_by_name(sub[node_idx]) | ||||
|           out_node = sub_matchings[sub_idx][node_idx] | ||||
|           if FuzzyMatch.match_location out_node, node | ||||
|             encoded_result += sub[node_idx] | ||||
|             extended_target += sub[node_idx] | ||||
|           else | ||||
|             encoded_result += "? [#{out_node[0]},#{out_node[1]}]" | ||||
|             extended_target += "#{sub[node_idx]} [#{node.lat},#{node.lon}]" | ||||
|             ok = false | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|       if ok | ||||
|         got['matchings'] = row['matchings'] | ||||
|         got['timestamps'] = row['timestamps'] | ||||
|       else | ||||
|         got['matchings'] = encoded_result | ||||
|         row['matchings'] = extended_target | ||||
|         log_fail row,got, { 'matching' => {:query => @query, :response => response} } | ||||
|       end | ||||
| 
 | ||||
|       actual << got | ||||
|     end | ||||
|   end | ||||
|   table.diff! actual | ||||
| end | ||||
| 
 | ||||
| When /^I match with turns I should get$/ do |table| | ||||
|   reprocess | ||||
|   actual = [] | ||||
|   OSRMLoader.load(self,"#{prepared_file}.osrm") do | ||||
|     table.hashes.each_with_index do |row,ri| | ||||
|       if row['request'] | ||||
|         got = {'request' => row['request'] } | ||||
|         response = request_url row['request'] | ||||
|       else | ||||
|         params = @query_params | ||||
|         trace = [] | ||||
|         timestamps = [] | ||||
|         if row['from'] and row['to'] | ||||
|           node = find_node_by_name(row['from']) | ||||
|           raise "*** unknown from-node '#{row['from']}" unless node | ||||
|           trace << node | ||||
| 
 | ||||
|           node = find_node_by_name(row['to']) | ||||
|           raise "*** unknown to-node '#{row['to']}" unless node | ||||
|           trace << node | ||||
| 
 | ||||
|           got = {'from' => row['from'], 'to' => row['to'] } | ||||
|           response = request_matching trace, timestamps, params | ||||
|         elsif row['trace'] | ||||
|           row['trace'].each_char do |n| | ||||
|             node = find_node_by_name(n.strip) | ||||
|             raise "*** unknown waypoint node '#{n.strip}" unless node | ||||
|             trace << node | ||||
|           end | ||||
|           if row['timestamps'] | ||||
|               timestamps = row['timestamps'].split(" ").compact.map { |t| t.to_i} | ||||
|           end | ||||
|           got = {'trace' => row['trace'] } | ||||
|           response = request_matching trace, timestamps, params | ||||
|         else | ||||
|           raise "*** no trace" | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       row.each_pair do |k,v| | ||||
|         if k =~ /param:(.*)/ | ||||
|           if v=='(nil)' | ||||
|             params[$1]=nil | ||||
|           elsif v!=nil | ||||
|             params[$1]=v | ||||
|           end | ||||
|           got[k]=v | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       if response.body.empty? == false | ||||
|         json = JSON.parse response.body | ||||
|       end | ||||
|       if response.body.empty? == false | ||||
|         if response.code == "200" | ||||
|           instructions = way_list json['matchings'][0]['instructions'] | ||||
|           bearings = bearing_list json['matchings'][0]['instructions'] | ||||
|           compasses = compass_list json['matchings'][0]['instructions'] | ||||
|         if table.headers.include? 'turns' | ||||
|           raise "*** Checking turns only support for matchings with one subtrace" unless json['matchings'].size == 1 | ||||
|           turns = turn_list json['matchings'][0]['instructions'] | ||||
|           modes = mode_list json['matchings'][0]['instructions'] | ||||
|           times = time_list json['matchings'][0]['instructions'] | ||||
|           distances = distance_list json['matchings'][0]['instructions'] | ||||
|         end | ||||
|       end | ||||
|        | ||||
|       if table.headers.include? 'status' | ||||
|         got['status'] = json['status'].to_s | ||||
|       end | ||||
|       if table.headers.include? 'message' | ||||
|         got['message'] = json['status_message'] | ||||
|       end | ||||
|       if table.headers.include? '#'   # comment column | ||||
|         got['#'] = row['#']           # copy value so it always match | ||||
|         if table.headers.include? 'route' | ||||
|           raise "*** Checking route only support for matchings with one subtrace" unless json['matchings'].size == 1 | ||||
|           route = way_list json['matchings'][0]['instructions'] | ||||
|         if table.headers.include? 'duration' | ||||
|           raise "*** Checking duration only support for matchings with one subtrace" unless json['matchings'].size == 1 | ||||
|           duration = json['matchings'][0]['route_summary']['total_time'] | ||||
|         end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       sub_matchings = [] | ||||
|       if response.code == "200" | ||||
|         if table.headers.include? 'matchings' | ||||
|           sub_matchings = json['matchings'].compact.map { |sub| sub['matched_points']} | ||||
| 
 | ||||
|           got['route'] = (instructions || '').strip | ||||
|           if table.headers.include?('distance') | ||||
|             if row['distance']!='' | ||||
|               raise "*** Distance must be specied in meters. (ex: 250m)" unless row['distance'] =~ /\d+m/ | ||||
|             end | ||||
|             got['distance'] = instructions ? "#{json['route_summary']['total_distance'].to_s}m" : '' | ||||
|           end | ||||
|           if table.headers.include?('time') | ||||
|             raise "*** Time must be specied in seconds. (ex: 60s)" unless row['time'] =~ /\d+s/ | ||||
|             got['time'] = instructions ? "#{json['route_summary']['total_time'].to_s}s" : '' | ||||
|           end | ||||
|           if table.headers.include?('speed') | ||||
|             if row['speed'] != '' && instructions | ||||
|               raise "*** Speed must be specied in km/h. (ex: 50 km/h)" unless row['speed'] =~ /\d+ km\/h/ | ||||
|                 time = json['route_summary']['total_time'] | ||||
|                 distance = json['route_summary']['total_distance'] | ||||
|                 speed = time>0 ? (3.6*distance/time).to_i : nil | ||||
|                 got['speed'] =  "#{speed} km/h" | ||||
|             else | ||||
|               got['speed'] = '' | ||||
|             end | ||||
|           end | ||||
|           if table.headers.include? 'bearing' | ||||
|             got['bearing'] = instructions ? bearings : '' | ||||
|           end | ||||
|           if table.headers.include? 'compass' | ||||
|             got['compass'] = instructions ? compasses : '' | ||||
|           end | ||||
|           if table.headers.include? 'turns' | ||||
|             got['turns'] = instructions ? turns : '' | ||||
|           end | ||||
|           if table.headers.include? 'modes' | ||||
|             got['modes'] = instructions ? modes : '' | ||||
|           end | ||||
|           if table.headers.include? 'times' | ||||
|             got['times'] = instructions ? times : '' | ||||
|           end | ||||
|           if table.headers.include? 'distances' | ||||
|             got['distances'] = instructions ? distances : '' | ||||
|           end | ||||
|         end | ||||
|         if table.headers.include? 'start' | ||||
|           got['start'] = instructions ? json['route_summary']['start_point'] : nil | ||||
|         end | ||||
|         if table.headers.include? 'end' | ||||
|           got['end'] = instructions ? json['route_summary']['end_point'] : nil | ||||
|         end | ||||
|         if table.headers.include? 'geometry' | ||||
|             got['geometry'] = json['route_geometry'] | ||||
|         end | ||||
|       if table.headers.include? 'turns' | ||||
|         got['turns'] = turns | ||||
|       end | ||||
|       if table.headers.include? 'route' | ||||
|         got['route'] = route | ||||
|       end | ||||
|       if table.headers.include? 'duration' | ||||
|         got['duration'] = duration.to_s | ||||
|       end | ||||
| 
 | ||||
|       ok = true | ||||
| @ -248,8 +105,12 @@ When /^I match with turns I should get$/ do |table| | ||||
|         end | ||||
|       end | ||||
|       if ok | ||||
|         got['matchings'] = row['matchings'] | ||||
|         got['timestamps'] = row['timestamps'] | ||||
|         if table.headers.include? 'matchings' | ||||
|           got['matchings'] = row['matchings'] | ||||
|         end | ||||
|         if table.headers.include? 'timestamps' | ||||
|           got['timestamps'] = row['timestamps'] | ||||
|         end | ||||
|       else | ||||
|         got['matchings'] = encoded_result | ||||
|         row['matchings'] = extended_target | ||||
|  | ||||
| @ -9,10 +9,10 @@ When /^I request nearest I should get$/ do |table| | ||||
|       out_node = find_node_by_name row['out'] | ||||
|       raise "*** unknown out-node '#{row['out']}" unless out_node | ||||
| 
 | ||||
|       response = request_nearest(in_node) | ||||
|       response = request_nearest in_node, @query_params | ||||
|       if response.code == "200" && response.body.empty? == false | ||||
|         json = JSON.parse response.body | ||||
|         if json['status'] == 0 | ||||
|         if json['status'] == 200 | ||||
|           coord =  json['mapped_coordinate'] | ||||
|         end | ||||
|       end | ||||
|  | ||||
| @ -53,5 +53,5 @@ Then /^stdout should contain (\d+) lines?$/ do |lines| | ||||
| end | ||||
| 
 | ||||
| Given (/^the query options$/) do |table| | ||||
|   @query_params = table.rows_hash | ||||
|   table.rows_hash.each { |k,v| @query_params << [k, v] } | ||||
| end | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| When /^I request \/(.*)$/ do |path| | ||||
|   reprocess | ||||
|   OSRMLoader.load(self,"#{prepared_file}.osrm") do | ||||
|     @response = request_path path | ||||
|     @response = request_path path, [] | ||||
|   end | ||||
| end | ||||
| 
 | ||||
|  | ||||
| @ -4,12 +4,12 @@ def test_routability_row i | ||||
|     a = Location.new @origin[0]+(1+WAY_SPACING*i)*@zoom, @origin[1] | ||||
|     b = Location.new @origin[0]+(3+WAY_SPACING*i)*@zoom, @origin[1] | ||||
|     r = {} | ||||
|     r[:response] = request_route (direction=='forw' ? [a,b] : [b,a]), @query_params | ||||
|     r[:response] = request_route (direction=='forw' ? [a,b] : [b,a]), [], @query_params | ||||
|     r[:query] = @query | ||||
|     r[:json] = JSON.parse(r[:response].body) | ||||
| 
 | ||||
|     r[:status] = route_status r[:response] | ||||
|     if r[:status].empty? == false | ||||
|     r[:status] = (route_status r[:response]) == 200 ? 'x' : nil | ||||
|     if r[:status] then | ||||
|       r[:route] = way_list r[:json]['route_instructions'] | ||||
| 
 | ||||
|       if r[:route]=="w#{i}" | ||||
|  | ||||
| @ -7,8 +7,26 @@ When /^I route I should get$/ do |table| | ||||
|         got = {'request' => row['request'] } | ||||
|         response = request_url row['request'] | ||||
|       else | ||||
|         params = @query_params | ||||
|         default_params = @query_params | ||||
|         user_params = [] | ||||
|         got = {} | ||||
|         row.each_pair do |k,v| | ||||
|           if k =~ /param:(.*)/ | ||||
|             if v=='(nil)' | ||||
|               user_params << [$1, nil] | ||||
|             elsif v!=nil | ||||
|               user_params << [$1, v] | ||||
|             end | ||||
|             got[k]=v | ||||
|           end | ||||
|         end | ||||
|         params = overwrite_params default_params, user_params | ||||
|         waypoints = [] | ||||
|         bearings = [] | ||||
|         if row['bearings'] | ||||
|           got['bearings'] = row['bearings'] | ||||
|           bearings = row['bearings'].split(' ').compact | ||||
|         end | ||||
|         if row['from'] and row['to'] | ||||
|           node = find_node_by_name(row['from']) | ||||
|           raise "*** unknown from-node '#{row['from']}" unless node | ||||
| @ -18,38 +36,27 @@ When /^I route I should get$/ do |table| | ||||
|           raise "*** unknown to-node '#{row['to']}" unless node | ||||
|           waypoints << node | ||||
| 
 | ||||
|           got = {'from' => row['from'], 'to' => row['to'] } | ||||
|           response = request_route waypoints, params | ||||
|           got = got.merge({'from' => row['from'], 'to' => row['to'] }) | ||||
|           response = request_route waypoints, bearings, params | ||||
|         elsif row['waypoints'] | ||||
|           row['waypoints'].split(',').each do |n| | ||||
|             node = find_node_by_name(n.strip) | ||||
|             raise "*** unknown waypoint node '#{n.strip}" unless node | ||||
|             waypoints << node | ||||
|           end | ||||
|           got = {'waypoints' => row['waypoints'] } | ||||
|           response = request_route waypoints, params | ||||
|           got = got.merge({'waypoints' => row['waypoints'] }) | ||||
|           response = request_route waypoints, bearings, params | ||||
|         else | ||||
|           raise "*** no waypoints" | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       row.each_pair do |k,v| | ||||
|         if k =~ /param:(.*)/ | ||||
|           if v=='(nil)' | ||||
|             params[$1]=nil | ||||
|           elsif v!=nil | ||||
|             params[$1]=v | ||||
|           end | ||||
|           got[k]=v | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       if response.body.empty? == false | ||||
|         json = JSON.parse response.body | ||||
|       end | ||||
| 
 | ||||
|       if response.body.empty? == false | ||||
|         if json['status'] == 0 | ||||
|         if json['status'] == 200 | ||||
|           instructions = way_list json['route_instructions'] | ||||
|           bearings = bearing_list json['route_instructions'] | ||||
|           compasses = compass_list json['route_instructions'] | ||||
| @ -70,62 +77,64 @@ When /^I route I should get$/ do |table| | ||||
|         got['#'] = row['#']           # copy value so it always match | ||||
|       end | ||||
| 
 | ||||
|       if response.code == "200" | ||||
|         if table.headers.include? 'start' | ||||
|           got['start'] = instructions ? json['route_summary']['start_point'] : nil | ||||
|         end | ||||
|         if table.headers.include? 'end' | ||||
|           got['end'] = instructions ? json['route_summary']['end_point'] : nil | ||||
|         end | ||||
|         if table.headers.include? 'geometry' | ||||
|             got['geometry'] = json['route_geometry'] | ||||
|         end | ||||
|         if table.headers.include? 'route' | ||||
|           got['route'] = (instructions || '').strip | ||||
|           if table.headers.include?('alternative') | ||||
|             raise "*** No alternative found ***" unless json['found_alternative'] | ||||
|             got['alternative'] = way_list json['alternative_instructions'].first | ||||
|           end | ||||
|           if table.headers.include?('distance') | ||||
|             if row['distance']!='' | ||||
|               raise "*** Distance must be specied in meters. (ex: 250m)" unless row['distance'] =~ /\d+m/ | ||||
|             end | ||||
|             got['distance'] = instructions ? "#{json['route_summary']['total_distance'].to_s}m" : '' | ||||
|           end | ||||
|           if table.headers.include?('time') | ||||
|             raise "*** Time must be specied in seconds. (ex: 60s)" unless row['time'] =~ /\d+s/ | ||||
|             got['time'] = instructions ? "#{json['route_summary']['total_time'].to_s}s" : '' | ||||
|           end | ||||
|           if table.headers.include?('speed') | ||||
|             if row['speed'] != '' && instructions | ||||
|               raise "*** Speed must be specied in km/h. (ex: 50 km/h)" unless row['speed'] =~ /\d+ km\/h/ | ||||
|                 time = json['route_summary']['total_time'] | ||||
|                 distance = json['route_summary']['total_distance'] | ||||
|                 speed = time>0 ? (3.6*distance/time).round : nil | ||||
|                 got['speed'] =  "#{speed} km/h" | ||||
|       if table.headers.include? 'start' | ||||
|         got['start'] = instructions ? json['route_summary']['start_point'] : nil | ||||
|       end | ||||
|       if table.headers.include? 'end' | ||||
|         got['end'] = instructions ? json['route_summary']['end_point'] : nil | ||||
|       end | ||||
|       if table.headers.include? 'geometry' | ||||
|           got['geometry'] = json['route_geometry'] | ||||
|       end | ||||
|       if table.headers.include? 'route' | ||||
|         got['route'] = (instructions || '').strip | ||||
|         if table.headers.include?('alternative') | ||||
|           got['alternative'] = | ||||
|             if json['found_alternative'] | ||||
|               way_list json['alternative_instructions'].first | ||||
|             else | ||||
|               got['speed'] = '' | ||||
|               "" | ||||
|             end | ||||
|         end | ||||
|         if table.headers.include?('distance') | ||||
|           if row['distance']!='' | ||||
|             raise "*** Distance must be specied in meters. (ex: 250m)" unless row['distance'] =~ /\d+m/ | ||||
|           end | ||||
|           if table.headers.include? 'bearing' | ||||
|             got['bearing'] = instructions ? bearings : '' | ||||
|           end | ||||
|           if table.headers.include? 'compass' | ||||
|             got['compass'] = instructions ? compasses : '' | ||||
|           end | ||||
|           if table.headers.include? 'turns' | ||||
|             got['turns'] = instructions ? turns : '' | ||||
|           end | ||||
|           if table.headers.include? 'modes' | ||||
|             got['modes'] = instructions ? modes : '' | ||||
|           end | ||||
|           if table.headers.include? 'times' | ||||
|             got['times'] = instructions ? times : '' | ||||
|           end | ||||
|           if table.headers.include? 'distances' | ||||
|             got['distances'] = instructions ? distances : '' | ||||
|           got['distance'] = instructions ? "#{json['route_summary']['total_distance'].to_s}m" : '' | ||||
|         end | ||||
|         if table.headers.include?('time') | ||||
|           raise "*** Time must be specied in seconds. (ex: 60s)" unless row['time'] =~ /\d+s/ | ||||
|           got['time'] = instructions ? "#{json['route_summary']['total_time'].to_s}s" : '' | ||||
|         end | ||||
|         if table.headers.include?('speed') | ||||
|           if row['speed'] != '' && instructions | ||||
|             raise "*** Speed must be specied in km/h. (ex: 50 km/h)" unless row['speed'] =~ /\d+ km\/h/ | ||||
|               time = json['route_summary']['total_time'] | ||||
|               distance = json['route_summary']['total_distance'] | ||||
|               speed = time>0 ? (3.6*distance/time).round : nil | ||||
|               got['speed'] =  "#{speed} km/h" | ||||
|           else | ||||
|             got['speed'] = '' | ||||
|           end | ||||
|         end | ||||
|         if table.headers.include? 'bearing' | ||||
|           got['bearing'] = instructions ? bearings : '' | ||||
|         end | ||||
|         if table.headers.include? 'compass' | ||||
|           got['compass'] = instructions ? compasses : '' | ||||
|         end | ||||
|         if table.headers.include? 'turns' | ||||
|           got['turns'] = instructions ? turns : '' | ||||
|         end | ||||
|         if table.headers.include? 'modes' | ||||
|           got['modes'] = instructions ? modes : '' | ||||
|         end | ||||
|         if table.headers.include? 'times' | ||||
|           got['times'] = instructions ? times : '' | ||||
|         end | ||||
|         if table.headers.include? 'distances' | ||||
|           got['distances'] = instructions ? distances : '' | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       ok = true | ||||
|  | ||||
| @ -38,7 +38,7 @@ When /^I plan a trip I should get$/ do |table| | ||||
|           if v=='(nil)' | ||||
|             params[$1]=nil | ||||
|           elsif v!=nil | ||||
|             params[$1]=v | ||||
|             params[$1]=[v] | ||||
|           end | ||||
|           got[k]=v | ||||
|         end | ||||
|  | ||||
| @ -10,3 +10,7 @@ end | ||||
| def set_profile profile | ||||
|   @profile = profile | ||||
| end | ||||
| 
 | ||||
| def set_extract_args args | ||||
|     @extract_args = args | ||||
| end | ||||
|  | ||||
| @ -123,8 +123,9 @@ def table_coord_to_lonlat ci,ri | ||||
|   [@origin[0]+ci*@zoom, @origin[1]-ri*@zoom] | ||||
| end | ||||
| 
 | ||||
| def add_osm_node name,lon,lat | ||||
|   node = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, lon, lat | ||||
| def add_osm_node name,lon,lat,id | ||||
|   id = make_osm_id if id == nil | ||||
|   node = OSM::Node.new id, OSM_USER, OSM_TIMESTAMP, lon, lat | ||||
|   node << { :name => name } | ||||
|   node.uid = OSM_UID | ||||
|   osm_db << node | ||||
| @ -273,12 +274,13 @@ def extract_data | ||||
|   Dir.chdir TEST_FOLDER do | ||||
|     log_preprocess_info | ||||
|     log "== Extracting #{osm_file}.osm...", :preprocess | ||||
|     unless system "#{BIN_PATH}/osrm-extract #{osm_file}.osm#{'.pbf' if pbf?} --profile #{PROFILES_PATH}/#{@profile}.lua >>#{PREPROCESS_LOG_FILE} 2>&1" | ||||
|     unless system "#{BIN_PATH}/osrm-extract #{osm_file}.osm#{'.pbf' if pbf?} #{@extract_args} --profile #{PROFILES_PATH}/#{@profile}.lua >>#{PREPROCESS_LOG_FILE} 2>&1" | ||||
|       log "*** Exited with code #{$?.exitstatus}.", :preprocess | ||||
|       raise ExtractError.new $?.exitstatus, "osrm-extract exited with code #{$?.exitstatus}." | ||||
|     end | ||||
|     begin | ||||
|       ["osrm","osrm.names","osrm.restrictions"].each do |file| | ||||
|       ["osrm","osrm.names","osrm.restrictions","osrm.ebg","osrm.edges","osrm.fileIndex","osrm.geometry","osrm.nodes","osrm.ramIndex"].each do |file| | ||||
|         log "Renaming #{osm_file}.#{file} to #{extracted_file}.#{file}", :preprocess | ||||
|         File.rename "#{osm_file}.#{file}", "#{extracted_file}.#{file}" | ||||
|       end | ||||
|     rescue Exception => e | ||||
| @ -296,14 +298,16 @@ def prepare_data | ||||
|       raise PrepareError.new $?.exitstatus, "osrm-prepare exited with code #{$?.exitstatus}." | ||||
|     end | ||||
|     begin | ||||
|       ["osrm.hsgr","osrm.fileIndex","osrm.geometry","osrm.nodes","osrm.ramIndex","osrm.core"].each do |file| | ||||
|       ["osrm.hsgr","osrm.fileIndex","osrm.geometry","osrm.nodes","osrm.ramIndex","osrm.core","osrm.edges"].each do |file| | ||||
|         log "Renaming #{extracted_file}.#{file} to #{prepared_file}.#{file}", :preprocess | ||||
|         File.rename "#{extracted_file}.#{file}", "#{prepared_file}.#{file}" | ||||
|       end | ||||
|     rescue Exception => e | ||||
|       raise FileError.new nil, "failed to rename data file after preparing." | ||||
|     end | ||||
|     begin | ||||
|       ["osrm.names","osrm.edges","osrm.restrictions"].each do |file| | ||||
|       ["osrm.names","osrm.restrictions","osrm"].each do |file| | ||||
|         log "Copying #{extracted_file}.#{file} to #{prepared_file}.#{file}", :preprocess | ||||
|         FileUtils.cp "#{extracted_file}.#{file}", "#{prepared_file}.#{file}" | ||||
|       end | ||||
|     rescue Exception => e | ||||
|  | ||||
| @ -15,7 +15,7 @@ Before do |scenario| | ||||
|   end | ||||
|    | ||||
|   @load_method  = DEFAULT_LOAD_METHOD | ||||
|   @query_params = {} | ||||
|   @query_params = [] | ||||
|   @scenario_time = Time.now.strftime("%Y-%m-%dT%H:%m:%SZ") | ||||
|   reset_data | ||||
|   @has_logged_preprocess_info = false | ||||
|  | ||||
| @ -1,29 +1,32 @@ | ||||
| require 'net/http' | ||||
| 
 | ||||
| def generate_request_url path | ||||
|   if @http_method.eql? "POST" | ||||
|     pos = path.index('?') - 1 | ||||
|     service = path[0..pos] | ||||
|     uri = URI.parse "#{HOST}/#{service}" | ||||
|   else | ||||
|     uri = URI.parse "#{HOST}/#{path}" | ||||
| # Converts an array [["param","val1"], ["param","val2"]] into ?param=val1¶m=val2 | ||||
| def params_to_url params | ||||
|   kv_pairs = params.map { |kv| kv[0].to_s + "=" + kv[1].to_s } | ||||
|   url = kv_pairs.size > 0 ? ("?" + kv_pairs.join("&")) : "" | ||||
|   return url | ||||
| end | ||||
| 
 | ||||
| # Converts an array [["param","val1"], ["param","val2"]] into ["param"=>["val1", "val2"]] | ||||
| def params_to_map params | ||||
|   result = {} | ||||
|   params.each do |pair| | ||||
|     if not result.has_key? pair[0] | ||||
|       result[pair[0]] = [] | ||||
|     end | ||||
|     result[pair[0]] << [pair[1]] | ||||
|   end | ||||
| end | ||||
| 
 | ||||
| def send_request uri, waypoints=[], options={}, timestamps=[] | ||||
|   @query = uri.to_s | ||||
| def send_request base_uri, parameters | ||||
|   Timeout.timeout(OSRM_TIMEOUT) do | ||||
|     if @http_method.eql? "POST" | ||||
|       datas = {} | ||||
|       if waypoints.length > 0 | ||||
|         datas[:loc] = waypoints.compact.map { |w| "#{w.lat},#{w.lon}" } | ||||
|       end | ||||
|       if timestamps.length > 0 | ||||
|         datas[:t] = timestamps.compact.map { |t| "#{t}" } | ||||
|       end | ||||
|       datas.merge! options | ||||
|       response = Net::HTTP.post_form uri, datas | ||||
|       uri = URI.parse base_uri | ||||
|       @query = uri.to_s | ||||
|       response = Net::HTTP.post_form uri, (params_to_map parameters) | ||||
|     else | ||||
|       uri = URI.parse base_uri+(params_to_url parameters) | ||||
|       @query = uri.to_s | ||||
|       response = Net::HTTP.get_response uri | ||||
|     end | ||||
|   end | ||||
|  | ||||
| @ -1,12 +0,0 @@ | ||||
| require 'net/http' | ||||
| 
 | ||||
| def request_locate_url path, node | ||||
|   @query = path | ||||
| 
 | ||||
|   uri = generate_request_url path | ||||
|   response = send_request uri, [node] | ||||
| end | ||||
| 
 | ||||
| def request_locate node | ||||
|   request_locate_url "locate?loc=#{node.lat},#{node.lon}", node | ||||
| end | ||||
| @ -1,20 +0,0 @@ | ||||
| require 'net/http' | ||||
| 
 | ||||
| HOST = "http://127.0.0.1:#{OSRM_PORT}" | ||||
| 
 | ||||
| def request_matching trace=[], timestamps=[], options={} | ||||
|   defaults = { 'output' => 'json', 'instructions' => 'true' } | ||||
|   locs = trace.compact.map { |w| "loc=#{w.lat},#{w.lon}" } | ||||
|   ts = timestamps.compact.map { |t| "t=#{t}" } | ||||
|   if ts.length > 0 | ||||
|     trace_params = locs.zip(ts).map { |a| a.join('&')} | ||||
|   else | ||||
|     trace_params = locs | ||||
|   end | ||||
|   params = (trace_params + defaults.merge(options).to_param).join('&') | ||||
|   params = nil if params=="" | ||||
|    | ||||
|   uri = generate_request_url ("match" + '?' + params) | ||||
|   response = send_request uri, trace, options, timestamps | ||||
| end | ||||
| 
 | ||||
| @ -1,12 +0,0 @@ | ||||
| require 'net/http' | ||||
| 
 | ||||
| def request_nearest_url path, node | ||||
|   @query = path | ||||
|    | ||||
|   uri = generate_request_url path | ||||
|   response = send_request uri, [node] | ||||
| end | ||||
| 
 | ||||
| def request_nearest node | ||||
|   request_nearest_url "nearest?loc=#{node.lat},#{node.lon}", node | ||||
| end | ||||
| @ -3,25 +3,10 @@ require 'net/http' | ||||
| HOST = "http://127.0.0.1:#{OSRM_PORT}" | ||||
| DESTINATION_REACHED = 15      #OSRM instruction code | ||||
| 
 | ||||
| class Hash | ||||
|   def to_param(namespace = nil) | ||||
|     collect do |key, value| | ||||
|       "#{key}=#{value}" | ||||
|     end.sort | ||||
|   end | ||||
| end | ||||
| 
 | ||||
| def request_path path, waypoints=[], options={} | ||||
|   locs = waypoints.compact.map { |w| "loc=#{w.lat},#{w.lon}" } | ||||
|   params = (locs + options.to_param).join('&') | ||||
|   params = nil if params=="" | ||||
|    | ||||
|   if params == nil | ||||
|     uri = generate_request_url (path) | ||||
|   else | ||||
|     uri = generate_request_url (path + '?' + params) | ||||
|   end | ||||
|   response = send_request uri, waypoints, options | ||||
| def request_path service, params | ||||
|   uri = "#{HOST}/" + service | ||||
|   response = send_request uri, params | ||||
|   return response | ||||
| end | ||||
| 
 | ||||
| def request_url path | ||||
| @ -36,40 +21,95 @@ rescue Timeout::Error | ||||
|   raise "*** osrm-routed did not respond." | ||||
| end | ||||
| 
 | ||||
| def request_route waypoints, params={} | ||||
|   defaults = { 'output' => 'json', 'instructions' => true, 'alt' => false } | ||||
|   request_path "viaroute", waypoints, defaults.merge(params) | ||||
| # Overwriters the default values in defaults. | ||||
| # e.g. [[a, 1], [b, 2]], [[a, 5], [d, 10]] => [[a, 5], [b, 2], [d, 10]] | ||||
| def overwrite_params defaults, other | ||||
|   merged = [] | ||||
|   defaults.each do |k,v| | ||||
|     idx = other.index { |p| p[0] == k } | ||||
|     if idx == nil then | ||||
|       merged << [k, v] | ||||
|     else | ||||
|       merged << [k, other[idx][1]] | ||||
|     end | ||||
|   end | ||||
|   other.each do |k,v| | ||||
|     if merged.index { |pair| pair[0] == k} == nil then | ||||
|       merged << [k, v] | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   return merged | ||||
| end | ||||
| 
 | ||||
| def request_table waypoints, params={} | ||||
|   defaults = { 'output' => 'json' } | ||||
|   request_path "table", waypoints, defaults.merge(params) | ||||
| def request_route waypoints, bearings, user_params | ||||
|   raise "*** number of bearings does not equal the number of waypoints" unless bearings.size == 0 || bearings.size == waypoints.size | ||||
| 
 | ||||
|   defaults = [['output','json'], ['instructions',true], ['alt',false]] | ||||
|   params = overwrite_params defaults, user_params | ||||
|   encoded_waypoint = waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] } | ||||
|   if bearings.size > 0 | ||||
|     encoded_bearings = bearings.map { |b| ["b", b.to_s]} | ||||
|     parasm = params.concat encoded_waypoint.zip(encoded_bearings).flatten! 1 | ||||
|   else | ||||
|     params = params.concat encoded_waypoint | ||||
|   end | ||||
| 
 | ||||
|   return request_path "viaroute", params | ||||
| end | ||||
| 
 | ||||
| def request_nearest node, user_params | ||||
|   defaults = [['output', 'json']] | ||||
|   params = overwrite_params defaults, user_params | ||||
|   params << ["loc", "#{node.lat},#{node.lon}"] | ||||
| 
 | ||||
|   return request_path "nearest", params | ||||
| end | ||||
| 
 | ||||
| def request_table waypoints, user_params | ||||
|   defaults = [['output', 'json']] | ||||
|   params = overwrite_params defaults, user_params | ||||
|   params = params.concat waypoints.map{ |w| [w[:type],"#{w[:coord].lat},#{w[:coord].lon}"] } | ||||
| 
 | ||||
|   return request_path "table", params | ||||
| end | ||||
| 
 | ||||
| def request_trip waypoints, user_params | ||||
|   defaults = [['output', 'json']] | ||||
|   params = overwrite_params defaults, user_params | ||||
|   params = params.concat waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] } | ||||
| 
 | ||||
|   return request_path "trip", params | ||||
| end | ||||
| 
 | ||||
| def request_matching waypoints, timestamps, user_params | ||||
|   defaults = [['output', 'json']] | ||||
|   params = overwrite_params defaults, user_params | ||||
|   encoded_waypoint = waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] } | ||||
|   if timestamps.size > 0 | ||||
|     encoded_timestamps = timestamps.map { |t| ["t", t.to_s]} | ||||
|     parasm = params.concat encoded_waypoint.zip(encoded_timestamps).flatten! 1 | ||||
|   else | ||||
|     params = params.concat encoded_waypoint | ||||
|   end | ||||
| 
 | ||||
|   return request_path "match", params | ||||
| end | ||||
| 
 | ||||
| def got_route? response | ||||
|   if response.code == "200" && !response.body.empty? | ||||
|     json = JSON.parse response.body | ||||
|     if json['status'] == 0 | ||||
|     if json['status'] == 200 | ||||
|       return way_list( json['route_instructions']).empty? == false | ||||
|     end | ||||
|   end | ||||
|   false | ||||
|   return false | ||||
| end | ||||
| 
 | ||||
| def route_status response | ||||
|   if response.code == "200" && !response.body.empty? | ||||
|     json = JSON.parse response.body | ||||
|     if json['status'] == 0 | ||||
|       if way_list( json['route_instructions']).empty? | ||||
|         return 'Empty route' | ||||
|       else | ||||
|         return 'x' | ||||
|       end | ||||
|     elsif json['status'] == 207 | ||||
|       '' | ||||
|     else | ||||
|       "Status #{json['status']}" | ||||
|     end | ||||
|     return json['status'] | ||||
|   else | ||||
|     "HTTP #{response.code}" | ||||
|   end | ||||
|  | ||||
| @ -1,14 +0,0 @@ | ||||
| require 'net/http' | ||||
| HOST = "http://127.0.0.1:#{OSRM_PORT}" | ||||
| 
 | ||||
| def request_trip waypoints=[], params={} | ||||
|   defaults = { 'output' => 'json' } | ||||
|   locs = waypoints.compact.map { |w| "loc=#{w.lat},#{w.lon}" } | ||||
| 
 | ||||
|   params = (locs + defaults.merge(params).to_param).join('&') | ||||
|   params = nil if params=="" | ||||
| 
 | ||||
|   uri = generate_request_url ("trip" + '?' + params) | ||||
|   response = send_request uri, waypoints, params | ||||
| end | ||||
| 
 | ||||
							
								
								
									
										23
									
								
								features/testbot/64bit.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								features/testbot/64bit.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,23 @@ | ||||
| @testbot | ||||
| Feature: Support 64bit node IDs | ||||
| 
 | ||||
|     # Without 64bit support, this test should fail | ||||
|     Scenario: 64bit overflow conflicts | ||||
|         Given the node locations | ||||
|             | node | lat       | lon       | id         | | ||||
|             | a    | 55.660778 | 12.573909 | 1          | | ||||
|             | b    | 55.660672 | 12.573693 | 2          | | ||||
|             | c    | 55.660128 | 12.572546 | 3          | | ||||
|             | d    | 55.660015 | 12.572476 | 4294967297 | | ||||
|             | e    | 55.660119 | 12.572325 | 4294967298 | | ||||
|             | x    | 55.660818 | 12.574051 | 4294967299 | | ||||
|             | y    | 55.660073 | 12.574067 | 4294967300 | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | abc   | | ||||
|             | cdec  | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route | turns            | | ||||
|             | x    | y  | abc   | head,destination | | ||||
							
								
								
									
										38
									
								
								features/testbot/alternative.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								features/testbot/alternative.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,38 @@ | ||||
| @routing @testbot @alternative | ||||
| Feature: Alternative route | ||||
| 
 | ||||
|     Background: | ||||
|         Given the profile "testbot" | ||||
| 
 | ||||
|         And the node map | ||||
|             |   | b | c | d |   |   | | ||||
|             | a |   |   |   |   | z | | ||||
|             |   | g | h | i | j |   | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
|             | bc    | | ||||
|             | cd    | | ||||
|             | dz    | | ||||
|             | ag    | | ||||
|             | gh    | | ||||
|             | hi    | | ||||
|             | ij    | | ||||
|             | jz    | | ||||
| 
 | ||||
|     Scenario: Enabled alternative | ||||
|         Given the query options | ||||
|             | alt | true | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route       | alternative    | | ||||
|             | a    | z  | ab,bc,cd,dz | ag,gh,hi,ij,jz | | ||||
| 
 | ||||
|     Scenario: Disabled alternative | ||||
|         Given the query options | ||||
|             | alt | false | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | route       | alternative | | ||||
|             | a    | z  | ab,bc,cd,dz |             | | ||||
| @ -1,10 +1,26 @@ | ||||
| @routing @bearing_param @todo @testbot | ||||
| @routing @bearing_param @testbot | ||||
| Feature: Bearing parameter | ||||
| 
 | ||||
|     Background: | ||||
|         Given the profile "testbot" | ||||
|         And a grid size of 10 meters | ||||
| 
 | ||||
|     Scenario: Testbot - Intial bearing in simple case | ||||
|         Given the node map | ||||
|             | a | b | c | d | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ad    | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | bearings  | route | bearing | | ||||
|             | b    | c  | 90 90     | ad    | 90      | | ||||
|             | b    | c  | 180 90    |       |         | | ||||
|             | b    | c  | 80 100    | ad    | 90      | | ||||
|             | b    | c  | 79 100    |       |         | | ||||
|             | b    | c  | 79,11 100 | ad    | 90      | | ||||
| 
 | ||||
|     Scenario: Testbot - Intial bearing in simple case | ||||
|         Given the node map | ||||
|             | a |   | | ||||
| @ -17,13 +33,13 @@ Feature: Bearing parameter | ||||
|             | bc    | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | param:bearing | route | bearing | | ||||
|             | 0    | c  | 0             | bc    | 45      | | ||||
|             | 0    | c  | 45            | bc    | 45      | | ||||
|             | 0    | c  | 85            | bc    | 45      | | ||||
|             | 0    | c  | 95            | ac    | 135     | | ||||
|             | 0    | c  | 135           | ac    | 135     | | ||||
|             | 0    | c  | 180           | ac    | 135     | | ||||
|             | from | to | bearings | route | bearing | | ||||
|             | 0    | c  | 0 0      |       |         | | ||||
|             | 0    | c  | 45 45    | bc    | 45 ~3%   | | ||||
|             | 0    | c  | 85 85    |       |         | | ||||
|             | 0    | c  | 95 95    |       |         | | ||||
|             | 0    | c  | 135 135  | ac    | 135 ~1%  | | ||||
|             | 0    | c  | 180 180  |       |         | | ||||
| 
 | ||||
|     Scenario: Testbot - Initial bearing on split way | ||||
|         Given the node map | ||||
| @ -38,23 +54,25 @@ Feature: Bearing parameter | ||||
|             | da    | yes    | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | param:bearing | route    | bearing | | ||||
|             | 0    | b  | 10            | ab       | 90      | | ||||
|             | 0    | b  | 90            | ab       | 90      | | ||||
|             | 0    | b  | 170           | ab       | 90      | | ||||
|             | 0    | b  | 190           | cd,da,ab | 270     | | ||||
|             | 0    | b  | 270           | cd,da,ab | 270     | | ||||
|             | 0    | b  | 350           | cd,da,ab | 270     | | ||||
|             | 1    | d  | 10            | cd       | 90      | | ||||
|             | 1    | d  | 90            | cd       | 90      | | ||||
|             | 1    | d  | 170           | cd       | 90      | | ||||
|             | 1    | d  | 190           | ab,bc,cd | 270     | | ||||
|             | 1    | d  | 270           | ab,bc,cd | 270     | | ||||
|             | 1    | d  | 350           | ab,bc,cd | 270     | | ||||
|             | from | to | bearings | route       | bearing       | | ||||
|             | 0    | b  | 10 10    | bc          | 0             | | ||||
|             | 0    | b  | 90 90    | ab          | 90            | | ||||
|             # The returned bearing is wrong here, it's based on the snapped | ||||
|             # coordinates, not the acutal edge bearing.  This should be | ||||
|             # fixed one day, but it's only a problem when we snap too vias | ||||
|             # to the same point - DP | ||||
|             #| 0    | b  | 170 170  | da          | 180           | | ||||
|             #| 0    | b  | 189 189  | da          | 180           | | ||||
|             | 0    | 1  | 90 270   | ab,bc,cd    | 90,0,270      | | ||||
|             | 1    | d  | 10 10    | bc          | 0             | | ||||
|             | 1    | d  | 90 90    | ab,bc,cd,da | 90,0,270,180  | | ||||
|             | 1    | 0  | 189 189  | da          | 180           | | ||||
|             | 1    | d  | 270 270  | cd          | 270           | | ||||
|             | 1    | d  | 349 349  |             |               | | ||||
| 
 | ||||
|     Scenario: Testbot - Initial bearing in all direction | ||||
|         Given the node map | ||||
|             | h |  |   | a |   |  | b | | ||||
|             | h |  | q | a |   |  | b | | ||||
|             |   |  |   |   |   |  |   | | ||||
|             |   |  | p | i | j |  |   | | ||||
|             | g |  | o | 0 | k |  | c | | ||||
| @ -82,12 +100,12 @@ Feature: Bearing parameter | ||||
|             | ha    | yes    | | ||||
| 
 | ||||
|         When I route I should get | ||||
|             | from | to | param:bearing | route                   | bearing | | ||||
|             | 0    | a  | 0             | ia                      | 0       | | ||||
|             | 0    | a  | 45            | jb,bc,cd,de,ef,fg,gh,ha | 45      | | ||||
|             | 0    | a  | 90            | kc,cd,de,ef,fg,gh,ha    | 90      | | ||||
|             | 0    | a  | 135           | ld,de,ef,fg,gh,ha       | 135     | | ||||
|             | 0    | a  | 180           | me,de,ef,fg,gh,ha       | 180     | | ||||
|             | 0    | a  | 225           | nf,ef,fg,gh,ha          | 225     | | ||||
|             | 0    | a  | 270           | og,gh,ha                | 270     | | ||||
|             | 0    | a  | 315           | pn,ha                   | 315     | | ||||
|             | from | to | bearings | route                      | bearing                     | | ||||
|             | 0    | q  | 0 90     | ia,ab,bc,cd,de,ef,fg,gh,ha | 0,90,180,180,270,270,0,0,90 | | ||||
|             | 0    | a  | 45 90    | jb,bc,cd,de,ef,fg,gh,ha    | 45,180,180,270,270,0,0,90   | | ||||
|             | 0    | q  | 90 90    | kc,cd,de,ef,fg,gh,ha       | 90,180,270,270,0,0,90       | | ||||
|             | 0    | a  | 135 90   | ld,de,ef,fg,gh,ha          | 135,270,270,0,0,90          | | ||||
|             | 0    | a  | 180 90   | me,ef,fg,gh,ha             | 180,270,0,0,90              | | ||||
|             | 0    | a  | 225 90   | nf,fg,gh,ha                | 225,0,0,90                  | | ||||
|             | 0    | a  | 270 90   | og,gh,ha                   | 270,0,90                    | | ||||
|             | 0    | a  | 315 90   | ph,ha                      | 315,90                      | | ||||
|  | ||||
| @ -100,3 +100,82 @@ Feature: Basic Distance Matrix | ||||
|             | y | 500 | 0   | 300 | 200 | | ||||
|             | d | 200 | 300 | 0   | 300 | | ||||
|             | e | 300 | 400 | 100 | 0   | | ||||
| 
 | ||||
|     Scenario: Testbot - Travel time matrix and with only one source | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
|             | d | e | f | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | abc   | | ||||
|             | def   | | ||||
|             | ad    | | ||||
|             | be    | | ||||
|             | cf    | | ||||
| 
 | ||||
|         When I request a travel time matrix I should get | ||||
|             |   | a   | b   | e   | f   | | ||||
|             | a | 0   | 100 | 200 | 300 | | ||||
| 
 | ||||
|      Scenario: Testbot - Travel time 3x2 matrix | ||||
|         Given the node map | ||||
|             | a | b | c | | ||||
|             | d | e | f | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | abc   | | ||||
|             | def   | | ||||
|             | ad    | | ||||
|             | be    | | ||||
|             | cf    | | ||||
| 
 | ||||
|         When I request a travel time matrix I should get | ||||
|             |   | b   | e   | f   | | ||||
|             | a | 100 | 200 | 300 | | ||||
|             | b | 0   | 100 | 200 | | ||||
| 
 | ||||
|     Scenario: Testbog - All coordinates are from same small component | ||||
|         Given a grid size of 300 meters | ||||
|         Given the extract extra arguments "--small-component-size 4" | ||||
|         Given the node map | ||||
|             | a | b |  | f | | ||||
|             | d | e |  | g | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
|             | be    | | ||||
|             | ed    | | ||||
|             | da    | | ||||
|             | fg    | | ||||
| 
 | ||||
|         When I request a travel time matrix I should get | ||||
|             |   | f   | g   | | ||||
|             | f | 0   | 300 | | ||||
|             | g | 300 |  0  | | ||||
| 
 | ||||
|     Scenario: Testbog - Coordinates are from different small component and snap to big CC | ||||
|         Given a grid size of 300 meters | ||||
|         Given the extract extra arguments "--small-component-size 4" | ||||
|         Given the node map | ||||
|             | a | b |  | f | h | | ||||
|             | d | e |  | g | i | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | ab    | | ||||
|             | be    | | ||||
|             | ed    | | ||||
|             | da    | | ||||
|             | fg    | | ||||
|             | hi    | | ||||
| 
 | ||||
|         When I request a travel time matrix I should get | ||||
|             |   | f   | g   | h   | i   | | ||||
|             | f | 0   | 300 | 0   | 300 | | ||||
|             | g | 300 |  0  | 300 | 0   | | ||||
|             | h | 0   | 300 | 0   | 300 | | ||||
|             | i | 300 |  0  | 300 | 0   | | ||||
| 
 | ||||
|  | ||||
| @ -5,6 +5,8 @@ Feature: Turn directions/codes | ||||
|         Given the profile "testbot" | ||||
| 
 | ||||
|     Scenario: Turn directions | ||||
|         Given the query options | ||||
|             | instructions | true | | ||||
|         Given the node map | ||||
|             | o | p | a | b | c | | ||||
|             | n |   |   |   | d | | ||||
| @ -31,7 +33,7 @@ Feature: Turn directions/codes | ||||
|             | xo    | | ||||
|             | xp    | | ||||
| 
 | ||||
|         When I match with turns I should get | ||||
|         When I match I should get | ||||
|             | trace | route | turns                         | matchings | | ||||
|             | im    | xi,xm | head,left,destination         | im        | | ||||
|             | io    | xi,xo | head,slight_left,destination  | io        | | ||||
| @ -79,4 +81,41 @@ Feature: Turn directions/codes | ||||
|             | gm    | xg,xm | head,slight_left,destination  | gm        | | ||||
|             | go    | xg,xo | head,straight,destination     | go        | | ||||
|             | ga    | xg,xa | head,slight_right,destination | ga        | | ||||
|             | gc    | xg,xc | head,right,destination        | gc        | | ||||
|             | gc    | xg,xc | head,right,destination        | gc        | | ||||
| 
 | ||||
|     Scenario: Turn directions | ||||
|         Given the query options | ||||
|             | instructions | true | | ||||
|         Given the node map | ||||
|             | o | p | a | b | c | | ||||
|             | n |   |   |   | d | | ||||
|             | m |   | x |   | e | | ||||
|             | l |   |   |   | f | | ||||
|             | k | j | i | h | g | | ||||
| 
 | ||||
|         And the ways | ||||
|             | nodes | | ||||
|             | xa    | | ||||
|             | xb    | | ||||
|             | xc    | | ||||
|             | xd    | | ||||
|             | xe    | | ||||
|             | xf    | | ||||
|             | xg    | | ||||
|             | xh    | | ||||
|             | xi    | | ||||
|             | xj    | | ||||
|             | xk    | | ||||
|             | xl    | | ||||
|             | xm    | | ||||
|             | xn    | | ||||
|             | xo    | | ||||
|             | xp    | | ||||
| 
 | ||||
|         When I match I should get | ||||
|             | trace | route | turns                         | matchings | duration | | ||||
|             | im    | xi,xm | head,left,destination         | im        | 80       | | ||||
|             | io    | xi,xo | head,slight_left,destination  | io        | 88       | | ||||
|             | ia    | xi,xa | head,straight,destination     | ia        | 80       | | ||||
|             | ic    | xi,xc | head,slight_right,destination | ic        | 88       | | ||||
|             | ie    | xi,xe | head,right,destination        | ie        | 60       | | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue
	
	Block a user