Merge remote-tracking branch 'upstream/master' into lua_maxpseed_refactoring
This commit is contained in:
commit
5230a8717d
4
.gitignore
vendored
4
.gitignore
vendored
@ -68,6 +68,10 @@ Thumbs.db
|
||||
/*.local.bat
|
||||
/CMakeSettings.json
|
||||
|
||||
# Jetbrains related files #
|
||||
###########################
|
||||
.idea/
|
||||
|
||||
# stxxl related files #
|
||||
#######################
|
||||
.stxxl
|
||||
|
||||
122
.travis.yml
122
.travis.yml
@ -17,7 +17,7 @@ branches:
|
||||
- /^v\d+\.\d+(\.\d+)?(-\S*)?$/
|
||||
|
||||
cache:
|
||||
yarn: true
|
||||
npm: true
|
||||
ccache: true
|
||||
apt: true
|
||||
directories:
|
||||
@ -34,7 +34,7 @@ env:
|
||||
- CMAKE_VERSION=3.7.2
|
||||
- MASON="$(pwd)/scripts/mason.sh"
|
||||
- ENABLE_NODE_BINDINGS=On
|
||||
- NODE="4"
|
||||
- NODE="10"
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
@ -45,7 +45,7 @@ matrix:
|
||||
# Debug Builds
|
||||
- os: linux
|
||||
compiler: "format-taginfo-docs"
|
||||
env: NODE=6
|
||||
env: NODE=10
|
||||
sudo: false
|
||||
before_install:
|
||||
install:
|
||||
@ -53,8 +53,7 @@ matrix:
|
||||
- nvm install $NODE
|
||||
- nvm use $NODE
|
||||
- npm --version
|
||||
- npm install --ignore-scripts
|
||||
- npm link --ignore-scripts
|
||||
- npm ci --ignore-scripts
|
||||
script:
|
||||
- ./scripts/check_taginfo.py taginfo.json profiles/car.lua
|
||||
- ${MASON} install clang-format 3.8.1
|
||||
@ -156,17 +155,17 @@ matrix:
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode9.2
|
||||
compiler: "mason-osx-release-node-8"
|
||||
compiler: "mason-osx-release-node-10"
|
||||
# we use the xcode provides clang and don't install our own
|
||||
env: ENABLE_MASON=ON BUILD_TYPE='Release' CUCUMBER_TIMEOUT=60000 CCOMPILER='clang' CXXCOMPILER='clang++' ENABLE_ASSERTIONS=ON ENABLE_LTO=ON NODE="8"
|
||||
env: ENABLE_MASON=ON BUILD_TYPE='Release' CUCUMBER_TIMEOUT=60000 CCOMPILER='clang' CXXCOMPILER='clang++' ENABLE_ASSERTIONS=ON ENABLE_LTO=ON NODE="10"
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode9.2
|
||||
compiler: "mason-osx-release-node-4"
|
||||
compiler: "mason-osx-release-node-8"
|
||||
# we use the xcode provides clang and don't install our own
|
||||
env: ENABLE_MASON=ON BUILD_TYPE='Release' CUCUMBER_TIMEOUT=60000 CCOMPILER='clang' CXXCOMPILER='clang++' ENABLE_ASSERTIONS=ON ENABLE_LTO=ON NODE="4"
|
||||
env: ENABLE_MASON=ON BUILD_TYPE='Release' CUCUMBER_TIMEOUT=60000 CCOMPILER='clang' CXXCOMPILER='clang++' ENABLE_ASSERTIONS=ON ENABLE_LTO=ON NODE="8"
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
@ -180,54 +179,6 @@ matrix:
|
||||
env: CCOMPILER='gcc-7' CXXCOMPILER='g++-7' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON
|
||||
|
||||
# Node build jobs. These skip running the tests.
|
||||
- os: linux
|
||||
sudo: false
|
||||
compiler: "node-4-mason-linux-release"
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: ['libstdc++-4.9-dev']
|
||||
env: CLANG_VERSION='5.0.0' BUILD_TYPE='Release' ENABLE_MASON=ON ENABLE_LTO=ON JOBS=3 NODE="4"
|
||||
install:
|
||||
- pushd ${OSRM_BUILD_DIR}
|
||||
- |
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_MASON=${ENABLE_MASON:-OFF} \
|
||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
||||
-DENABLE_CCACHE=ON \
|
||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} \
|
||||
-DENABLE_GLIBC_WORKAROUND=ON
|
||||
- make --jobs=${JOBS}
|
||||
- popd
|
||||
script:
|
||||
- npm run nodejs-tests
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
- os: linux
|
||||
sudo: false
|
||||
compiler: "node-4-mason-linux-debug"
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: ['libstdc++-4.9-dev']
|
||||
env: CLANG_VERSION='5.0.0' BUILD_TYPE='Debug' ENABLE_MASON=ON ENABLE_LTO=ON JOBS=3 NODE="4"
|
||||
install:
|
||||
- pushd ${OSRM_BUILD_DIR}
|
||||
- |
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_MASON=${ENABLE_MASON:-OFF} \
|
||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
||||
-DENABLE_CCACHE=ON \
|
||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} \
|
||||
-DENABLE_GLIBC_WORKAROUND=ON
|
||||
- make --jobs=${JOBS}
|
||||
- popd
|
||||
script:
|
||||
- npm run nodejs-tests
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
- os: linux
|
||||
sudo: false
|
||||
compiler: "node-8-mason-linux-release"
|
||||
@ -276,6 +227,54 @@ matrix:
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
- os: linux
|
||||
sudo: false
|
||||
compiler: "node-10-mason-linux-release"
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: ['libstdc++-4.9-dev']
|
||||
env: CLANG_VERSION='5.0.0' BUILD_TYPE='Release' ENABLE_MASON=ON ENABLE_LTO=ON JOBS=3 NODE="10"
|
||||
install:
|
||||
- pushd ${OSRM_BUILD_DIR}
|
||||
- |
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_MASON=${ENABLE_MASON:-OFF} \
|
||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
||||
-DENABLE_CCACHE=ON \
|
||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} \
|
||||
-DENABLE_GLIBC_WORKAROUND=ON
|
||||
- make --jobs=${JOBS}
|
||||
- popd
|
||||
script:
|
||||
- npm run nodejs-tests
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
- os: linux
|
||||
sudo: false
|
||||
compiler: "node-10-mason-linux-debug"
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: ['libstdc++-4.9-dev']
|
||||
env: CLANG_VERSION='5.0.0' BUILD_TYPE='Debug' ENABLE_MASON=ON ENABLE_LTO=ON JOBS=3 NODE="10"
|
||||
install:
|
||||
- pushd ${OSRM_BUILD_DIR}
|
||||
- |
|
||||
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DENABLE_MASON=${ENABLE_MASON:-OFF} \
|
||||
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
|
||||
-DENABLE_CCACHE=ON \
|
||||
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} \
|
||||
-DENABLE_GLIBC_WORKAROUND=ON
|
||||
- make --jobs=${JOBS}
|
||||
- popd
|
||||
script:
|
||||
- npm run nodejs-tests
|
||||
after_success:
|
||||
- ./scripts/travis/publish.sh
|
||||
|
||||
before_install:
|
||||
- source $NVM_DIR/nvm.sh
|
||||
- nvm install $NODE
|
||||
@ -294,15 +293,10 @@ before_install:
|
||||
if [[ "${TRAVIS_OS_NAME}" == "osx" ]]; then
|
||||
sudo mdutil -i off /
|
||||
fi
|
||||
- |
|
||||
if [[ ! -f $(which yarn) ]]; then
|
||||
npm install -g yarn
|
||||
fi
|
||||
- export PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
|
||||
- export PUBLISH=$([[ "${TRAVIS_TAG:-}" == "v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off")
|
||||
- echo "Using ${JOBS} jobs"
|
||||
- yarn install --ignore-scripts
|
||||
- yarn check --ignore-scripts --integrity
|
||||
- npm ci --ignore-scripts
|
||||
# Bootstrap cmake to be able to run mason
|
||||
- CMAKE_URL="https://mason-binaries.s3.amazonaws.com/${TRAVIS_OS_NAME}-x86_64/cmake/${CMAKE_VERSION}.tar.gz"
|
||||
- CMAKE_DIR="mason_packages/${TRAVIS_OS_NAME}-x86_64/cmake/${CMAKE_VERSION}"
|
||||
@ -381,4 +375,4 @@ script:
|
||||
fi
|
||||
- |
|
||||
- popd
|
||||
- yarn test
|
||||
- npm test
|
||||
|
||||
44
CHANGELOG.md
44
CHANGELOG.md
@ -1,7 +1,51 @@
|
||||
# UNRELEASED
|
||||
- Changes from 5.20.0
|
||||
- Features:
|
||||
- ADDED: all waypoints in responses now contain a distance property between the original coordinate and the snapped location. [#5255](https://github.com/Project-OSRM/osrm-backend/pull/5255)
|
||||
- ADDED: if `fallback_speed` is used, a new structure `fallback_speed_cells` will describe which cells contain estimated values [#5259](https://github.com/Project-OSRM/osrm-backend/pull/5259)
|
||||
- REMOVED: we no longer publish Node 4 or 6 binary modules (they are still buildable from source) [#5314](https://github.com/Project-OSRM/osrm-backend/pull/5314)
|
||||
- Table:
|
||||
- ADDED: new parameter `scale_factor` which will scale the cell `duration` values by this factor. [#5298](https://github.com/Project-OSRM/osrm-backend/pull/5298)
|
||||
- FIXED: only trigger `scale_factor` code to scan matrix when necessary. [#5303](https://github.com/Project-OSRM/osrm-backend/pull/5303)
|
||||
- Docker:
|
||||
- FIXED: use consistent boost version between build and runtime [#5311](https://github.com/Project-OSRM/osrm-backend/pull/5311)
|
||||
- FIXED: don't override default permissions on /opt [#5311](https://github.com/Project-OSRM/osrm-backend/pull/5311)
|
||||
- Matching:
|
||||
- CHANGED: matching will now consider edges marked with is_startpoint=false, allowing matching over ferries and other previously non-matchable edge types. [#5297](https://github.com/Project-OSRM/osrm-backend/pull/5297)
|
||||
- Profile:
|
||||
- ADDED: Parse `source:maxspeed` and `maxspeed:type` tags to apply maxspeeds and add belgian flanders rural speed limit. [#5217](https://github.com/Project-OSRM/osrm-backend/pull/5217)
|
||||
|
||||
# 5.20.0
|
||||
- Changes from 5.19.0:
|
||||
- Table:
|
||||
- CHANGED: switch to pre-calculated distances for table responses for large speedup and 10% memory increase. [#5251](https://github.com/Project-OSRM/osrm-backend/pull/5251)
|
||||
- ADDED: new parameter `fallback_speed` which will fill `null` cells with estimated value [#5257](https://github.com/Project-OSRM/osrm-backend/pull/5257)
|
||||
- CHANGED: Remove API check for matrix sources/destination length to be less than or equal to coordinates length. [#5298](https://github.com/Project-OSRM/osrm-backend/pull/5289)
|
||||
- FIXED: Fix crashing bug when using fallback_speed parameter with more sources than destinations. [#5291](https://github.com/Project-OSRM/osrm-backend/pull/5291)
|
||||
- Features:
|
||||
- ADDED: direct mmapping of datafiles is now supported via the `--mmap` switch. [#5242](https://github.com/Project-OSRM/osrm-backend/pull/5242)
|
||||
- REMOVED: the previous `--memory_file` switch is now deprecated and will fallback to `--mmap` [#5242](https://github.com/Project-OSRM/osrm-backend/pull/5242)
|
||||
- ADDED: Now publishing Node 10.x LTS binary modules [#5246](https://github.com/Project-OSRM/osrm-backend/pull/5246)
|
||||
- Windows:
|
||||
- FIXED: Windows builds again. [#5249](https://github.com/Project-OSRM/osrm-backend/pull/5249)
|
||||
- Docker:
|
||||
- CHANGED: switch from Alpine Linux to Debian Buster base images [#5281](https://github.com/Project-OSRM/osrm-backend/pull/5281)
|
||||
|
||||
# 5.19.0
|
||||
- Changes from 5.18.0:
|
||||
- Optimizations:
|
||||
- CHANGED: Use Grisu2 for serializing floating point numbers. [#5188](https://github.com/Project-OSRM/osrm-backend/pull/5188)
|
||||
- ADDED: Node bindings can return pre-rendered JSON buffer. [#5189](https://github.com/Project-OSRM/osrm-backend/pull/5189)
|
||||
- Profiles:
|
||||
- CHANGED: Bicycle profile now blacklists barriers instead of whitelisting them [#5076
|
||||
](https://github.com/Project-OSRM/osrm-backend/pull/5076/)
|
||||
- CHANGED: Foot profile now blacklists barriers instead of whitelisting them [#5077
|
||||
](https://github.com/Project-OSRM/osrm-backend/pull/5077/)
|
||||
- CHANGED: Support maxlength and maxweight in car profile [#5101](https://github.com/Project-OSRM/osrm-backend/pull/5101]
|
||||
- Bugfixes:
|
||||
- FIXED: collapsing of ExitRoundabout instructions [#5114](https://github.com/Project-OSRM/osrm-backend/issues/5114)
|
||||
- Misc:
|
||||
- CHANGED: Support up to 512 named shared memory regions [#5185](https://github.com/Project-OSRM/osrm-backend/pull/5185)
|
||||
|
||||
# 5.18.0
|
||||
- Changes from 5.17.0:
|
||||
|
||||
@ -50,7 +50,7 @@ If you want to use the CH pipeline instead replace `osrm-partition` and `osrm-cu
|
||||
|
||||
### Using Docker
|
||||
|
||||
We base our Docker images ([backend](https://hub.docker.com/r/osrm/osrm-backend/), [frontend](https://hub.docker.com/r/osrm/osrm-frontend/)) on Alpine Linux and make sure they are as lightweight as possible.
|
||||
We base our Docker images ([backend](https://hub.docker.com/r/osrm/osrm-backend/), [frontend](https://hub.docker.com/r/osrm/osrm-frontend/)) on Debian and make sure they are as lightweight as possible.
|
||||
|
||||
Download OpenStreetMap extracts for example from [Geofabrik](http://download.geofabrik.de/)
|
||||
|
||||
|
||||
@ -1,15 +1,13 @@
|
||||
FROM alpine:3.6 as buildstage
|
||||
|
||||
FROM debian:buster-slim as builder
|
||||
ARG DOCKER_TAG
|
||||
RUN mkdir -p /src && mkdir -p /opt
|
||||
COPY . /src
|
||||
WORKDIR /src
|
||||
|
||||
RUN NPROC=$(grep -c ^processor /proc/cpuinfo 2>/dev/null || 1) && \
|
||||
echo "@testing http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories && \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add git cmake wget make libc-dev gcc g++ bzip2-dev boost-dev zlib-dev expat-dev lua5.2-dev libtbb@testing libtbb-dev@testing && \
|
||||
apt-get update && \
|
||||
apt-get -y --no-install-recommends install cmake make git gcc g++ libbz2-dev libstxxl-dev libstxxl1v5 libxml2-dev \
|
||||
libzip-dev libboost1.67-all-dev lua5.2 liblua5.2-dev libtbb-dev -o APT::Install-Suggests=0 -o APT::Install-Recommends=0 && \
|
||||
NPROC=$(grep -c ^processor /proc/cpuinfo 2>/dev/null || 1) && \
|
||||
echo "Building OSRM ${DOCKER_TAG}" && \
|
||||
git show --format="%H" | head -n1 > /opt/OSRM_GITSHA && \
|
||||
@ -26,20 +24,21 @@ RUN NPROC=$(grep -c ^processor /proc/cpuinfo 2>/dev/null || 1) && \
|
||||
make -j${NPROC} install && \
|
||||
cd ../profiles && \
|
||||
cp -r * /opt && \
|
||||
\
|
||||
strip /usr/local/bin/* && \
|
||||
rm -rf /src /usr/local/lib/libosrm*
|
||||
|
||||
|
||||
# Multistage build to reduce image size - https://docs.docker.com/engine/userguide/eng-image/multistage-build/#use-multi-stage-builds
|
||||
# Only the content below ends up in the image, this helps remove /src from the image (which is large)
|
||||
FROM alpine:3.6 as runstage
|
||||
FROM debian:buster-slim as runstage
|
||||
RUN mkdir -p /src && mkdir -p /opt
|
||||
RUN echo "@testing http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories && \
|
||||
apk update && \
|
||||
apk add boost-filesystem boost-program_options boost-regex boost-iostreams boost-thread libgomp lua5.2 expat libtbb@testing
|
||||
COPY --from=buildstage /usr/local /usr/local
|
||||
COPY --from=buildstage /opt /opt
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends libboost-program-options1.67.0 libboost-regex1.67.0 \
|
||||
libboost-date-time1.67.0 libboost-chrono1.67.0 libboost-filesystem1.67.0 \
|
||||
libboost-iostreams1.67.0 libboost-thread1.67.0 expat liblua5.2-0 libtbb2 &&\
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=builder /usr/local /usr/local
|
||||
COPY --from=builder /opt /opt
|
||||
WORKDIR /opt
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
14
docs/http.md
14
docs/http.md
@ -119,7 +119,6 @@ In addition to the [general options](#general-options) the following options are
|
||||
|
||||
- `code` if the request was successful `Ok` otherwise see the service dependent and general status codes.
|
||||
- `waypoints` array of `Waypoint` objects sorted by distance to the input coordinate. Each object has at least the following additional properties:
|
||||
- `distance`: Distance in meters to the supplied input coordinate.
|
||||
- `nodes`: Array of OpenStreetMap node ids.
|
||||
|
||||
#### Example Requests
|
||||
@ -236,8 +235,10 @@ In addition to the [general options](#general-options) the following options are
|
||||
|------------|--------------------------------------------------|---------------------------------------------|
|
||||
|sources |`{index};{index}[;{index} ...]` or `all` (default)|Use location with given index as source. |
|
||||
|destinations|`{index};{index}[;{index} ...]` or `all` (default)|Use location with given index as destination.|
|
||||
|annotations |`duration` (default), `distance`, or `duration,distance`|Return the requested table or tables in response. Note that computing the `distances` table is currently only implemented for CH. If `annotations=distance` or `annotations=duration,distance` is requested when running a MLD router, a `NotImplemented` error will be returned.
|
||||
|
|
||||
|annotations |`duration` (default), `distance`, or `duration,distance`|Return the requested table or tables in response. |
|
||||
|fallback_speed|`double > 0`| If no route found between a source/destination pair, calculate the as-the-crow-flies distance, then use this speed to estimate duration.|
|
||||
|fallback_coordinate|`input` (default), or `snapped`| When using a `fallback_speed`, use the user-supplied coordinate (`input`), or the snapped location (`snapped`) for calculating distances.|
|
||||
|scale_factor|`double > 0`| Use in conjunction with `annotations=durations`. Scales the table `duration` values by this number.|
|
||||
|
||||
Unlike other array encoded options, the length of `sources` and `destinations` can be **smaller or equal**
|
||||
to number of input locations;
|
||||
@ -283,6 +284,7 @@ curl 'http://router.project-osrm.org/table/v1/driving/13.388860,52.517037;13.397
|
||||
the i-th waypoint to the j-th waypoint. Values are given in meters. Can be `null` if no route between `i` and `j` can be found. Note that computing the `distances` table is currently only implemented for CH. If `annotations=distance` or `annotations=duration,distance` is requested when running a MLD router, a `NotImplemented` error will be returned.
|
||||
- `sources` array of `Waypoint` objects describing all sources in order
|
||||
- `destinations` array of `Waypoint` objects describing all destinations in order
|
||||
- `fallback_speed_cells` (optional) array of arrays containing `i,j` pairs indicating which cells contain estimated values based on `fallback_speed`. Will be absent if `fallback_speed` is not used.
|
||||
|
||||
In case of error the following `code`s are supported in addition to the general ones:
|
||||
|
||||
@ -383,6 +385,10 @@ All other properties might be undefined.
|
||||
2361.73,
|
||||
0
|
||||
]
|
||||
],
|
||||
"fallback_speed_cells": [
|
||||
[ 0, 1 ],
|
||||
[ 1, 0 ]
|
||||
]
|
||||
}
|
||||
```
|
||||
@ -551,6 +557,7 @@ Vector tiles contain two layers:
|
||||
| `weight ` | `integer` | how long this segment takes to traverse, in units (may differ from `duration` when artificial biasing is applied in the Lua profiles). ACTUAL ROUTING USES THIS VALUE. |
|
||||
| `name` | `string` | the name of the road this segment belongs to |
|
||||
| `rate` | `float` | the value of `length/weight` - analagous to `speed`, but using the `weight` value rather than `duration`, rounded to the nearest integer |
|
||||
| `is_startpoint` | `boolean` | whether this segment can be used as a start/endpoint for routes |
|
||||
|
||||
`turns` layer:
|
||||
|
||||
@ -905,6 +912,7 @@ Object used to describe waypoint on a route.
|
||||
|
||||
- `name` Name of the street the coordinate snapped to
|
||||
- `location` Array that contains the `[longitude, latitude]` pair of the snapped coordinate
|
||||
- `distance` The distance, in metres, from the input coordinate to the snapped coordinate
|
||||
- `hint` Unique internal identifier of the segment (ephemeral, not constant over data updates)
|
||||
This can be used on subsequent request to significantly speed up the query and to connect multiple services.
|
||||
E.g. you can use the `hint` value obtained by the `nearest` query as `hint` values for `route` inputs.
|
||||
|
||||
@ -25,7 +25,9 @@ var osrm = new OSRM('network.osrm');
|
||||
Make sure you prepared the dataset with the correct toolchain.
|
||||
- `options.shared_memory` **[Boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Connects to the persistent shared memory datastore.
|
||||
This requires you to run `osrm-datastore` prior to creating an `OSRM` object.
|
||||
- `options.memory_file` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)?** Path to a file on disk to store the memory using mmap.
|
||||
- `options.memory_file` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)?** *DEPRECATED*
|
||||
Old behaviour: Path to a file on disk to store the memory using mmap. Current behaviour: setting this value is the same as setting `mmap_memory: true`.
|
||||
- `options.mmap_memory` **[Boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** Map on-disk files to virtual memory addresses (mmap), rather than loading into RAM.
|
||||
- `options.path` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)?** The path to the `.osrm` files. This is mutually exclusive with setting {options.shared_memory} to true.
|
||||
- `options.max_locations_trip` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in trip query (default: unlimited).
|
||||
- `options.max_locations_viaroute` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Max. locations supported in viaroute query (default: unlimited).
|
||||
@ -127,6 +129,9 @@ tables. Optionally returns distance table.
|
||||
- `options.destinations` **[Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array)?** An array of `index` elements (`0 <= integer <
|
||||
#coordinates`) to use location with given index as destination. Default is to use all.
|
||||
- `options.approaches` **[Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array)?** Keep waypoints on curb side. Can be `null` (unrestricted, default) or `curb`.
|
||||
- `options.fallback_speed` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Replace `null` responses in result with as-the-crow-flies estimates based on `fallback_speed`. Value is in metres/second.
|
||||
- `options.fallback_coordinate` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)?** Either `input` (default) or `snapped`. If using a `fallback_speed`, use either the user-supplied coordinate (`input`), or the snapped coordinate (`snapped`) for calculating the as-the-crow-flies diestance between two points.
|
||||
- `options.scale_factor` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** Multiply the table duration values in the table by this number for more controlled input into a route optimization solver.
|
||||
- `callback` **[Function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function)**
|
||||
|
||||
**Examples**
|
||||
@ -152,6 +157,7 @@ Returns **[Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Refer
|
||||
Values are given in seconds.
|
||||
**`sources`**: array of [`Ẁaypoint`](#waypoint) objects describing all sources in order.
|
||||
**`destinations`**: array of [`Ẁaypoint`](#waypoint) objects describing all destinations in order.
|
||||
**`fallback_speed_cells`**: (optional) if `fallback_speed` is used, will be an array of arrays of `row,column` values, indicating which cells contain estimated values.
|
||||
|
||||
### tile
|
||||
|
||||
@ -297,9 +303,30 @@ Returns **[Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Refer
|
||||
2) `waypoint_index`: index of the point in the trip.
|
||||
**`trips`**: an array of [`Route`](#route) objects that assemble the trace.
|
||||
|
||||
## Responses
|
||||
## Plugin behaviour
|
||||
|
||||
Responses
|
||||
All plugins support a second additional object that is available to configure some NodeJS specific behaviours.
|
||||
|
||||
- `plugin_config` **[Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object)** Object literal containing parameters for the trip query.
|
||||
- `plugin_config.format` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)?** The format of the result object to various API calls. Valid options are `object` (default), which returns a standard Javascript object, as described above, and `json_buffer`, which will return a NodeJS **[Buffer](https://nodejs.org/api/buffer.html)** object, containing a JSON string. The latter has the advantage that it can be immediately serialized to disk/sent over the network, and the generation of the string is performed outside the main NodeJS event loop. This option is ignored by the `tile` plugin.
|
||||
|
||||
**Examples**
|
||||
|
||||
```javascript
|
||||
var osrm = new OSRM('network.osrm');
|
||||
var options = {
|
||||
coordinates: [
|
||||
[13.36761474609375, 52.51663871100423],
|
||||
[13.374481201171875, 52.506191342034576]
|
||||
]
|
||||
};
|
||||
osrm.route(options, { format: "json_buffer" }, function(err, response) {
|
||||
if (err) throw err;
|
||||
console.log(response.toString("utf-8"));
|
||||
});
|
||||
```
|
||||
|
||||
## Responses
|
||||
|
||||
### Route
|
||||
|
||||
@ -307,8 +334,8 @@ Represents a route through (potentially multiple) waypoints.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `exteral` **documentation** in
|
||||
[`osrm-backend`](../http.md#route)
|
||||
- **documentation** in
|
||||
[`osrm-backend`](../http.md#route-object)
|
||||
|
||||
### RouteLeg
|
||||
|
||||
@ -316,8 +343,8 @@ Represents a route between two waypoints.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `exteral` **documentation** in
|
||||
[`osrm-backend`](../http.md#routeleg)
|
||||
- **documentation** in
|
||||
[`osrm-backend`](../http.md#routeleg-object)
|
||||
|
||||
### RouteStep
|
||||
|
||||
@ -326,15 +353,15 @@ single way to the subsequent step.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `exteral` **documentation** in
|
||||
[`osrm-backend`](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/http.md#routestep)
|
||||
- **documentation** in
|
||||
[`osrm-backend`](../http.md#routestep-object)
|
||||
|
||||
### StepManeuver
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `exteral` **documentation** in
|
||||
[`osrm-backend`](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/http.md#stepmaneuver)
|
||||
- **documentation** in
|
||||
[`osrm-backend`](../http.md#stepmaneuver-object)
|
||||
|
||||
### Waypoint
|
||||
|
||||
@ -342,5 +369,5 @@ Object used to describe waypoint on a route.
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `exteral` **documentation** in
|
||||
[`osrm-backend`](https://github.com/Project-OSRM/osrm-backend/blob/master/docs/http.md#waypoint)
|
||||
- **documentation** in
|
||||
[`osrm-backend`](../http.md#waypoint-object)
|
||||
|
||||
@ -109,3 +109,12 @@ Feature: Car - Handle ferry routes
|
||||
When I route I should get
|
||||
| from | to | route | modes | time |
|
||||
| c | d | bcde,bcde | ferry,ferry | 600s |
|
||||
|
||||
Given the query options
|
||||
| geometries | geojson |
|
||||
| overview | full |
|
||||
|
||||
# Note that matching *should* work across unsnappable ferries
|
||||
When I match I should get
|
||||
| trace | geometry | duration |
|
||||
| abcdef| 1,1,1.000899,1,1.000899,1,1.002697,1,1.002697,1,1.003596,1,1.003596,1,1.005394,1,1.005394,1,1.006293,1 | 610.9 |
|
||||
|
||||
@ -137,3 +137,28 @@ OSRM will use 4/5 of the projected free-flow speed.
|
||||
| primary | | | 30 | -1 | | 23 km/h | | 6.7 |
|
||||
| primary | 20 | 30 | | -1 | | 15 km/h | | 4.4 |
|
||||
| primary | 20 | | 30 | -1 | | 23 km/h | | 6.7 |
|
||||
|
||||
|
||||
Scenario: Car - Respect source:maxspeed
|
||||
Given the node map
|
||||
"""
|
||||
a b c d e f g
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | highway | source:maxspeed | maxspeed |
|
||||
| ab | trunk | | |
|
||||
| bc | trunk | | 60 |
|
||||
| cd | trunk | FR:urban | |
|
||||
| de | trunk | CH:rural | |
|
||||
| ef | trunk | CH:trunk | |
|
||||
| fg | trunk | CH:motorway | |
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | speed |
|
||||
| a | b | ab,ab | 85 km/h |
|
||||
| b | c | bc,bc | 48 km/h |
|
||||
| c | d | cd,cd | 40 km/h |
|
||||
| d | e | de,de | 64 km/h |
|
||||
| e | f | ef,ef | 80 km/h |
|
||||
| f | g | fg,fg | 96 km/h |
|
||||
@ -84,9 +84,49 @@ class OSRMDirectLoader extends OSRMBaseLoader {
|
||||
throw new Error(util.format('osrm-routed %s: %s', errorReason(err), err.cmd));
|
||||
}
|
||||
});
|
||||
|
||||
this.child.readyFunc = (data) => {
|
||||
if (/running and waiting for requests/.test(data)) {
|
||||
this.child.stdout.removeListener('data', this.child.readyFunc);
|
||||
callback();
|
||||
}
|
||||
};
|
||||
this.child.stdout.on('data',this.child.readyFunc);
|
||||
}
|
||||
};
|
||||
|
||||
class OSRMmmapLoader extends OSRMBaseLoader {
|
||||
constructor (scope) {
|
||||
super(scope);
|
||||
}
|
||||
|
||||
load (inputFile, callback) {
|
||||
this.inputFile = inputFile;
|
||||
this.shutdown(() => {
|
||||
this.launch(callback);
|
||||
});
|
||||
}
|
||||
|
||||
osrmUp (callback) {
|
||||
if (this.osrmIsRunning()) return callback(new Error("osrm-routed already running!"));
|
||||
|
||||
const command_arguments = util.format('%s -p %d -i %s -a %s --mmap', this.inputFile, this.scope.OSRM_PORT, this.scope.OSRM_IP, this.scope.ROUTING_ALGORITHM);
|
||||
this.child = this.scope.runBin('osrm-routed', command_arguments, this.scope.environment, (err) => {
|
||||
if (err && err.signal !== 'SIGINT') {
|
||||
this.child = null;
|
||||
throw new Error(util.format('osrm-routed %s: %s', errorReason(err), err.cmd));
|
||||
}
|
||||
});
|
||||
|
||||
this.child.readyFunc = (data) => {
|
||||
if (/running and waiting for requests/.test(data)) {
|
||||
this.child.stdout.removeListener('data', this.child.readyFunc);
|
||||
callback();
|
||||
}
|
||||
};
|
||||
this.child.stdout.on('data',this.child.readyFunc);
|
||||
}
|
||||
};
|
||||
|
||||
class OSRMDatastoreLoader extends OSRMBaseLoader {
|
||||
constructor (scope) {
|
||||
@ -135,22 +175,32 @@ class OSRMLoader {
|
||||
this.scope = scope;
|
||||
this.sharedLoader = new OSRMDatastoreLoader(this.scope);
|
||||
this.directLoader = new OSRMDirectLoader(this.scope);
|
||||
this.mmapLoader = new OSRMmmapLoader(this.scope);
|
||||
this.method = scope.DEFAULT_LOAD_METHOD;
|
||||
}
|
||||
|
||||
load (inputFile, callback) {
|
||||
if (!this.loader) {
|
||||
this.loader = {shutdown: (cb) => cb() };
|
||||
}
|
||||
if (this.method === 'datastore') {
|
||||
this.directLoader.shutdown((err) => {
|
||||
this.loader.shutdown((err) => {
|
||||
if (err) return callback(err);
|
||||
this.loader = this.sharedLoader;
|
||||
this.sharedLoader.load(inputFile, callback);
|
||||
});
|
||||
} else if (this.method === 'directly') {
|
||||
this.sharedLoader.shutdown((err) => {
|
||||
this.loader.shutdown((err) => {
|
||||
if (err) return callback(err);
|
||||
this.loader = this.directLoader;
|
||||
this.directLoader.load(inputFile, callback);
|
||||
});
|
||||
} else if (this.method === 'mmap') {
|
||||
this.loader.shutdown((err) => {
|
||||
if (err) return callback(err);
|
||||
this.loader = this.mmapLoader;
|
||||
this.mmapLoader.load(inputFile, callback);
|
||||
});
|
||||
} else {
|
||||
callback(new Error('*** Unknown load method ' + method));
|
||||
}
|
||||
|
||||
@ -295,7 +295,7 @@ module.exports = function () {
|
||||
this.reprocess(callback);
|
||||
});
|
||||
|
||||
this.Given(/^osrm\-routed is stopped$/, (callback) => {
|
||||
this.Given(/^osrm-routed is stopped$/, (callback) => {
|
||||
this.OSRMLoader.shutdown(callback);
|
||||
});
|
||||
|
||||
|
||||
@ -3,22 +3,25 @@ var util = require('util');
|
||||
module.exports = function () {
|
||||
const durationsRegex = new RegExp(/^I request a travel time matrix I should get$/);
|
||||
const distancesRegex = new RegExp(/^I request a travel distance matrix I should get$/);
|
||||
const estimatesRegex = new RegExp(/^I request a travel time matrix I should get estimates for$/);
|
||||
|
||||
const DURATIONS_NO_ROUTE = 2147483647; // MAX_INT
|
||||
const DISTANCES_NO_ROUTE = 3.40282e+38; // MAX_FLOAT
|
||||
|
||||
this.When(durationsRegex, function(table, callback) {tableParse.call(this, table, DURATIONS_NO_ROUTE, 'durations', callback);}.bind(this));
|
||||
this.When(distancesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'distances', callback);}.bind(this));
|
||||
this.When(estimatesRegex, function(table, callback) {tableParse.call(this, table, DISTANCES_NO_ROUTE, 'fallback_speed_cells', callback);}.bind(this));
|
||||
};
|
||||
|
||||
const durationsParse = function(v) { return isNaN(parseInt(v)); };
|
||||
const distancesParse = function(v) { return isNaN(parseFloat(v)); };
|
||||
const estimatesParse = function(v) { return isNaN(parseFloat(v)); };
|
||||
|
||||
function tableParse(table, noRoute, annotation, callback) {
|
||||
|
||||
const parse = annotation == 'distances' ? distancesParse : durationsParse;
|
||||
const parse = annotation == 'distances' ? distancesParse : (annotation == 'durations' ? durationsParse : estimatesParse);
|
||||
const params = this.queryParams;
|
||||
params.annotations = annotation == 'distances' ? 'distance' : 'duration';
|
||||
params.annotations = ['durations','fallback_speed_cells'].indexOf(annotation) !== -1 ? 'duration' : 'distance';
|
||||
|
||||
var tableRows = table.raw();
|
||||
|
||||
@ -61,11 +64,26 @@ function tableParse(table, noRoute, annotation, callback) {
|
||||
|
||||
var json = JSON.parse(response.body);
|
||||
|
||||
var result = json[annotation].map(row => {
|
||||
var result = {};
|
||||
if (annotation === 'fallback_speed_cells') {
|
||||
result = table.raw().map(row => row.map(() => ''));
|
||||
json[annotation].forEach(pair => {
|
||||
result[pair[0]+1][pair[1]+1] = 'Y';
|
||||
});
|
||||
result = result.slice(1).map(row => {
|
||||
var hashes = {};
|
||||
row.slice(1).forEach((v,i) => {
|
||||
hashes[tableRows[0][i+1]] = v;
|
||||
});
|
||||
return hashes;
|
||||
});
|
||||
} else {
|
||||
result = json[annotation].map(row => {
|
||||
var hashes = {};
|
||||
row.forEach((v, i) => { hashes[tableRows[0][i+1]] = parse(v) ? '' : v; });
|
||||
return hashes;
|
||||
});
|
||||
}
|
||||
|
||||
var testRow = (row, ri, cb) => {
|
||||
for (var k in result[ri]) {
|
||||
|
||||
@ -21,11 +21,11 @@ module.exports = function () {
|
||||
});
|
||||
};
|
||||
|
||||
this.When(/^I run "osrm\-routed\s?(.*?)"$/, { timeout: this.TIMEOUT }, (options, callback) => {
|
||||
this.When(/^I run "osrm-routed\s?(.*?)"$/, { timeout: this.TIMEOUT }, (options, callback) => {
|
||||
this.runAndSafeOutput('osrm-routed', options, callback);
|
||||
});
|
||||
|
||||
this.When(/^I run "osrm\-(extract|contract|partition|customize)\s?(.*?)"$/, (binary, options, callback) => {
|
||||
this.When(/^I run "osrm-(extract|contract|partition|customize)\s?(.*?)"$/, (binary, options, callback) => {
|
||||
const stamp = this.processedCacheFile + '.stamp_' + binary;
|
||||
this.runAndSafeOutput('osrm-' + binary, options, (err) => {
|
||||
if (err) return callback(err);
|
||||
@ -33,11 +33,11 @@ module.exports = function () {
|
||||
});
|
||||
});
|
||||
|
||||
this.When(/^I try to run "(osrm\-[a-z]+)\s?(.*?)"$/, (binary, options, callback) => {
|
||||
this.When(/^I try to run "(osrm-[a-z]+)\s?(.*?)"$/, (binary, options, callback) => {
|
||||
this.runAndSafeOutput(binary, options, () => { callback(); });
|
||||
});
|
||||
|
||||
this.When(/^I run "osrm\-datastore\s?(.*?)"(?: with input "([^"]*)")?$/, (options, input, callback) => {
|
||||
this.When(/^I run "osrm-datastore\s?(.*?)"(?: with input "([^"]*)")?$/, (options, input, callback) => {
|
||||
let child = this.runAndSafeOutput('osrm-datastore', options, callback);
|
||||
if (input !== undefined)
|
||||
child.stdin.write(input);
|
||||
|
||||
@ -69,7 +69,7 @@ module.exports = function () {
|
||||
outputRow[direction] = result[direction].status ?
|
||||
'x' : '';
|
||||
break;
|
||||
case /^[\d\.]+ s/.test(want):
|
||||
case /^[\d.]+ s/.test(want):
|
||||
// the result here can come back as a non-number value like
|
||||
// `diff`, but we only want to apply the unit when it comes
|
||||
// back as a number, for tableDiff's literal comparison
|
||||
|
||||
@ -145,7 +145,7 @@ module.exports = function() {
|
||||
|
||||
// converts the scenario titles in file prefixes
|
||||
this.getScenarioID = (scenario) => {
|
||||
let name = scenario.getName().toLowerCase().replace(/[\/\-'=,\(\):\*#]/g, '')
|
||||
let name = scenario.getName().toLowerCase().replace(/[/\-'=,():*#]/g, '')
|
||||
.replace(/\s/g, '_').replace(/__/g, '_').replace(/\.\./g, '.')
|
||||
.substring(0, 64);
|
||||
return util.format('%d_%s', scenario.getLine(), name);
|
||||
|
||||
@ -17,12 +17,12 @@ module.exports = {
|
||||
return true;
|
||||
|
||||
var matchPercent = want.match(/(.*)\s+~(.+)%$/),
|
||||
matchAbs = want.match(/(.*)\s+\+\-(.+)$/),
|
||||
matchAbs = want.match(/(.*)\s+\+-(.+)$/),
|
||||
matchRe = want.match(/^\/(.*)\/$/),
|
||||
// we use this for matching before/after bearing
|
||||
matchBearingListAbs = want.match(/^((\d+)->(\d+))(,(\d+)->(\d+))*\s+\+\-(.+)$/),
|
||||
matchIntersectionListAbs = want.match(/^(((((true|false):\d+)\s{0,1})+,{0,1})+;{0,1})+\s+\+\-(.+)$/),
|
||||
matchRangeNumbers = want.match(/\d+\+\-\d+/);
|
||||
matchBearingListAbs = want.match(/^((\d+)->(\d+))(,(\d+)->(\d+))*\s+\+-(.+)$/),
|
||||
matchIntersectionListAbs = want.match(/^(((((true|false):\d+)\s{0,1})+,{0,1})+;{0,1})+\s+\+-(.+)$/),
|
||||
matchRangeNumbers = want.match(/\d+\+-\d+/);
|
||||
|
||||
function inRange(margin, got, want) {
|
||||
var fromR = parseFloat(want) - margin,
|
||||
|
||||
@ -32,7 +32,7 @@ module.exports = function () {
|
||||
this.DEFAULT_ENVIRONMENT = Object.assign({STXXLCFG: stxxl_config}, process.env);
|
||||
this.DEFAULT_PROFILE = 'bicycle';
|
||||
this.DEFAULT_INPUT_FORMAT = 'osm';
|
||||
this.DEFAULT_LOAD_METHOD = 'datastore';
|
||||
this.DEFAULT_LOAD_METHOD = process.argv[process.argv.indexOf('-m') +1].match('mmap') ? 'mmap' : 'datastore';
|
||||
this.DEFAULT_ORIGIN = [1,1];
|
||||
this.OSM_USER = 'osrm';
|
||||
this.OSM_UID = 1;
|
||||
@ -80,7 +80,7 @@ module.exports = function () {
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(util.format('Node Version', process.version));
|
||||
if (parseInt(process.version.match(/v(\d)/)[1]) < 4) throw new Error('*** Please upgrade to Node 4.+ to run OSRM cucumber tests');
|
||||
if (parseInt(process.version.match(/v(\d+)/)[1]) < 4) throw new Error('*** Please upgrade to Node 4.+ to run OSRM cucumber tests');
|
||||
|
||||
fs.exists(this.TEST_PATH, (exists) => {
|
||||
if (exists)
|
||||
|
||||
@ -115,7 +115,7 @@ module.exports = function () {
|
||||
|
||||
if (headers.has('weight')) {
|
||||
if (row.weight.length) {
|
||||
if (!row.weight.match(/[\d\.]+/))
|
||||
if (!row.weight.match(/[\d.]+/))
|
||||
return cb(new Error('*** Weight must be specified as a numeric value. (ex: 8)'));
|
||||
got.weight = instructions ? util.format('%d', weight) : '';
|
||||
} else {
|
||||
|
||||
@ -5,21 +5,49 @@ Feature: Basic Distance Matrix
|
||||
Background:
|
||||
Given the profile "testbot"
|
||||
And the partition extra arguments "--small-component-size 1 --max-cell-sizes 2,4,8,16"
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network
|
||||
Scenario: Testbot - Travel distance matrix of small grid
|
||||
Given the node map
|
||||
"""
|
||||
a b
|
||||
a b c
|
||||
d e f
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
| abc |
|
||||
| def |
|
||||
| ad |
|
||||
| be |
|
||||
| cf |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 100+-1 |
|
||||
| b | 100+-1 | 0 |
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
| b | 100.1 | 0 | 99.4 | 199.5 |
|
||||
| e | 199.5 | 99.4 | 0 | 100.1 |
|
||||
| f | 299.5 | 199.5 | 100.1 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network exact distances
|
||||
Given the node map
|
||||
"""
|
||||
a z
|
||||
b
|
||||
c
|
||||
d
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| az |
|
||||
| zbcd |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | z | b | c | d |
|
||||
| a | 0 | 100.1 | 199.5 | 298.9 | 398.3 |
|
||||
| z | 100.1 | 0 | 99.4 | 198.8 | 298.2 |
|
||||
| b | 199.5 | 99.4 | 0 | 99.4 | 198.8 |
|
||||
| c | 298.9 | 198.8 | 99.4 | 0 | 99.4 |
|
||||
| d | 398.3 | 298.2 | 198.8 | 99.4 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network with toll exclude
|
||||
Given the query options
|
||||
@ -40,10 +68,10 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 100+-1 | | |
|
||||
| b | 100+-1 | 0 | | |
|
||||
| c | | | 0 | 100+-1 |
|
||||
| d | | | 100+-1 | 0 |
|
||||
| a | 0 | 100.1 | | |
|
||||
| b | 100.1 | 0 | | |
|
||||
| c | | | 0 | 100.1 |
|
||||
| d | | | 100.1 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network with motorway exclude
|
||||
Given the query options
|
||||
@ -64,7 +92,7 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 300+-2 | 100+-2 | 200+-2 |
|
||||
| a | 0 | 298.9 | 99.4 | 199.5 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network disconnected motorway exclude
|
||||
Given the query options
|
||||
@ -85,7 +113,7 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e |
|
||||
| a | 0 | 50+-1 | |
|
||||
| a | 0 | 50.1 | |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of minimal network with motorway and toll excludes
|
||||
Given the query options
|
||||
@ -106,7 +134,7 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | g |
|
||||
| a | 0 | 100+-1 | | |
|
||||
| a | 0 | 100.1 | | |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix with different way speeds
|
||||
Given the node map
|
||||
@ -122,21 +150,21 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 | 200+-1 |
|
||||
| c | 200+-1 | 100+-1 | 0 | 100+-1 |
|
||||
| d | 300+-1 | 200+-1 | 100+-1 | 0 |
|
||||
| a | 0 | 100.1 | 200.1 | 300.2 |
|
||||
| b | 100.1 | 0 | 100.1 | 200.1 |
|
||||
| c | 200.1 | 100.1 | 0 | 100.1 |
|
||||
| d | 300.2 | 200.1 | 100.1 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| a | 0 | 100.1 | 200.1 | 300.2 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 100+-1 |
|
||||
| c | 200+-1 |
|
||||
| d | 300+-1 |
|
||||
| b | 100.1 |
|
||||
| c | 200.1 |
|
||||
| d | 300.2 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of small grid
|
||||
Given the node map
|
||||
@ -155,10 +183,10 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 | 200+-1 |
|
||||
| e | 200+-1 | 100+-1 | 0 | 100+-1 |
|
||||
| f | 300+-1 | 200+-1 | 100+-1 | 0 |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
| b | 100.1 | 0 | 99.4 | 199.5 |
|
||||
| e | 199.5 | 99.4 | 0 | 100.1 |
|
||||
| f | 299.5 | 199.5 | 100.1 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of network with unroutable parts
|
||||
Given the node map
|
||||
@ -172,7 +200,7 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 100+-1 |
|
||||
| a | 0 | 100.1 |
|
||||
| b | | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix of network with oneways
|
||||
@ -190,10 +218,10 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | x | y | d | e |
|
||||
| x | 0 | 300+-2 | 400+-2 | 300+-2 |
|
||||
| y | 500+-2 | 0 | 300+-2 | 200+-2 |
|
||||
| d | 200+-2 | 300+-2 | 0 | 300+-2 |
|
||||
| e | 300+-2 | 400+-2 | 100+-2 | 0 |
|
||||
| x | 0 | 300.2 | 399.6 | 299.5 |
|
||||
| y | 499 | 0 | 299.5 | 199.5 |
|
||||
| d | 199.5 | 299.5 | 0 | 298.9 |
|
||||
| e | 299.5 | 399.6 | 100.1 | 0 |
|
||||
|
||||
Scenario: Testbot - Rectangular travel distance matrix
|
||||
Given the node map
|
||||
@ -212,53 +240,53 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I route I should get
|
||||
| from | to | distance |
|
||||
| e | a | 200m +- 1 |
|
||||
| e | b | 100m +- 1 |
|
||||
| f | a | 300m +- 1 |
|
||||
| f | b | 200m +- 1 |
|
||||
| e | a | 200m |
|
||||
| e | b | 100m |
|
||||
| f | a | 299.9m |
|
||||
| f | b | 200m |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 100+-1 |
|
||||
| e | 200+-1 |
|
||||
| f | 300+-1 |
|
||||
| b | 100.1 |
|
||||
| e | 199.5 |
|
||||
| f | 299.5 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 | 200+-1 |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
| b | 100.1 | 0 | 99.4 | 199.5 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 100+-1 |
|
||||
| b | 100+-1 | 0 |
|
||||
| e | 200+-1 | 100+-1 |
|
||||
| f | 300+-1 | 200+-1 |
|
||||
| a | 0 | 100.1 |
|
||||
| b | 100.1 | 0 |
|
||||
| e | 199.5 | 99.4 |
|
||||
| f | 299.5 | 199.5 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 | 200+-1 |
|
||||
| e | 200+-1 | 100+-1 | 0 | 100+-1 |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
| b | 100.1 | 0 | 99.4 | 199.5 |
|
||||
| e | 199.5 | 99.4 | 0 | 100.1 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e |
|
||||
| a | 0 | 100+-1 | 200+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 |
|
||||
| e | 200+-1 | 100+-1 | 0 |
|
||||
| f | 300+-1 | 200+-1 | 100+-1 |
|
||||
| a | 0 | 100.1 | 199.5 |
|
||||
| b | 100.1 | 0 | 99.4 |
|
||||
| e | 199.5 | 99.4 | 0 |
|
||||
| f | 299.5 | 199.5 | 100.1 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | e | f |
|
||||
| a | 0 | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 100+-1 | 0 | 100+-1 | 200+-1 |
|
||||
| e | 200+-1 | 100+-1 | 0 | 100+-1 |
|
||||
| f | 300+-1 | 200+-1 | 100+-1 | 0 |
|
||||
| a | 0 | 100.1 | 199.5 | 299.5 |
|
||||
| b | 100.1 | 0 | 99.4 | 199.5 |
|
||||
| e | 199.5 | 99.4 | 0 | 100.1 |
|
||||
| f | 299.5 | 199.5 | 100.1 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance 3x2 matrix
|
||||
Given the node map
|
||||
@ -278,8 +306,8 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | b | e | f |
|
||||
| a | 100+-1 | 200+-1 | 300+-1 |
|
||||
| b | 0 | 100+-1 | 200+-1 |
|
||||
| a | 100.1 | 199.5 | 299.5 |
|
||||
| b | 0 | 99.4 | 199.5 |
|
||||
|
||||
Scenario: Testbot - All coordinates are from same small component
|
||||
Given a grid size of 300 meters
|
||||
@ -300,8 +328,8 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | f | g |
|
||||
| f | 0 | 300+-2 |
|
||||
| g | 300+-2 | 0 |
|
||||
| f | 0 | 298.2 |
|
||||
| g | 298.2 | 0 |
|
||||
|
||||
Scenario: Testbot - Coordinates are from different small component and snap to big CC
|
||||
Given a grid size of 300 meters
|
||||
@ -334,10 +362,10 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | f | g | h | i |
|
||||
| f | 0 | 300+-2 | 0 | 300+-2 |
|
||||
| g | 300+-2 | 0 | 300+-2 | 0 |
|
||||
| h | 0 | 300+-2 | 0 | 300+-2 |
|
||||
| i | 300+-2 | 0 | 300+-2 | 0 |
|
||||
| f | 0 | 298.2 | 0 | 298.2 |
|
||||
| g | 298.2 | 0 | 298.2 | 0 |
|
||||
| h | 0 | 298.2 | 0 | 298.2 |
|
||||
| i | 298.2 | 0 | 298.2 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix with loops
|
||||
Given the node map
|
||||
@ -355,10 +383,10 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | 1 | 2 | 3 | 4 |
|
||||
| 1 | 0 | 100+-1 | 400+-1 | 500+-1 |
|
||||
| 2 | 700+-1 | 0 | 300+-1 | 400+-1 |
|
||||
| 3 | 400+-1 | 500+-1 | 0 | 100+-1 |
|
||||
| 4 | 300+-1 | 400+-1 | 700+-1 | 0 |
|
||||
| 1 | 0 | 100.1 | 399.6 | 499.7 |
|
||||
| 2 | 699.1 | 0 | 299.5 | 399.6 |
|
||||
| 3 | 399.6 | 499.7 | 0 | 100.1 |
|
||||
| 4 | 299.5 | 399.6 | 699.1 | 0 |
|
||||
|
||||
|
||||
Scenario: Testbot - Travel distance matrix based on segment durations
|
||||
@ -396,11 +424,11 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d | e |
|
||||
| a | 0 | 100+-2 | 200+-2 | 300+-2 | 400+-2 |
|
||||
| b | 100+-2 | 0 | 100+-2 | 200+-2 | 300+-2 |
|
||||
| c | 200+-2 | 100+-2 | 0 | 100+-2 | 200+-2 |
|
||||
| d | 300+-2 | 200+-2 | 100+-2 | 0 | 300+-2 |
|
||||
| e | 400+-2 | 300+-2 | 200+-2 | 300+-2 | 0 |
|
||||
| a | 0 | 100.1 | 200.1 | 300.2 | 398.9 |
|
||||
| b | 100.1 | 0 | 100.1 | 200.1 | 298.9 |
|
||||
| c | 200.1 | 100.1 | 0 | 100.1 | 198.8 |
|
||||
| d | 300.2 | 200.1 | 100.1 | 0 | 298.9 |
|
||||
| e | 398.9 | 298.9 | 198.8 | 298.9 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix for alternative loop paths
|
||||
Given the profile file
|
||||
@ -440,25 +468,25 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
|
||||
| 1 | 0 | 1100+-5 | 300+-5 | 200+-5 | 600+-5 | 500+-5 | 900+-5 | 800+-5 |
|
||||
| 2 | 100+-5 | 0 | 400+-5 | 300+-5 | 700+-5 | 600+-5 | 1000+-5 | 900+-5 |
|
||||
| 3 | 900+-5 | 800+-5 | 0 | 1100+-5 | 300+-5 | 200+-5 | 600+-5 | 500+-5 |
|
||||
| 4 | 1000+-5 | 900+-5 | 100+-5 | 0 | 400+-5 | 300+-5 | 700+-5 | 600+-5 |
|
||||
| 5 | 600+-5 | 500+-5 | 900+-5 | 800+-5 | 0 | 1100+-5 | 300+-5 | 200+-5 |
|
||||
| 6 | 700+-5 | 600+-5 | 1000+-5 | 900+-5 | 100+-5 | 0 | 400+-5 | 300+-5 |
|
||||
| 7 | 300+-5 | 200+-5 | 600+-5 | 500+-5 | 900+-5 | 800+-5 | 0 | 1100+-5 |
|
||||
| 8 | 400+-5 | 300+-5 | 700+-5 | 600+-5 | 1000+-5 | 900+-5 | 100+-5 | 0 |
|
||||
| 1 | 0 | 1096.7 | 298.9 | 199.5 | 598.4 | 498.3 | 897.3 | 797.9 |
|
||||
| 2 | 100.1 | 0 | 398.9 | 299.5 | 698.5 | 598.4 | 997.3 | 897.9 |
|
||||
| 3 | 897.9 | 797.9 | 0 | 1097.4 | 299.5 | 199.5 | 598.4 | 499 |
|
||||
| 4 | 997.3 | 897.3 | 99.4 | 0 | 398.9 | 298.9 | 697.8 | 598.4 |
|
||||
| 5 | 598.4 | 498.3 | 897.3 | 797.9 | 0 | 1096.7 | 298.9 | 199.5 |
|
||||
| 6 | 698.5 | 598.4 | 997.3 | 897.9 | 100.1 | 0 | 398.9 | 299.5 |
|
||||
| 7 | 299.5 | 199.5 | 598.4 | 499 | 897.9 | 797.9 | 0 | 1097.4 |
|
||||
| 8 | 398.9 | 298.9 | 697.8 | 598.4 | 997.3 | 897.3 | 99.4 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | 1 |
|
||||
| 1 | 0 |
|
||||
| 2 | 100+-5 |
|
||||
| 3 | 900+-5 |
|
||||
| 4 | 1000+-5 |
|
||||
| 5 | 600+-5 |
|
||||
| 6 | 700+-5 |
|
||||
| 7 | 300+-5 |
|
||||
| 8 | 400+-5 |
|
||||
| 2 | 100.1 |
|
||||
| 3 | 897.9 |
|
||||
| 4 | 997.3 |
|
||||
| 5 | 598.4 |
|
||||
| 6 | 698.5 |
|
||||
| 7 | 299.5 |
|
||||
| 8 | 398.9 |
|
||||
|
||||
Scenario: Testbot - Travel distance matrix with ties
|
||||
Given the node map
|
||||
@ -483,23 +511,23 @@ Feature: Basic Distance Matrix
|
||||
| from | to | route | distance |
|
||||
| a | b | ab,ab | 450m |
|
||||
| a | c | ac,ac | 200m |
|
||||
| a | d | ac,dc,dc | 500m +- 1 |
|
||||
| a | d | ac,dc,dc | 499.9m |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 450+-2 | 200+-2 | 500+-2 |
|
||||
| a | 0 | 450.3 | 198.8 | 499 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 450+-2 |
|
||||
| c | 200+-2 |
|
||||
| d | 500+-2 |
|
||||
| b | 450.3 |
|
||||
| c | 198.8 |
|
||||
| d | 499 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | c |
|
||||
| a | 0 | 200+-2 |
|
||||
| c | 200+-2 | 0 |
|
||||
| a | 0 | 198.8 |
|
||||
| c | 198.8 | 0 |
|
||||
|
||||
|
||||
# Check rounding errors
|
||||
@ -516,7 +544,7 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d |
|
||||
| a | 0 | 1000+-3 | 2000+-3 | 3000+-3 |
|
||||
| a | 0 | 1000.7 | 2001.4 | 3002.1 |
|
||||
|
||||
|
||||
Scenario: Testbot - OneToMany vs ManyToOne
|
||||
@ -561,9 +589,115 @@ Feature: Basic Distance Matrix
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | c | d | e | f |
|
||||
| a | 0 | 100+-1 | 300+-1 | 650+-1 | 1930+-1 | 1533+-1 |
|
||||
| b | 760+-1 | 0 | 200+-1 | 550+-1 | 1830+-1 | 1433+-1 |
|
||||
| c | 560+-2 | 660+-2 | 0 | 350+-1 | 1630+-1 | 1233+-1 |
|
||||
| d | 1480+-2 | 1580+-1 | 1780+-1 | 0 | 1280+-1 | 883+-1 |
|
||||
| e | 200+-2 | 300+-2 | 500+-1 | 710+-1 | 0 | 1593+-1 |
|
||||
| f | 597+-1 | 696+-1 | 896+-1 | 1108+-1 | 400+-3 | 0 |
|
||||
| a | 0 | 100.1 | 300.2 | 650.5 | 1930.6 | 1533 |
|
||||
| b | 759 | 0 | 200.1 | 550.4 | 1830.5 | 1432.9 |
|
||||
| c | 558.8 | 658.9 | 0 | 350.3 | 1630.4 | 1232.8 |
|
||||
| d | 1478.9 | 1579 | 1779.1 | 0 | 1280.1 | 882.5 |
|
||||
| e | 198.8 | 298.9 | 499 | 710.3 | 0 | 1592.8 |
|
||||
| f | 596.4 | 696.5 | 896.6 | 1107.9 | 397.6 | 0 |
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates (defaults to input coordinate location)
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1501.1 |
|
||||
| b | 300.2 | 0 | 600.5 | 1200.9 |
|
||||
| f | 900.7 | 600.5 | 0 | 302.2 |
|
||||
| 1 | 1501.1 | 1200.9 | 300.2 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1501.1 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 300.2 |
|
||||
| f | 900.7 |
|
||||
| 1 | 1501.1 |
|
||||
|
||||
Scenario: Testbot - Fise input coordinate
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
| fallback_coordinate | input |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1501.1 |
|
||||
| b | 300.2 | 0 | 600.5 | 1200.9 |
|
||||
| f | 900.7 | 600.5 | 0 | 302.2 |
|
||||
| 1 | 1501.1 | 1200.9 | 300.2 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1501.1 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 300.2 |
|
||||
| f | 900.7 |
|
||||
| 1 | 1501.1 |
|
||||
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates - use snapped coordinate
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
| fallback_coordinate | snapped |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1200.9 |
|
||||
| b | 300.2 | 0 | 600.5 | 900.7 |
|
||||
| f | 900.7 | 600.5 | 0 | 302.2 |
|
||||
| 1 | 1200.9 | 900.7 | 300.2 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 300.2 | 900.7 | 1200.9 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 300.2 |
|
||||
| f | 900.7 |
|
||||
| 1 | 1200.9 |
|
||||
|
||||
|
||||
@ -510,3 +510,268 @@ Feature: Basic Duration Matrix
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 24.1 |
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates (defaults to input coordinate location)
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 30 |
|
||||
| b | 30 | 0 | 12 | 24 |
|
||||
| f | 18 | 12 | 0 | 30 |
|
||||
| 1 | 30 | 24 | 30 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 30 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 30 |
|
||||
| f | 18 |
|
||||
| 1 | 30 |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
| b | | | Y | Y |
|
||||
| f | Y | Y | | |
|
||||
| 1 | Y | Y | | |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a |
|
||||
| a | |
|
||||
| b | |
|
||||
| f | Y |
|
||||
| 1 | Y |
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates - use input coordinate
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
| fallback_coordinate | input |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 30 |
|
||||
| b | 30 | 0 | 12 | 24 |
|
||||
| f | 18 | 12 | 0 | 30 |
|
||||
| 1 | 30 | 24 | 30 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 30 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 30 |
|
||||
| f | 18 |
|
||||
| 1 | 30 |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
| b | | | Y | Y |
|
||||
| f | Y | Y | | |
|
||||
| 1 | Y | Y | | |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a |
|
||||
| a | |
|
||||
| b | |
|
||||
| f | Y |
|
||||
| 1 | Y |
|
||||
|
||||
|
||||
Scenario: Testbot - Filling in noroutes with estimates - use snapped coordinate
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| fallback_speed | 5 |
|
||||
| fallback_coordinate | snapped |
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 24 |
|
||||
| b | 30 | 0 | 12 | 18 |
|
||||
| f | 18 | 12 | 0 | 30 |
|
||||
| 1 | 24 | 18 | 30 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 30 | 18 | 24 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 30 |
|
||||
| f | 18 |
|
||||
| 1 | 24 |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
| b | | | Y | Y |
|
||||
| f | Y | Y | | |
|
||||
| 1 | Y | Y | | |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a |
|
||||
| a | |
|
||||
| b | |
|
||||
| f | Y |
|
||||
| 1 | Y |
|
||||
|
||||
Scenario: Testbot - Travel time matrix of minimal network with scale factor
|
||||
Given the query options
|
||||
| scale_factor | 2 |
|
||||
Given the node map
|
||||
"""
|
||||
a b
|
||||
"""
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 20 |
|
||||
| b | 20 | 0 |
|
||||
Scenario: Testbot - Test fallback speeds and scale factor
|
||||
Given a grid size of 300 meters
|
||||
Given the extract extra arguments "--small-component-size 4"
|
||||
Given the query options
|
||||
| scale_factor | 2 |
|
||||
| fallback_speed | 5 |
|
||||
| fallback_coordinate | snapped |
|
||||
|
||||
Given the node map
|
||||
"""
|
||||
a b f h 1
|
||||
d e g i
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| abeda |
|
||||
| fhigf |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 60 | 36 | 48 |
|
||||
| b | 60 | 0 | 24 | 36 |
|
||||
| f | 36 | 24 | 0 | 60 |
|
||||
| 1 | 48 | 36 | 60 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b | f | 1 |
|
||||
| a | 0 | 60 | 36 | 48 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| b | 60 |
|
||||
| f | 36 |
|
||||
| 1 | 48 |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
| b | | | Y | Y |
|
||||
| f | Y | Y | | |
|
||||
| 1 | Y | Y | | |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a | b | f | 1 |
|
||||
| a | | | Y | Y |
|
||||
|
||||
When I request a travel time matrix I should get estimates for
|
||||
| | a |
|
||||
| a | |
|
||||
| b | |
|
||||
| f | Y |
|
||||
| 1 | Y |
|
||||
|
||||
|
||||
Scenario: Testbot - Travel time matrix of minimal network with overflow scale factor
|
||||
Given the query options
|
||||
| scale_factor | 2147483647 |
|
||||
|
||||
Given the node map
|
||||
"""
|
||||
a b
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 214748364.6 |
|
||||
| b | 214748364.6 | 0 |
|
||||
|
||||
Scenario: Testbot - Travel time matrix of minimal network with fraction scale factor
|
||||
Given the query options
|
||||
| scale_factor | 0.5 |
|
||||
|
||||
Given the node map
|
||||
"""
|
||||
a b
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes |
|
||||
| ab |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | b |
|
||||
| a | 0 | 5 |
|
||||
| b | 5 | 0 |
|
||||
|
||||
@ -38,15 +38,15 @@ Feature: Multi level routing
|
||||
Scenario: Testbot - Multi level routing
|
||||
Given the node map
|
||||
"""
|
||||
a───b e───f
|
||||
│ │ │ │
|
||||
a────b e─────f
|
||||
\ │ │ /
|
||||
d───c h───g
|
||||
╲ ╱
|
||||
╳
|
||||
╱ ╲
|
||||
i───j m───n
|
||||
│ │ │ │
|
||||
l───k───p───o
|
||||
/ │ │ \
|
||||
l─────k───p─────o
|
||||
"""
|
||||
|
||||
And the nodes
|
||||
@ -67,78 +67,76 @@ Feature: Multi level routing
|
||||
|
||||
When I route I should get
|
||||
| from | to | route | time |
|
||||
| a | b | abcda,abcda | 20s |
|
||||
| a | f | abcda,cm,mnopm,kp,ijkli,hj,efghe,efghe | 229.4s |
|
||||
| a | l | abcda,cm,mnopm,kp,ijkli,ijkli | 144.7s |
|
||||
| a | o | abcda,cm,mnopm,mnopm,mnopm | 124.7s |
|
||||
| f | l | efghe,hj,ijkli,ijkli,ijkli | 124.7s |
|
||||
| f | o | efghe,hj,ijkli,kp,mnopm,mnopm | 144.7s |
|
||||
| l | o | ijkli,kp,mnopm,mnopm | 60s |
|
||||
| a | b | abcda,abcda | 25s |
|
||||
| a | f | abcda,cm,mnopm,kp,ijkli,hj,efghe,efghe | 239.2s |
|
||||
| a | l | abcda,cm,mnopm,kp,ijkli,ijkli | 157.1s |
|
||||
| a | o | abcda,cm,mnopm,mnopm,mnopm | 137.1s |
|
||||
| f | l | efghe,hj,ijkli,ijkli | 136.7s |
|
||||
| f | o | efghe,hj,ijkli,kp,mnopm,mnopm | 162.1s |
|
||||
| l | o | ijkli,kp,mnopm,mnopm | 80s |
|
||||
| c | m | cm,cm | 44.7s |
|
||||
| f | a | efghe,hj,ijkli,kp,mnopm,cm,abcda,abcda | 239.2s |
|
||||
| l | a | ijkli,kp,mnopm,cm,abcda,abcda | 157.1s |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0 | 229.4 | 144.7 | 124.7 |
|
||||
| f | 229.4 | 0 | 124.7 | 144.7 |
|
||||
| l | 144.7 | 124.7 | 0 | 60 |
|
||||
| o | 124.7 | 144.7 | 60 | 0 |
|
||||
| a | 0 | 239.2 | 157.1 | 137.1 |
|
||||
| f | 239.2 | 0 | 136.7 | 162.1 |
|
||||
| l | 157.1 | 136.7 | 0 | 80 |
|
||||
| o | 137.1 | 162.1 | 80 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0 | 229.4 | 144.7 | 124.7 |
|
||||
| a | 0 | 239.2 | 157.1 | 137.1 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| f | 229.4 |
|
||||
| l | 144.7 |
|
||||
| o | 124.7 |
|
||||
| f | 239.2 |
|
||||
| l | 157.1 |
|
||||
| o | 137.1 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0 | 229.4 | 144.7 | 124.7 |
|
||||
| o | 124.7 | 144.7 | 60 | 0 |
|
||||
| a | 0 | 239.2 | 157.1 | 137.1 |
|
||||
| o | 137.1 | 162.1 | 80 | 0 |
|
||||
|
||||
When I request a travel time matrix I should get
|
||||
| | a | o |
|
||||
| a | 0 | 124.7 |
|
||||
| f | 229.4 | 144.7 |
|
||||
| l | 144.7 | 60 |
|
||||
| o | 124.7 | 0 |
|
||||
|
||||
| a | 0 | 137.1 |
|
||||
| f | 239.2 | 162.1 |
|
||||
| l | 157.1 | 80 |
|
||||
| o | 137.1 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0+-2 | 2287+-2 | 1443+-2 | 1243+-2 |
|
||||
| f | 2284+-2 | 0+-2 | 1241+-2 | 1443+-2 |
|
||||
| l | 1443+-2 | 1244+-2 | 0+-2 | 600+-2 |
|
||||
| o | 1243+-2 | 1444+-2 | 600+-2 | 0+-2 |
|
||||
| a | 0 | 2383.7 | 1566.9 | 1366.8 |
|
||||
| f | 2339.9 | 0 | 1198.1 | 1522.1 |
|
||||
| l | 1618.3 | 1293.3 | 0 | 800.5 |
|
||||
| o | 1418.2 | 1617.3 | 800.5 | 0 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0 | 2287.2+-2 | 1443+-2 | 1243+-2 |
|
||||
| a | 0 | 2383.7 | 1566.9 | 1366.8 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a |
|
||||
| a | 0 |
|
||||
| f | 2284.5+-2 |
|
||||
| l | 1443.1 |
|
||||
| o | 1243 |
|
||||
| f | 2339.9 |
|
||||
| l | 1618.3 |
|
||||
| o | 1418.2 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | f | l | o |
|
||||
| a | 0 | 2287+-2 | 1443+-2 | 1243+-2 |
|
||||
| o | 1243 | 1444+-2 | 600+-2 | 0+-2 |
|
||||
|
||||
| a | 0 | 2383.7 | 1566.9 | 1366.8 |
|
||||
| f | 2339.9 | 0 | 1198.1 | 1522.1 |
|
||||
|
||||
When I request a travel distance matrix I should get
|
||||
| | a | o |
|
||||
| a | 0+-2 | 1243+-2 |
|
||||
| f | 2284+-2 | 1443+-2 |
|
||||
| l | 1443+-2 | 600+-2 |
|
||||
| o | 1243+-2 | 0+-2 |
|
||||
|
||||
|
||||
| a | 0 | 1366.8 |
|
||||
| f | 2339.9 | 1522.1 |
|
||||
| l | 1618.3 | 800.5 |
|
||||
| o | 1418.2 | 0 |
|
||||
|
||||
Scenario: Testbot - Multi level routing: horizontal road
|
||||
Given the node map
|
||||
|
||||
@ -12,23 +12,26 @@ namespace contractor
|
||||
struct ContractorEdgeData
|
||||
{
|
||||
ContractorEdgeData()
|
||||
: weight(0), duration(0), id(0), originalEdges(0), shortcut(0), forward(0), backward(0)
|
||||
: weight(0), duration(0), distance(0), id(0), originalEdges(0), shortcut(0), forward(0),
|
||||
backward(0)
|
||||
{
|
||||
}
|
||||
ContractorEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance,
|
||||
unsigned original_edges,
|
||||
unsigned id,
|
||||
bool shortcut,
|
||||
bool forward,
|
||||
bool backward)
|
||||
: weight(weight), duration(duration), id(id),
|
||||
: weight(weight), duration(duration), distance(distance), id(id),
|
||||
originalEdges(std::min((1u << 29) - 1u, original_edges)), shortcut(shortcut),
|
||||
forward(forward), backward(backward)
|
||||
{
|
||||
}
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDistance distance;
|
||||
unsigned id;
|
||||
unsigned originalEdges : 29;
|
||||
bool shortcut : 1;
|
||||
|
||||
@ -41,6 +41,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
input_edge.target,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
false,
|
||||
@ -51,6 +52,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
input_edge.source,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
false,
|
||||
@ -82,6 +84,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
forward_edge.data.originalEdges = reverse_edge.data.originalEdges = 1;
|
||||
forward_edge.data.weight = reverse_edge.data.weight = INVALID_EDGE_WEIGHT;
|
||||
forward_edge.data.duration = reverse_edge.data.duration = MAXIMAL_EDGE_DURATION;
|
||||
forward_edge.data.distance = reverse_edge.data.distance = MAXIMAL_EDGE_DISTANCE;
|
||||
// remove parallel edges
|
||||
while (i < edges.size() && edges[i].source == source && edges[i].target == target)
|
||||
{
|
||||
@ -90,12 +93,16 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
forward_edge.data.weight = std::min(edges[i].data.weight, forward_edge.data.weight);
|
||||
forward_edge.data.duration =
|
||||
std::min(edges[i].data.duration, forward_edge.data.duration);
|
||||
forward_edge.data.distance =
|
||||
std::min(edges[i].data.distance, forward_edge.data.distance);
|
||||
}
|
||||
if (edges[i].data.backward)
|
||||
{
|
||||
reverse_edge.data.weight = std::min(edges[i].data.weight, reverse_edge.data.weight);
|
||||
reverse_edge.data.duration =
|
||||
std::min(edges[i].data.duration, reverse_edge.data.duration);
|
||||
reverse_edge.data.distance =
|
||||
std::min(edges[i].data.distance, reverse_edge.data.distance);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
@ -151,6 +158,7 @@ template <class Edge, typename GraphT> inline std::vector<Edge> toEdges(GraphT g
|
||||
BOOST_ASSERT_MSG(SPECIAL_NODEID != new_edge.target, "Target id invalid");
|
||||
new_edge.data.weight = data.weight;
|
||||
new_edge.data.duration = data.duration;
|
||||
new_edge.data.distance = data.distance;
|
||||
new_edge.data.shortcut = data.shortcut;
|
||||
new_edge.data.turn_id = data.id;
|
||||
BOOST_ASSERT_MSG(new_edge.data.turn_id != INT_MAX, // 2^31
|
||||
|
||||
@ -17,7 +17,8 @@ struct QueryEdge
|
||||
struct EdgeData
|
||||
{
|
||||
explicit EdgeData()
|
||||
: turn_id(0), shortcut(false), weight(0), duration(0), forward(false), backward(false)
|
||||
: turn_id(0), shortcut(false), weight(0), duration(0), forward(false), backward(false),
|
||||
distance(0)
|
||||
{
|
||||
}
|
||||
|
||||
@ -25,10 +26,11 @@ struct QueryEdge
|
||||
const bool shortcut,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: turn_id(turn_id), shortcut(shortcut), weight(weight), duration(duration),
|
||||
forward(forward), backward(backward)
|
||||
forward(forward), backward(backward), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
@ -40,6 +42,7 @@ struct QueryEdge
|
||||
turn_id = other.id;
|
||||
forward = other.forward;
|
||||
backward = other.backward;
|
||||
distance = other.distance;
|
||||
}
|
||||
// this ID is either the middle node of the shortcut, or the ID of the edge based node (node
|
||||
// based edge) storing the appropriate data. If `shortcut` is set to true, we get the middle
|
||||
@ -50,6 +53,7 @@ struct QueryEdge
|
||||
EdgeWeight duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
EdgeDistance distance;
|
||||
} data;
|
||||
|
||||
QueryEdge() : source(SPECIAL_NODEID), target(SPECIAL_NODEID) {}
|
||||
@ -69,7 +73,8 @@ struct QueryEdge
|
||||
return (source == right.source && target == right.target &&
|
||||
data.weight == right.data.weight && data.duration == right.data.duration &&
|
||||
data.shortcut == right.data.shortcut && data.forward == right.data.forward &&
|
||||
data.backward == right.data.backward && data.turn_id == right.data.turn_id);
|
||||
data.backward == right.data.backward && data.turn_id == right.data.turn_id &&
|
||||
data.distance == right.data.distance);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -22,6 +22,7 @@ class CellCustomizer
|
||||
{
|
||||
bool from_clique;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
};
|
||||
|
||||
public:
|
||||
@ -60,7 +61,7 @@ class CellCustomizer
|
||||
}
|
||||
}
|
||||
heap.Clear();
|
||||
heap.Insert(source, 0, {false, 0});
|
||||
heap.Insert(source, 0, {false, 0, 0});
|
||||
|
||||
// explore search space
|
||||
while (!heap.Empty() && !destinations_set.empty())
|
||||
@ -68,8 +69,18 @@ class CellCustomizer
|
||||
const NodeID node = heap.DeleteMin();
|
||||
const EdgeWeight weight = heap.GetKey(node);
|
||||
const EdgeDuration duration = heap.GetData(node).duration;
|
||||
const EdgeDistance distance = heap.GetData(node).distance;
|
||||
|
||||
RelaxNode(graph, cells, allowed_nodes, metric, heap, level, node, weight, duration);
|
||||
RelaxNode(graph,
|
||||
cells,
|
||||
allowed_nodes,
|
||||
metric,
|
||||
heap,
|
||||
level,
|
||||
node,
|
||||
weight,
|
||||
duration,
|
||||
distance);
|
||||
|
||||
destinations_set.erase(node);
|
||||
}
|
||||
@ -77,21 +88,27 @@ class CellCustomizer
|
||||
// fill a map of destination nodes to placeholder pointers
|
||||
auto weights = cell.GetOutWeight(source);
|
||||
auto durations = cell.GetOutDuration(source);
|
||||
auto distances = cell.GetOutDistance(source);
|
||||
for (auto &destination : destinations)
|
||||
{
|
||||
BOOST_ASSERT(!weights.empty());
|
||||
BOOST_ASSERT(!durations.empty());
|
||||
BOOST_ASSERT(!distances.empty());
|
||||
|
||||
const bool inserted = heap.WasInserted(destination);
|
||||
weights.front() = inserted ? heap.GetKey(destination) : INVALID_EDGE_WEIGHT;
|
||||
durations.front() =
|
||||
inserted ? heap.GetData(destination).duration : MAXIMAL_EDGE_DURATION;
|
||||
distances.front() =
|
||||
inserted ? heap.GetData(destination).distance : INVALID_EDGE_DISTANCE;
|
||||
|
||||
weights.advance_begin(1);
|
||||
durations.advance_begin(1);
|
||||
distances.advance_begin(1);
|
||||
}
|
||||
BOOST_ASSERT(weights.empty());
|
||||
BOOST_ASSERT(durations.empty());
|
||||
BOOST_ASSERT(distances.empty());
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,7 +145,8 @@ class CellCustomizer
|
||||
LevelID level,
|
||||
NodeID node,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration) const
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance) const
|
||||
{
|
||||
auto first_level = level == 1;
|
||||
BOOST_ASSERT(heap.WasInserted(node));
|
||||
@ -149,6 +167,7 @@ class CellCustomizer
|
||||
auto subcell = cells.GetCell(metric, level - 1, subcell_id);
|
||||
auto subcell_destination = subcell.GetDestinationNodes().begin();
|
||||
auto subcell_duration = subcell.GetOutDuration(node).begin();
|
||||
auto subcell_distance = subcell.GetOutDistance(node).begin();
|
||||
for (auto subcell_weight : subcell.GetOutWeight(node))
|
||||
{
|
||||
if (subcell_weight != INVALID_EDGE_WEIGHT)
|
||||
@ -161,20 +180,24 @@ class CellCustomizer
|
||||
|
||||
const EdgeWeight to_weight = weight + subcell_weight;
|
||||
const EdgeDuration to_duration = duration + *subcell_duration;
|
||||
const EdgeDistance to_distance = distance + *subcell_distance;
|
||||
if (!heap.WasInserted(to))
|
||||
{
|
||||
heap.Insert(to, to_weight, {true, to_duration});
|
||||
heap.Insert(to, to_weight, {true, to_duration, to_distance});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration) <
|
||||
std::tie(heap.GetKey(to), heap.GetData(to).duration))
|
||||
else if (std::tie(to_weight, to_duration, to_distance) <
|
||||
std::tie(heap.GetKey(to),
|
||||
heap.GetData(to).duration,
|
||||
heap.GetData(to).distance))
|
||||
{
|
||||
heap.DecreaseKey(to, to_weight);
|
||||
heap.GetData(to) = {true, to_duration};
|
||||
heap.GetData(to) = {true, to_duration, to_distance};
|
||||
}
|
||||
}
|
||||
|
||||
++subcell_destination;
|
||||
++subcell_duration;
|
||||
++subcell_distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -195,15 +218,18 @@ class CellCustomizer
|
||||
{
|
||||
const EdgeWeight to_weight = weight + data.weight;
|
||||
const EdgeDuration to_duration = duration + data.duration;
|
||||
const EdgeDistance to_distance = distance + data.distance;
|
||||
if (!heap.WasInserted(to))
|
||||
{
|
||||
heap.Insert(to, to_weight, {false, duration + data.duration});
|
||||
heap.Insert(
|
||||
to, to_weight, {false, duration + data.duration, distance + data.distance});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration) <
|
||||
std::tie(heap.GetKey(to), heap.GetData(to).duration))
|
||||
else if (std::tie(to_weight, to_duration, to_distance) <
|
||||
std::tie(
|
||||
heap.GetKey(to), heap.GetData(to).duration, heap.GetData(to).distance))
|
||||
{
|
||||
heap.DecreaseKey(to, to_weight);
|
||||
heap.GetData(to) = {false, to_duration};
|
||||
heap.GetData(to) = {false, to_duration, to_distance};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -20,6 +20,7 @@ template <storage::Ownership Ownership> struct CellMetricImpl
|
||||
|
||||
Vector<EdgeWeight> weights;
|
||||
Vector<EdgeDuration> durations;
|
||||
Vector<EdgeDistance> distances;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -58,8 +58,10 @@ class MultiLevelGraph : public partitioner::MultiLevelGraph<EdgeDataT, Ownership
|
||||
|
||||
MultiLevelGraph(PartitionerGraphT &&graph,
|
||||
Vector<EdgeWeight> node_weights_,
|
||||
Vector<EdgeDuration> node_durations_)
|
||||
: node_weights(std::move(node_weights_)), node_durations(std::move(node_durations_))
|
||||
Vector<EdgeDuration> node_durations_,
|
||||
Vector<EdgeDistance> node_distances_)
|
||||
: node_weights(std::move(node_weights_)), node_durations(std::move(node_durations_)),
|
||||
node_distances(std::move(node_distances_))
|
||||
{
|
||||
util::ViewOrVector<PartitionerGraphT::EdgeArrayEntry, storage::Ownership::Container>
|
||||
original_edge_array;
|
||||
@ -83,11 +85,13 @@ class MultiLevelGraph : public partitioner::MultiLevelGraph<EdgeDataT, Ownership
|
||||
Vector<EdgeOffset> node_to_edge_offset_,
|
||||
Vector<EdgeWeight> node_weights_,
|
||||
Vector<EdgeDuration> node_durations_,
|
||||
Vector<EdgeDistance> node_distances_,
|
||||
Vector<bool> is_forward_edge_,
|
||||
Vector<bool> is_backward_edge_)
|
||||
: SuperT(std::move(node_array_), std::move(edge_array_), std::move(node_to_edge_offset_)),
|
||||
node_weights(std::move(node_weights_)), node_durations(std::move(node_durations_)),
|
||||
is_forward_edge(is_forward_edge_), is_backward_edge(is_backward_edge_)
|
||||
node_distances(std::move(node_distances_)), is_forward_edge(is_forward_edge_),
|
||||
is_backward_edge(is_backward_edge_)
|
||||
{
|
||||
}
|
||||
|
||||
@ -95,6 +99,8 @@ class MultiLevelGraph : public partitioner::MultiLevelGraph<EdgeDataT, Ownership
|
||||
|
||||
EdgeWeight GetNodeDuration(NodeID node) const { return node_durations[node]; }
|
||||
|
||||
EdgeDistance GetNodeDistance(NodeID node) const { return node_distances[node]; }
|
||||
|
||||
bool IsForwardEdge(EdgeID edge) const { return is_forward_edge[edge]; }
|
||||
|
||||
bool IsBackwardEdge(EdgeID edge) const { return is_backward_edge[edge]; }
|
||||
@ -111,6 +117,7 @@ class MultiLevelGraph : public partitioner::MultiLevelGraph<EdgeDataT, Ownership
|
||||
protected:
|
||||
Vector<EdgeWeight> node_weights;
|
||||
Vector<EdgeDuration> node_durations;
|
||||
Vector<EdgeDistance> node_distances;
|
||||
Vector<bool> is_forward_edge;
|
||||
Vector<bool> is_backward_edge;
|
||||
};
|
||||
|
||||
@ -23,6 +23,7 @@ inline void read(storage::tar::FileReader &reader,
|
||||
{
|
||||
storage::serialization::read(reader, name + "/weights", metric.weights);
|
||||
storage::serialization::read(reader, name + "/durations", metric.durations);
|
||||
storage::serialization::read(reader, name + "/distances", metric.distances);
|
||||
}
|
||||
|
||||
template <storage::Ownership Ownership>
|
||||
@ -32,6 +33,7 @@ inline void write(storage::tar::FileWriter &writer,
|
||||
{
|
||||
storage::serialization::write(writer, name + "/weights", metric.weights);
|
||||
storage::serialization::write(writer, name + "/durations", metric.durations);
|
||||
storage::serialization::write(writer, name + "/distances", metric.distances);
|
||||
}
|
||||
|
||||
template <typename EdgeDataT, storage::Ownership Ownership>
|
||||
@ -42,6 +44,7 @@ inline void read(storage::tar::FileReader &reader,
|
||||
storage::serialization::read(reader, name + "/node_array", graph.node_array);
|
||||
storage::serialization::read(reader, name + "/node_weights", graph.node_weights);
|
||||
storage::serialization::read(reader, name + "/node_durations", graph.node_durations);
|
||||
storage::serialization::read(reader, name + "/node_distances", graph.node_distances);
|
||||
storage::serialization::read(reader, name + "/edge_array", graph.edge_array);
|
||||
storage::serialization::read(reader, name + "/is_forward_edge", graph.is_forward_edge);
|
||||
storage::serialization::read(reader, name + "/is_backward_edge", graph.is_backward_edge);
|
||||
@ -56,6 +59,7 @@ inline void write(storage::tar::FileWriter &writer,
|
||||
storage::serialization::write(writer, name + "/node_array", graph.node_array);
|
||||
storage::serialization::write(writer, name + "/node_weights", graph.node_weights);
|
||||
storage::serialization::write(writer, name + "/node_durations", graph.node_durations);
|
||||
storage::serialization::write(writer, name + "/node_distances", graph.node_distances);
|
||||
storage::serialization::write(writer, name + "/edge_array", graph.edge_array);
|
||||
storage::serialization::write(writer, name + "/is_forward_edge", graph.is_forward_edge);
|
||||
storage::serialization::write(writer, name + "/is_backward_edge", graph.is_backward_edge);
|
||||
|
||||
@ -6,6 +6,7 @@
|
||||
|
||||
#include "engine/api/json_factory.hpp"
|
||||
#include "engine/hint.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/range/algorithm/transform.hpp>
|
||||
@ -53,6 +54,8 @@ class BaseAPI
|
||||
// TODO: check forward/reverse
|
||||
return json::makeWaypoint(
|
||||
phantom.location,
|
||||
util::coordinate_calculation::fccApproximateDistance(phantom.location,
|
||||
phantom.input_location),
|
||||
facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id)).to_string(),
|
||||
Hint{phantom, facade.GetCheckSum()});
|
||||
}
|
||||
@ -61,6 +64,8 @@ class BaseAPI
|
||||
// TODO: check forward/reverse
|
||||
return json::makeWaypoint(
|
||||
phantom.location,
|
||||
util::coordinate_calculation::fccApproximateDistance(phantom.location,
|
||||
phantom.input_location),
|
||||
facade.GetNameForID(facade.GetNameIndex(phantom.forward_segment_id.id))
|
||||
.to_string());
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ namespace json
|
||||
namespace detail
|
||||
{
|
||||
|
||||
util::json::Array coordinateToLonLat(const util::Coordinate coordinate);
|
||||
util::json::Array coordinateToLonLat(const util::Coordinate &coordinate);
|
||||
|
||||
/**
|
||||
* Ensures that a bearing value is a whole number, and clamped to the range 0-359
|
||||
@ -86,11 +86,14 @@ util::json::Object makeRoute(const guidance::Route &route,
|
||||
const char *weight_name);
|
||||
|
||||
// Creates a Waypoint without Hint, see the Hint overload below
|
||||
util::json::Object makeWaypoint(const util::Coordinate location, std::string name);
|
||||
util::json::Object
|
||||
makeWaypoint(const util::Coordinate &location, const double &distance, std::string name);
|
||||
|
||||
// Creates a Waypoint with Hint, see the overload above when Hint is not needed
|
||||
util::json::Object
|
||||
makeWaypoint(const util::Coordinate location, std::string name, const Hint &hint);
|
||||
util::json::Object makeWaypoint(const util::Coordinate &location,
|
||||
const double &distance,
|
||||
std::string name,
|
||||
const Hint &hint);
|
||||
|
||||
util::json::Object makeRouteLeg(guidance::RouteLeg leg, util::json::Array steps);
|
||||
|
||||
|
||||
@ -41,7 +41,6 @@ class NearestAPI final : public BaseAPI
|
||||
[this](const PhantomNodeWithDistance &phantom_with_distance) {
|
||||
auto &phantom_node = phantom_with_distance.phantom_node;
|
||||
auto waypoint = MakeWaypoint(phantom_node);
|
||||
waypoint.values["distance"] = phantom_with_distance.distance;
|
||||
|
||||
util::json::Array nodes;
|
||||
|
||||
|
||||
@ -31,6 +31,15 @@ namespace api
|
||||
class TableAPI final : public BaseAPI
|
||||
{
|
||||
public:
|
||||
struct TableCellRef
|
||||
{
|
||||
TableCellRef(const std::size_t &row, const std::size_t &column) : row{row}, column{column}
|
||||
{
|
||||
}
|
||||
std::size_t row;
|
||||
std::size_t column;
|
||||
};
|
||||
|
||||
TableAPI(const datafacade::BaseDataFacade &facade_, const TableParameters ¶meters_)
|
||||
: BaseAPI(facade_, parameters_), parameters(parameters_)
|
||||
{
|
||||
@ -39,6 +48,7 @@ class TableAPI final : public BaseAPI
|
||||
virtual void
|
||||
MakeResponse(const std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>> &tables,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<TableCellRef> &fallback_speed_cells,
|
||||
util::json::Object &response) const
|
||||
{
|
||||
auto number_of_sources = parameters.sources.size();
|
||||
@ -77,6 +87,11 @@ class TableAPI final : public BaseAPI
|
||||
MakeDistanceTable(tables.second, number_of_sources, number_of_destinations);
|
||||
}
|
||||
|
||||
if (parameters.fallback_speed != INVALID_FALLBACK_SPEED && parameters.fallback_speed > 0)
|
||||
{
|
||||
response.values["fallback_speed_cells"] = MakeEstimatesTable(fallback_speed_cells);
|
||||
}
|
||||
|
||||
response.values["code"] = "Ok";
|
||||
}
|
||||
|
||||
@ -163,6 +178,20 @@ class TableAPI final : public BaseAPI
|
||||
return json_table;
|
||||
}
|
||||
|
||||
virtual util::json::Array
|
||||
MakeEstimatesTable(const std::vector<TableCellRef> &fallback_speed_cells) const
|
||||
{
|
||||
util::json::Array json_table;
|
||||
std::for_each(
|
||||
fallback_speed_cells.begin(), fallback_speed_cells.end(), [&](const auto &cell) {
|
||||
util::json::Array row;
|
||||
row.values.push_back(util::json::Number(cell.row));
|
||||
row.values.push_back(util::json::Number(cell.column));
|
||||
json_table.values.push_back(std::move(row));
|
||||
});
|
||||
return json_table;
|
||||
}
|
||||
|
||||
const TableParameters ¶meters;
|
||||
};
|
||||
|
||||
|
||||
@ -59,6 +59,15 @@ struct TableParameters : public BaseParameters
|
||||
{
|
||||
std::vector<std::size_t> sources;
|
||||
std::vector<std::size_t> destinations;
|
||||
double fallback_speed = INVALID_FALLBACK_SPEED;
|
||||
|
||||
enum class FallbackCoordinateType
|
||||
{
|
||||
Input = 0,
|
||||
Snapped = 1
|
||||
};
|
||||
|
||||
FallbackCoordinateType fallback_coordinate_type = FallbackCoordinateType::Input;
|
||||
|
||||
enum class AnnotationsType
|
||||
{
|
||||
@ -70,6 +79,8 @@ struct TableParameters : public BaseParameters
|
||||
|
||||
AnnotationsType annotations = AnnotationsType::Duration;
|
||||
|
||||
double scale_factor = 1;
|
||||
|
||||
TableParameters() = default;
|
||||
template <typename... Args>
|
||||
TableParameters(std::vector<std::size_t> sources_,
|
||||
@ -90,6 +101,22 @@ struct TableParameters : public BaseParameters
|
||||
{
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
TableParameters(std::vector<std::size_t> sources_,
|
||||
std::vector<std::size_t> destinations_,
|
||||
const AnnotationsType annotations_,
|
||||
double fallback_speed_,
|
||||
FallbackCoordinateType fallback_coordinate_type_,
|
||||
double scale_factor_,
|
||||
Args... args_)
|
||||
: BaseParameters{std::forward<Args>(args_)...}, sources{std::move(sources_)},
|
||||
destinations{std::move(destinations_)}, fallback_speed{fallback_speed_},
|
||||
fallback_coordinate_type{fallback_coordinate_type_}, annotations{annotations_},
|
||||
scale_factor{scale_factor_}
|
||||
|
||||
{
|
||||
}
|
||||
|
||||
bool IsValid() const
|
||||
{
|
||||
if (!BaseParameters::IsValid())
|
||||
@ -101,14 +128,7 @@ struct TableParameters : public BaseParameters
|
||||
|
||||
// 1/ The user is able to specify duplicates in srcs and dsts, in that case it's their fault
|
||||
|
||||
// 2/ len(srcs) and len(dsts) smaller or equal to len(locations)
|
||||
if (sources.size() > coordinates.size())
|
||||
return false;
|
||||
|
||||
if (destinations.size() > coordinates.size())
|
||||
return false;
|
||||
|
||||
// 3/ 0 <= index < len(locations)
|
||||
// 2/ 0 <= index < len(locations)
|
||||
const auto not_in_range = [this](const std::size_t x) { return x >= coordinates.size(); };
|
||||
|
||||
if (std::any_of(begin(sources), end(sources), not_in_range))
|
||||
@ -117,6 +137,12 @@ struct TableParameters : public BaseParameters
|
||||
if (std::any_of(begin(destinations), end(destinations), not_in_range))
|
||||
return false;
|
||||
|
||||
if (fallback_speed <= 0)
|
||||
return false;
|
||||
|
||||
if (scale_factor <= 0)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
@ -78,6 +78,8 @@ template <> class AlgorithmDataFacade<MLD>
|
||||
|
||||
virtual EdgeWeight GetNodeDuration(const NodeID node) const = 0; // TODO: to be removed
|
||||
|
||||
virtual EdgeDistance GetNodeDistance(const NodeID node) const = 0;
|
||||
|
||||
virtual bool IsForwardEdge(EdgeID edge) const = 0;
|
||||
|
||||
virtual bool IsBackwardEdge(EdgeID edge) const = 0;
|
||||
|
||||
@ -312,12 +312,13 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const Approach approach) const override final
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodesInRange(
|
||||
input_coordinate, max_distance, approach);
|
||||
input_coordinate, max_distance, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
@ -325,12 +326,13 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
||||
const float max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const override final
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const override final
|
||||
{
|
||||
BOOST_ASSERT(m_geospatial_query.get());
|
||||
|
||||
return m_geospatial_query->NearestPhantomNodesInRange(
|
||||
input_coordinate, max_distance, bearing, bearing_range, approach);
|
||||
input_coordinate, max_distance, bearing, bearing_range, approach, use_all_edges);
|
||||
}
|
||||
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
@ -697,6 +699,11 @@ template <> class ContiguousInternalMemoryAlgorithmDataFacade<MLD> : public Algo
|
||||
return query_graph.GetNodeDuration(node);
|
||||
}
|
||||
|
||||
EdgeDistance GetNodeDistance(const NodeID node) const override final
|
||||
{
|
||||
return query_graph.GetNodeDistance(node);
|
||||
}
|
||||
|
||||
bool IsForwardEdge(const NodeID node) const override final
|
||||
{
|
||||
return query_graph.IsForwardEdge(node);
|
||||
|
||||
@ -126,11 +126,13 @@ class BaseDataFacade
|
||||
const float max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const = 0;
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const float max_distance,
|
||||
const Approach approach) const = 0;
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const = 0;
|
||||
|
||||
virtual std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodes(const util::Coordinate input_coordinate,
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
#include <boost/iostreams/device/mapped_file.hpp>
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -24,8 +25,7 @@ namespace datafacade
|
||||
class MMapMemoryAllocator : public ContiguousBlockAllocator
|
||||
{
|
||||
public:
|
||||
explicit MMapMemoryAllocator(const storage::StorageConfig &config,
|
||||
const boost::filesystem::path &memory_file);
|
||||
explicit MMapMemoryAllocator(const storage::StorageConfig &config);
|
||||
~MMapMemoryAllocator() override final;
|
||||
|
||||
// interface to give access to the datafacades
|
||||
@ -33,8 +33,8 @@ class MMapMemoryAllocator : public ContiguousBlockAllocator
|
||||
|
||||
private:
|
||||
storage::SharedDataIndex index;
|
||||
util::vector_view<char> mapped_memory;
|
||||
boost::iostreams::mapped_file mapped_memory_file;
|
||||
std::vector<boost::iostreams::mapped_file> mapped_memory_files;
|
||||
std::string rtree_filename;
|
||||
};
|
||||
|
||||
} // namespace datafacade
|
||||
|
||||
@ -32,9 +32,8 @@ class ExternalProvider final : public DataFacadeProvider<AlgorithmT, FacadeT>
|
||||
public:
|
||||
using Facade = typename DataFacadeProvider<AlgorithmT, FacadeT>::Facade;
|
||||
|
||||
ExternalProvider(const storage::StorageConfig &config,
|
||||
const boost::filesystem::path &memory_file)
|
||||
: facade_factory(std::make_shared<datafacade::MMapMemoryAllocator>(config, memory_file))
|
||||
ExternalProvider(const storage::StorageConfig &config)
|
||||
: facade_factory(std::make_shared<datafacade::MMapMemoryAllocator>(config))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@ -63,12 +63,16 @@ template <typename Algorithm> class Engine final : public EngineInterface
|
||||
<< "\" with algorithm " << routing_algorithms::name<Algorithm>();
|
||||
facade_provider = std::make_unique<WatchingProvider<Algorithm>>(config.dataset_name);
|
||||
}
|
||||
else if (!config.memory_file.empty())
|
||||
else if (!config.memory_file.empty() || config.use_mmap)
|
||||
{
|
||||
util::Log(logDEBUG) << "Using memory mapped filed at " << config.memory_file
|
||||
<< " with algorithm " << routing_algorithms::name<Algorithm>();
|
||||
facade_provider = std::make_unique<ExternalProvider<Algorithm>>(config.storage_config,
|
||||
config.memory_file);
|
||||
if (!config.memory_file.empty())
|
||||
{
|
||||
util::Log(logWARNING)
|
||||
<< "The 'memory_file' option is DEPRECATED - using direct mmaping instead";
|
||||
}
|
||||
util::Log(logDEBUG) << "Using direct memory mapping with algorithm "
|
||||
<< routing_algorithms::name<Algorithm>();
|
||||
facade_provider = std::make_unique<ExternalProvider<Algorithm>>(config.storage_config);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@ -89,6 +89,7 @@ struct EngineConfig final
|
||||
int max_alternatives = 3; // set an arbitrary upper bound; can be adjusted by user
|
||||
bool use_shared_memory = true;
|
||||
boost::filesystem::path memory_file;
|
||||
bool use_mmap = true;
|
||||
Algorithm algorithm = Algorithm::CH;
|
||||
std::string verbosity;
|
||||
std::string dataset_name;
|
||||
|
||||
@ -53,12 +53,14 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
std::vector<PhantomNodeWithDistance>
|
||||
NearestPhantomNodesInRange(const util::Coordinate input_coordinate,
|
||||
const double max_distance,
|
||||
const Approach approach) const
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate](const CandidateSegment &segment) {
|
||||
return boolPairAnd(boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)),
|
||||
[this, approach, &input_coordinate, use_all_edges](const CandidateSegment &segment) {
|
||||
return boolPairAnd(
|
||||
boolPairAnd(HasValidEdge(segment, use_all_edges), CheckSegmentExclude(segment)),
|
||||
CheckApproach(input_coordinate, segment, approach));
|
||||
},
|
||||
[this, max_distance, input_coordinate](const std::size_t,
|
||||
@ -76,15 +78,17 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
const double max_distance,
|
||||
const int bearing,
|
||||
const int bearing_range,
|
||||
const Approach approach) const
|
||||
const Approach approach,
|
||||
const bool use_all_edges) const
|
||||
{
|
||||
auto results = rtree.Nearest(
|
||||
input_coordinate,
|
||||
[this, approach, &input_coordinate, bearing, bearing_range](
|
||||
[this, approach, &input_coordinate, bearing, bearing_range, use_all_edges](
|
||||
const CandidateSegment &segment) {
|
||||
auto use_direction =
|
||||
boolPairAnd(CheckSegmentBearing(segment, bearing, bearing_range),
|
||||
boolPairAnd(HasValidEdge(segment), CheckSegmentExclude(segment)));
|
||||
boolPairAnd(HasValidEdge(segment, use_all_edges),
|
||||
CheckSegmentExclude(segment)));
|
||||
use_direction =
|
||||
boolPairAnd(use_direction, CheckApproach(input_coordinate, segment, approach));
|
||||
return use_direction;
|
||||
@ -628,7 +632,8 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
* which means that this edge is not currently traversible. If this is the case,
|
||||
* then we shouldn't snap to this edge.
|
||||
*/
|
||||
std::pair<bool, bool> HasValidEdge(const CandidateSegment &segment) const
|
||||
std::pair<bool, bool> HasValidEdge(const CandidateSegment &segment,
|
||||
const bool use_all_edges = false) const
|
||||
{
|
||||
|
||||
bool forward_edge_valid = false;
|
||||
@ -652,6 +657,9 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
reverse_edge_valid = data.reverse_segment_id.enabled;
|
||||
}
|
||||
|
||||
forward_edge_valid = forward_edge_valid && (data.is_startpoint || use_all_edges);
|
||||
reverse_edge_valid = reverse_edge_valid && (data.is_startpoint || use_all_edges);
|
||||
|
||||
return std::make_pair(forward_edge_valid, reverse_edge_valid);
|
||||
}
|
||||
|
||||
|
||||
@ -138,7 +138,8 @@ class BasePlugin
|
||||
std::vector<std::vector<PhantomNodeWithDistance>>
|
||||
GetPhantomNodesInRange(const datafacade::BaseDataFacade &facade,
|
||||
const api::BaseParameters ¶meters,
|
||||
const std::vector<double> radiuses) const
|
||||
const std::vector<double> radiuses,
|
||||
bool use_all_edges = false) const
|
||||
{
|
||||
std::vector<std::vector<PhantomNodeWithDistance>> phantom_nodes(
|
||||
parameters.coordinates.size());
|
||||
@ -171,12 +172,13 @@ class BasePlugin
|
||||
radiuses[i],
|
||||
parameters.bearings[i]->bearing,
|
||||
parameters.bearings[i]->range,
|
||||
approach);
|
||||
approach,
|
||||
use_all_edges);
|
||||
}
|
||||
else
|
||||
{
|
||||
phantom_nodes[i] = facade.NearestPhantomNodesInRange(
|
||||
parameters.coordinates[i], radiuses[i], approach);
|
||||
parameters.coordinates[i], radiuses[i], approach, use_all_edges);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -34,8 +34,7 @@ class RoutingAlgorithmsInterface
|
||||
ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration) const = 0;
|
||||
const bool calculate_distance) const = 0;
|
||||
|
||||
virtual routing_algorithms::SubMatchingList
|
||||
MapMatching(const routing_algorithms::CandidateLists &candidates_list,
|
||||
@ -88,8 +87,7 @@ template <typename Algorithm> class RoutingAlgorithms final : public RoutingAlgo
|
||||
ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration) const final override;
|
||||
const bool calculate_distance) const final override;
|
||||
|
||||
routing_algorithms::SubMatchingList
|
||||
MapMatching(const routing_algorithms::CandidateLists &candidates_list,
|
||||
@ -198,8 +196,7 @@ std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
RoutingAlgorithms<Algorithm>::ManyToManySearch(const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &_source_indices,
|
||||
const std::vector<std::size_t> &_target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration) const
|
||||
const bool calculate_distance) const
|
||||
{
|
||||
BOOST_ASSERT(!phantom_nodes.empty());
|
||||
|
||||
@ -222,8 +219,7 @@ RoutingAlgorithms<Algorithm>::ManyToManySearch(const std::vector<PhantomNode> &p
|
||||
phantom_nodes,
|
||||
std::move(source_indices),
|
||||
std::move(target_indices),
|
||||
calculate_distance,
|
||||
calculate_duration);
|
||||
calculate_distance);
|
||||
}
|
||||
|
||||
template <typename Algorithm>
|
||||
|
||||
@ -25,15 +25,17 @@ struct NodeBucket
|
||||
unsigned from_clique_arc : 1;
|
||||
EdgeWeight weight;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
|
||||
NodeBucket(NodeID middle_node,
|
||||
NodeID parent_node,
|
||||
bool from_clique_arc,
|
||||
unsigned column_index,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration)
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance)
|
||||
: middle_node(middle_node), parent_node(parent_node), column_index(column_index),
|
||||
from_clique_arc(from_clique_arc), weight(weight), duration(duration)
|
||||
from_clique_arc(from_clique_arc), weight(weight), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
@ -41,9 +43,10 @@ struct NodeBucket
|
||||
NodeID parent_node,
|
||||
unsigned column_index,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration)
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance)
|
||||
: middle_node(middle_node), parent_node(parent_node), column_index(column_index),
|
||||
from_clique_arc(false), weight(weight), duration(duration)
|
||||
from_clique_arc(false), weight(weight), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
@ -94,8 +97,7 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration);
|
||||
const bool calculate_distance);
|
||||
|
||||
} // namespace routing_algorithms
|
||||
} // namespace engine
|
||||
|
||||
@ -85,13 +85,17 @@ void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
{
|
||||
heap.Insert(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
{phantom_node.forward_segment_id.id, -phantom_node.GetForwardDuration()});
|
||||
{phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance()});
|
||||
}
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
heap.Insert(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
{phantom_node.reverse_segment_id.id, -phantom_node.GetReverseDuration()});
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,13 +106,17 @@ void insertTargetInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
{
|
||||
heap.Insert(phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
{phantom_node.forward_segment_id.id, phantom_node.GetForwardDuration()});
|
||||
{phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance()});
|
||||
}
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
{
|
||||
heap.Insert(phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
{phantom_node.reverse_segment_id.id, phantom_node.GetReverseDuration()});
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -186,9 +186,10 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
|
||||
template <bool UseDuration>
|
||||
EdgeWeight getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
std::tuple<EdgeWeight, EdgeDistance> getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
{
|
||||
EdgeWeight loop_weight = UseDuration ? MAXIMAL_EDGE_DURATION : INVALID_EDGE_WEIGHT;
|
||||
EdgeDistance loop_distance = MAXIMAL_EDGE_DISTANCE;
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
{
|
||||
const auto &data = facade.GetEdgeData(edge);
|
||||
@ -198,11 +199,15 @@ EdgeWeight getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
if (to == node)
|
||||
{
|
||||
const auto value = UseDuration ? data.duration : data.weight;
|
||||
loop_weight = std::min(loop_weight, value);
|
||||
if (value < loop_weight)
|
||||
{
|
||||
loop_weight = value;
|
||||
loop_distance = data.distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
return loop_weight;
|
||||
}
|
||||
return std::make_tuple(loop_weight, loop_distance);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -509,90 +509,6 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
return std::make_tuple(weight, std::move(unpacked_nodes), std::move(unpacked_edges));
|
||||
}
|
||||
|
||||
// With (s, middle, t) we trace back the paths middle -> s and middle -> t.
|
||||
// This gives us a packed path (node ids) from the base graph around s and t,
|
||||
// and overlay node ids otherwise. We then have to unpack the overlay clique
|
||||
// edges by recursively descending unpacking the path down to the base graph.
|
||||
|
||||
using UnpackedNodes = std::vector<NodeID>;
|
||||
using UnpackedEdges = std::vector<EdgeID>;
|
||||
using UnpackedPath = std::tuple<EdgeWeight, UnpackedNodes, UnpackedEdges>;
|
||||
|
||||
template <typename Algorithm, typename... Args>
|
||||
UnpackedPath
|
||||
unpackPathAndCalculateDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
|
||||
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
const bool force_loop_forward,
|
||||
const bool force_loop_reverse,
|
||||
EdgeWeight weight_upper_bound,
|
||||
PackedPath packed_path,
|
||||
NodeID middle,
|
||||
Args... args)
|
||||
{
|
||||
EdgeWeight weight = weight_upper_bound;
|
||||
const auto &partition = facade.GetMultiLevelPartition();
|
||||
const NodeID source_node = !packed_path.empty() ? std::get<0>(packed_path.front()) : middle;
|
||||
|
||||
// Unpack path
|
||||
std::vector<NodeID> unpacked_nodes;
|
||||
std::vector<EdgeID> unpacked_edges;
|
||||
unpacked_nodes.reserve(packed_path.size());
|
||||
unpacked_edges.reserve(packed_path.size());
|
||||
|
||||
unpacked_nodes.push_back(source_node);
|
||||
|
||||
for (auto const &packed_edge : packed_path)
|
||||
{
|
||||
NodeID source, target;
|
||||
bool overlay_edge;
|
||||
std::tie(source, target, overlay_edge) = packed_edge;
|
||||
if (!overlay_edge)
|
||||
{ // a base graph edge
|
||||
unpacked_nodes.push_back(target);
|
||||
unpacked_edges.push_back(facade.FindEdge(source, target));
|
||||
}
|
||||
else
|
||||
{ // an overlay graph edge
|
||||
LevelID level = getNodeQueryLevel(partition, source, args...);
|
||||
CellID parent_cell_id = partition.GetCell(level, source);
|
||||
BOOST_ASSERT(parent_cell_id == partition.GetCell(level, target));
|
||||
|
||||
LevelID sublevel = level - 1;
|
||||
|
||||
// Here heaps can be reused, let's go deeper!
|
||||
forward_heap.Clear();
|
||||
reverse_heap.Clear();
|
||||
forward_heap.Insert(source, 0, {source});
|
||||
reverse_heap.Insert(target, 0, {target});
|
||||
|
||||
// TODO: when structured bindings will be allowed change to
|
||||
// auto [subpath_weight, subpath_source, subpath_target, subpath] = ...
|
||||
EdgeWeight subpath_weight;
|
||||
std::vector<NodeID> subpath_nodes;
|
||||
std::vector<EdgeID> subpath_edges;
|
||||
std::tie(subpath_weight, subpath_nodes, subpath_edges) = search(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
force_loop_forward,
|
||||
force_loop_reverse,
|
||||
weight_upper_bound,
|
||||
sublevel,
|
||||
parent_cell_id);
|
||||
BOOST_ASSERT(!subpath_edges.empty());
|
||||
BOOST_ASSERT(subpath_nodes.size() > 1);
|
||||
BOOST_ASSERT(subpath_nodes.front() == source);
|
||||
BOOST_ASSERT(subpath_nodes.back() == target);
|
||||
unpacked_nodes.insert(
|
||||
unpacked_nodes.end(), std::next(subpath_nodes.begin()), subpath_nodes.end());
|
||||
unpacked_edges.insert(unpacked_edges.end(), subpath_edges.begin(), subpath_edges.end());
|
||||
}
|
||||
}
|
||||
return std::make_tuple(weight, std::move(unpacked_nodes), std::move(unpacked_edges));
|
||||
}
|
||||
|
||||
// Alias to be compatible with the CH-based search
|
||||
template <typename Algorithm>
|
||||
inline void search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
|
||||
@ -30,7 +30,11 @@ struct HeapData
|
||||
struct ManyToManyHeapData : HeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
ManyToManyHeapData(NodeID p, EdgeWeight duration) : HeapData(p), duration(duration) {}
|
||||
EdgeDistance distance;
|
||||
ManyToManyHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
: HeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
template <> struct SearchEngineData<routing_algorithms::ch::Algorithm>
|
||||
@ -75,12 +79,16 @@ struct MultiLayerDijkstraHeapData
|
||||
struct ManyToManyMultiLayerDijkstraHeapData : MultiLayerDijkstraHeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeWeight duration)
|
||||
: MultiLayerDijkstraHeapData(p), duration(duration)
|
||||
EdgeDistance distance;
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, bool from, EdgeWeight duration)
|
||||
: MultiLayerDijkstraHeapData(p, from), duration(duration)
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p,
|
||||
bool from,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p, from), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
@ -15,20 +15,25 @@ struct EdgeBasedEdge
|
||||
public:
|
||||
struct EdgeData
|
||||
{
|
||||
EdgeData() : turn_id(0), weight(0), duration(0), forward(false), backward(false) {}
|
||||
EdgeData()
|
||||
: turn_id(0), weight(0), distance(0), duration(0), forward(false), backward(false)
|
||||
{
|
||||
}
|
||||
|
||||
EdgeData(const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDistance distance,
|
||||
const EdgeWeight duration,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: turn_id(turn_id), weight(weight), duration(duration), forward(forward),
|
||||
backward(backward)
|
||||
: turn_id(turn_id), weight(weight), distance(distance), duration(duration),
|
||||
forward(forward), backward(backward)
|
||||
{
|
||||
}
|
||||
|
||||
NodeID turn_id; // ID of the edge based node (node based edge)
|
||||
EdgeWeight weight;
|
||||
EdgeDistance distance;
|
||||
EdgeWeight duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
@ -43,6 +48,7 @@ struct EdgeBasedEdge
|
||||
const NodeID edge_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward);
|
||||
EdgeBasedEdge(const NodeID source, const NodeID target, const EdgeBasedEdge::EdgeData &data);
|
||||
@ -53,7 +59,7 @@ struct EdgeBasedEdge
|
||||
NodeID target;
|
||||
EdgeData data;
|
||||
};
|
||||
static_assert(sizeof(extractor::EdgeBasedEdge) == 20,
|
||||
static_assert(sizeof(extractor::EdgeBasedEdge) == 24,
|
||||
"Size of extractor::EdgeBasedEdge type is "
|
||||
"bigger than expected. This will influence "
|
||||
"memory consumption.");
|
||||
@ -67,9 +73,10 @@ inline EdgeBasedEdge::EdgeBasedEdge(const NodeID source,
|
||||
const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: source(source), target(target), data{turn_id, weight, duration, forward, backward}
|
||||
: source(source), target(target), data{turn_id, weight, distance, duration, forward, backward}
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
@ -89,9 +89,9 @@ class EdgeBasedGraphFactory
|
||||
// The following get access functions destroy the content in the factory
|
||||
void GetEdgeBasedEdges(util::DeallocatingVector<EdgeBasedEdge> &edges);
|
||||
void GetEdgeBasedNodeSegments(std::vector<EdgeBasedNodeSegment> &nodes);
|
||||
void GetStartPointMarkers(std::vector<bool> &node_is_startpoint);
|
||||
void GetEdgeBasedNodeWeights(std::vector<EdgeWeight> &output_node_weights);
|
||||
void GetEdgeBasedNodeDurations(std::vector<EdgeWeight> &output_node_durations);
|
||||
void GetEdgeBasedNodeDistances(std::vector<EdgeDistance> &output_node_distances);
|
||||
std::uint32_t GetConnectivityChecksum() const;
|
||||
|
||||
std::uint64_t GetNumberOfEdgeBasedNodes() const;
|
||||
@ -111,14 +111,11 @@ class EdgeBasedGraphFactory
|
||||
std::vector<ConditionalTurnPenalty>
|
||||
IndexConditionals(std::vector<Conditional> &&conditionals) const;
|
||||
|
||||
//! maps index from m_edge_based_node_list to ture/false if the node is an entry point to the
|
||||
//! graph
|
||||
std::vector<bool> m_edge_based_node_is_startpoint;
|
||||
|
||||
//! node weights that indicate the length of the segment (node based) represented by the
|
||||
//! edge-based node
|
||||
std::vector<EdgeWeight> m_edge_based_node_weights;
|
||||
std::vector<EdgeDuration> m_edge_based_node_durations;
|
||||
std::vector<EdgeDistance> m_edge_based_node_distances;
|
||||
|
||||
//! list of edge based nodes (compressed segments)
|
||||
std::vector<EdgeBasedNodeSegment> m_edge_based_node_segments;
|
||||
|
||||
@ -22,7 +22,9 @@ struct EdgeBasedNodeSegment
|
||||
EdgeBasedNodeSegment()
|
||||
: forward_segment_id{SPECIAL_SEGMENTID, false},
|
||||
reverse_segment_id{SPECIAL_SEGMENTID, false}, u(SPECIAL_NODEID), v(SPECIAL_NODEID),
|
||||
fwd_segment_position(std::numeric_limits<unsigned short>::max())
|
||||
fwd_segment_position(std::numeric_limits<unsigned short>::max() >>
|
||||
1), // >> 1 because we've only got 15 bits
|
||||
is_startpoint(false)
|
||||
{
|
||||
}
|
||||
|
||||
@ -30,9 +32,10 @@ struct EdgeBasedNodeSegment
|
||||
const SegmentID reverse_segment_id_,
|
||||
NodeID u,
|
||||
NodeID v,
|
||||
unsigned short fwd_segment_position)
|
||||
unsigned short fwd_segment_position,
|
||||
bool is_startpoint_)
|
||||
: forward_segment_id(forward_segment_id_), reverse_segment_id(reverse_segment_id_), u(u),
|
||||
v(v), fwd_segment_position(fwd_segment_position)
|
||||
v(v), fwd_segment_position(fwd_segment_position), is_startpoint(is_startpoint_)
|
||||
{
|
||||
BOOST_ASSERT(forward_segment_id.enabled || reverse_segment_id.enabled);
|
||||
}
|
||||
@ -41,7 +44,8 @@ struct EdgeBasedNodeSegment
|
||||
SegmentID reverse_segment_id; // edge-based graph node ID in reverse direction (v->u if exists)
|
||||
NodeID u; // node-based graph node ID of the start node
|
||||
NodeID v; // node-based graph node ID of the target node
|
||||
unsigned short fwd_segment_position; // segment id in a compressed geometry
|
||||
unsigned short fwd_segment_position : 15; // segment id in a compressed geometry
|
||||
bool is_startpoint : 1;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,9 +85,9 @@ class Extractor
|
||||
// output data
|
||||
EdgeBasedNodeDataContainer &edge_based_nodes_container,
|
||||
std::vector<EdgeBasedNodeSegment> &edge_based_node_segments,
|
||||
std::vector<bool> &node_is_startpoint,
|
||||
std::vector<EdgeWeight> &edge_based_node_weights,
|
||||
std::vector<EdgeDuration> &edge_based_node_durations,
|
||||
std::vector<EdgeDistance> &edge_based_node_distances,
|
||||
util::DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list,
|
||||
std::uint32_t &connectivity_checksum);
|
||||
|
||||
@ -96,7 +96,6 @@ class Extractor
|
||||
const std::vector<EdgeBasedNodeSegment> &input_node_segments,
|
||||
EdgeBasedNodeDataContainer &nodes_container) const;
|
||||
void BuildRTree(std::vector<EdgeBasedNodeSegment> edge_based_node_segments,
|
||||
std::vector<bool> node_is_startpoint,
|
||||
const std::vector<util::Coordinate> &coordinates);
|
||||
std::shared_ptr<RestrictionMap> LoadRestrictionMap();
|
||||
|
||||
|
||||
@ -453,8 +453,8 @@ void writeNames(const boost::filesystem::path &path, const NameTableT &table)
|
||||
serialization::write(writer, "/common/names", table);
|
||||
}
|
||||
|
||||
template <typename NodeWeigtsVectorT>
|
||||
void readEdgeBasedNodeWeights(const boost::filesystem::path &path, NodeWeigtsVectorT &weights)
|
||||
template <typename NodeWeightsVectorT>
|
||||
void readEdgeBasedNodeWeights(const boost::filesystem::path &path, NodeWeightsVectorT &weights)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileReader::VerifyFingerprint;
|
||||
storage::tar::FileReader reader{path, fingerprint};
|
||||
@ -462,9 +462,33 @@ void readEdgeBasedNodeWeights(const boost::filesystem::path &path, NodeWeigtsVec
|
||||
storage::serialization::read(reader, "/extractor/edge_based_node_weights", weights);
|
||||
}
|
||||
|
||||
template <typename NodeWeigtsVectorT, typename NodeDurationsVectorT>
|
||||
template <typename NodeDistancesVectorT>
|
||||
void readEdgeBasedNodeDistances(const boost::filesystem::path &path,
|
||||
NodeDistancesVectorT &distances)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileReader::VerifyFingerprint;
|
||||
storage::tar::FileReader reader{path, fingerprint};
|
||||
|
||||
storage::serialization::read(reader, "/extractor/edge_based_node_distances", distances);
|
||||
}
|
||||
|
||||
template <typename NodeWeightsVectorT, typename NodeDurationsVectorT, typename NodeDistancesVectorT>
|
||||
void writeEdgeBasedNodeWeightsDurationsDistances(const boost::filesystem::path &path,
|
||||
const NodeWeightsVectorT &weights,
|
||||
const NodeDurationsVectorT &durations,
|
||||
const NodeDistancesVectorT &distances)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileWriter::GenerateFingerprint;
|
||||
storage::tar::FileWriter writer{path, fingerprint};
|
||||
|
||||
storage::serialization::write(writer, "/extractor/edge_based_node_weights", weights);
|
||||
storage::serialization::write(writer, "/extractor/edge_based_node_durations", durations);
|
||||
storage::serialization::write(writer, "/extractor/edge_based_node_distances", distances);
|
||||
}
|
||||
|
||||
template <typename NodeWeightsVectorT, typename NodeDurationsVectorT>
|
||||
void readEdgeBasedNodeWeightsDurations(const boost::filesystem::path &path,
|
||||
NodeWeigtsVectorT &weights,
|
||||
NodeWeightsVectorT &weights,
|
||||
NodeDurationsVectorT &durations)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileReader::VerifyFingerprint;
|
||||
@ -474,9 +498,9 @@ void readEdgeBasedNodeWeightsDurations(const boost::filesystem::path &path,
|
||||
storage::serialization::read(reader, "/extractor/edge_based_node_durations", durations);
|
||||
}
|
||||
|
||||
template <typename NodeWeigtsVectorT, typename NodeDurationsVectorT>
|
||||
template <typename NodeWeightsVectorT, typename NodeDurationsVectorT>
|
||||
void writeEdgeBasedNodeWeightsDurations(const boost::filesystem::path &path,
|
||||
const NodeWeigtsVectorT &weights,
|
||||
const NodeWeightsVectorT &weights,
|
||||
const NodeDurationsVectorT &durations)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileWriter::GenerateFingerprint;
|
||||
|
||||
@ -63,7 +63,7 @@ struct InternalExtractorEdge
|
||||
WeightData weight_data,
|
||||
DurationData duration_data,
|
||||
util::Coordinate source_coordinate)
|
||||
: result(source, target, 0, 0, {}, -1, {}), weight_data(std::move(weight_data)),
|
||||
: result(source, target, 0, 0, 0, {}, -1, {}), weight_data(std::move(weight_data)),
|
||||
duration_data(std::move(duration_data)), source_coordinate(std::move(source_coordinate))
|
||||
{
|
||||
}
|
||||
|
||||
@ -97,6 +97,7 @@ struct NodeBasedEdge
|
||||
NodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags);
|
||||
@ -107,6 +108,7 @@ struct NodeBasedEdge
|
||||
NodeID target; // 32 4
|
||||
EdgeWeight weight; // 32 4
|
||||
EdgeDuration duration; // 32 4
|
||||
EdgeDistance distance; // 32 4
|
||||
GeometryID geometry_id; // 32 4
|
||||
AnnotationID annotation_data; // 32 4
|
||||
NodeBasedEdgeClassification flags; // 32 4
|
||||
@ -120,6 +122,7 @@ struct NodeBasedEdgeWithOSM : NodeBasedEdge
|
||||
OSMNodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags);
|
||||
@ -137,7 +140,8 @@ inline NodeBasedEdgeClassification::NodeBasedEdgeClassification()
|
||||
}
|
||||
|
||||
inline NodeBasedEdge::NodeBasedEdge()
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight(0), duration(0), annotation_data(-1)
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight(0), duration(0), distance(0),
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
||||
@ -145,11 +149,12 @@ inline NodeBasedEdge::NodeBasedEdge(NodeID source,
|
||||
NodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags)
|
||||
: source(source), target(target), weight(weight), duration(duration), geometry_id(geometry_id),
|
||||
annotation_data(annotation_data), flags(flags)
|
||||
: source(source), target(target), weight(weight), duration(duration), distance(distance),
|
||||
geometry_id(geometry_id), annotation_data(annotation_data), flags(flags)
|
||||
{
|
||||
}
|
||||
|
||||
@ -175,11 +180,18 @@ inline NodeBasedEdgeWithOSM::NodeBasedEdgeWithOSM(OSMNodeID source,
|
||||
OSMNodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags)
|
||||
: NodeBasedEdge(
|
||||
SPECIAL_NODEID, SPECIAL_NODEID, weight, duration, geometry_id, annotation_data, flags),
|
||||
: NodeBasedEdge(SPECIAL_NODEID,
|
||||
SPECIAL_NODEID,
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
geometry_id,
|
||||
annotation_data,
|
||||
flags),
|
||||
osm_source_id(std::move(source)), osm_target_id(std::move(target))
|
||||
{
|
||||
}
|
||||
@ -189,7 +201,7 @@ inline NodeBasedEdgeWithOSM::NodeBasedEdgeWithOSM()
|
||||
{
|
||||
}
|
||||
|
||||
static_assert(sizeof(extractor::NodeBasedEdge) == 28,
|
||||
static_assert(sizeof(extractor::NodeBasedEdge) == 32,
|
||||
"Size of extractor::NodeBasedEdge type is "
|
||||
"bigger than expected. This will influence "
|
||||
"memory consumption.");
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
#define OSRM_BINDINGS_NODE_SUPPORT_HPP
|
||||
|
||||
#include "nodejs/json_v8_renderer.hpp"
|
||||
#include "util/json_renderer.hpp"
|
||||
|
||||
#include "osrm/approach.hpp"
|
||||
#include "osrm/bearing.hpp"
|
||||
@ -24,6 +25,7 @@
|
||||
#include <algorithm>
|
||||
#include <iostream>
|
||||
#include <iterator>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -42,6 +44,13 @@ using match_parameters_ptr = std::unique_ptr<osrm::MatchParameters>;
|
||||
using nearest_parameters_ptr = std::unique_ptr<osrm::NearestParameters>;
|
||||
using table_parameters_ptr = std::unique_ptr<osrm::TableParameters>;
|
||||
|
||||
struct PluginParameters
|
||||
{
|
||||
bool renderJSONToBuffer = false;
|
||||
};
|
||||
|
||||
using ObjectOrString = typename mapbox::util::variant<osrm::json::Object, std::string>;
|
||||
|
||||
template <typename ResultT> inline v8::Local<v8::Value> render(const ResultT &result);
|
||||
|
||||
template <> v8::Local<v8::Value> inline render(const std::string &result)
|
||||
@ -49,12 +58,22 @@ template <> v8::Local<v8::Value> inline render(const std::string &result)
|
||||
return Nan::CopyBuffer(result.data(), result.size()).ToLocalChecked();
|
||||
}
|
||||
|
||||
template <> v8::Local<v8::Value> inline render(const osrm::json::Object &result)
|
||||
template <> v8::Local<v8::Value> inline render(const ObjectOrString &result)
|
||||
{
|
||||
if (result.is<osrm::json::Object>())
|
||||
{
|
||||
// Convert osrm::json object tree into matching v8 object tree
|
||||
v8::Local<v8::Value> value;
|
||||
renderToV8(value, result);
|
||||
renderToV8(value, result.get<osrm::json::Object>());
|
||||
return value;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Return the string object as a node Buffer
|
||||
return Nan::CopyBuffer(result.get<std::string>().data(), result.get<std::string>().size())
|
||||
.ToLocalChecked();
|
||||
}
|
||||
}
|
||||
|
||||
inline void ParseResult(const osrm::Status &result_status, osrm::json::Object &result)
|
||||
{
|
||||
@ -123,6 +142,10 @@ inline engine_config_ptr argumentsToEngineConfig(const Nan::FunctionCallbackInfo
|
||||
if (shared_memory.IsEmpty())
|
||||
return engine_config_ptr();
|
||||
|
||||
auto mmap_memory = params->Get(Nan::New("mmap_memory").ToLocalChecked());
|
||||
if (mmap_memory.IsEmpty())
|
||||
return engine_config_ptr();
|
||||
|
||||
if (!memory_file->IsUndefined())
|
||||
{
|
||||
if (path->IsUndefined())
|
||||
@ -171,6 +194,18 @@ inline engine_config_ptr argumentsToEngineConfig(const Nan::FunctionCallbackInfo
|
||||
return engine_config_ptr();
|
||||
}
|
||||
}
|
||||
if (!mmap_memory->IsUndefined())
|
||||
{
|
||||
if (mmap_memory->IsBoolean())
|
||||
{
|
||||
engine_config->use_mmap = Nan::To<bool>(mmap_memory).FromJust();
|
||||
}
|
||||
else
|
||||
{
|
||||
Nan::ThrowError("mmap_memory option must be a boolean");
|
||||
return engine_config_ptr();
|
||||
}
|
||||
}
|
||||
|
||||
if (path->IsUndefined() && !engine_config->use_shared_memory)
|
||||
{
|
||||
@ -814,6 +849,50 @@ inline bool parseCommonParameters(const v8::Local<v8::Object> &obj, ParamType &p
|
||||
return true;
|
||||
}
|
||||
|
||||
inline PluginParameters
|
||||
argumentsToPluginParameters(const Nan::FunctionCallbackInfo<v8::Value> &args)
|
||||
{
|
||||
if (args.Length() < 3 || !args[1]->IsObject())
|
||||
{
|
||||
return {};
|
||||
}
|
||||
v8::Local<v8::Object> obj = Nan::To<v8::Object>(args[1]).ToLocalChecked();
|
||||
if (obj->Has(Nan::New("format").ToLocalChecked()))
|
||||
{
|
||||
|
||||
v8::Local<v8::Value> format = obj->Get(Nan::New("format").ToLocalChecked());
|
||||
if (format.IsEmpty())
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!format->IsString())
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"json_buffer\"");
|
||||
return {};
|
||||
}
|
||||
|
||||
const Nan::Utf8String format_utf8str(format);
|
||||
std::string format_str{*format_utf8str, *format_utf8str + format_utf8str.length()};
|
||||
|
||||
if (format_str == "object")
|
||||
{
|
||||
return {false};
|
||||
}
|
||||
else if (format_str == "json_buffer")
|
||||
{
|
||||
return {true};
|
||||
}
|
||||
else
|
||||
{
|
||||
Nan::ThrowError("format must be a string: \"object\" or \"json_buffer\"");
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
inline route_parameters_ptr
|
||||
argumentsToRouteParameter(const Nan::FunctionCallbackInfo<v8::Value> &args,
|
||||
bool requires_multiple_coordinates)
|
||||
@ -1104,6 +1183,70 @@ argumentsToTableParameter(const Nan::FunctionCallbackInfo<v8::Value> &args,
|
||||
}
|
||||
}
|
||||
|
||||
if (obj->Has(Nan::New("fallback_speed").ToLocalChecked()))
|
||||
{
|
||||
auto fallback_speed = obj->Get(Nan::New("fallback_speed").ToLocalChecked());
|
||||
|
||||
if (!fallback_speed->IsNumber())
|
||||
{
|
||||
Nan::ThrowError("fallback_speed must be a number");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
else if (fallback_speed->NumberValue() <= 0)
|
||||
{
|
||||
Nan::ThrowError("fallback_speed must be > 0");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
|
||||
params->fallback_speed = static_cast<double>(fallback_speed->NumberValue());
|
||||
}
|
||||
|
||||
if (obj->Has(Nan::New("fallback_coordinate").ToLocalChecked()))
|
||||
{
|
||||
auto fallback_coordinate = obj->Get(Nan::New("fallback_coordinate").ToLocalChecked());
|
||||
|
||||
if (!fallback_coordinate->IsString())
|
||||
{
|
||||
Nan::ThrowError("fallback_coordinate must be a string: [input, snapped]");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
|
||||
std::string fallback_coordinate_str = *v8::String::Utf8Value(fallback_coordinate);
|
||||
|
||||
if (fallback_coordinate_str == "snapped")
|
||||
{
|
||||
params->fallback_coordinate_type =
|
||||
osrm::TableParameters::FallbackCoordinateType::Snapped;
|
||||
}
|
||||
else if (fallback_coordinate_str == "input")
|
||||
{
|
||||
params->fallback_coordinate_type = osrm::TableParameters::FallbackCoordinateType::Input;
|
||||
}
|
||||
else
|
||||
{
|
||||
Nan::ThrowError("'fallback_coordinate' param must be one of [input, snapped]");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
}
|
||||
|
||||
if (obj->Has(Nan::New("scale_factor").ToLocalChecked()))
|
||||
{
|
||||
auto scale_factor = obj->Get(Nan::New("scale_factor").ToLocalChecked());
|
||||
|
||||
if (!scale_factor->IsNumber())
|
||||
{
|
||||
Nan::ThrowError("scale_factor must be a number");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
else if (scale_factor->NumberValue() <= 0)
|
||||
{
|
||||
Nan::ThrowError("scale_factor must be > 0");
|
||||
return table_parameters_ptr();
|
||||
}
|
||||
|
||||
params->scale_factor = static_cast<double>(scale_factor->NumberValue());
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
@ -1357,6 +1500,6 @@ argumentsToMatchParameter(const Nan::FunctionCallbackInfo<v8::Value> &args,
|
||||
return params;
|
||||
}
|
||||
|
||||
} // ns node_osrm
|
||||
} // namespace node_osrm
|
||||
|
||||
#endif
|
||||
|
||||
@ -75,28 +75,35 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
|
||||
// Implementation of the cell view. We need a template parameter here
|
||||
// because we need to derive a read-only and read-write view from this.
|
||||
template <typename WeightValueT, typename DurationValueT> class CellImpl
|
||||
template <typename WeightValueT, typename DurationValueT, typename DistanceValueT>
|
||||
class CellImpl
|
||||
{
|
||||
private:
|
||||
using WeightPtrT = WeightValueT *;
|
||||
using DurationPtrT = DurationValueT *;
|
||||
using DistancePtrT = DistanceValueT *;
|
||||
BoundarySize num_source_nodes;
|
||||
BoundarySize num_destination_nodes;
|
||||
|
||||
WeightPtrT const weights;
|
||||
DurationPtrT const durations;
|
||||
DistancePtrT const distances;
|
||||
const NodeID *const source_boundary;
|
||||
const NodeID *const destination_boundary;
|
||||
|
||||
using RowIterator = WeightPtrT;
|
||||
// Possibly replace with
|
||||
// http://www.boost.org/doc/libs/1_55_0/libs/range/doc/html/range/reference/adaptors/reference/strided.html
|
||||
class ColumnIterator : public boost::iterator_facade<ColumnIterator,
|
||||
WeightValueT,
|
||||
|
||||
template <typename ValuePtrT>
|
||||
class ColumnIterator : public boost::iterator_facade<ColumnIterator<ValuePtrT>,
|
||||
decltype(*std::declval<ValuePtrT>()),
|
||||
boost::random_access_traversal_tag>
|
||||
{
|
||||
typedef boost::iterator_facade<ColumnIterator,
|
||||
WeightValueT,
|
||||
|
||||
using ValueT = decltype(*std::declval<ValuePtrT>());
|
||||
typedef boost::iterator_facade<ColumnIterator<ValueT>,
|
||||
ValueT,
|
||||
boost::random_access_traversal_tag>
|
||||
base_t;
|
||||
|
||||
@ -108,7 +115,7 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
|
||||
explicit ColumnIterator() : current(nullptr), stride(1) {}
|
||||
|
||||
explicit ColumnIterator(WeightPtrT begin, std::size_t row_length)
|
||||
explicit ColumnIterator(ValuePtrT begin, std::size_t row_length)
|
||||
: current(begin), stride(row_length)
|
||||
{
|
||||
BOOST_ASSERT(begin != nullptr);
|
||||
@ -126,7 +133,7 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
}
|
||||
|
||||
friend class ::boost::iterator_core_access;
|
||||
WeightPtrT current;
|
||||
ValuePtrT current;
|
||||
const std::size_t stride;
|
||||
};
|
||||
|
||||
@ -147,12 +154,13 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
auto iter =
|
||||
std::find(destination_boundary, destination_boundary + num_destination_nodes, node);
|
||||
if (iter == destination_boundary + num_destination_nodes)
|
||||
return boost::make_iterator_range(ColumnIterator{}, ColumnIterator{});
|
||||
return boost::make_iterator_range(ColumnIterator<ValuePtr>{},
|
||||
ColumnIterator<ValuePtr>{});
|
||||
|
||||
auto column = std::distance(destination_boundary, iter);
|
||||
auto begin = ColumnIterator{ptr + column, num_destination_nodes};
|
||||
auto end = ColumnIterator{ptr + column + num_source_nodes * num_destination_nodes,
|
||||
num_destination_nodes};
|
||||
auto begin = ColumnIterator<ValuePtr>{ptr + column, num_destination_nodes};
|
||||
auto end = ColumnIterator<ValuePtr>{
|
||||
ptr + column + num_source_nodes * num_destination_nodes, num_destination_nodes};
|
||||
return boost::make_iterator_range(begin, end);
|
||||
}
|
||||
|
||||
@ -165,6 +173,10 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
|
||||
auto GetInDuration(NodeID node) const { return GetInRange(durations, node); }
|
||||
|
||||
auto GetInDistance(NodeID node) const { return GetInRange(distances, node); }
|
||||
|
||||
auto GetOutDistance(NodeID node) const { return GetOutRange(distances, node); }
|
||||
|
||||
auto GetSourceNodes() const
|
||||
{
|
||||
return boost::make_iterator_range(source_boundary, source_boundary + num_source_nodes);
|
||||
@ -179,17 +191,20 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
CellImpl(const CellData &data,
|
||||
WeightPtrT const all_weights,
|
||||
DurationPtrT const all_durations,
|
||||
DistancePtrT const all_distances,
|
||||
const NodeID *const all_sources,
|
||||
const NodeID *const all_destinations)
|
||||
: num_source_nodes{data.num_source_nodes},
|
||||
num_destination_nodes{data.num_destination_nodes},
|
||||
weights{all_weights + data.value_offset},
|
||||
durations{all_durations + data.value_offset},
|
||||
distances{all_distances + data.value_offset},
|
||||
source_boundary{all_sources + data.source_boundary_offset},
|
||||
destination_boundary{all_destinations + data.destination_boundary_offset}
|
||||
{
|
||||
BOOST_ASSERT(all_weights != nullptr);
|
||||
BOOST_ASSERT(all_durations != nullptr);
|
||||
BOOST_ASSERT(all_distances != nullptr);
|
||||
BOOST_ASSERT(num_source_nodes == 0 || all_sources != nullptr);
|
||||
BOOST_ASSERT(num_destination_nodes == 0 || all_destinations != nullptr);
|
||||
}
|
||||
@ -201,7 +216,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
const NodeID *const all_destinations)
|
||||
: num_source_nodes{data.num_source_nodes},
|
||||
num_destination_nodes{data.num_destination_nodes}, weights{nullptr},
|
||||
durations{nullptr}, source_boundary{all_sources + data.source_boundary_offset},
|
||||
durations{nullptr}, distances{nullptr},
|
||||
source_boundary{all_sources + data.source_boundary_offset},
|
||||
destination_boundary{all_destinations + data.destination_boundary_offset}
|
||||
{
|
||||
BOOST_ASSERT(num_source_nodes == 0 || all_sources != nullptr);
|
||||
@ -212,8 +228,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
std::size_t LevelIDToIndex(LevelID level) const { return level - 1; }
|
||||
|
||||
public:
|
||||
using Cell = CellImpl<EdgeWeight, EdgeDuration>;
|
||||
using ConstCell = CellImpl<const EdgeWeight, const EdgeDuration>;
|
||||
using Cell = CellImpl<EdgeWeight, EdgeDuration, EdgeDistance>;
|
||||
using ConstCell = CellImpl<const EdgeWeight, const EdgeDuration, const EdgeDistance>;
|
||||
|
||||
CellStorageImpl() {}
|
||||
|
||||
@ -361,6 +377,7 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
|
||||
metric.weights.resize(total_size + 1, INVALID_EDGE_WEIGHT);
|
||||
metric.durations.resize(total_size + 1, MAXIMAL_EDGE_DURATION);
|
||||
metric.distances.resize(total_size + 1, INVALID_EDGE_DISTANCE);
|
||||
|
||||
return metric;
|
||||
}
|
||||
@ -388,6 +405,7 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
return ConstCell{cells[cell_index],
|
||||
metric.weights.data(),
|
||||
metric.durations.data(),
|
||||
metric.distances.data(),
|
||||
source_boundary.empty() ? nullptr : source_boundary.data(),
|
||||
destination_boundary.empty() ? nullptr : destination_boundary.data()};
|
||||
}
|
||||
@ -415,6 +433,7 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
return Cell{cells[cell_index],
|
||||
metric.weights.data(),
|
||||
metric.durations.data(),
|
||||
metric.distances.data(),
|
||||
source_boundary.data(),
|
||||
destination_boundary.data()};
|
||||
}
|
||||
|
||||
@ -43,6 +43,7 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
edge.data.distance,
|
||||
edge.data.forward,
|
||||
edge.data.backward);
|
||||
|
||||
@ -51,6 +52,7 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
edge.data.distance,
|
||||
edge.data.backward,
|
||||
edge.data.forward);
|
||||
}
|
||||
|
||||
@ -178,6 +178,8 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
||||
qi::rule<Iterator, Signature> base_rule;
|
||||
qi::rule<Iterator, Signature> query_rule;
|
||||
|
||||
qi::real_parser<double, json_policy> double_;
|
||||
|
||||
private:
|
||||
qi::rule<Iterator, Signature> bearings_rule;
|
||||
qi::rule<Iterator, Signature> radiuses_rule;
|
||||
@ -195,7 +197,6 @@ struct BaseParametersGrammar : boost::spirit::qi::grammar<Iterator, Signature>
|
||||
qi::rule<Iterator, unsigned char()> base64_char;
|
||||
qi::rule<Iterator, std::string()> polyline_chars;
|
||||
qi::rule<Iterator, double()> unlimited_rule;
|
||||
qi::real_parser<double, json_policy> double_;
|
||||
|
||||
qi::symbols<char, engine::Approach> approach_type;
|
||||
};
|
||||
|
||||
@ -48,10 +48,28 @@ struct TableParametersGrammar : public BaseParametersGrammar<Iterator, Signature
|
||||
(qi::lit("all") |
|
||||
(size_t_ % ';')[ph::bind(&engine::api::TableParameters::sources, qi::_r1) = qi::_1]);
|
||||
|
||||
fallback_speed_rule =
|
||||
qi::lit("fallback_speed=") >
|
||||
(double_)[ph::bind(&engine::api::TableParameters::fallback_speed, qi::_r1) = qi::_1];
|
||||
|
||||
fallback_coordinate_type.add("input",
|
||||
engine::api::TableParameters::FallbackCoordinateType::Input)(
|
||||
"snapped", engine::api::TableParameters::FallbackCoordinateType::Snapped);
|
||||
|
||||
scale_factor_rule =
|
||||
qi::lit("scale_factor=") >
|
||||
(double_)[ph::bind(&engine::api::TableParameters::scale_factor, qi::_r1) = qi::_1];
|
||||
|
||||
table_rule = destinations_rule(qi::_r1) | sources_rule(qi::_r1);
|
||||
|
||||
root_rule = BaseGrammar::query_rule(qi::_r1) > -qi::lit(".json") >
|
||||
-('?' > (table_rule(qi::_r1) | base_rule(qi::_r1)) % '&');
|
||||
-('?' > (table_rule(qi::_r1) | base_rule(qi::_r1) | scale_factor_rule(qi::_r1) |
|
||||
fallback_speed_rule(qi::_r1) |
|
||||
(qi::lit("fallback_coordinate=") >
|
||||
fallback_coordinate_type
|
||||
[ph::bind(&engine::api::TableParameters::fallback_coordinate_type,
|
||||
qi::_r1) = qi::_1])) %
|
||||
'&');
|
||||
}
|
||||
|
||||
TableParametersGrammar(qi::rule<Iterator, Signature> &root_rule_) : BaseGrammar(root_rule_)
|
||||
@ -73,13 +91,20 @@ struct TableParametersGrammar : public BaseParametersGrammar<Iterator, Signature
|
||||
qi::rule<Iterator, Signature> base_rule;
|
||||
|
||||
private:
|
||||
using json_policy = no_trailing_dot_policy<double, 'j', 's', 'o', 'n'>;
|
||||
|
||||
qi::rule<Iterator, Signature> root_rule;
|
||||
qi::rule<Iterator, Signature> table_rule;
|
||||
qi::rule<Iterator, Signature> sources_rule;
|
||||
qi::rule<Iterator, Signature> destinations_rule;
|
||||
qi::rule<Iterator, Signature> fallback_speed_rule;
|
||||
qi::rule<Iterator, Signature> scale_factor_rule;
|
||||
qi::rule<Iterator, std::size_t()> size_t_;
|
||||
qi::symbols<char, engine::api::TableParameters::AnnotationsType> annotations;
|
||||
qi::rule<Iterator, engine::api::TableParameters::AnnotationsType()> annotations_list;
|
||||
qi::symbols<char, engine::api::TableParameters::FallbackCoordinateType>
|
||||
fallback_coordinate_type;
|
||||
qi::real_parser<double, json_policy> double_;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,10 +16,15 @@ struct Block
|
||||
{
|
||||
std::uint64_t num_entries;
|
||||
std::uint64_t byte_size;
|
||||
std::uint64_t offset;
|
||||
|
||||
Block() : num_entries(0), byte_size(0) {}
|
||||
Block() : num_entries(0), byte_size(0), offset(0) {}
|
||||
Block(std::uint64_t num_entries, std::uint64_t byte_size, std::uint64_t offset)
|
||||
: num_entries(num_entries), byte_size(byte_size), offset(offset)
|
||||
{
|
||||
}
|
||||
Block(std::uint64_t num_entries, std::uint64_t byte_size)
|
||||
: num_entries(num_entries), byte_size(byte_size)
|
||||
: num_entries(num_entries), byte_size(byte_size), offset(0)
|
||||
{
|
||||
}
|
||||
};
|
||||
@ -29,7 +34,7 @@ using NamedBlock = std::tuple<std::string, Block>;
|
||||
template <typename T> Block make_block(uint64_t num_entries)
|
||||
{
|
||||
static_assert(sizeof(T) % alignof(T) == 0, "aligned T* can't be used as an array pointer");
|
||||
return Block{num_entries, sizeof(T) * num_entries};
|
||||
return Block{num_entries, sizeof(T) * num_entries, 0};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@
|
||||
#include "storage/shared_datatype.hpp"
|
||||
#include "storage/tar.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/function_output_iterator.hpp>
|
||||
#include <boost/iterator/function_input_iterator.hpp>
|
||||
|
||||
@ -30,22 +31,37 @@ namespace serialization
|
||||
namespace detail
|
||||
{
|
||||
template <typename T, typename BlockT = unsigned char>
|
||||
inline BlockT packBits(const T &data, std::size_t index, std::size_t count)
|
||||
inline BlockT packBits(const T &data, std::size_t base_index, const std::size_t count)
|
||||
{
|
||||
static_assert(std::is_same<typename T::value_type, bool>::value, "value_type is not bool");
|
||||
static_assert(std::is_unsigned<BlockT>::value, "BlockT must be unsigned type");
|
||||
static_assert(std::is_integral<BlockT>::value, "BlockT must be an integral type");
|
||||
static_assert(CHAR_BIT == 8, "Non-8-bit bytes not supported, sorry!");
|
||||
BOOST_ASSERT(sizeof(BlockT) * CHAR_BIT >= count);
|
||||
|
||||
// Note: if this packing is changed, be sure to update vector_view<bool>
|
||||
// as well, so that on-disk and in-memory layouts match.
|
||||
BlockT value = 0;
|
||||
for (std::size_t bit = 0; bit < count; ++bit, ++index)
|
||||
value = (value << 1) | data[index];
|
||||
for (std::size_t bit = 0; bit < count; ++bit)
|
||||
{
|
||||
value |= (data[base_index + bit] ? BlockT{1} : BlockT{0}) << bit;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
template <typename T, typename BlockT = unsigned char>
|
||||
inline void unpackBits(T &data, std::size_t index, std::size_t count, BlockT value)
|
||||
inline void
|
||||
unpackBits(T &data, const std::size_t base_index, const std::size_t count, const BlockT value)
|
||||
{
|
||||
static_assert(std::is_same<typename T::value_type, bool>::value, "value_type is not bool");
|
||||
const BlockT mask = BlockT{1} << (count - 1);
|
||||
for (std::size_t bit = 0; bit < count; value <<= 1, ++bit, ++index)
|
||||
data[index] = value & mask;
|
||||
static_assert(std::is_unsigned<BlockT>::value, "BlockT must be unsigned type");
|
||||
static_assert(std::is_integral<BlockT>::value, "BlockT must be an integral type");
|
||||
static_assert(CHAR_BIT == 8, "Non-8-bit bytes not supported, sorry!");
|
||||
BOOST_ASSERT(sizeof(BlockT) * CHAR_BIT >= count);
|
||||
for (std::size_t bit = 0; bit < count; ++bit)
|
||||
{
|
||||
data[base_index + bit] = value & (BlockT{1} << bit);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename VectorT>
|
||||
@ -55,15 +71,16 @@ void readBoolVector(tar::FileReader &reader, const std::string &name, VectorT &d
|
||||
data.resize(count);
|
||||
std::uint64_t index = 0;
|
||||
|
||||
constexpr std::uint64_t WORD_BITS = CHAR_BIT * sizeof(std::uint64_t);
|
||||
using BlockType = std::uint64_t;
|
||||
constexpr std::uint64_t BLOCK_BITS = CHAR_BIT * sizeof(BlockType);
|
||||
|
||||
const auto decode = [&](const std::uint64_t block) {
|
||||
auto read_size = std::min<std::size_t>(count - index, WORD_BITS);
|
||||
unpackBits<VectorT, std::uint64_t>(data, index, read_size, block);
|
||||
index += WORD_BITS;
|
||||
const auto decode = [&](const BlockType block) {
|
||||
auto read_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
||||
unpackBits<VectorT, BlockType>(data, index, read_size, block);
|
||||
index += BLOCK_BITS;
|
||||
};
|
||||
|
||||
reader.ReadStreaming<std::uint64_t>(name, boost::make_function_output_iterator(decode));
|
||||
reader.ReadStreaming<BlockType>(name, boost::make_function_output_iterator(decode));
|
||||
}
|
||||
|
||||
template <typename VectorT>
|
||||
@ -73,19 +90,20 @@ void writeBoolVector(tar::FileWriter &writer, const std::string &name, const Vec
|
||||
writer.WriteElementCount64(name, count);
|
||||
std::uint64_t index = 0;
|
||||
|
||||
constexpr std::uint64_t WORD_BITS = CHAR_BIT * sizeof(std::uint64_t);
|
||||
using BlockType = std::uint64_t;
|
||||
constexpr std::uint64_t BLOCK_BITS = CHAR_BIT * sizeof(BlockType);
|
||||
|
||||
// FIXME on old boost version the function_input_iterator does not work with lambdas
|
||||
// so we need to wrap it in a function here.
|
||||
const std::function<std::uint64_t()> encode_function = [&]() -> std::uint64_t {
|
||||
auto write_size = std::min<std::size_t>(count - index, WORD_BITS);
|
||||
auto packed = packBits<VectorT, std::uint64_t>(data, index, write_size);
|
||||
index += WORD_BITS;
|
||||
const std::function<BlockType()> encode_function = [&]() -> BlockType {
|
||||
auto write_size = std::min<std::size_t>(count - index, BLOCK_BITS);
|
||||
auto packed = packBits<VectorT, BlockType>(data, index, write_size);
|
||||
index += BLOCK_BITS;
|
||||
return packed;
|
||||
};
|
||||
|
||||
std::uint64_t number_of_blocks = (count + WORD_BITS - 1) / WORD_BITS;
|
||||
writer.WriteStreaming<std::uint64_t>(
|
||||
std::uint64_t number_of_blocks = (count + BLOCK_BITS - 1) / BLOCK_BITS;
|
||||
writer.WriteStreaming<BlockType>(
|
||||
name,
|
||||
boost::make_function_input_iterator(encode_function, boost::infinite()),
|
||||
number_of_blocks);
|
||||
@ -266,9 +284,9 @@ template <typename K, typename V> void write(io::BufferWriter &writer, const std
|
||||
}
|
||||
}
|
||||
|
||||
inline void read(io::BufferReader &reader, DataLayout &layout) { read(reader, layout.blocks); }
|
||||
inline void read(io::BufferReader &reader, BaseDataLayout &layout) { read(reader, layout.blocks); }
|
||||
|
||||
inline void write(io::BufferWriter &writer, const DataLayout &layout)
|
||||
inline void write(io::BufferWriter &writer, const BaseDataLayout &layout)
|
||||
{
|
||||
write(writer, layout.blocks);
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
|
||||
#include <boost/function_output_iterator.hpp>
|
||||
|
||||
#include <type_traits>
|
||||
#include <unordered_map>
|
||||
|
||||
namespace osrm
|
||||
@ -19,8 +20,8 @@ class SharedDataIndex
|
||||
public:
|
||||
struct AllocatedRegion
|
||||
{
|
||||
char *memory_ptr;
|
||||
DataLayout layout;
|
||||
void *memory_ptr;
|
||||
std::unique_ptr<BaseDataLayout> layout;
|
||||
};
|
||||
|
||||
SharedDataIndex() = default;
|
||||
@ -29,7 +30,7 @@ class SharedDataIndex
|
||||
// Build mapping from block name to region
|
||||
for (auto index : util::irange<std::uint32_t>(0, regions.size()))
|
||||
{
|
||||
regions[index].layout.List("",
|
||||
regions[index].layout->List("",
|
||||
boost::make_function_output_iterator([&](const auto &name) {
|
||||
block_to_region[name] = index;
|
||||
}));
|
||||
@ -40,32 +41,44 @@ class SharedDataIndex
|
||||
{
|
||||
for (const auto ®ion : regions)
|
||||
{
|
||||
region.layout.List(name_prefix, out);
|
||||
region.layout->List(name_prefix, out);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T> auto GetBlockPtr(const std::string &name) const
|
||||
{
|
||||
#if !defined(__GNUC__) || (__GNUC__ > 4)
|
||||
// is_tivially_copyable only exists in GCC >=5
|
||||
static_assert(std::is_trivially_copyable<T>::value,
|
||||
"Block-based data must be a trivially copyable type");
|
||||
static_assert(sizeof(T) % alignof(T) == 0, "aligned T* can't be used as an array pointer");
|
||||
#endif
|
||||
const auto ®ion = GetBlockRegion(name);
|
||||
return region.layout.GetBlockPtr<T>(region.memory_ptr, name);
|
||||
return reinterpret_cast<T *>(region.layout->GetBlockPtr(region.memory_ptr, name));
|
||||
}
|
||||
|
||||
template <typename T> auto GetBlockPtr(const std::string &name)
|
||||
{
|
||||
#if !defined(__GNUC__) || (__GNUC__ > 4)
|
||||
// is_tivially_copyable only exists in GCC >=5
|
||||
static_assert(std::is_trivially_copyable<T>::value,
|
||||
"Block-based data must be a trivially copyable type");
|
||||
static_assert(sizeof(T) % alignof(T) == 0, "aligned T* can't be used as an array pointer");
|
||||
#endif
|
||||
const auto ®ion = GetBlockRegion(name);
|
||||
return region.layout.GetBlockPtr<T>(region.memory_ptr, name);
|
||||
return reinterpret_cast<T *>(region.layout->GetBlockPtr(region.memory_ptr, name));
|
||||
}
|
||||
|
||||
std::size_t GetBlockEntries(const std::string &name) const
|
||||
{
|
||||
const auto ®ion = GetBlockRegion(name);
|
||||
return region.layout.GetBlockEntries(name);
|
||||
return region.layout->GetBlockEntries(name);
|
||||
}
|
||||
|
||||
std::size_t GetBlockSize(const std::string &name) const
|
||||
{
|
||||
const auto ®ion = GetBlockRegion(name);
|
||||
return region.layout.GetBlockSize(name);
|
||||
return region.layout->GetBlockSize(name);
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
@ -20,13 +20,13 @@ namespace osrm
|
||||
namespace storage
|
||||
{
|
||||
|
||||
class DataLayout;
|
||||
class BaseDataLayout;
|
||||
namespace serialization
|
||||
{
|
||||
inline void read(io::BufferReader &reader, DataLayout &layout);
|
||||
inline void read(io::BufferReader &reader, BaseDataLayout &layout);
|
||||
|
||||
inline void write(io::BufferWriter &writer, const DataLayout &layout);
|
||||
}
|
||||
inline void write(io::BufferWriter &writer, const BaseDataLayout &layout);
|
||||
} // namespace serialization
|
||||
|
||||
namespace detail
|
||||
{
|
||||
@ -52,46 +52,30 @@ inline std::string trimName(const std::string &name_prefix, const std::string &n
|
||||
return name;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace detail
|
||||
|
||||
class DataLayout
|
||||
class BaseDataLayout
|
||||
{
|
||||
public:
|
||||
DataLayout() : blocks{} {}
|
||||
virtual ~BaseDataLayout() = default;
|
||||
|
||||
inline void SetBlock(const std::string &name, Block block) { blocks[name] = std::move(block); }
|
||||
|
||||
inline uint64_t GetBlockEntries(const std::string &name) const
|
||||
inline std::uint64_t GetBlockEntries(const std::string &name) const
|
||||
{
|
||||
return GetBlock(name).num_entries;
|
||||
}
|
||||
|
||||
inline uint64_t GetBlockSize(const std::string &name) const { return GetBlock(name).byte_size; }
|
||||
inline std::uint64_t GetBlockSize(const std::string &name) const
|
||||
{
|
||||
return GetBlock(name).byte_size;
|
||||
}
|
||||
|
||||
inline bool HasBlock(const std::string &name) const
|
||||
{
|
||||
return blocks.find(name) != blocks.end();
|
||||
}
|
||||
|
||||
inline uint64_t GetSizeOfLayout() const
|
||||
{
|
||||
uint64_t result = 0;
|
||||
for (const auto &name_and_block : blocks)
|
||||
{
|
||||
result += GetBlockSize(name_and_block.first) + BLOCK_ALIGNMENT;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
template <typename T> inline T *GetBlockPtr(char *shared_memory, const std::string &name) const
|
||||
{
|
||||
static_assert(BLOCK_ALIGNMENT % std::alignment_of<T>::value == 0,
|
||||
"Datatype does not fit alignment constraints.");
|
||||
|
||||
char *ptr = (char *)GetAlignedBlockPtr(shared_memory, name);
|
||||
return (T *)ptr;
|
||||
}
|
||||
|
||||
// Depending on the name prefix this function either lists all blocks with the same prefix
|
||||
// or all entries in the sub-directory.
|
||||
// '/ch/edge' -> '/ch/edge_filter/0/blocks', '/ch/edge_filter/1/blocks'
|
||||
@ -115,10 +99,10 @@ class DataLayout
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
friend void serialization::read(io::BufferReader &reader, DataLayout &layout);
|
||||
friend void serialization::write(io::BufferWriter &writer, const DataLayout &layout);
|
||||
virtual inline void *GetBlockPtr(void *base_ptr, const std::string &name) const = 0;
|
||||
virtual inline std::uint64_t GetSizeOfLayout() const = 0;
|
||||
|
||||
protected:
|
||||
const Block &GetBlock(const std::string &name) const
|
||||
{
|
||||
auto iter = blocks.find(name);
|
||||
@ -130,10 +114,42 @@ class DataLayout
|
||||
return iter->second;
|
||||
}
|
||||
|
||||
friend void serialization::read(io::BufferReader &reader, BaseDataLayout &layout);
|
||||
friend void serialization::write(io::BufferWriter &writer, const BaseDataLayout &layout);
|
||||
|
||||
std::map<std::string, Block> blocks;
|
||||
};
|
||||
|
||||
class ContiguousDataLayout final : public BaseDataLayout
|
||||
{
|
||||
public:
|
||||
inline std::uint64_t GetSizeOfLayout() const override final
|
||||
{
|
||||
std::uint64_t result = 0;
|
||||
for (const auto &name_and_block : blocks)
|
||||
{
|
||||
result += GetBlockSize(name_and_block.first) + BLOCK_ALIGNMENT;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
inline void *GetBlockPtr(void *base_ptr, const std::string &name) const override final
|
||||
{
|
||||
// TODO: re-enable this alignment checking somehow
|
||||
// static_assert(BLOCK_ALIGNMENT % std::alignment_of<T>::value == 0,
|
||||
// "Datatype does not fit alignment constraints.");
|
||||
|
||||
return GetAlignedBlockPtr(base_ptr, name);
|
||||
}
|
||||
|
||||
private:
|
||||
friend void serialization::read(io::BufferReader &reader, BaseDataLayout &layout);
|
||||
friend void serialization::write(io::BufferWriter &writer, const BaseDataLayout &layout);
|
||||
|
||||
// Fit aligned storage in buffer to 64 bytes to conform with AVX 512 types
|
||||
inline void *align(void *&ptr) const noexcept
|
||||
{
|
||||
const auto intptr = reinterpret_cast<uintptr_t>(ptr);
|
||||
const auto intptr = reinterpret_cast<std::uintptr_t>(ptr);
|
||||
const auto aligned = (intptr - 1u + BLOCK_ALIGNMENT) & -BLOCK_ALIGNMENT;
|
||||
return ptr = reinterpret_cast<void *>(aligned);
|
||||
}
|
||||
@ -157,7 +173,27 @@ class DataLayout
|
||||
}
|
||||
|
||||
static constexpr std::size_t BLOCK_ALIGNMENT = 64;
|
||||
std::map<std::string, Block> blocks;
|
||||
};
|
||||
|
||||
class TarDataLayout final : public BaseDataLayout
|
||||
{
|
||||
public:
|
||||
inline std::uint64_t GetSizeOfLayout() const override final
|
||||
{
|
||||
std::uint64_t result = 0;
|
||||
for (const auto &name_and_block : blocks)
|
||||
{
|
||||
result += GetBlockSize(name_and_block.first);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
inline void *GetBlockPtr(void *base_ptr, const std::string &name) const override final
|
||||
{
|
||||
auto offset = GetBlock(name).offset;
|
||||
const auto offset_address = reinterpret_cast<std::uintptr_t>(base_ptr) + offset;
|
||||
return reinterpret_cast<void *>(offset_address);
|
||||
}
|
||||
};
|
||||
|
||||
struct SharedRegion
|
||||
@ -165,7 +201,7 @@ struct SharedRegion
|
||||
static constexpr const int MAX_NAME_LENGTH = 254;
|
||||
|
||||
SharedRegion() : name{0}, timestamp{0} {}
|
||||
SharedRegion(const std::string &name_, std::uint64_t timestamp, std::uint8_t shm_key)
|
||||
SharedRegion(const std::string &name_, std::uint64_t timestamp, std::uint16_t shm_key)
|
||||
: name{0}, timestamp{timestamp}, shm_key{shm_key}
|
||||
{
|
||||
std::copy_n(name_.begin(), std::min<std::size_t>(MAX_NAME_LENGTH, name_.size()), name);
|
||||
@ -175,14 +211,14 @@ struct SharedRegion
|
||||
|
||||
char name[MAX_NAME_LENGTH + 1];
|
||||
std::uint64_t timestamp;
|
||||
std::uint8_t shm_key;
|
||||
std::uint16_t shm_key;
|
||||
};
|
||||
|
||||
// Keeps a list of all shared regions in a fixed-sized struct
|
||||
// for fast access and deserialization.
|
||||
struct SharedRegionRegister
|
||||
{
|
||||
using RegionID = std::uint8_t;
|
||||
using RegionID = std::uint16_t;
|
||||
static constexpr const RegionID INVALID_REGION_ID = std::numeric_limits<RegionID>::max();
|
||||
using ShmKey = decltype(SharedRegion::shm_key);
|
||||
|
||||
@ -250,12 +286,11 @@ struct SharedRegionRegister
|
||||
|
||||
void ReleaseKey(ShmKey key) { shm_key_in_use[key] = false; }
|
||||
|
||||
static constexpr const std::uint8_t MAX_SHARED_REGIONS =
|
||||
std::numeric_limits<RegionID>::max() - 1;
|
||||
static constexpr const std::size_t MAX_SHARED_REGIONS = 512;
|
||||
static_assert(MAX_SHARED_REGIONS < std::numeric_limits<RegionID>::max(),
|
||||
"Number of shared memory regions needs to be less than the region id size.");
|
||||
|
||||
static constexpr const std::uint8_t MAX_SHM_KEYS = std::numeric_limits<std::uint8_t>::max() - 1;
|
||||
static constexpr const std::size_t MAX_SHM_KEYS = MAX_SHARED_REGIONS * 2;
|
||||
|
||||
static constexpr const char *name = "osrm-region";
|
||||
|
||||
@ -263,7 +298,7 @@ struct SharedRegionRegister
|
||||
std::array<SharedRegion, MAX_SHARED_REGIONS> regions;
|
||||
std::array<bool, MAX_SHM_KEYS> shm_key_in_use;
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace storage
|
||||
} // namespace osrm
|
||||
|
||||
#endif /* SHARED_DATA_TYPE_HPP */
|
||||
|
||||
@ -34,10 +34,10 @@ namespace storage
|
||||
|
||||
struct OSRMLockFile
|
||||
{
|
||||
boost::filesystem::path operator()()
|
||||
template <typename IdentifierT> boost::filesystem::path operator()(const IdentifierT &id)
|
||||
{
|
||||
boost::filesystem::path temp_dir = boost::filesystem::temp_directory_path();
|
||||
boost::filesystem::path lock_file = temp_dir / "osrm.lock";
|
||||
boost::filesystem::path lock_file = temp_dir / ("osrm-" + std::to_string(id) + ".lock");
|
||||
return lock_file;
|
||||
}
|
||||
};
|
||||
@ -93,7 +93,7 @@ class SharedMemory
|
||||
try
|
||||
{
|
||||
OSRMLockFile lock_file;
|
||||
boost::interprocess::xsi_key key(lock_file().string().c_str(), id);
|
||||
boost::interprocess::xsi_key key(lock_file(id).string().c_str(), id);
|
||||
result = RegionExists(key);
|
||||
}
|
||||
catch (...)
|
||||
@ -106,7 +106,7 @@ class SharedMemory
|
||||
template <typename IdentifierT> static bool Remove(const IdentifierT id)
|
||||
{
|
||||
OSRMLockFile lock_file;
|
||||
boost::interprocess::xsi_key key(lock_file().string().c_str(), id);
|
||||
boost::interprocess::xsi_key key(lock_file(id).string().c_str(), id);
|
||||
return Remove(key);
|
||||
}
|
||||
|
||||
@ -287,10 +287,11 @@ class SharedMemory
|
||||
template <typename IdentifierT, typename LockFileT = OSRMLockFile>
|
||||
std::unique_ptr<SharedMemory> makeSharedMemory(const IdentifierT &id, const uint64_t size = 0)
|
||||
{
|
||||
static_assert(sizeof(id) == sizeof(std::uint16_t), "Key type is not 16 bits");
|
||||
try
|
||||
{
|
||||
LockFileT lock_file;
|
||||
if (!boost::filesystem::exists(lock_file()))
|
||||
if (!boost::filesystem::exists(lock_file(id)))
|
||||
{
|
||||
if (0 == size)
|
||||
{
|
||||
@ -298,10 +299,10 @@ std::unique_ptr<SharedMemory> makeSharedMemory(const IdentifierT &id, const uint
|
||||
}
|
||||
else
|
||||
{
|
||||
boost::filesystem::ofstream ofs(lock_file());
|
||||
boost::filesystem::ofstream ofs(lock_file(id));
|
||||
}
|
||||
}
|
||||
return std::make_unique<SharedMemory>(lock_file(), id, size);
|
||||
return std::make_unique<SharedMemory>(lock_file(id), id, size);
|
||||
}
|
||||
catch (const boost::interprocess::interprocess_exception &e)
|
||||
{
|
||||
@ -310,7 +311,7 @@ std::unique_ptr<SharedMemory> makeSharedMemory(const IdentifierT &id, const uint
|
||||
throw util::exception(e.what() + SOURCE_REF);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace storage
|
||||
} // namespace osrm
|
||||
|
||||
#endif // SHARED_MEMORY_HPP
|
||||
|
||||
@ -33,7 +33,7 @@ template <class Lock> class InvertedLock
|
||||
InvertedLock(Lock &lock) : lock(lock) { lock.unlock(); }
|
||||
~InvertedLock() { lock.lock(); }
|
||||
};
|
||||
}
|
||||
} // namespace
|
||||
|
||||
// The shared monitor implementation based on a semaphore and mutex
|
||||
template <typename Data> struct SharedMonitor
|
||||
@ -146,7 +146,9 @@ template <typename Data> struct SharedMonitor
|
||||
// like two-turnstile reusable barrier or boost/interprocess/sync/spin/condition.hpp
|
||||
// fail if a waiter is killed.
|
||||
|
||||
static constexpr int buffer_size = 256;
|
||||
// Buffer size needs to be large enough to hold all the semaphores for every
|
||||
// listener you want to support.
|
||||
static constexpr int buffer_size = 4096 * 4;
|
||||
|
||||
struct InternalData
|
||||
{
|
||||
@ -232,8 +234,8 @@ template <typename Data> struct SharedMonitor
|
||||
bi::shared_memory_object shmem;
|
||||
bi::mapped_region region;
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace storage
|
||||
} // namespace osrm
|
||||
|
||||
#undef USE_BOOST_INTERPROCESS_CONDITION
|
||||
|
||||
|
||||
@ -35,22 +35,28 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#include <boost/filesystem/path.hpp>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace storage
|
||||
{
|
||||
|
||||
void populateLayoutFromFile(const boost::filesystem::path &path, storage::BaseDataLayout &layout);
|
||||
|
||||
class Storage
|
||||
{
|
||||
public:
|
||||
Storage(StorageConfig config);
|
||||
|
||||
int Run(int max_wait, const std::string &name, bool only_metric);
|
||||
|
||||
void PopulateStaticLayout(DataLayout &layout);
|
||||
void PopulateUpdatableLayout(DataLayout &layout);
|
||||
void PopulateStaticData(const SharedDataIndex &index);
|
||||
void PopulateUpdatableData(const SharedDataIndex &index);
|
||||
void PopulateLayout(storage::BaseDataLayout &layout,
|
||||
const std::vector<std::pair<bool, boost::filesystem::path>> &files);
|
||||
std::string PopulateLayoutWithRTree(storage::BaseDataLayout &layout);
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> GetUpdatableFiles();
|
||||
std::vector<std::pair<bool, boost::filesystem::path>> GetStaticFiles();
|
||||
|
||||
private:
|
||||
StorageConfig config;
|
||||
|
||||
@ -294,11 +294,14 @@ inline auto make_filtered_cell_metric_view(const SharedDataIndex &index,
|
||||
auto prefix = name + "/exclude/" + std::to_string(exclude_index);
|
||||
auto weights_block_id = prefix + "/weights";
|
||||
auto durations_block_id = prefix + "/durations";
|
||||
auto distances_block_id = prefix + "/distances";
|
||||
|
||||
auto weights = make_vector_view<EdgeWeight>(index, weights_block_id);
|
||||
auto durations = make_vector_view<EdgeDuration>(index, durations_block_id);
|
||||
auto distances = make_vector_view<EdgeDistance>(index, distances_block_id);
|
||||
|
||||
return customizer::CellMetricView{std::move(weights), std::move(durations)};
|
||||
return customizer::CellMetricView{
|
||||
std::move(weights), std::move(durations), std::move(distances)};
|
||||
}
|
||||
|
||||
inline auto make_cell_metric_view(const SharedDataIndex &index, const std::string &name)
|
||||
@ -311,12 +314,14 @@ inline auto make_cell_metric_view(const SharedDataIndex &index, const std::strin
|
||||
{
|
||||
auto weights_block_id = prefix + "/weights";
|
||||
auto durations_block_id = prefix + "/durations";
|
||||
auto distances_block_id = prefix + "/distances";
|
||||
|
||||
auto weights = make_vector_view<EdgeWeight>(index, weights_block_id);
|
||||
auto durations = make_vector_view<EdgeDuration>(index, durations_block_id);
|
||||
auto distances = make_vector_view<EdgeDistance>(index, distances_block_id);
|
||||
|
||||
cell_metric_excludes.push_back(
|
||||
customizer::CellMetricView{std::move(weights), std::move(durations)});
|
||||
cell_metric_excludes.push_back(customizer::CellMetricView{
|
||||
std::move(weights), std::move(durations), std::move(distances)});
|
||||
}
|
||||
|
||||
return cell_metric_excludes;
|
||||
@ -332,6 +337,7 @@ inline auto make_multi_level_graph_view(const SharedDataIndex &index, const std:
|
||||
index, name + "/node_to_edge_offset");
|
||||
auto node_weights = make_vector_view<EdgeWeight>(index, name + "/node_weights");
|
||||
auto node_durations = make_vector_view<EdgeDuration>(index, name + "/node_durations");
|
||||
auto node_distances = make_vector_view<EdgeDistance>(index, name + "/node_distances");
|
||||
auto is_forward_edge = make_vector_view<bool>(index, name + "/is_forward_edge");
|
||||
auto is_backward_edge = make_vector_view<bool>(index, name + "/is_backward_edge");
|
||||
|
||||
@ -340,6 +346,7 @@ inline auto make_multi_level_graph_view(const SharedDataIndex &index, const std:
|
||||
std::move(node_to_offset),
|
||||
std::move(node_weights),
|
||||
std::move(node_durations),
|
||||
std::move(node_distances),
|
||||
std::move(is_forward_edge),
|
||||
std::move(is_backward_edge));
|
||||
}
|
||||
|
||||
@ -22,10 +22,16 @@ class Updater
|
||||
std::vector<EdgeWeight> &node_weights,
|
||||
std::uint32_t &connectivity_checksum) const;
|
||||
|
||||
EdgeID
|
||||
LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &edge_based_edge_list,
|
||||
EdgeID LoadAndUpdateEdgeExpandedGraph(
|
||||
std::vector<extractor::EdgeBasedEdge> &edge_based_edge_list,
|
||||
std::vector<EdgeWeight> &node_weights,
|
||||
std::vector<EdgeDuration> &node_durations, // TODO: to be deleted
|
||||
std::vector<EdgeDuration> &node_durations, // TODO: remove when optional
|
||||
std::uint32_t &connectivity_checksum) const;
|
||||
EdgeID LoadAndUpdateEdgeExpandedGraph(
|
||||
std::vector<extractor::EdgeBasedEdge> &edge_based_edge_list,
|
||||
std::vector<EdgeWeight> &node_weights,
|
||||
std::vector<EdgeDuration> &node_durations, // TODO: remove when optional
|
||||
std::vector<EdgeDistance> &node_distances, // TODO: remove when optional
|
||||
std::uint32_t &connectivity_checksum) const;
|
||||
|
||||
private:
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
#ifndef OSRM_UTIL_DEBUG_HPP_
|
||||
#define OSRM_UTIL_DEBUG_HPP_
|
||||
|
||||
#include "extractor/edge_based_edge.hpp"
|
||||
#include "extractor/node_data_container.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "guidance/intersection.hpp"
|
||||
#include "guidance/turn_instruction.hpp"
|
||||
#include "guidance/turn_lane_data.hpp"
|
||||
#include "engine/guidance/route_step.hpp"
|
||||
#include "util/node_based_graph.hpp"
|
||||
@ -186,6 +188,23 @@ inline std::ostream &operator<<(std::ostream &out, const LaneDataVector &turn_la
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace extractor
|
||||
{
|
||||
inline std::ostream &operator<<(std::ostream &out, const EdgeBasedEdge &edge)
|
||||
{
|
||||
out << " EdgeBasedEdge {";
|
||||
out << " source " << edge.source << ", target: " << edge.target;
|
||||
out << " EdgeBasedEdgeData data {";
|
||||
out << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
out << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
out << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
out << " }";
|
||||
out << "}";
|
||||
return out;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif /*OSRM_ENGINE_GUIDANCE_DEBUG_HPP_*/
|
||||
|
||||
517
include/util/ieee754.hpp
Normal file
517
include/util/ieee754.hpp
Normal file
@ -0,0 +1,517 @@
|
||||
#ifndef IEEE754_HPP
|
||||
#define IEEE754_HPP
|
||||
/**
|
||||
Copyright (C) 2014 Milo Yip
|
||||
|
||||
Imported from:
|
||||
https://github.com/miloyip/dtoa-benchmark/blob/c4020c62754950d38a1aaaed2975b05b441d1e7d/src/milo/dtoa_milo.h
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
**/
|
||||
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#include "rapidjson/msinttypes/stdint.h"
|
||||
#include <intrin.h>
|
||||
#else
|
||||
#include <stdint.h>
|
||||
#endif
|
||||
|
||||
#define UINT64_C2(h, l) ((static_cast<uint64_t>(h) << 32) | static_cast<uint64_t>(l))
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace util
|
||||
{
|
||||
namespace ieee754
|
||||
{
|
||||
|
||||
struct DiyFp
|
||||
{
|
||||
DiyFp() {}
|
||||
|
||||
DiyFp(uint64_t f, int e) : f(f), e(e) {}
|
||||
|
||||
DiyFp(double d)
|
||||
{
|
||||
union {
|
||||
double d;
|
||||
uint64_t u64;
|
||||
} u = {d};
|
||||
|
||||
int biased_e = (u.u64 & kDpExponentMask) >> kDpSignificandSize;
|
||||
uint64_t significand = (u.u64 & kDpSignificandMask);
|
||||
if (biased_e != 0)
|
||||
{
|
||||
f = significand + kDpHiddenBit;
|
||||
e = biased_e - kDpExponentBias;
|
||||
}
|
||||
else
|
||||
{
|
||||
f = significand;
|
||||
e = kDpMinExponent + 1;
|
||||
}
|
||||
}
|
||||
|
||||
DiyFp operator-(const DiyFp &rhs) const
|
||||
{
|
||||
assert(e == rhs.e);
|
||||
assert(f >= rhs.f);
|
||||
return DiyFp(f - rhs.f, e);
|
||||
}
|
||||
|
||||
DiyFp operator*(const DiyFp &rhs) const
|
||||
{
|
||||
#if defined(_MSC_VER) && defined(_M_AMD64)
|
||||
uint64_t h;
|
||||
uint64_t l = _umul128(f, rhs.f, &h);
|
||||
if (l & (uint64_t(1) << 63)) // rounding
|
||||
h++;
|
||||
return DiyFp(h, e + rhs.e + 64);
|
||||
#elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) && defined(__x86_64__)
|
||||
unsigned __int128 p =
|
||||
static_cast<unsigned __int128>(f) * static_cast<unsigned __int128>(rhs.f);
|
||||
uint64_t h = p >> 64;
|
||||
uint64_t l = static_cast<uint64_t>(p);
|
||||
if (l & (uint64_t(1) << 63)) // rounding
|
||||
h++;
|
||||
return DiyFp(h, e + rhs.e + 64);
|
||||
#else
|
||||
const uint64_t M32 = 0xFFFFFFFF;
|
||||
const uint64_t a = f >> 32;
|
||||
const uint64_t b = f & M32;
|
||||
const uint64_t c = rhs.f >> 32;
|
||||
const uint64_t d = rhs.f & M32;
|
||||
const uint64_t ac = a * c;
|
||||
const uint64_t bc = b * c;
|
||||
const uint64_t ad = a * d;
|
||||
const uint64_t bd = b * d;
|
||||
uint64_t tmp = (bd >> 32) + (ad & M32) + (bc & M32);
|
||||
tmp += 1U << 31; /// mult_round
|
||||
return DiyFp(ac + (ad >> 32) + (bc >> 32) + (tmp >> 32), e + rhs.e + 64);
|
||||
#endif
|
||||
}
|
||||
|
||||
DiyFp Normalize() const
|
||||
{
|
||||
#if defined(_MSC_VER) && defined(_M_AMD64)
|
||||
unsigned long index;
|
||||
_BitScanReverse64(&index, f);
|
||||
return DiyFp(f << (63 - index), e - (63 - index));
|
||||
#elif defined(__GNUC__)
|
||||
int s = __builtin_clzll(f);
|
||||
return DiyFp(f << s, e - s);
|
||||
#else
|
||||
DiyFp res = *this;
|
||||
while (!(res.f & kDpHiddenBit))
|
||||
{
|
||||
res.f <<= 1;
|
||||
res.e--;
|
||||
}
|
||||
res.f <<= (kDiySignificandSize - kDpSignificandSize - 1);
|
||||
res.e = res.e - (kDiySignificandSize - kDpSignificandSize - 1);
|
||||
return res;
|
||||
#endif
|
||||
}
|
||||
|
||||
DiyFp NormalizeBoundary() const
|
||||
{
|
||||
#if defined(_MSC_VER) && defined(_M_AMD64)
|
||||
unsigned long index;
|
||||
_BitScanReverse64(&index, f);
|
||||
return DiyFp(f << (63 - index), e - (63 - index));
|
||||
#else
|
||||
DiyFp res = *this;
|
||||
while (!(res.f & (kDpHiddenBit << 1)))
|
||||
{
|
||||
res.f <<= 1;
|
||||
res.e--;
|
||||
}
|
||||
res.f <<= (kDiySignificandSize - kDpSignificandSize - 2);
|
||||
res.e = res.e - (kDiySignificandSize - kDpSignificandSize - 2);
|
||||
return res;
|
||||
#endif
|
||||
}
|
||||
|
||||
void NormalizedBoundaries(DiyFp *minus, DiyFp *plus) const
|
||||
{
|
||||
DiyFp pl = DiyFp((f << 1) + 1, e - 1).NormalizeBoundary();
|
||||
DiyFp mi = (f == kDpHiddenBit) ? DiyFp((f << 2) - 1, e - 2) : DiyFp((f << 1) - 1, e - 1);
|
||||
mi.f <<= mi.e - pl.e;
|
||||
mi.e = pl.e;
|
||||
*plus = pl;
|
||||
*minus = mi;
|
||||
}
|
||||
|
||||
static const int kDiySignificandSize = 64;
|
||||
static const int kDpSignificandSize = 52;
|
||||
static const int kDpExponentBias = 0x3FF + kDpSignificandSize;
|
||||
static const int kDpMinExponent = -kDpExponentBias;
|
||||
static const uint64_t kDpExponentMask = UINT64_C2(0x7FF00000, 0x00000000);
|
||||
static const uint64_t kDpSignificandMask = UINT64_C2(0x000FFFFF, 0xFFFFFFFF);
|
||||
static const uint64_t kDpHiddenBit = UINT64_C2(0x00100000, 0x00000000);
|
||||
|
||||
uint64_t f;
|
||||
int e;
|
||||
};
|
||||
|
||||
inline DiyFp GetCachedPower(int e, int *K)
|
||||
{
|
||||
// 10^-348, 10^-340, ..., 10^340
|
||||
static const uint64_t kCachedPowers_F[] = {
|
||||
UINT64_C2(0xfa8fd5a0, 0x081c0288), UINT64_C2(0xbaaee17f, 0xa23ebf76),
|
||||
UINT64_C2(0x8b16fb20, 0x3055ac76), UINT64_C2(0xcf42894a, 0x5dce35ea),
|
||||
UINT64_C2(0x9a6bb0aa, 0x55653b2d), UINT64_C2(0xe61acf03, 0x3d1a45df),
|
||||
UINT64_C2(0xab70fe17, 0xc79ac6ca), UINT64_C2(0xff77b1fc, 0xbebcdc4f),
|
||||
UINT64_C2(0xbe5691ef, 0x416bd60c), UINT64_C2(0x8dd01fad, 0x907ffc3c),
|
||||
UINT64_C2(0xd3515c28, 0x31559a83), UINT64_C2(0x9d71ac8f, 0xada6c9b5),
|
||||
UINT64_C2(0xea9c2277, 0x23ee8bcb), UINT64_C2(0xaecc4991, 0x4078536d),
|
||||
UINT64_C2(0x823c1279, 0x5db6ce57), UINT64_C2(0xc2109436, 0x4dfb5637),
|
||||
UINT64_C2(0x9096ea6f, 0x3848984f), UINT64_C2(0xd77485cb, 0x25823ac7),
|
||||
UINT64_C2(0xa086cfcd, 0x97bf97f4), UINT64_C2(0xef340a98, 0x172aace5),
|
||||
UINT64_C2(0xb23867fb, 0x2a35b28e), UINT64_C2(0x84c8d4df, 0xd2c63f3b),
|
||||
UINT64_C2(0xc5dd4427, 0x1ad3cdba), UINT64_C2(0x936b9fce, 0xbb25c996),
|
||||
UINT64_C2(0xdbac6c24, 0x7d62a584), UINT64_C2(0xa3ab6658, 0x0d5fdaf6),
|
||||
UINT64_C2(0xf3e2f893, 0xdec3f126), UINT64_C2(0xb5b5ada8, 0xaaff80b8),
|
||||
UINT64_C2(0x87625f05, 0x6c7c4a8b), UINT64_C2(0xc9bcff60, 0x34c13053),
|
||||
UINT64_C2(0x964e858c, 0x91ba2655), UINT64_C2(0xdff97724, 0x70297ebd),
|
||||
UINT64_C2(0xa6dfbd9f, 0xb8e5b88f), UINT64_C2(0xf8a95fcf, 0x88747d94),
|
||||
UINT64_C2(0xb9447093, 0x8fa89bcf), UINT64_C2(0x8a08f0f8, 0xbf0f156b),
|
||||
UINT64_C2(0xcdb02555, 0x653131b6), UINT64_C2(0x993fe2c6, 0xd07b7fac),
|
||||
UINT64_C2(0xe45c10c4, 0x2a2b3b06), UINT64_C2(0xaa242499, 0x697392d3),
|
||||
UINT64_C2(0xfd87b5f2, 0x8300ca0e), UINT64_C2(0xbce50864, 0x92111aeb),
|
||||
UINT64_C2(0x8cbccc09, 0x6f5088cc), UINT64_C2(0xd1b71758, 0xe219652c),
|
||||
UINT64_C2(0x9c400000, 0x00000000), UINT64_C2(0xe8d4a510, 0x00000000),
|
||||
UINT64_C2(0xad78ebc5, 0xac620000), UINT64_C2(0x813f3978, 0xf8940984),
|
||||
UINT64_C2(0xc097ce7b, 0xc90715b3), UINT64_C2(0x8f7e32ce, 0x7bea5c70),
|
||||
UINT64_C2(0xd5d238a4, 0xabe98068), UINT64_C2(0x9f4f2726, 0x179a2245),
|
||||
UINT64_C2(0xed63a231, 0xd4c4fb27), UINT64_C2(0xb0de6538, 0x8cc8ada8),
|
||||
UINT64_C2(0x83c7088e, 0x1aab65db), UINT64_C2(0xc45d1df9, 0x42711d9a),
|
||||
UINT64_C2(0x924d692c, 0xa61be758), UINT64_C2(0xda01ee64, 0x1a708dea),
|
||||
UINT64_C2(0xa26da399, 0x9aef774a), UINT64_C2(0xf209787b, 0xb47d6b85),
|
||||
UINT64_C2(0xb454e4a1, 0x79dd1877), UINT64_C2(0x865b8692, 0x5b9bc5c2),
|
||||
UINT64_C2(0xc83553c5, 0xc8965d3d), UINT64_C2(0x952ab45c, 0xfa97a0b3),
|
||||
UINT64_C2(0xde469fbd, 0x99a05fe3), UINT64_C2(0xa59bc234, 0xdb398c25),
|
||||
UINT64_C2(0xf6c69a72, 0xa3989f5c), UINT64_C2(0xb7dcbf53, 0x54e9bece),
|
||||
UINT64_C2(0x88fcf317, 0xf22241e2), UINT64_C2(0xcc20ce9b, 0xd35c78a5),
|
||||
UINT64_C2(0x98165af3, 0x7b2153df), UINT64_C2(0xe2a0b5dc, 0x971f303a),
|
||||
UINT64_C2(0xa8d9d153, 0x5ce3b396), UINT64_C2(0xfb9b7cd9, 0xa4a7443c),
|
||||
UINT64_C2(0xbb764c4c, 0xa7a44410), UINT64_C2(0x8bab8eef, 0xb6409c1a),
|
||||
UINT64_C2(0xd01fef10, 0xa657842c), UINT64_C2(0x9b10a4e5, 0xe9913129),
|
||||
UINT64_C2(0xe7109bfb, 0xa19c0c9d), UINT64_C2(0xac2820d9, 0x623bf429),
|
||||
UINT64_C2(0x80444b5e, 0x7aa7cf85), UINT64_C2(0xbf21e440, 0x03acdd2d),
|
||||
UINT64_C2(0x8e679c2f, 0x5e44ff8f), UINT64_C2(0xd433179d, 0x9c8cb841),
|
||||
UINT64_C2(0x9e19db92, 0xb4e31ba9), UINT64_C2(0xeb96bf6e, 0xbadf77d9),
|
||||
UINT64_C2(0xaf87023b, 0x9bf0ee6b)};
|
||||
static const int16_t kCachedPowers_E[] = {
|
||||
-1220, -1193, -1166, -1140, -1113, -1087, -1060, -1034, -1007, -980, -954, -927, -901,
|
||||
-874, -847, -821, -794, -768, -741, -715, -688, -661, -635, -608, -582, -555,
|
||||
-529, -502, -475, -449, -422, -396, -369, -343, -316, -289, -263, -236, -210,
|
||||
-183, -157, -130, -103, -77, -50, -24, 3, 30, 56, 83, 109, 136,
|
||||
162, 189, 216, 242, 269, 295, 322, 348, 375, 402, 428, 455, 481,
|
||||
508, 534, 561, 588, 614, 641, 667, 694, 720, 747, 774, 800, 827,
|
||||
853, 880, 907, 933, 960, 986, 1013, 1039, 1066};
|
||||
|
||||
// int k = static_cast<int>(ceil((-61 - e) * 0.30102999566398114)) + 374;
|
||||
double dk =
|
||||
(-61 - e) * 0.30102999566398114 + 347; // dk must be positive, so can do ceiling in positive
|
||||
int k = static_cast<int>(dk);
|
||||
if (k != dk)
|
||||
k++;
|
||||
|
||||
unsigned index = static_cast<unsigned>((k >> 3) + 1);
|
||||
*K = -(-348 + static_cast<int>(index << 3)); // decimal exponent no need lookup table
|
||||
|
||||
assert(index < sizeof(kCachedPowers_F) / sizeof(kCachedPowers_F[0]));
|
||||
return DiyFp(kCachedPowers_F[index], kCachedPowers_E[index]);
|
||||
}
|
||||
|
||||
inline void
|
||||
GrisuRound(char *buffer, int len, uint64_t delta, uint64_t rest, uint64_t ten_kappa, uint64_t wp_w)
|
||||
{
|
||||
while (rest < wp_w && delta - rest >= ten_kappa && (rest + ten_kappa < wp_w || /// closer
|
||||
wp_w - rest > rest + ten_kappa - wp_w))
|
||||
{
|
||||
buffer[len - 1]--;
|
||||
rest += ten_kappa;
|
||||
}
|
||||
}
|
||||
|
||||
inline unsigned CountDecimalDigit32(uint32_t n)
|
||||
{
|
||||
// Simple pure C++ implementation was faster than __builtin_clz version in this situation.
|
||||
if (n < 10)
|
||||
return 1;
|
||||
if (n < 100)
|
||||
return 2;
|
||||
if (n < 1000)
|
||||
return 3;
|
||||
if (n < 10000)
|
||||
return 4;
|
||||
if (n < 100000)
|
||||
return 5;
|
||||
if (n < 1000000)
|
||||
return 6;
|
||||
if (n < 10000000)
|
||||
return 7;
|
||||
if (n < 100000000)
|
||||
return 8;
|
||||
if (n < 1000000000)
|
||||
return 9;
|
||||
return 10;
|
||||
}
|
||||
|
||||
inline void
|
||||
DigitGen(const DiyFp &W, const DiyFp &Mp, uint64_t delta, char *buffer, int *len, int *K)
|
||||
{
|
||||
static const uint32_t kPow10[] = {
|
||||
1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000};
|
||||
const DiyFp one(uint64_t(1) << -Mp.e, Mp.e);
|
||||
const DiyFp wp_w = Mp - W;
|
||||
uint32_t p1 = static_cast<uint32_t>(Mp.f >> -one.e);
|
||||
uint64_t p2 = Mp.f & (one.f - 1);
|
||||
int kappa = static_cast<int>(CountDecimalDigit32(p1));
|
||||
*len = 0;
|
||||
|
||||
while (kappa > 0)
|
||||
{
|
||||
uint32_t d;
|
||||
switch (kappa)
|
||||
{
|
||||
case 10:
|
||||
d = p1 / 1000000000;
|
||||
p1 %= 1000000000;
|
||||
break;
|
||||
case 9:
|
||||
d = p1 / 100000000;
|
||||
p1 %= 100000000;
|
||||
break;
|
||||
case 8:
|
||||
d = p1 / 10000000;
|
||||
p1 %= 10000000;
|
||||
break;
|
||||
case 7:
|
||||
d = p1 / 1000000;
|
||||
p1 %= 1000000;
|
||||
break;
|
||||
case 6:
|
||||
d = p1 / 100000;
|
||||
p1 %= 100000;
|
||||
break;
|
||||
case 5:
|
||||
d = p1 / 10000;
|
||||
p1 %= 10000;
|
||||
break;
|
||||
case 4:
|
||||
d = p1 / 1000;
|
||||
p1 %= 1000;
|
||||
break;
|
||||
case 3:
|
||||
d = p1 / 100;
|
||||
p1 %= 100;
|
||||
break;
|
||||
case 2:
|
||||
d = p1 / 10;
|
||||
p1 %= 10;
|
||||
break;
|
||||
case 1:
|
||||
d = p1;
|
||||
p1 = 0;
|
||||
break;
|
||||
default:
|
||||
#if defined(_MSC_VER)
|
||||
__assume(0);
|
||||
#elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
|
||||
__builtin_unreachable();
|
||||
#else
|
||||
d = 0;
|
||||
#endif
|
||||
}
|
||||
if (d || *len)
|
||||
buffer[(*len)++] = '0' + static_cast<char>(d);
|
||||
kappa--;
|
||||
uint64_t tmp = (static_cast<uint64_t>(p1) << -one.e) + p2;
|
||||
if (tmp <= delta)
|
||||
{
|
||||
*K += kappa;
|
||||
GrisuRound(
|
||||
buffer, *len, delta, tmp, static_cast<uint64_t>(kPow10[kappa]) << -one.e, wp_w.f);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// kappa = 0
|
||||
for (;;)
|
||||
{
|
||||
p2 *= 10;
|
||||
delta *= 10;
|
||||
char d = static_cast<char>(p2 >> -one.e);
|
||||
if (d || *len)
|
||||
buffer[(*len)++] = '0' + d;
|
||||
p2 &= one.f - 1;
|
||||
kappa--;
|
||||
if (p2 < delta)
|
||||
{
|
||||
*K += kappa;
|
||||
GrisuRound(buffer, *len, delta, p2, one.f, wp_w.f * kPow10[-kappa]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void Grisu2(double value, char *buffer, int *length, int *K)
|
||||
{
|
||||
const DiyFp v(value);
|
||||
DiyFp w_m, w_p;
|
||||
v.NormalizedBoundaries(&w_m, &w_p);
|
||||
|
||||
const DiyFp c_mk = GetCachedPower(w_p.e, K);
|
||||
const DiyFp W = v.Normalize() * c_mk;
|
||||
DiyFp Wp = w_p * c_mk;
|
||||
DiyFp Wm = w_m * c_mk;
|
||||
Wm.f++;
|
||||
Wp.f--;
|
||||
DigitGen(W, Wp, Wp.f - Wm.f, buffer, length, K);
|
||||
}
|
||||
|
||||
inline const char *GetDigitsLut()
|
||||
{
|
||||
static const char cDigitsLut[200] = {
|
||||
'0', '0', '0', '1', '0', '2', '0', '3', '0', '4', '0', '5', '0', '6', '0', '7', '0',
|
||||
'8', '0', '9', '1', '0', '1', '1', '1', '2', '1', '3', '1', '4', '1', '5', '1', '6',
|
||||
'1', '7', '1', '8', '1', '9', '2', '0', '2', '1', '2', '2', '2', '3', '2', '4', '2',
|
||||
'5', '2', '6', '2', '7', '2', '8', '2', '9', '3', '0', '3', '1', '3', '2', '3', '3',
|
||||
'3', '4', '3', '5', '3', '6', '3', '7', '3', '8', '3', '9', '4', '0', '4', '1', '4',
|
||||
'2', '4', '3', '4', '4', '4', '5', '4', '6', '4', '7', '4', '8', '4', '9', '5', '0',
|
||||
'5', '1', '5', '2', '5', '3', '5', '4', '5', '5', '5', '6', '5', '7', '5', '8', '5',
|
||||
'9', '6', '0', '6', '1', '6', '2', '6', '3', '6', '4', '6', '5', '6', '6', '6', '7',
|
||||
'6', '8', '6', '9', '7', '0', '7', '1', '7', '2', '7', '3', '7', '4', '7', '5', '7',
|
||||
'6', '7', '7', '7', '8', '7', '9', '8', '0', '8', '1', '8', '2', '8', '3', '8', '4',
|
||||
'8', '5', '8', '6', '8', '7', '8', '8', '8', '9', '9', '0', '9', '1', '9', '2', '9',
|
||||
'3', '9', '4', '9', '5', '9', '6', '9', '7', '9', '8', '9', '9'};
|
||||
return cDigitsLut;
|
||||
}
|
||||
|
||||
inline void WriteExponent(int K, char *buffer)
|
||||
{
|
||||
if (K < 0)
|
||||
{
|
||||
*buffer++ = '-';
|
||||
K = -K;
|
||||
}
|
||||
|
||||
if (K >= 100)
|
||||
{
|
||||
*buffer++ = '0' + static_cast<char>(K / 100);
|
||||
K %= 100;
|
||||
const char *d = GetDigitsLut() + K * 2;
|
||||
*buffer++ = d[0];
|
||||
*buffer++ = d[1];
|
||||
}
|
||||
else if (K >= 10)
|
||||
{
|
||||
const char *d = GetDigitsLut() + K * 2;
|
||||
*buffer++ = d[0];
|
||||
*buffer++ = d[1];
|
||||
}
|
||||
else
|
||||
*buffer++ = '0' + static_cast<char>(K);
|
||||
|
||||
*buffer = '\0';
|
||||
}
|
||||
|
||||
inline void Prettify(char *buffer, int length, int k)
|
||||
{
|
||||
const int kk = length + k; // 10^(kk-1) <= v < 10^kk
|
||||
|
||||
if (length <= kk && kk <= 21)
|
||||
{
|
||||
// 1234e7 -> 12340000000
|
||||
for (int i = length; i < kk; i++)
|
||||
buffer[i] = '0';
|
||||
buffer[kk] = '.';
|
||||
buffer[kk + 1] = '0';
|
||||
buffer[kk + 2] = '\0';
|
||||
}
|
||||
else if (0 < kk && kk <= 21)
|
||||
{
|
||||
// 1234e-2 -> 12.34
|
||||
memmove(&buffer[kk + 1], &buffer[kk], length - kk);
|
||||
buffer[kk] = '.';
|
||||
buffer[length + 1] = '\0';
|
||||
}
|
||||
else if (-6 < kk && kk <= 0)
|
||||
{
|
||||
// 1234e-6 -> 0.001234
|
||||
const int offset = 2 - kk;
|
||||
memmove(&buffer[offset], &buffer[0], length);
|
||||
buffer[0] = '0';
|
||||
buffer[1] = '.';
|
||||
for (int i = 2; i < offset; i++)
|
||||
buffer[i] = '0';
|
||||
buffer[length + offset] = '\0';
|
||||
}
|
||||
else if (length == 1)
|
||||
{
|
||||
// 1e30
|
||||
buffer[1] = 'e';
|
||||
WriteExponent(kk - 1, &buffer[2]);
|
||||
}
|
||||
else
|
||||
{
|
||||
// 1234e30 -> 1.234e33
|
||||
memmove(&buffer[2], &buffer[1], length - 1);
|
||||
buffer[1] = '.';
|
||||
buffer[length + 1] = 'e';
|
||||
WriteExponent(kk - 1, &buffer[0 + length + 2]);
|
||||
}
|
||||
}
|
||||
|
||||
inline void dtoa_milo(double value, char *buffer)
|
||||
{
|
||||
// Not handling NaN and inf
|
||||
assert(!isnan(value));
|
||||
assert(!isinf(value));
|
||||
|
||||
if (value == 0)
|
||||
{
|
||||
buffer[0] = '0';
|
||||
buffer[1] = '.';
|
||||
buffer[2] = '0';
|
||||
buffer[3] = '\0';
|
||||
}
|
||||
else
|
||||
{
|
||||
if (value < 0)
|
||||
{
|
||||
*buffer++ = '-';
|
||||
value = -value;
|
||||
}
|
||||
int length, K;
|
||||
Grisu2(value, buffer, &length, &K);
|
||||
Prettify(buffer, length, K);
|
||||
}
|
||||
}
|
||||
} // namespace ieee754
|
||||
} // namespace util
|
||||
} // namespace osrm
|
||||
|
||||
#endif // IEEE754_HPP
|
||||
@ -5,6 +5,7 @@
|
||||
#define JSON_RENDERER_HPP
|
||||
|
||||
#include "util/cast.hpp"
|
||||
#include "util/ieee754.hpp"
|
||||
#include "util/string_util.hpp"
|
||||
|
||||
#include "osrm/json_container.hpp"
|
||||
@ -21,6 +22,11 @@ namespace util
|
||||
namespace json
|
||||
{
|
||||
|
||||
namespace
|
||||
{
|
||||
constexpr int MAX_FLOAT_STRING_LENGTH = 256;
|
||||
}
|
||||
|
||||
struct Renderer
|
||||
{
|
||||
explicit Renderer(std::ostream &_out) : out(_out) {}
|
||||
@ -34,8 +40,31 @@ struct Renderer
|
||||
|
||||
void operator()(const Number &number) const
|
||||
{
|
||||
out.precision(10);
|
||||
out << number.value;
|
||||
char buffer[MAX_FLOAT_STRING_LENGTH] = {'\0'};
|
||||
ieee754::dtoa_milo(number.value, buffer);
|
||||
|
||||
// Trucate to 10 decimal places
|
||||
int pos = 0;
|
||||
int decimalpos = 0;
|
||||
while (decimalpos == 0 && pos < MAX_FLOAT_STRING_LENGTH && buffer[pos] != 0)
|
||||
{
|
||||
if (buffer[pos] == '.')
|
||||
{
|
||||
decimalpos = pos;
|
||||
break;
|
||||
}
|
||||
++pos;
|
||||
}
|
||||
while (pos < MAX_FLOAT_STRING_LENGTH && buffer[pos] != 0)
|
||||
{
|
||||
if (pos - decimalpos == 10)
|
||||
{
|
||||
buffer[pos] = '\0';
|
||||
break;
|
||||
}
|
||||
++pos;
|
||||
}
|
||||
out << buffer;
|
||||
}
|
||||
|
||||
void operator()(const Object &object) const
|
||||
|
||||
@ -15,14 +15,14 @@ namespace util
|
||||
|
||||
namespace detail
|
||||
{
|
||||
template <typename T, typename RegionT>
|
||||
util::vector_view<T> mmapFile(const boost::filesystem::path &file, RegionT ®ion)
|
||||
template <typename T, typename MmapContainerT>
|
||||
util::vector_view<T> mmapFile(const boost::filesystem::path &file, MmapContainerT &mmap_container)
|
||||
{
|
||||
try
|
||||
{
|
||||
region.open(file);
|
||||
std::size_t num_objects = region.size() / sizeof(T);
|
||||
auto data_ptr = region.data();
|
||||
mmap_container.open(file);
|
||||
std::size_t num_objects = mmap_container.size() / sizeof(T);
|
||||
auto data_ptr = mmap_container.data();
|
||||
BOOST_ASSERT(reinterpret_cast<uintptr_t>(data_ptr) % alignof(T) == 0);
|
||||
return util::vector_view<T>(reinterpret_cast<T *>(data_ptr), num_objects);
|
||||
}
|
||||
@ -34,9 +34,10 @@ util::vector_view<T> mmapFile(const boost::filesystem::path &file, RegionT ®i
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T, typename RegionT>
|
||||
util::vector_view<T>
|
||||
mmapFile(const boost::filesystem::path &file, RegionT ®ion, const std::size_t size)
|
||||
template <typename T, typename MmapContainerT>
|
||||
util::vector_view<T> mmapFile(const boost::filesystem::path &file,
|
||||
MmapContainerT &mmap_container,
|
||||
const std::size_t size)
|
||||
{
|
||||
try
|
||||
{
|
||||
@ -45,10 +46,10 @@ mmapFile(const boost::filesystem::path &file, RegionT ®ion, const std::size_t
|
||||
params.path = file.string();
|
||||
params.flags = boost::iostreams::mapped_file::readwrite;
|
||||
params.new_file_size = size;
|
||||
region.open(params);
|
||||
mmap_container.open(params);
|
||||
|
||||
std::size_t num_objects = size / sizeof(T);
|
||||
auto data_ptr = region.data();
|
||||
auto data_ptr = mmap_container.data();
|
||||
BOOST_ASSERT(reinterpret_cast<uintptr_t>(data_ptr) % alignof(T) == 0);
|
||||
return util::vector_view<T>(reinterpret_cast<T *>(data_ptr), num_objects);
|
||||
}
|
||||
@ -63,24 +64,24 @@ mmapFile(const boost::filesystem::path &file, RegionT ®ion, const std::size_t
|
||||
|
||||
template <typename T>
|
||||
util::vector_view<const T> mmapFile(const boost::filesystem::path &file,
|
||||
boost::iostreams::mapped_file_source ®ion)
|
||||
boost::iostreams::mapped_file_source &mmap_container)
|
||||
{
|
||||
return detail::mmapFile<const T>(file, region);
|
||||
return detail::mmapFile<const T>(file, mmap_container);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
util::vector_view<T> mmapFile(const boost::filesystem::path &file,
|
||||
boost::iostreams::mapped_file ®ion)
|
||||
boost::iostreams::mapped_file &mmap_container)
|
||||
{
|
||||
return detail::mmapFile<T>(file, region);
|
||||
return detail::mmapFile<T>(file, mmap_container);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
util::vector_view<T> mmapFile(const boost::filesystem::path &file,
|
||||
boost::iostreams::mapped_file ®ion,
|
||||
boost::iostreams::mapped_file &mmap_container,
|
||||
std::size_t size)
|
||||
{
|
||||
return detail::mmapFile<T>(file, region, size);
|
||||
return detail::mmapFile<T>(file, mmap_container, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@
|
||||
|
||||
#include <tbb/parallel_sort.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
@ -20,24 +21,27 @@ namespace util
|
||||
struct NodeBasedEdgeData
|
||||
{
|
||||
NodeBasedEdgeData()
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_WEIGHT), geometry_id({0, false}),
|
||||
reversed(false), annotation_data(-1)
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_WEIGHT),
|
||||
distance(INVALID_EDGE_DISTANCE), geometry_id({0, false}), reversed(false),
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
||||
NodeBasedEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
bool reversed,
|
||||
extractor::NodeBasedEdgeClassification flags,
|
||||
AnnotationID annotation_data)
|
||||
: weight(weight), duration(duration), geometry_id(geometry_id), reversed(reversed),
|
||||
flags(flags), annotation_data(annotation_data)
|
||||
: weight(weight), duration(duration), distance(distance), geometry_id(geometry_id),
|
||||
reversed(reversed), flags(flags), annotation_data(annotation_data)
|
||||
{
|
||||
}
|
||||
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDistance distance;
|
||||
GeometryID geometry_id;
|
||||
bool reversed : 1;
|
||||
extractor::NodeBasedEdgeClassification flags;
|
||||
@ -80,11 +84,13 @@ NodeBasedDynamicGraphFromEdges(NodeID number_of_nodes,
|
||||
const extractor::NodeBasedEdge &input_edge) {
|
||||
output_edge.data.weight = input_edge.weight;
|
||||
output_edge.data.duration = input_edge.duration;
|
||||
output_edge.data.distance = input_edge.distance;
|
||||
output_edge.data.flags = input_edge.flags;
|
||||
output_edge.data.annotation_data = input_edge.annotation_data;
|
||||
|
||||
BOOST_ASSERT(output_edge.data.weight > 0);
|
||||
BOOST_ASSERT(output_edge.data.duration > 0);
|
||||
BOOST_ASSERT(output_edge.data.distance >= 0);
|
||||
});
|
||||
|
||||
tbb::parallel_sort(edges_list.begin(), edges_list.end());
|
||||
|
||||
@ -113,8 +113,10 @@ static const SegmentWeight MAX_SEGMENT_WEIGHT = INVALID_SEGMENT_WEIGHT - 1;
|
||||
static const SegmentDuration MAX_SEGMENT_DURATION = INVALID_SEGMENT_DURATION - 1;
|
||||
static const EdgeWeight INVALID_EDGE_WEIGHT = std::numeric_limits<EdgeWeight>::max();
|
||||
static const EdgeDuration MAXIMAL_EDGE_DURATION = std::numeric_limits<EdgeDuration>::max();
|
||||
static const EdgeDistance MAXIMAL_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
static const TurnPenalty INVALID_TURN_PENALTY = std::numeric_limits<TurnPenalty>::max();
|
||||
static const EdgeDistance INVALID_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
static const EdgeDistance INVALID_FALLBACK_SPEED = std::numeric_limits<double>::max();
|
||||
|
||||
// FIXME the bitfields we use require a reduced maximal duration, this should be kept consistent
|
||||
// within the code base. For now we have to ensure that we don't case 30 bit to -1 and break any
|
||||
|
||||
@ -195,7 +195,10 @@ template <> class vector_view<bool>
|
||||
{
|
||||
BOOST_ASSERT_MSG(index < m_size, "invalid size");
|
||||
const std::size_t bucket = index / WORD_BITS;
|
||||
// Note: ordering of bits here should match packBits in storage/serialization.hpp
|
||||
// so that directly mmap-ing data is possible
|
||||
const auto offset = index % WORD_BITS;
|
||||
BOOST_ASSERT(WORD_BITS > offset);
|
||||
return m_ptr[bucket] & (static_cast<Word>(1) << offset);
|
||||
}
|
||||
|
||||
@ -224,11 +227,23 @@ template <> class vector_view<bool>
|
||||
{
|
||||
BOOST_ASSERT(index < m_size);
|
||||
const auto bucket = index / WORD_BITS;
|
||||
// Note: ordering of bits here should match packBits in storage/serialization.hpp
|
||||
// so that directly mmap-ing data is possible
|
||||
const auto offset = index % WORD_BITS;
|
||||
BOOST_ASSERT(WORD_BITS > offset);
|
||||
return reference{m_ptr + bucket, static_cast<Word>(1) << offset};
|
||||
}
|
||||
|
||||
template <typename T> friend void swap(vector_view<T> &, vector_view<T> &) noexcept;
|
||||
|
||||
friend std::ostream &operator<<(std::ostream &os, const vector_view<bool> &rhs)
|
||||
{
|
||||
for (std::size_t i = 0; i < rhs.size(); ++i)
|
||||
{
|
||||
os << (i > 0 ? " " : "") << rhs.at(i);
|
||||
}
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
// Both vector_view<T> and the vector_view<bool> specializations share this impl.
|
||||
|
||||
9569
package-lock.json
generated
9569
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@ -1,13 +1,13 @@
|
||||
{
|
||||
"name": "osrm",
|
||||
"version": "5.18.0-latest.1",
|
||||
"version": "5.20.0-latest.1",
|
||||
"private": false,
|
||||
"description": "The Open Source Routing Machine is a high performance routing engine written in C++14 designed to run on OpenStreetMap data.",
|
||||
"dependencies": {
|
||||
"mkdirp": "^0.5.1",
|
||||
"nan": "^2.6.2",
|
||||
"nan": "^2.11.1",
|
||||
"node-cmake": "^2.3.2",
|
||||
"node-pre-gyp": "^0.6.36",
|
||||
"node-pre-gyp": "^0.12.0",
|
||||
"rimraf": "^2.5.4"
|
||||
},
|
||||
"browserify": {
|
||||
@ -18,7 +18,7 @@
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "node ./node_modules/eslint/bin/eslint.js -c ./.eslintrc features/step_definitions/ features/support/",
|
||||
"test": "npm run lint && node ./node_modules/cucumber/bin/cucumber.js features/ -p verify && node ./node_modules/cucumber/bin/cucumber.js features/ -p mld",
|
||||
"test": "npm run lint && node ./node_modules/cucumber/bin/cucumber.js features/ -p verify && node ./node_modules/cucumber/bin/cucumber.js features/ -p verify -m mmap && node ./node_modules/cucumber/bin/cucumber.js features/ -p mld && node ./node_modules/cucumber/bin/cucumber.js features/ -p mld -m mmap",
|
||||
"clean": "rm -rf test/cache",
|
||||
"docs": "./scripts/build_api_docs.sh",
|
||||
"install": "node-pre-gyp install --fallback-to-build=false || ./scripts/node_install.sh",
|
||||
@ -47,14 +47,14 @@
|
||||
"csv-stringify": "^3.0.0",
|
||||
"cucumber": "^1.2.1",
|
||||
"d3-queue": "^2.0.3",
|
||||
"docbox": "^1.0.6",
|
||||
"docbox": "^1.0.11",
|
||||
"documentation": "^4.0.0-rc.1",
|
||||
"eslint": "^2.4.0",
|
||||
"eslint": "^5.10.0",
|
||||
"faucet": "^0.0.1",
|
||||
"jsonpath": "^1.0.0",
|
||||
"node-timeout": "0.0.4",
|
||||
"polyline": "^0.2.0",
|
||||
"request": "^2.69.0",
|
||||
"request": "^2.88.0",
|
||||
"tape": "^4.7.0",
|
||||
"turf": "^3.0.14",
|
||||
"xmlbuilder": "^4.2.1"
|
||||
|
||||
@ -269,6 +269,7 @@ function setup()
|
||||
["at:rural"] = 100,
|
||||
["at:trunk"] = 100,
|
||||
["be:motorway"] = 120,
|
||||
["be-vlg:rural"] = 70,
|
||||
["by:urban"] = 60,
|
||||
["by:motorway"] = 110,
|
||||
["ch:rural"] = 80,
|
||||
|
||||
@ -432,7 +432,7 @@ end
|
||||
|
||||
-- maxspeed and advisory maxspeed
|
||||
function WayHandlers.maxspeed(profile,way,result,data)
|
||||
local keys = Sequence { 'maxspeed:advisory', 'maxspeed' }
|
||||
local keys = Sequence { 'maxspeed:advisory', 'maxspeed', 'source:maxspeed', 'maxspeed:type' }
|
||||
local forward, backward = Tags.get_forward_backward_by_set(way,data,keys)
|
||||
forward = WayHandlers.parse_maxspeed(forward,profile)
|
||||
backward = WayHandlers.parse_maxspeed(backward,profile)
|
||||
|
||||
@ -6,8 +6,12 @@ var fs = require('fs');
|
||||
var name = process.argv[2];
|
||||
var cmd = process.argv.slice(3).join(' ');
|
||||
var start = Date.now();
|
||||
exec(cmd, (err) => {
|
||||
if (err) return console.log(err);
|
||||
exec(cmd, (err, stdout, stderr) => {
|
||||
if (err) {
|
||||
console.log(stdout);
|
||||
console.log(stderr);
|
||||
return process.exit(err.code);
|
||||
}
|
||||
var stop = +new Date();
|
||||
var time = (stop - start) / 1000.;
|
||||
fs.appendFileSync('/tmp/osrm.timings', `${name}\t${time}`, 'utf-8');
|
||||
|
||||
@ -215,6 +215,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
target,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -225,6 +226,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
source,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -280,6 +282,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
target,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -290,6 +293,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
source,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
|
||||
@ -76,6 +76,7 @@ auto LoadAndUpdateEdgeExpandedGraph(const CustomizationConfig &config,
|
||||
const partitioner::MultiLevelPartition &mlp,
|
||||
std::vector<EdgeWeight> &node_weights,
|
||||
std::vector<EdgeDuration> &node_durations,
|
||||
std::vector<EdgeDistance> &node_distances,
|
||||
std::uint32_t &connectivity_checksum)
|
||||
{
|
||||
updater::Updater updater(config.updater_config);
|
||||
@ -84,6 +85,8 @@ auto LoadAndUpdateEdgeExpandedGraph(const CustomizationConfig &config,
|
||||
EdgeID num_nodes = updater.LoadAndUpdateEdgeExpandedGraph(
|
||||
edge_based_edge_list, node_weights, node_durations, connectivity_checksum);
|
||||
|
||||
extractor::files::readEdgeBasedNodeDistances(config.GetPath(".osrm.enw"), node_distances);
|
||||
|
||||
auto directed = partitioner::splitBidirectionalEdges(edge_based_edge_list);
|
||||
|
||||
auto tidied = partitioner::prepareEdgesForUsageInGraph<
|
||||
@ -124,10 +127,11 @@ int Customizer::Run(const CustomizationConfig &config)
|
||||
partitioner::files::readPartition(config.GetPath(".osrm.partition"), mlp);
|
||||
|
||||
std::vector<EdgeWeight> node_weights;
|
||||
std::vector<EdgeDuration> node_durations; // TODO: to be removed later
|
||||
std::vector<EdgeDuration> node_durations; // TODO: remove when durations are optional
|
||||
std::vector<EdgeDistance> node_distances; // TODO: remove when distances are optional
|
||||
std::uint32_t connectivity_checksum = 0;
|
||||
auto graph = LoadAndUpdateEdgeExpandedGraph(
|
||||
config, mlp, node_weights, node_durations, connectivity_checksum);
|
||||
config, mlp, node_weights, node_durations, node_distances, connectivity_checksum);
|
||||
BOOST_ASSERT(graph.GetNumberOfNodes() == node_weights.size());
|
||||
std::for_each(node_weights.begin(), node_weights.end(), [](auto &w) { w &= 0x7fffffff; });
|
||||
util::Log() << "Loaded edge based graph: " << graph.GetNumberOfEdges() << " edges, "
|
||||
@ -166,8 +170,10 @@ int Customizer::Run(const CustomizationConfig &config)
|
||||
util::Log() << "MLD customization writing took " << TIMER_SEC(writing_mld_data) << " seconds";
|
||||
|
||||
TIMER_START(writing_graph);
|
||||
MultiLevelEdgeBasedGraph shaved_graph{
|
||||
std::move(graph), std::move(node_weights), std::move(node_durations)};
|
||||
MultiLevelEdgeBasedGraph shaved_graph{std::move(graph),
|
||||
std::move(node_weights),
|
||||
std::move(node_durations),
|
||||
std::move(node_distances)};
|
||||
customizer::files::writeGraph(
|
||||
config.GetPath(".osrm.mldgr"), shaved_graph, connectivity_checksum);
|
||||
TIMER_STOP(writing_graph);
|
||||
|
||||
@ -94,7 +94,7 @@ std::string waypointTypeToString(const guidance::WaypointType waypoint_type)
|
||||
return waypoint_type_names[static_cast<std::size_t>(waypoint_type)];
|
||||
}
|
||||
|
||||
util::json::Array coordinateToLonLat(const util::Coordinate coordinate)
|
||||
util::json::Array coordinateToLonLat(const util::Coordinate &coordinate)
|
||||
{
|
||||
util::json::Array array;
|
||||
array.values.push_back(static_cast<double>(util::toFloating(coordinate.lon)));
|
||||
@ -240,17 +240,22 @@ util::json::Object makeRoute(const guidance::Route &route,
|
||||
return json_route;
|
||||
}
|
||||
|
||||
util::json::Object makeWaypoint(const util::Coordinate location, std::string name)
|
||||
util::json::Object
|
||||
makeWaypoint(const util::Coordinate &location, const double &distance, std::string name)
|
||||
{
|
||||
util::json::Object waypoint;
|
||||
waypoint.values["location"] = detail::coordinateToLonLat(location);
|
||||
waypoint.values["name"] = std::move(name);
|
||||
waypoint.values["distance"] = distance;
|
||||
return waypoint;
|
||||
}
|
||||
|
||||
util::json::Object makeWaypoint(const util::Coordinate location, std::string name, const Hint &hint)
|
||||
util::json::Object makeWaypoint(const util::Coordinate &location,
|
||||
const double &distance,
|
||||
std::string name,
|
||||
const Hint &hint)
|
||||
{
|
||||
auto waypoint = makeWaypoint(location, name);
|
||||
auto waypoint = makeWaypoint(location, distance, name);
|
||||
waypoint.values["hint"] = hint.ToBase64();
|
||||
return waypoint;
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
#include "engine/datafacade/mmap_memory_allocator.hpp"
|
||||
|
||||
#include "storage/block.hpp"
|
||||
#include "storage/io.hpp"
|
||||
#include "storage/serialization.hpp"
|
||||
#include "storage/storage.hpp"
|
||||
@ -7,7 +8,7 @@
|
||||
#include "util/log.hpp"
|
||||
#include "util/mmap_file.hpp"
|
||||
|
||||
#include "boost/assert.hpp"
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -16,48 +17,52 @@ namespace engine
|
||||
namespace datafacade
|
||||
{
|
||||
|
||||
MMapMemoryAllocator::MMapMemoryAllocator(const storage::StorageConfig &config,
|
||||
const boost::filesystem::path &memory_file)
|
||||
MMapMemoryAllocator::MMapMemoryAllocator(const storage::StorageConfig &config)
|
||||
{
|
||||
storage::Storage storage(config);
|
||||
std::vector<storage::SharedDataIndex::AllocatedRegion> allocated_regions;
|
||||
|
||||
if (!boost::filesystem::exists(memory_file))
|
||||
{
|
||||
storage::DataLayout initial_layout;
|
||||
storage.PopulateStaticLayout(initial_layout);
|
||||
storage.PopulateUpdatableLayout(initial_layout);
|
||||
std::unique_ptr<storage::BaseDataLayout> fake_layout =
|
||||
std::make_unique<storage::TarDataLayout>();
|
||||
|
||||
auto data_size = initial_layout.GetSizeOfLayout();
|
||||
// Convert the boost::filesystem::path object into a plain string
|
||||
// that's stored as a member of this allocator object
|
||||
rtree_filename = storage.PopulateLayoutWithRTree(*fake_layout);
|
||||
|
||||
storage::io::BufferWriter writer;
|
||||
storage::serialization::write(writer, initial_layout);
|
||||
auto encoded_layout = writer.GetBuffer();
|
||||
|
||||
auto total_size = data_size + encoded_layout.size();
|
||||
|
||||
mapped_memory = util::mmapFile<char>(memory_file, mapped_memory_file, total_size);
|
||||
|
||||
std::copy(encoded_layout.begin(), encoded_layout.end(), mapped_memory.data());
|
||||
|
||||
index = storage::SharedDataIndex(
|
||||
{{mapped_memory.data() + encoded_layout.size(), std::move(initial_layout)}});
|
||||
|
||||
storage.PopulateStaticData(index);
|
||||
storage.PopulateUpdatableData(index);
|
||||
// Now, we add one more AllocatedRegion, with it's start address as the start
|
||||
// of the rtree_filename string we've saved. In the fake_layout, we've
|
||||
// stated that the data is at offset 0, which is where the string starts
|
||||
// at it's own memory address.
|
||||
// The syntax &(rtree_filename[0]) gets the memory address of the first char.
|
||||
// We can't use the convenient `.data()` or `.c_str()` methods, because
|
||||
// prior to C++17 (which we're not using), those return a `const char *`,
|
||||
// which isn't compatible with the `char *` that AllocatedRegion expects
|
||||
// for it's memory_ptr
|
||||
allocated_regions.push_back({&(rtree_filename[0]), std::move(fake_layout)});
|
||||
}
|
||||
else
|
||||
|
||||
auto files = storage.GetStaticFiles();
|
||||
auto updatable_files = storage.GetUpdatableFiles();
|
||||
files.insert(files.end(), updatable_files.begin(), updatable_files.end());
|
||||
|
||||
for (const auto &file : files)
|
||||
{
|
||||
mapped_memory = util::mmapFile<char>(memory_file, mapped_memory_file);
|
||||
|
||||
storage::DataLayout layout;
|
||||
storage::io::BufferReader reader(mapped_memory.data(), mapped_memory.size());
|
||||
storage::serialization::read(reader, layout);
|
||||
auto layout_size = reader.GetPosition();
|
||||
|
||||
index = storage::SharedDataIndex({{mapped_memory.data() + layout_size, std::move(layout)}});
|
||||
if (boost::filesystem::exists(file.second))
|
||||
{
|
||||
std::unique_ptr<storage::BaseDataLayout> layout =
|
||||
std::make_unique<storage::TarDataLayout>();
|
||||
boost::iostreams::mapped_file mapped_memory_file;
|
||||
util::mmapFile<char>(file.second, mapped_memory_file);
|
||||
mapped_memory_files.push_back(std::move(mapped_memory_file));
|
||||
storage::populateLayoutFromFile(file.second, *layout);
|
||||
allocated_regions.push_back({mapped_memory_file.data(), std::move(layout)});
|
||||
}
|
||||
}
|
||||
|
||||
index = storage::SharedDataIndex{std::move(allocated_regions)};
|
||||
}
|
||||
|
||||
MMapMemoryAllocator::~MMapMemoryAllocator() {}
|
||||
|
||||
const storage::SharedDataIndex &MMapMemoryAllocator::GetIndex() { return index; }
|
||||
|
||||
@ -15,14 +15,20 @@ ProcessMemoryAllocator::ProcessMemoryAllocator(const storage::StorageConfig &con
|
||||
storage::Storage storage(config);
|
||||
|
||||
// Calculate the layout/size of the memory block
|
||||
storage::DataLayout layout;
|
||||
storage.PopulateStaticLayout(layout);
|
||||
storage.PopulateUpdatableLayout(layout);
|
||||
auto static_files = storage.GetStaticFiles();
|
||||
auto updatable_files = storage.GetUpdatableFiles();
|
||||
std::unique_ptr<storage::BaseDataLayout> layout =
|
||||
std::make_unique<storage::ContiguousDataLayout>();
|
||||
storage.PopulateLayoutWithRTree(*layout);
|
||||
storage.PopulateLayout(*layout, static_files);
|
||||
storage.PopulateLayout(*layout, updatable_files);
|
||||
|
||||
// Allocate the memory block, then load data from files into it
|
||||
internal_memory = std::make_unique<char[]>(layout.GetSizeOfLayout());
|
||||
internal_memory = std::make_unique<char[]>(layout->GetSizeOfLayout());
|
||||
|
||||
index = storage::SharedDataIndex({{internal_memory.get(), std::move(layout)}});
|
||||
std::vector<storage::SharedDataIndex::AllocatedRegion> regions;
|
||||
regions.push_back({internal_memory.get(), std::move(layout)});
|
||||
index = {std::move(regions)};
|
||||
|
||||
storage.PopulateStaticData(index);
|
||||
storage.PopulateUpdatableData(index);
|
||||
|
||||
@ -25,8 +25,9 @@ SharedMemoryAllocator::SharedMemoryAllocator(
|
||||
auto mem = storage::makeSharedMemory(shm_key);
|
||||
|
||||
storage::io::BufferReader reader(reinterpret_cast<char *>(mem->Ptr()), mem->Size());
|
||||
storage::DataLayout layout;
|
||||
storage::serialization::read(reader, layout);
|
||||
std::unique_ptr<storage::BaseDataLayout> layout =
|
||||
std::make_unique<storage::ContiguousDataLayout>();
|
||||
storage::serialization::read(reader, *layout);
|
||||
auto layout_size = reader.GetPosition();
|
||||
|
||||
regions.push_back({reinterpret_cast<char *>(mem->Ptr()) + layout_size, std::move(layout)});
|
||||
|
||||
@ -23,7 +23,9 @@ bool EngineConfig::IsValid() const
|
||||
unlimited_or_more_than(max_results_nearest, 0) &&
|
||||
max_alternatives >= 0;
|
||||
|
||||
return ((use_shared_memory && all_path_are_empty) || storage_config.IsValid()) && limits_valid;
|
||||
return ((use_shared_memory && all_path_are_empty) || (use_mmap && storage_config.IsValid()) ||
|
||||
storage_config.IsValid()) &&
|
||||
limits_valid;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -213,7 +213,8 @@ Status MatchPlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
});
|
||||
}
|
||||
|
||||
auto candidates_lists = GetPhantomNodesInRange(facade, tidied.parameters, search_radiuses);
|
||||
auto candidates_lists =
|
||||
GetPhantomNodesInRange(facade, tidied.parameters, search_radiuses, true);
|
||||
|
||||
filterCandidates(tidied.parameters.coordinates, candidates_lists);
|
||||
if (std::all_of(candidates_lists.begin(),
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
#include "engine/api/table_parameters.hpp"
|
||||
#include "engine/routing_algorithms/many_to_many.hpp"
|
||||
#include "engine/search_engine_data.hpp"
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
#include "util/json_container.hpp"
|
||||
#include "util/string_util.hpp"
|
||||
|
||||
@ -86,7 +87,7 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
bool request_duration = params.annotations & api::TableParameters::AnnotationsType::Duration;
|
||||
|
||||
auto result_tables_pair = algorithms.ManyToManySearch(
|
||||
snapped_phantoms, params.sources, params.destinations, request_distance, request_duration);
|
||||
snapped_phantoms, params.sources, params.destinations, request_distance);
|
||||
|
||||
if ((request_duration && result_tables_pair.first.empty()) ||
|
||||
(request_distance && result_tables_pair.second.empty()))
|
||||
@ -94,8 +95,66 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
return Error("NoTable", "No table found", result);
|
||||
}
|
||||
|
||||
std::vector<api::TableAPI::TableCellRef> estimated_pairs;
|
||||
|
||||
// Scan table for null results - if any exist, replace with distance estimates
|
||||
if (params.fallback_speed != INVALID_FALLBACK_SPEED || params.scale_factor != 1)
|
||||
{
|
||||
for (std::size_t row = 0; row < num_sources; row++)
|
||||
{
|
||||
for (std::size_t column = 0; column < num_destinations; column++)
|
||||
{
|
||||
const auto &table_index = row * num_destinations + column;
|
||||
BOOST_ASSERT(table_index < result_tables_pair.first.size());
|
||||
if (params.fallback_speed != INVALID_FALLBACK_SPEED && params.fallback_speed > 0 &&
|
||||
result_tables_pair.first[table_index] == MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
const auto &source =
|
||||
snapped_phantoms[params.sources.empty() ? row : params.sources[row]];
|
||||
const auto &destination =
|
||||
snapped_phantoms[params.destinations.empty() ? column
|
||||
: params.destinations[column]];
|
||||
|
||||
auto distance_estimate =
|
||||
params.fallback_coordinate_type ==
|
||||
api::TableParameters::FallbackCoordinateType::Input
|
||||
? util::coordinate_calculation::fccApproximateDistance(
|
||||
source.input_location, destination.input_location)
|
||||
: util::coordinate_calculation::fccApproximateDistance(
|
||||
source.location, destination.location);
|
||||
|
||||
result_tables_pair.first[table_index] =
|
||||
distance_estimate / (double)params.fallback_speed;
|
||||
if (!result_tables_pair.second.empty())
|
||||
{
|
||||
result_tables_pair.second[table_index] = distance_estimate;
|
||||
}
|
||||
|
||||
estimated_pairs.emplace_back(row, column);
|
||||
}
|
||||
if (params.scale_factor > 0 && params.scale_factor != 1 &&
|
||||
result_tables_pair.first[table_index] != MAXIMAL_EDGE_DURATION &&
|
||||
result_tables_pair.first[table_index] != 0)
|
||||
{
|
||||
EdgeDuration diff =
|
||||
MAXIMAL_EDGE_DURATION / result_tables_pair.first[table_index];
|
||||
|
||||
if (params.scale_factor >= diff)
|
||||
{
|
||||
result_tables_pair.first[table_index] = MAXIMAL_EDGE_DURATION - 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
result_tables_pair.first[table_index] = std::lround(
|
||||
result_tables_pair.first[table_index] * params.scale_factor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
api::TableAPI table_api{facade, params};
|
||||
table_api.MakeResponse(result_tables_pair, snapped_phantoms, result);
|
||||
table_api.MakeResponse(result_tables_pair, snapped_phantoms, estimated_pairs, result);
|
||||
|
||||
return Status::Ok;
|
||||
}
|
||||
|
||||
@ -294,6 +294,7 @@ struct SpeedLayer : public vtzero::layer_builder
|
||||
vtzero::index_value key_duration;
|
||||
vtzero::index_value key_name;
|
||||
vtzero::index_value key_rate;
|
||||
vtzero::index_value key_is_startpoint;
|
||||
|
||||
SpeedLayer(vtzero::tile_builder &tile)
|
||||
: layer_builder(tile, "speeds"), uint_index(*this), double_index(*this),
|
||||
@ -302,7 +303,8 @@ struct SpeedLayer : public vtzero::layer_builder
|
||||
key_datasource(add_key_without_dup_check("datasource")),
|
||||
key_weight(add_key_without_dup_check("weight")),
|
||||
key_duration(add_key_without_dup_check("duration")),
|
||||
key_name(add_key_without_dup_check("name")), key_rate(add_key_without_dup_check("rate"))
|
||||
key_name(add_key_without_dup_check("name")), key_rate(add_key_without_dup_check("rate")),
|
||||
key_is_startpoint(add_key_without_dup_check("is_startpoint"))
|
||||
{
|
||||
}
|
||||
|
||||
@ -349,6 +351,11 @@ class SpeedLayerFeatureBuilder : public vtzero::linestring_feature_builder
|
||||
|
||||
void set_rate(double value) { add_property(m_layer.key_rate, m_layer.double_index(value)); }
|
||||
|
||||
void set_is_startpoint(bool value)
|
||||
{
|
||||
add_property(m_layer.key_is_startpoint, m_layer.bool_index(value));
|
||||
}
|
||||
|
||||
}; // class SpeedLayerFeatureBuilder
|
||||
|
||||
struct TurnsLayer : public vtzero::layer_builder
|
||||
@ -485,6 +492,8 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
const auto reverse_datasource_idx = reverse_datasource_range(
|
||||
reverse_datasource_range.size() - edge.fwd_segment_position - 1);
|
||||
|
||||
const auto is_startpoint = edge.is_startpoint;
|
||||
|
||||
const auto component_id = facade.GetComponentID(edge.forward_segment_id.id);
|
||||
const auto name_id = facade.GetNameIndex(edge.forward_segment_id.id);
|
||||
auto name = facade.GetNameForID(name_id);
|
||||
@ -516,6 +525,7 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
fbuilder.set_duration(forward_duration / 10.0);
|
||||
fbuilder.set_name(name);
|
||||
fbuilder.set_rate(forward_rate / 10.0);
|
||||
fbuilder.set_is_startpoint(is_startpoint);
|
||||
|
||||
fbuilder.commit();
|
||||
}
|
||||
@ -549,6 +559,7 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
fbuilder.set_duration(reverse_duration / 10.0);
|
||||
fbuilder.set_name(name);
|
||||
fbuilder.set_rate(reverse_rate / 10.0);
|
||||
fbuilder.set_is_startpoint(is_startpoint);
|
||||
|
||||
fbuilder.commit();
|
||||
}
|
||||
|
||||
@ -217,10 +217,7 @@ Status TripPlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
|
||||
// compute the duration table of all phantom nodes
|
||||
auto result_duration_table = util::DistTableWrapper<EdgeWeight>(
|
||||
algorithms
|
||||
.ManyToManySearch(
|
||||
snapped_phantoms, {}, {}, /*requestDistance*/ false, /*requestDuration*/ true)
|
||||
.first,
|
||||
algorithms.ManyToManySearch(snapped_phantoms, {}, {}, /*requestDistance*/ false).first,
|
||||
number_of_locations);
|
||||
|
||||
if (result_duration_table.size() == 0)
|
||||
|
||||
@ -90,7 +90,7 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
else
|
||||
{
|
||||
// check whether there is a loop present at the node
|
||||
const auto loop_weight = getLoopWeight<false>(facade, node);
|
||||
const auto loop_weight = std::get<0>(getLoopWeight<false>(facade, node));
|
||||
const EdgeWeight new_weight_with_loop = new_weight + loop_weight;
|
||||
if (loop_weight != INVALID_EDGE_WEIGHT &&
|
||||
new_weight_with_loop <= *upper_bound_to_shortest_path_weight)
|
||||
|
||||
@ -21,18 +21,21 @@ namespace ch
|
||||
inline bool addLoopWeight(const DataFacade<ch::Algorithm> &facade,
|
||||
const NodeID node,
|
||||
EdgeWeight &weight,
|
||||
EdgeDuration &duration)
|
||||
EdgeDuration &duration,
|
||||
EdgeDistance &distance)
|
||||
{ // Special case for CH when contractor creates a loop edge node->node
|
||||
BOOST_ASSERT(weight < 0);
|
||||
|
||||
const auto loop_weight = ch::getLoopWeight<false>(facade, node);
|
||||
if (loop_weight != INVALID_EDGE_WEIGHT)
|
||||
if (std::get<0>(loop_weight) != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
const auto new_weight_with_loop = weight + loop_weight;
|
||||
const auto new_weight_with_loop = weight + std::get<0>(loop_weight);
|
||||
if (new_weight_with_loop >= 0)
|
||||
{
|
||||
weight = new_weight_with_loop;
|
||||
duration += ch::getLoopWeight<true>(facade, node);
|
||||
auto result = ch::getLoopWeight<true>(facade, node);
|
||||
duration += std::get<0>(result);
|
||||
distance += std::get<1>(result);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -46,6 +49,7 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
const NodeID node,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
typename SearchEngineData<Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
const PhantomNode &)
|
||||
{
|
||||
@ -63,21 +67,23 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
const auto edge_weight = data.weight;
|
||||
|
||||
const auto edge_duration = data.duration;
|
||||
const auto edge_distance = data.distance;
|
||||
|
||||
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
|
||||
const auto to_weight = weight + edge_weight;
|
||||
const auto to_duration = duration + edge_duration;
|
||||
const auto to_distance = distance + edge_distance;
|
||||
|
||||
// New Node discovered -> Add to Heap + Node Info Storage
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, to_duration, to_distance});
|
||||
}
|
||||
// Found a shorter Path -> Update weight and set new parent
|
||||
else if (std::tie(to_weight, to_duration) <
|
||||
std::tie(query_heap.GetKey(to), query_heap.GetData(to).duration))
|
||||
{
|
||||
query_heap.GetData(to) = {node, to_duration};
|
||||
query_heap.GetData(to) = {node, to_duration, to_distance};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -91,12 +97,14 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<EdgeWeight> &weights_table,
|
||||
std::vector<EdgeDuration> &durations_table,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
std::vector<NodeID> &middle_nodes_table,
|
||||
const PhantomNode &phantom_node)
|
||||
{
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto source_weight = query_heap.GetKey(node);
|
||||
const auto source_duration = query_heap.GetData(node).duration;
|
||||
const auto source_distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Check if each encountered node has an entry
|
||||
const auto &bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
@ -109,20 +117,29 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto column_index = current_bucket.column_index;
|
||||
const auto target_weight = current_bucket.weight;
|
||||
const auto target_duration = current_bucket.duration;
|
||||
const auto target_distance = current_bucket.distance;
|
||||
|
||||
auto ¤t_weight = weights_table[row_index * number_of_targets + column_index];
|
||||
|
||||
EdgeDistance nulldistance = 0;
|
||||
|
||||
auto ¤t_duration = durations_table[row_index * number_of_targets + column_index];
|
||||
auto ¤t_distance =
|
||||
distances_table.empty() ? nulldistance
|
||||
: distances_table[row_index * number_of_targets + column_index];
|
||||
|
||||
// Check if new weight is better
|
||||
auto new_weight = source_weight + target_weight;
|
||||
auto new_duration = source_duration + target_duration;
|
||||
auto new_distance = source_distance + target_distance;
|
||||
|
||||
if (new_weight < 0)
|
||||
{
|
||||
if (addLoopWeight(facade, node, new_weight, new_duration))
|
||||
if (addLoopWeight(facade, node, new_weight, new_duration, new_distance))
|
||||
{
|
||||
current_weight = std::min(current_weight, new_weight);
|
||||
current_duration = std::min(current_duration, new_duration);
|
||||
current_distance = std::min(current_distance, new_distance);
|
||||
middle_nodes_table[row_index * number_of_targets + column_index] = node;
|
||||
}
|
||||
}
|
||||
@ -130,12 +147,13 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
{
|
||||
current_weight = new_weight;
|
||||
current_duration = new_duration;
|
||||
current_distance = new_distance;
|
||||
middle_nodes_table[row_index * number_of_targets + column_index] = node;
|
||||
}
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<FORWARD_DIRECTION>(
|
||||
facade, node, source_weight, source_duration, query_heap, phantom_node);
|
||||
facade, node, source_weight, source_duration, source_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
@ -147,172 +165,19 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto target_weight = query_heap.GetKey(node);
|
||||
const auto target_duration = query_heap.GetData(node).duration;
|
||||
const auto target_distance = query_heap.GetData(node).distance;
|
||||
const auto parent = query_heap.GetData(node).parent;
|
||||
|
||||
// Store settled nodes in search space bucket
|
||||
search_space_with_buckets.emplace_back(
|
||||
node, parent, column_index, target_weight, target_duration);
|
||||
node, parent, column_index, target_weight, target_duration, target_distance);
|
||||
|
||||
relaxOutgoingEdges<REVERSE_DIRECTION>(
|
||||
facade, node, target_weight, target_duration, query_heap, phantom_node);
|
||||
facade, node, target_weight, target_duration, target_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
} // namespace ch
|
||||
|
||||
void retrievePackedPathFromSearchSpace(const NodeID middle_node_id,
|
||||
const unsigned column_index,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<NodeID> &packed_leg)
|
||||
{
|
||||
auto bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
search_space_with_buckets.end(),
|
||||
middle_node_id,
|
||||
NodeBucket::ColumnCompare(column_index));
|
||||
|
||||
NodeID current_node_id = middle_node_id;
|
||||
|
||||
BOOST_ASSERT_MSG(std::distance(bucket_list.first, bucket_list.second) == 1,
|
||||
"The pointers are not pointing to the same element.");
|
||||
|
||||
while (bucket_list.first->parent_node != current_node_id &&
|
||||
bucket_list.first != search_space_with_buckets.end())
|
||||
{
|
||||
current_node_id = bucket_list.first->parent_node;
|
||||
|
||||
packed_leg.emplace_back(current_node_id);
|
||||
|
||||
bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
search_space_with_buckets.end(),
|
||||
current_node_id,
|
||||
NodeBucket::ColumnCompare(column_index));
|
||||
}
|
||||
}
|
||||
|
||||
void calculateDistances(typename SearchEngineData<ch::Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
const DataFacade<ch::Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const std::size_t row_index,
|
||||
const std::size_t source_index,
|
||||
const PhantomNode &source_phantom,
|
||||
const std::size_t number_of_targets,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
const std::vector<NodeID> &middle_nodes_table)
|
||||
{
|
||||
std::vector<NodeID> packed_leg;
|
||||
|
||||
for (auto column_index : util::irange<std::size_t>(0, number_of_targets))
|
||||
{
|
||||
const auto target_index = target_indices[column_index];
|
||||
const auto &target_phantom = phantom_nodes[target_index];
|
||||
|
||||
if (source_index == target_index)
|
||||
{
|
||||
distances_table[row_index * number_of_targets + column_index] = 0.0;
|
||||
continue;
|
||||
}
|
||||
|
||||
NodeID middle_node_id = middle_nodes_table[row_index * number_of_targets + column_index];
|
||||
|
||||
if (middle_node_id == SPECIAL_NODEID) // takes care of one-ways
|
||||
{
|
||||
distances_table[row_index * number_of_targets + column_index] = INVALID_EDGE_DISTANCE;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 1: Find path from source to middle node
|
||||
ch::retrievePackedPathFromSingleManyToManyHeap(query_heap, middle_node_id, packed_leg);
|
||||
std::reverse(packed_leg.begin(), packed_leg.end());
|
||||
|
||||
packed_leg.push_back(middle_node_id);
|
||||
|
||||
// Step 2: Find path from middle to target node
|
||||
retrievePackedPathFromSearchSpace(
|
||||
middle_node_id, column_index, search_space_with_buckets, packed_leg);
|
||||
|
||||
if (packed_leg.size() == 1 && (needsLoopForward(source_phantom, target_phantom) ||
|
||||
needsLoopBackwards(source_phantom, target_phantom)))
|
||||
{
|
||||
auto weight = ch::getLoopWeight<false>(facade, packed_leg.front());
|
||||
if (weight != INVALID_EDGE_WEIGHT)
|
||||
packed_leg.push_back(packed_leg.front());
|
||||
}
|
||||
if (!packed_leg.empty())
|
||||
{
|
||||
auto annotation =
|
||||
ch::calculateEBGNodeAnnotations(facade, packed_leg.begin(), packed_leg.end());
|
||||
|
||||
distances_table[row_index * number_of_targets + column_index] = annotation;
|
||||
|
||||
// check the direction of travel to figure out how to calculate the offset to/from
|
||||
// the source/target
|
||||
if (source_phantom.forward_segment_id.id == packed_leg.front())
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// -->s <-- subtract offset to start at source
|
||||
// ......... <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
EdgeDistance offset = source_phantom.GetForwardDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] -= offset;
|
||||
}
|
||||
else if (source_phantom.reverse_segment_id.id == packed_leg.front())
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// s<------- <-- subtract offset to start at source
|
||||
// ... <-- want this distance
|
||||
// entry 0---1---2---3 <-- 3 is exit node
|
||||
EdgeDistance offset = source_phantom.GetReverseDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] -= offset;
|
||||
}
|
||||
if (target_phantom.forward_segment_id.id == packed_leg.back())
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// ++>t <-- add offset to get to target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
EdgeDistance offset = target_phantom.GetForwardDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] += offset;
|
||||
}
|
||||
else if (target_phantom.reverse_segment_id.id == packed_leg.back())
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// <++t <-- add offset to get from target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
EdgeDistance offset = target_phantom.GetReverseDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] += offset;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// there is no shortcut to unpack. source and target are on the same EBG Node.
|
||||
// if the offset of the target is greater than the offset of the source, subtract it
|
||||
if (target_phantom.GetForwardDistance() > source_phantom.GetForwardDistance())
|
||||
{
|
||||
// --------->t <-- offsets
|
||||
// ->s <-- subtract source offset from target offset
|
||||
// ......... <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
EdgeDistance offset =
|
||||
target_phantom.GetForwardDistance() - source_phantom.GetForwardDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] = offset;
|
||||
}
|
||||
else
|
||||
{
|
||||
// s<--- <-- offsets
|
||||
// t<--------- <-- subtract source offset from target offset
|
||||
// ...... <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
EdgeDistance offset =
|
||||
target_phantom.GetReverseDistance() - source_phantom.GetReverseDistance();
|
||||
distances_table[row_index * number_of_targets + column_index] = offset;
|
||||
}
|
||||
}
|
||||
packed_leg.clear();
|
||||
}
|
||||
}
|
||||
|
||||
template <>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
@ -320,18 +185,16 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration)
|
||||
const bool calculate_distance)
|
||||
{
|
||||
(void)calculate_duration; // TODO: stub to use when computing durations become optional
|
||||
|
||||
const auto number_of_sources = source_indices.size();
|
||||
const auto number_of_targets = target_indices.size();
|
||||
const auto number_of_entries = number_of_sources * number_of_targets;
|
||||
|
||||
std::vector<EdgeWeight> weights_table(number_of_entries, INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations_table(number_of_entries, MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table;
|
||||
std::vector<EdgeDistance> distances_table(calculate_distance ? number_of_entries : 0,
|
||||
MAXIMAL_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(number_of_entries, SPECIAL_NODEID);
|
||||
|
||||
std::vector<NodeBucket> search_space_with_buckets;
|
||||
@ -380,25 +243,10 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
search_space_with_buckets,
|
||||
weights_table,
|
||||
durations_table,
|
||||
distances_table,
|
||||
middle_nodes_table,
|
||||
source_phantom);
|
||||
}
|
||||
|
||||
if (calculate_distance)
|
||||
{
|
||||
distances_table.resize(number_of_entries, INVALID_EDGE_DISTANCE);
|
||||
calculateDistances(query_heap,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
target_indices,
|
||||
row_index,
|
||||
source_index,
|
||||
source_phantom,
|
||||
number_of_targets,
|
||||
search_space_with_buckets,
|
||||
distances_table,
|
||||
middle_nodes_table);
|
||||
}
|
||||
}
|
||||
|
||||
return std::make_pair(durations_table, distances_table);
|
||||
|
||||
@ -41,6 +41,7 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const NodeID node,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
typename SearchEngineData<mld::Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
Args... args)
|
||||
{
|
||||
@ -65,65 +66,77 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
{ // Shortcuts in forward direction
|
||||
auto destination = cell.GetDestinationNodes().begin();
|
||||
auto shortcut_durations = cell.GetOutDuration(node);
|
||||
auto shortcut_distances = cell.GetOutDistance(node);
|
||||
for (auto shortcut_weight : cell.GetOutWeight(node))
|
||||
{
|
||||
BOOST_ASSERT(destination != cell.GetDestinationNodes().end());
|
||||
BOOST_ASSERT(!shortcut_durations.empty());
|
||||
BOOST_ASSERT(!shortcut_distances.empty());
|
||||
const NodeID to = *destination;
|
||||
|
||||
if (shortcut_weight != INVALID_EDGE_WEIGHT && node != to)
|
||||
{
|
||||
const auto to_weight = weight + shortcut_weight;
|
||||
const auto to_duration = duration + shortcut_durations.front();
|
||||
const auto to_distance = distance + shortcut_distances.front();
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration, to_distance});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
else if (std::tie(to_weight, to_duration, to_distance, node) <
|
||||
std::tie(query_heap.GetKey(to),
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).distance,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, true, to_duration};
|
||||
query_heap.GetData(to) = {node, true, to_duration, to_distance};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
++destination;
|
||||
shortcut_durations.advance_begin(1);
|
||||
shortcut_distances.advance_begin(1);
|
||||
}
|
||||
BOOST_ASSERT(shortcut_durations.empty());
|
||||
BOOST_ASSERT(shortcut_distances.empty());
|
||||
}
|
||||
else
|
||||
{ // Shortcuts in backward direction
|
||||
auto source = cell.GetSourceNodes().begin();
|
||||
auto shortcut_durations = cell.GetInDuration(node);
|
||||
auto shortcut_distances = cell.GetInDistance(node);
|
||||
for (auto shortcut_weight : cell.GetInWeight(node))
|
||||
{
|
||||
BOOST_ASSERT(source != cell.GetSourceNodes().end());
|
||||
BOOST_ASSERT(!shortcut_durations.empty());
|
||||
BOOST_ASSERT(!shortcut_distances.empty());
|
||||
const NodeID to = *source;
|
||||
|
||||
if (shortcut_weight != INVALID_EDGE_WEIGHT && node != to)
|
||||
{
|
||||
const auto to_weight = weight + shortcut_weight;
|
||||
const auto to_duration = duration + shortcut_durations.front();
|
||||
const auto to_distance = distance + shortcut_distances.front();
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration, to_distance});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
else if (std::tie(to_weight, to_duration, to_distance, node) <
|
||||
std::tie(query_heap.GetKey(to),
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).distance,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, true, to_duration};
|
||||
query_heap.GetData(to) = {node, true, to_duration, to_distance};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
++source;
|
||||
shortcut_durations.advance_begin(1);
|
||||
shortcut_distances.advance_begin(1);
|
||||
}
|
||||
BOOST_ASSERT(shortcut_durations.empty());
|
||||
BOOST_ASSERT(shortcut_distances.empty());
|
||||
}
|
||||
}
|
||||
|
||||
@ -143,25 +156,28 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const auto node_id = DIRECTION == FORWARD_DIRECTION ? node : facade.GetTarget(edge);
|
||||
const auto node_weight = facade.GetNodeWeight(node_id);
|
||||
const auto node_duration = facade.GetNodeDuration(node_id);
|
||||
const auto node_distance = facade.GetNodeDistance(node_id);
|
||||
const auto turn_weight = node_weight + facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
const auto turn_duration = node_duration + facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
|
||||
BOOST_ASSERT_MSG(node_weight + turn_weight > 0, "edge weight is invalid");
|
||||
const auto to_weight = weight + turn_weight;
|
||||
const auto to_duration = duration + turn_duration;
|
||||
const auto to_distance = distance + node_distance;
|
||||
|
||||
// New Node discovered -> Add to Heap + Node Info Storage
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, false, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, false, to_duration, to_distance});
|
||||
}
|
||||
// Found a shorter Path -> Update weight and set new parent
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
else if (std::tie(to_weight, to_duration, to_distance, node) <
|
||||
std::tie(query_heap.GetKey(to),
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).distance,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, false, to_duration};
|
||||
query_heap.GetData(to) = {node, false, to_duration, to_distance};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -182,11 +198,12 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{
|
||||
std::vector<EdgeWeight> weights(phantom_indices.size(), INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations(phantom_indices.size(), MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table;
|
||||
std::vector<EdgeDistance> distances_table(calculate_distance ? phantom_indices.size() : 0,
|
||||
MAXIMAL_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(phantom_indices.size(), SPECIAL_NODEID);
|
||||
|
||||
// Collect destination (source) nodes into a map
|
||||
std::unordered_multimap<NodeID, std::tuple<std::size_t, EdgeWeight, EdgeDuration>>
|
||||
std::unordered_multimap<NodeID, std::tuple<std::size_t, EdgeWeight, EdgeDuration, EdgeDistance>>
|
||||
target_nodes_index;
|
||||
target_nodes_index.reserve(phantom_indices.size());
|
||||
for (std::size_t index = 0; index < phantom_indices.size(); ++index)
|
||||
@ -201,13 +218,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{phantom_node.forward_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
phantom_node.GetForwardDuration())});
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance())});
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
phantom_node.GetReverseDuration())});
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance())});
|
||||
}
|
||||
else if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
@ -216,13 +235,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{phantom_node.forward_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration())});
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance())});
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration())});
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance())});
|
||||
}
|
||||
}
|
||||
|
||||
@ -232,25 +253,33 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
|
||||
// Check if node is in the destinations list and update weights/durations
|
||||
auto update_values = [&](NodeID node, EdgeWeight weight, EdgeDuration duration) {
|
||||
auto update_values =
|
||||
[&](NodeID node, EdgeWeight weight, EdgeDuration duration, EdgeDistance distance) {
|
||||
auto candidates = target_nodes_index.equal_range(node);
|
||||
for (auto it = candidates.first; it != candidates.second;)
|
||||
{
|
||||
std::size_t index;
|
||||
EdgeWeight target_weight;
|
||||
EdgeDuration target_duration;
|
||||
std::tie(index, target_weight, target_duration) = it->second;
|
||||
EdgeDistance target_distance;
|
||||
std::tie(index, target_weight, target_duration, target_distance) = it->second;
|
||||
|
||||
const auto path_weight = weight + target_weight;
|
||||
if (path_weight >= 0)
|
||||
{
|
||||
const auto path_duration = duration + target_duration;
|
||||
const auto path_distance = distance + target_distance;
|
||||
|
||||
if (std::tie(path_weight, path_duration) <
|
||||
std::tie(weights[index], durations[index]))
|
||||
EdgeDistance nulldistance = 0;
|
||||
auto ¤t_distance =
|
||||
distances_table.empty() ? nulldistance : distances_table[index];
|
||||
|
||||
if (std::tie(path_weight, path_duration, path_distance) <
|
||||
std::tie(weights[index], durations[index], current_distance))
|
||||
{
|
||||
weights[index] = path_weight;
|
||||
durations[index] = path_duration;
|
||||
current_distance = path_distance;
|
||||
middle_nodes_table[index] = node;
|
||||
}
|
||||
|
||||
@ -264,12 +293,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
}
|
||||
};
|
||||
|
||||
auto insert_node = [&](NodeID node, EdgeWeight initial_weight, EdgeDuration initial_duration) {
|
||||
auto insert_node = [&](NodeID node,
|
||||
EdgeWeight initial_weight,
|
||||
EdgeDuration initial_duration,
|
||||
EdgeDistance initial_distance) {
|
||||
|
||||
// Update single node paths
|
||||
update_values(node, initial_weight, initial_duration);
|
||||
update_values(node, initial_weight, initial_duration, initial_distance);
|
||||
|
||||
query_heap.Insert(node, initial_weight, {node, initial_duration});
|
||||
query_heap.Insert(node, initial_weight, {node, initial_duration, initial_distance});
|
||||
|
||||
// Place adjacent nodes into heap
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
@ -292,8 +324,9 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
const auto edge_duration = initial_duration + facade.GetNodeDuration(node_id) +
|
||||
facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
const auto edge_distance = initial_distance + facade.GetNodeDistance(node_id);
|
||||
|
||||
query_heap.Insert(to, edge_weight, {node, edge_duration});
|
||||
query_heap.Insert(to, edge_weight, {node, edge_duration, edge_distance});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -307,14 +340,16 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{
|
||||
insert_node(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration());
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance());
|
||||
}
|
||||
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
insert_node(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration());
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance());
|
||||
}
|
||||
}
|
||||
else if (DIRECTION == REVERSE_DIRECTION)
|
||||
@ -323,14 +358,16 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{
|
||||
insert_node(phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
phantom_node.GetForwardDuration());
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance());
|
||||
}
|
||||
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
{
|
||||
insert_node(phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
phantom_node.GetReverseDuration());
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -341,141 +378,23 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto weight = query_heap.GetKey(node);
|
||||
const auto duration = query_heap.GetData(node).duration;
|
||||
const auto distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Update values
|
||||
update_values(node, weight, duration);
|
||||
update_values(node, weight, duration, distance);
|
||||
|
||||
// Relax outgoing edges
|
||||
relaxOutgoingEdges<DIRECTION>(facade,
|
||||
node,
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
query_heap,
|
||||
phantom_nodes,
|
||||
phantom_index,
|
||||
phantom_indices);
|
||||
}
|
||||
|
||||
if (calculate_distance)
|
||||
{
|
||||
// Initialize unpacking heaps
|
||||
engine_working_data.InitializeOrClearFirstThreadLocalStorage(
|
||||
facade.GetNumberOfNodes(), facade.GetMaxBorderNodeID() + 1);
|
||||
|
||||
distances_table.resize(phantom_indices.size(), INVALID_EDGE_DISTANCE);
|
||||
|
||||
for (unsigned location = 0; location < phantom_indices.size(); ++location)
|
||||
{
|
||||
// Get the "middle" node that is the last node of a path
|
||||
const NodeID middle_node_id = middle_nodes_table[location];
|
||||
if (middle_node_id == SPECIAL_NODEID) // takes care of one-ways
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Retrieve the packed path from the heap
|
||||
PackedPath packed_path = mld::retrievePackedPathFromSingleManyToManyHeap<DIRECTION>(
|
||||
query_heap, middle_node_id);
|
||||
|
||||
// ... and reverse it to have packed edges in the correct order,
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
{
|
||||
std::reverse(packed_path.begin(), packed_path.end());
|
||||
}
|
||||
|
||||
// ... unpack path
|
||||
auto &forward_heap = *engine_working_data.forward_heap_1;
|
||||
auto &reverse_heap = *engine_working_data.reverse_heap_1;
|
||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
||||
std::vector<NodeID> unpacked_nodes;
|
||||
std::vector<EdgeID> unpacked_edges;
|
||||
|
||||
std::tie(weight, unpacked_nodes, unpacked_edges) =
|
||||
unpackPathAndCalculateDistance(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
INVALID_EDGE_WEIGHT,
|
||||
packed_path,
|
||||
middle_node_id,
|
||||
phantom_nodes,
|
||||
phantom_index,
|
||||
phantom_indices);
|
||||
|
||||
// Accumulate the path length without the last node
|
||||
auto annotation = 0.0;
|
||||
|
||||
BOOST_ASSERT(!unpacked_nodes.empty());
|
||||
for (auto node = unpacked_nodes.begin(), last_node = std::prev(unpacked_nodes.end());
|
||||
node != last_node;
|
||||
++node)
|
||||
{
|
||||
annotation += computeEdgeDistance(facade, *node);
|
||||
}
|
||||
|
||||
// ... and add negative source and positive target offsets
|
||||
// ⚠ for REVERSE_DIRECTION original source and target phantom nodes are swapped
|
||||
// Get source and target phantom nodes
|
||||
// * 1-to-N: source is a single index, target is the corresponding from the indices list
|
||||
// * N-to-1: source is the corresponding from the indices list, target is a single index
|
||||
auto source_phantom_index = phantom_index;
|
||||
auto target_phantom_index = phantom_indices[location];
|
||||
if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
std::swap(source_phantom_index, target_phantom_index);
|
||||
}
|
||||
const auto &source_phantom = phantom_nodes[source_phantom_index];
|
||||
const auto &target_phantom = phantom_nodes[target_phantom_index];
|
||||
const NodeID source_node = unpacked_nodes.front();
|
||||
const NodeID target_node = unpacked_nodes.back();
|
||||
|
||||
EdgeDistance source_offset = 0., target_offset = 0.;
|
||||
if (source_phantom.IsValidForwardSource() &&
|
||||
source_phantom.forward_segment_id.id == source_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// -->s <-- subtract offset to start at source
|
||||
// ......... <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
source_offset = source_phantom.GetForwardDistance();
|
||||
}
|
||||
else if (source_phantom.IsValidReverseSource() &&
|
||||
source_phantom.reverse_segment_id.id == source_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// s<------- <-- subtract offset to start at source
|
||||
// ... <-- want this distance
|
||||
// entry 0---1---2---3 <-- 3 is exit node
|
||||
source_offset = source_phantom.GetReverseDistance();
|
||||
}
|
||||
if (target_phantom.IsValidForwardTarget() &&
|
||||
target_phantom.forward_segment_id.id == target_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// ++>t <-- add offset to get to target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
target_offset = target_phantom.GetForwardDistance();
|
||||
}
|
||||
else if (target_phantom.IsValidReverseTarget() &&
|
||||
target_phantom.reverse_segment_id.id == target_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// <++t <-- add offset to get from target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
target_offset = target_phantom.GetReverseDistance();
|
||||
}
|
||||
|
||||
distances_table[location] = -source_offset + annotation + target_offset;
|
||||
}
|
||||
}
|
||||
|
||||
return std::make_pair(durations, distances_table);
|
||||
}
|
||||
|
||||
@ -491,12 +410,14 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<EdgeWeight> &weights_table,
|
||||
std::vector<EdgeDuration> &durations_table,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
std::vector<NodeID> &middle_nodes_table,
|
||||
const PhantomNode &phantom_node)
|
||||
{
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto source_weight = query_heap.GetKey(node);
|
||||
const auto source_duration = query_heap.GetData(node).duration;
|
||||
const auto source_distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Check if each encountered node has an entry
|
||||
const auto &bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
@ -509,6 +430,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto column_idx = current_bucket.column_index;
|
||||
const auto target_weight = current_bucket.weight;
|
||||
const auto target_duration = current_bucket.duration;
|
||||
const auto target_distance = current_bucket.distance;
|
||||
|
||||
// Get the value location in the results tables:
|
||||
// * row-major direct (row_idx, column_idx) index for forward direction
|
||||
@ -519,21 +441,27 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
auto ¤t_weight = weights_table[location];
|
||||
auto ¤t_duration = durations_table[location];
|
||||
|
||||
EdgeDistance nulldistance = 0;
|
||||
auto ¤t_distance = distances_table.empty() ? nulldistance : distances_table[location];
|
||||
|
||||
// Check if new weight is better
|
||||
auto new_weight = source_weight + target_weight;
|
||||
auto new_duration = source_duration + target_duration;
|
||||
auto new_distance = source_distance + target_distance;
|
||||
|
||||
if (new_weight >= 0 &&
|
||||
std::tie(new_weight, new_duration) < std::tie(current_weight, current_duration))
|
||||
std::tie(new_weight, new_duration, new_distance) <
|
||||
std::tie(current_weight, current_duration, current_distance))
|
||||
{
|
||||
current_weight = new_weight;
|
||||
current_duration = new_duration;
|
||||
current_distance = new_distance;
|
||||
middle_nodes_table[location] = node;
|
||||
}
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<DIRECTION>(
|
||||
facade, node, source_weight, source_duration, query_heap, phantom_node);
|
||||
facade, node, source_weight, source_duration, source_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
@ -546,18 +474,25 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto target_weight = query_heap.GetKey(node);
|
||||
const auto target_duration = query_heap.GetData(node).duration;
|
||||
const auto target_distance = query_heap.GetData(node).distance;
|
||||
const auto parent = query_heap.GetData(node).parent;
|
||||
const auto from_clique_arc = query_heap.GetData(node).from_clique_arc;
|
||||
|
||||
// Store settled nodes in search space bucket
|
||||
search_space_with_buckets.emplace_back(
|
||||
node, parent, from_clique_arc, column_idx, target_weight, target_duration);
|
||||
node, parent, from_clique_arc, column_idx, target_weight, target_duration, target_distance);
|
||||
|
||||
const auto &partition = facade.GetMultiLevelPartition();
|
||||
const auto maximal_level = partition.GetNumberOfLevels() - 1;
|
||||
|
||||
relaxOutgoingEdges<!DIRECTION>(
|
||||
facade, node, target_weight, target_duration, query_heap, phantom_node, maximal_level);
|
||||
relaxOutgoingEdges<!DIRECTION>(facade,
|
||||
node,
|
||||
target_weight,
|
||||
target_duration,
|
||||
target_distance,
|
||||
query_heap,
|
||||
phantom_node,
|
||||
maximal_level);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
@ -596,182 +531,6 @@ void retrievePackedPathFromSearchSpace(NodeID middle_node_id,
|
||||
}
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
void calculateDistances(typename SearchEngineData<mld::Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
const DataFacade<mld::Algorithm> &facade,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const unsigned row_idx,
|
||||
const std::size_t source_index,
|
||||
const unsigned number_of_sources,
|
||||
const unsigned number_of_targets,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
const std::vector<NodeID> &middle_nodes_table,
|
||||
SearchEngineData<mld::Algorithm> &engine_working_data)
|
||||
{
|
||||
engine_working_data.InitializeOrClearFirstThreadLocalStorage(facade.GetNumberOfNodes(),
|
||||
facade.GetMaxBorderNodeID() + 1);
|
||||
|
||||
for (unsigned column_idx = 0; column_idx < number_of_targets; ++column_idx)
|
||||
{
|
||||
// Step 1: Get source and target phantom nodes that were used in the bucketed search
|
||||
auto source_phantom_index = source_index;
|
||||
auto target_phantom_index = target_indices[column_idx];
|
||||
const auto &source_phantom = phantom_nodes[source_phantom_index];
|
||||
const auto &target_phantom = phantom_nodes[target_phantom_index];
|
||||
|
||||
const auto location = DIRECTION == FORWARD_DIRECTION
|
||||
? row_idx * number_of_targets + column_idx
|
||||
: row_idx + column_idx * number_of_sources;
|
||||
|
||||
if (source_phantom_index == target_phantom_index)
|
||||
{
|
||||
distances_table[location] = 0.0;
|
||||
continue;
|
||||
}
|
||||
|
||||
NodeID middle_node_id = middle_nodes_table[location];
|
||||
|
||||
if (middle_node_id == SPECIAL_NODEID) // takes care of one-ways
|
||||
{
|
||||
distances_table[location] = INVALID_EDGE_DISTANCE;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 2: Find path from source to middle node
|
||||
PackedPath packed_path =
|
||||
mld::retrievePackedPathFromSingleManyToManyHeap<DIRECTION>(query_heap, middle_node_id);
|
||||
|
||||
if (DIRECTION == FORWARD_DIRECTION)
|
||||
{
|
||||
std::reverse(packed_path.begin(), packed_path.end());
|
||||
}
|
||||
|
||||
auto &forward_heap = *engine_working_data.forward_heap_1;
|
||||
auto &reverse_heap = *engine_working_data.reverse_heap_1;
|
||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
||||
std::vector<NodeID> unpacked_nodes_from_source;
|
||||
std::vector<EdgeID> unpacked_edges;
|
||||
std::tie(weight, unpacked_nodes_from_source, unpacked_edges) =
|
||||
unpackPathAndCalculateDistance(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
INVALID_EDGE_WEIGHT,
|
||||
packed_path,
|
||||
middle_node_id,
|
||||
source_phantom);
|
||||
|
||||
// Step 3: Find path from middle to target node
|
||||
packed_path.clear();
|
||||
retrievePackedPathFromSearchSpace<DIRECTION>(
|
||||
middle_node_id, column_idx, search_space_with_buckets, packed_path);
|
||||
|
||||
if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
std::reverse(packed_path.begin(), packed_path.end());
|
||||
}
|
||||
|
||||
std::vector<NodeID> unpacked_nodes_to_target;
|
||||
std::tie(weight, unpacked_nodes_to_target, unpacked_edges) =
|
||||
unpackPathAndCalculateDistance(engine_working_data,
|
||||
facade,
|
||||
forward_heap,
|
||||
reverse_heap,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
DO_NOT_FORCE_LOOPS,
|
||||
INVALID_EDGE_WEIGHT,
|
||||
packed_path,
|
||||
middle_node_id,
|
||||
target_phantom);
|
||||
|
||||
if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
std::swap(unpacked_nodes_to_target, unpacked_nodes_from_source);
|
||||
}
|
||||
|
||||
// Step 4: Compute annotation value along the path nodes without the target node
|
||||
auto annotation = 0.0;
|
||||
|
||||
for (auto node = unpacked_nodes_from_source.begin(),
|
||||
last_node = std::prev(unpacked_nodes_from_source.end());
|
||||
node != last_node;
|
||||
++node)
|
||||
{
|
||||
annotation += computeEdgeDistance(facade, *node);
|
||||
}
|
||||
|
||||
for (auto node = unpacked_nodes_to_target.begin(),
|
||||
last_node = std::prev(unpacked_nodes_to_target.end());
|
||||
node != last_node;
|
||||
++node)
|
||||
{
|
||||
annotation += computeEdgeDistance(facade, *node);
|
||||
}
|
||||
|
||||
// Step 5: Get phantom node offsets and compute the annotation value
|
||||
EdgeDistance source_offset = 0., target_offset = 0.;
|
||||
{
|
||||
// ⚠ for REVERSE_DIRECTION original source and target phantom nodes are swapped
|
||||
if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
std::swap(source_phantom_index, target_phantom_index);
|
||||
}
|
||||
const auto &source_phantom = phantom_nodes[source_phantom_index];
|
||||
const auto &target_phantom = phantom_nodes[target_phantom_index];
|
||||
|
||||
NodeID source_node = unpacked_nodes_from_source.front();
|
||||
NodeID target_node = unpacked_nodes_to_target.back();
|
||||
|
||||
if (source_phantom.IsValidForwardSource() &&
|
||||
source_phantom.forward_segment_id.id == source_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// -->s <-- subtract offset to start at source
|
||||
// ......... <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
source_offset = source_phantom.GetForwardDistance();
|
||||
}
|
||||
else if (source_phantom.IsValidReverseSource() &&
|
||||
source_phantom.reverse_segment_id.id == source_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0 to 3
|
||||
// s<------- <-- subtract offset to start at source
|
||||
// ... <-- want this distance
|
||||
// entry 0---1---2---3 <-- 3 is exit node
|
||||
source_offset = source_phantom.GetReverseDistance();
|
||||
}
|
||||
|
||||
if (target_phantom.IsValidForwardTarget() &&
|
||||
target_phantom.forward_segment_id.id == target_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// ++>t <-- add offset to get to target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
target_offset = target_phantom.GetForwardDistance();
|
||||
}
|
||||
else if (target_phantom.IsValidReverseTarget() &&
|
||||
target_phantom.reverse_segment_id.id == target_node)
|
||||
{
|
||||
// ............ <-- calculateEGBAnnotation returns distance from 0
|
||||
// to 3
|
||||
// <++t <-- add offset to get from target
|
||||
// ................ <-- want this distance as result
|
||||
// entry 0---1---2---3--- <-- 3 is exit node
|
||||
target_offset = target_phantom.GetReverseDistance();
|
||||
}
|
||||
}
|
||||
|
||||
distances_table[location] = -source_offset + annotation + target_offset;
|
||||
}
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
std::pair<std::vector<EdgeDuration>, std::vector<EdgeDistance>>
|
||||
manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
@ -787,7 +546,8 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
|
||||
std::vector<EdgeWeight> weights_table(number_of_entries, INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations_table(number_of_entries, MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table;
|
||||
std::vector<EdgeDistance> distances_table(calculate_distance ? number_of_entries : 0,
|
||||
INVALID_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(number_of_entries, SPECIAL_NODEID);
|
||||
|
||||
std::vector<NodeBucket> search_space_with_buckets;
|
||||
@ -846,25 +606,9 @@ manyToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
search_space_with_buckets,
|
||||
weights_table,
|
||||
durations_table,
|
||||
middle_nodes_table,
|
||||
source_phantom);
|
||||
}
|
||||
|
||||
if (calculate_distance)
|
||||
{
|
||||
distances_table.resize(number_of_entries, INVALID_EDGE_DISTANCE);
|
||||
calculateDistances<DIRECTION>(query_heap,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
target_indices, // source_indices
|
||||
row_idx,
|
||||
source_index,
|
||||
number_of_sources,
|
||||
number_of_targets,
|
||||
search_space_with_buckets,
|
||||
distances_table,
|
||||
middle_nodes_table,
|
||||
engine_working_data);
|
||||
source_phantom);
|
||||
}
|
||||
}
|
||||
|
||||
@ -892,12 +636,8 @@ manyToManySearch(SearchEngineData<mld::Algorithm> &engine_working_data,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
const std::vector<std::size_t> &source_indices,
|
||||
const std::vector<std::size_t> &target_indices,
|
||||
const bool calculate_distance,
|
||||
const bool calculate_duration)
|
||||
const bool calculate_distance)
|
||||
{
|
||||
(void)calculate_duration; // flag stub to use for calculating distances in matrix in mld in the
|
||||
// future
|
||||
|
||||
if (source_indices.size() == 1)
|
||||
{ // TODO: check if target_indices.size() == 1 and do a bi-directional search
|
||||
return mld::oneToManySearch<FORWARD_DIRECTION>(engine_working_data,
|
||||
|
||||
@ -95,12 +95,6 @@ void EdgeBasedGraphFactory::GetEdgeBasedNodeSegments(std::vector<EdgeBasedNodeSe
|
||||
swap(nodes, m_edge_based_node_segments);
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetStartPointMarkers(std::vector<bool> &node_is_startpoint)
|
||||
{
|
||||
using std::swap; // Koenig swap
|
||||
swap(m_edge_based_node_is_startpoint, node_is_startpoint);
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetEdgeBasedNodeWeights(std::vector<EdgeWeight> &output_node_weights)
|
||||
{
|
||||
using std::swap; // Koenig swap
|
||||
@ -114,6 +108,13 @@ void EdgeBasedGraphFactory::GetEdgeBasedNodeDurations(
|
||||
swap(m_edge_based_node_durations, output_node_durations);
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetEdgeBasedNodeDistances(
|
||||
std::vector<EdgeDistance> &output_node_distances)
|
||||
{
|
||||
using std::swap; // Koenig swap
|
||||
swap(m_edge_based_node_distances, output_node_distances);
|
||||
}
|
||||
|
||||
std::uint32_t EdgeBasedGraphFactory::GetConnectivityChecksum() const
|
||||
{
|
||||
return m_connectivity_checksum;
|
||||
@ -222,10 +223,9 @@ NBGToEBG EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u, const N
|
||||
edge_id_to_segment_id(nbe_to_ebn_mapping[edge_id_2]),
|
||||
current_edge_source_coordinate_id,
|
||||
current_edge_target_coordinate_id,
|
||||
i);
|
||||
i,
|
||||
forward_data.flags.startpoint || reverse_data.flags.startpoint);
|
||||
|
||||
m_edge_based_node_is_startpoint.push_back(forward_data.flags.startpoint ||
|
||||
reverse_data.flags.startpoint);
|
||||
current_edge_source_coordinate_id = current_edge_target_coordinate_id;
|
||||
}
|
||||
|
||||
@ -291,8 +291,12 @@ unsigned EdgeBasedGraphFactory::LabelEdgeBasedNodes()
|
||||
{
|
||||
// heuristic: node-based graph node is a simple intersection with four edges
|
||||
// (edge-based nodes)
|
||||
m_edge_based_node_weights.reserve(4 * m_node_based_graph.GetNumberOfNodes());
|
||||
m_edge_based_node_durations.reserve(4 * m_node_based_graph.GetNumberOfNodes());
|
||||
constexpr std::size_t ESTIMATED_EDGE_COUNT = 4;
|
||||
m_edge_based_node_weights.reserve(ESTIMATED_EDGE_COUNT * m_node_based_graph.GetNumberOfNodes());
|
||||
m_edge_based_node_durations.reserve(ESTIMATED_EDGE_COUNT *
|
||||
m_node_based_graph.GetNumberOfNodes());
|
||||
m_edge_based_node_distances.reserve(ESTIMATED_EDGE_COUNT *
|
||||
m_node_based_graph.GetNumberOfNodes());
|
||||
nbe_to_ebn_mapping.resize(m_node_based_graph.GetEdgeCapacity(), SPECIAL_NODEID);
|
||||
|
||||
// renumber edge based node of outgoing edges
|
||||
@ -310,6 +314,7 @@ unsigned EdgeBasedGraphFactory::LabelEdgeBasedNodes()
|
||||
|
||||
m_edge_based_node_weights.push_back(edge_data.weight);
|
||||
m_edge_based_node_durations.push_back(edge_data.duration);
|
||||
m_edge_based_node_distances.push_back(edge_data.distance);
|
||||
|
||||
BOOST_ASSERT(numbered_edges_count < m_node_based_graph.GetNumberOfEdges());
|
||||
nbe_to_ebn_mapping[current_edge] = numbered_edges_count;
|
||||
@ -407,15 +412,17 @@ EdgeBasedGraphFactory::GenerateEdgeExpandedNodes(const WayRestrictionMap &way_re
|
||||
m_edge_based_node_weights.push_back(ebn_weight);
|
||||
m_edge_based_node_durations.push_back(
|
||||
m_edge_based_node_durations[nbe_to_ebn_mapping[eid]]);
|
||||
m_edge_based_node_distances.push_back(
|
||||
m_edge_based_node_distances[nbe_to_ebn_mapping[eid]]);
|
||||
|
||||
edge_based_node_id++;
|
||||
progress.PrintStatus(progress_counter++);
|
||||
}
|
||||
}
|
||||
|
||||
BOOST_ASSERT(m_edge_based_node_segments.size() == m_edge_based_node_is_startpoint.size());
|
||||
BOOST_ASSERT(m_number_of_edge_based_nodes == m_edge_based_node_weights.size());
|
||||
BOOST_ASSERT(m_number_of_edge_based_nodes == m_edge_based_node_durations.size());
|
||||
BOOST_ASSERT(m_number_of_edge_based_nodes == m_edge_based_node_distances.size());
|
||||
|
||||
util::Log() << "Generated " << m_number_of_edge_based_nodes << " nodes ("
|
||||
<< way_restriction_map.NumberOfDuplicatedNodes()
|
||||
@ -652,14 +659,15 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
// auto turn_id = m_edge_based_edge_list.size();
|
||||
auto weight = boost::numeric_cast<EdgeWeight>(edge_data1.weight + weight_penalty);
|
||||
auto duration = boost::numeric_cast<EdgeWeight>(edge_data1.duration + duration_penalty);
|
||||
auto distance = boost::numeric_cast<EdgeDistance>(edge_data1.distance);
|
||||
|
||||
EdgeBasedEdge edge_based_edge = {
|
||||
edge_based_node_from,
|
||||
EdgeBasedEdge edge_based_edge = {edge_based_node_from,
|
||||
edge_based_node_to,
|
||||
SPECIAL_NODEID, // This will be updated once the main loop
|
||||
// completes!
|
||||
SPECIAL_NODEID, // This will be updated once the main
|
||||
// loop completes!
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
true,
|
||||
false};
|
||||
|
||||
|
||||
@ -387,12 +387,16 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
const auto weight = edge_iterator->weight_data(distance);
|
||||
const auto duration = edge_iterator->duration_data(distance);
|
||||
|
||||
const auto accurate_distance =
|
||||
util::coordinate_calculation::fccApproximateDistance(source_coord, target_coord);
|
||||
|
||||
ExtractionSegment segment(source_coord, target_coord, distance, weight, duration);
|
||||
scripting_environment.ProcessSegment(segment);
|
||||
|
||||
auto &edge = edge_iterator->result;
|
||||
edge.weight = std::max<EdgeWeight>(1, std::round(segment.weight * weight_multiplier));
|
||||
edge.duration = std::max<EdgeWeight>(1, std::round(segment.duration * 10.));
|
||||
edge.distance = accurate_distance;
|
||||
|
||||
// assign new node id
|
||||
const auto node_id = mapExternalToInternalNodeID(
|
||||
|
||||
@ -239,9 +239,9 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
EdgeBasedNodeDataContainer edge_based_nodes_container;
|
||||
std::vector<EdgeBasedNodeSegment> edge_based_node_segments;
|
||||
util::DeallocatingVector<EdgeBasedEdge> edge_based_edge_list;
|
||||
std::vector<bool> node_is_startpoint;
|
||||
std::vector<EdgeWeight> edge_based_node_weights;
|
||||
std::vector<EdgeDuration> edge_based_node_durations;
|
||||
std::vector<EdgeDistance> edge_based_node_distances;
|
||||
std::uint32_t ebg_connectivity_checksum = 0;
|
||||
|
||||
// Create a node-based graph from the OSRM file
|
||||
@ -319,9 +319,9 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
scripting_environment,
|
||||
edge_based_nodes_container,
|
||||
edge_based_node_segments,
|
||||
node_is_startpoint,
|
||||
edge_based_node_weights,
|
||||
edge_based_node_durations,
|
||||
edge_based_node_distances,
|
||||
edge_based_edge_list,
|
||||
ebg_connectivity_checksum);
|
||||
|
||||
@ -345,8 +345,10 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
|
||||
util::Log() << "Saving edge-based node weights to file.";
|
||||
TIMER_START(timer_write_node_weights);
|
||||
extractor::files::writeEdgeBasedNodeWeightsDurations(
|
||||
config.GetPath(".osrm.enw"), edge_based_node_weights, edge_based_node_durations);
|
||||
extractor::files::writeEdgeBasedNodeWeightsDurationsDistances(config.GetPath(".osrm.enw"),
|
||||
edge_based_node_weights,
|
||||
edge_based_node_durations,
|
||||
edge_based_node_distances);
|
||||
TIMER_STOP(timer_write_node_weights);
|
||||
util::Log() << "Done writing. (" << TIMER_SEC(timer_write_node_weights) << ")";
|
||||
|
||||
@ -358,7 +360,7 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
|
||||
util::Log() << "Building r-tree ...";
|
||||
TIMER_START(rtree);
|
||||
BuildRTree(std::move(edge_based_node_segments), std::move(node_is_startpoint), coordinates);
|
||||
BuildRTree(std::move(edge_based_node_segments), coordinates);
|
||||
|
||||
TIMER_STOP(rtree);
|
||||
|
||||
@ -733,9 +735,9 @@ EdgeID Extractor::BuildEdgeExpandedGraph(
|
||||
// output data
|
||||
EdgeBasedNodeDataContainer &edge_based_nodes_container,
|
||||
std::vector<EdgeBasedNodeSegment> &edge_based_node_segments,
|
||||
std::vector<bool> &node_is_startpoint,
|
||||
std::vector<EdgeWeight> &edge_based_node_weights,
|
||||
std::vector<EdgeDuration> &edge_based_node_durations,
|
||||
std::vector<EdgeDistance> &edge_based_node_distances,
|
||||
util::DeallocatingVector<EdgeBasedEdge> &edge_based_edge_list,
|
||||
std::uint32_t &connectivity_checksum)
|
||||
{
|
||||
@ -783,9 +785,9 @@ EdgeID Extractor::BuildEdgeExpandedGraph(
|
||||
|
||||
edge_based_graph_factory.GetEdgeBasedEdges(edge_based_edge_list);
|
||||
edge_based_graph_factory.GetEdgeBasedNodeSegments(edge_based_node_segments);
|
||||
edge_based_graph_factory.GetStartPointMarkers(node_is_startpoint);
|
||||
edge_based_graph_factory.GetEdgeBasedNodeWeights(edge_based_node_weights);
|
||||
edge_based_graph_factory.GetEdgeBasedNodeDurations(edge_based_node_durations);
|
||||
edge_based_graph_factory.GetEdgeBasedNodeDistances(edge_based_node_distances);
|
||||
connectivity_checksum = edge_based_graph_factory.GetConnectivityChecksum();
|
||||
|
||||
return number_of_edge_based_nodes;
|
||||
@ -797,35 +799,24 @@ EdgeID Extractor::BuildEdgeExpandedGraph(
|
||||
Saves tree into '.ramIndex' and leaves into '.fileIndex'.
|
||||
*/
|
||||
void Extractor::BuildRTree(std::vector<EdgeBasedNodeSegment> edge_based_node_segments,
|
||||
std::vector<bool> node_is_startpoint,
|
||||
const std::vector<util::Coordinate> &coordinates)
|
||||
{
|
||||
util::Log() << "Constructing r-tree of " << edge_based_node_segments.size()
|
||||
<< " segments build on-top of " << coordinates.size() << " coordinates";
|
||||
|
||||
BOOST_ASSERT(node_is_startpoint.size() == edge_based_node_segments.size());
|
||||
|
||||
// Filter node based edges based on startpoint
|
||||
auto out_iter = edge_based_node_segments.begin();
|
||||
auto in_iter = edge_based_node_segments.begin();
|
||||
for (auto index : util::irange<std::size_t>(0UL, node_is_startpoint.size()))
|
||||
{
|
||||
BOOST_ASSERT(in_iter != edge_based_node_segments.end());
|
||||
if (node_is_startpoint[index])
|
||||
{
|
||||
*out_iter = *in_iter;
|
||||
out_iter++;
|
||||
}
|
||||
in_iter++;
|
||||
}
|
||||
auto new_size = out_iter - edge_based_node_segments.begin();
|
||||
if (new_size == 0)
|
||||
auto start_point_count = std::accumulate(edge_based_node_segments.begin(),
|
||||
edge_based_node_segments.end(),
|
||||
0,
|
||||
[](const size_t so_far, const auto &segment) {
|
||||
return so_far + (segment.is_startpoint ? 1 : 0);
|
||||
});
|
||||
if (start_point_count == 0)
|
||||
{
|
||||
throw util::exception("There are no snappable edges left after processing. Are you "
|
||||
"setting travel modes correctly in the profile? Cannot continue." +
|
||||
SOURCE_REF);
|
||||
}
|
||||
edge_based_node_segments.resize(new_size);
|
||||
|
||||
TIMER_START(construction);
|
||||
util::StaticRTree<EdgeBasedNodeSegment> rtree(
|
||||
|
||||
@ -415,6 +415,7 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{true,
|
||||
@ -450,6 +451,7 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{false,
|
||||
|
||||
@ -259,6 +259,7 @@ void GraphCompressor::Compress(
|
||||
const auto forward_weight2 = fwd_edge_data2.weight;
|
||||
const auto forward_duration1 = fwd_edge_data1.duration;
|
||||
const auto forward_duration2 = fwd_edge_data2.duration;
|
||||
const auto forward_distance2 = fwd_edge_data2.distance;
|
||||
|
||||
BOOST_ASSERT(0 != forward_weight1);
|
||||
BOOST_ASSERT(0 != forward_weight2);
|
||||
@ -267,6 +268,17 @@ void GraphCompressor::Compress(
|
||||
const auto reverse_weight2 = rev_edge_data2.weight;
|
||||
const auto reverse_duration1 = rev_edge_data1.duration;
|
||||
const auto reverse_duration2 = rev_edge_data2.duration;
|
||||
const auto reverse_distance2 = rev_edge_data2.distance;
|
||||
|
||||
#ifndef NDEBUG
|
||||
// Because distances are symmetrical, we only need one
|
||||
// per edge - here we double-check that they match
|
||||
// their mirrors.
|
||||
const auto reverse_distance1 = rev_edge_data1.distance;
|
||||
const auto forward_distance1 = fwd_edge_data1.distance;
|
||||
BOOST_ASSERT(forward_distance1 == reverse_distance2);
|
||||
BOOST_ASSERT(forward_distance2 == reverse_distance1);
|
||||
#endif
|
||||
|
||||
BOOST_ASSERT(0 != reverse_weight1);
|
||||
BOOST_ASSERT(0 != reverse_weight2);
|
||||
@ -279,6 +291,10 @@ void GraphCompressor::Compress(
|
||||
graph.GetEdgeData(forward_e1).duration += forward_duration2;
|
||||
graph.GetEdgeData(reverse_e1).duration += reverse_duration2;
|
||||
|
||||
// add distance of e2's to e1
|
||||
graph.GetEdgeData(forward_e1).distance += forward_distance2;
|
||||
graph.GetEdgeData(reverse_e1).distance += reverse_distance2;
|
||||
|
||||
if (node_weight_penalty != INVALID_EDGE_WEIGHT &&
|
||||
node_duration_penalty != MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
@ -286,6 +302,7 @@ void GraphCompressor::Compress(
|
||||
graph.GetEdgeData(reverse_e1).weight += node_weight_penalty;
|
||||
graph.GetEdgeData(forward_e1).duration += node_duration_penalty;
|
||||
graph.GetEdgeData(reverse_e1).duration += node_duration_penalty;
|
||||
// Note: no penalties for distances
|
||||
}
|
||||
|
||||
// extend e1's to targets of e2's
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user