Merge branch 'master' into sf-cpp-20

This commit is contained in:
Siarhei Fedartsou 2024-05-21 21:33:37 +02:00
commit 41f90ab462
27 changed files with 681 additions and 272 deletions

View File

@ -151,7 +151,7 @@ jobs:
- name: gcc-13-debug-cov
continue-on-error: false
node: 20
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: gcc-13
@ -248,12 +248,11 @@ jobs:
- name: gcc-13-release
continue-on-error: false
node: 20
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-13
CXXCOMPILER: g++-13
ENABLE_BENCHMARKS: ON
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-12-release
@ -264,7 +263,6 @@ jobs:
BUILD_TYPE: Release
CCOMPILER: gcc-12
CXXCOMPILER: g++-12
ENABLE_BENCHMARKS: ON
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-11-release
@ -275,7 +273,6 @@ jobs:
BUILD_TYPE: Release
CCOMPILER: gcc-11
CXXCOMPILER: g++-11
ENABLE_BENCHMARKS: ON
- name: conan-linux-release-node
build_node_package: true
@ -533,17 +530,6 @@ jobs:
fi
popd
npm test
- name: Run benchmarks
if: ${{ matrix.ENABLE_BENCHMARKS == 'ON' }}
run: |
pushd ${OSRM_BUILD_DIR}
make --jobs=${JOBS} benchmarks
./src/benchmarks/alias-bench
./src/benchmarks/json-render-bench ../src/benchmarks/portugal_to_korea.json
./src/benchmarks/match-bench ../test/data/ch/monaco.osrm
./src/benchmarks/packedvector-bench
./src/benchmarks/rtree-bench ../test/data/monaco.osrm.ramIndex ../test/data/monaco.osrm.fileIndex ../test/data/monaco.osrm.nbg_nodes
popd
- name: Use Node 18
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
@ -616,8 +602,72 @@ jobs:
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
benchmarks:
if: github.event_name == 'pull_request'
needs: [format-taginfo-docs]
runs-on: ubuntu-22.04
env:
CCOMPILER: clang-13
CXXCOMPILER: clang++-13
CC: clang-13
CXX: clang++-13
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
GITHUB_REPOSITORY: ${{ github.repository }}
steps:
- name: Enable compiler cache
uses: actions/cache@v3
with:
path: ~/.ccache
key: v1-ccache-benchmarks-${{ github.sha }}
restore-keys: |
v1-ccache-benchmarks-
- name: Enable Conan cache
uses: actions/cache@v3
with:
path: ~/.conan
key: v1-conan-benchmarks-${{ github.sha }}
restore-keys: |
v1-conan-benchmarks-
- name: Checkout PR Branch
uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}
path: pr
- run: python3 -m pip install "conan<2.0.0" "requests==2.31.0"
- name: Build PR Branch
run: |
mkdir -p pr/build
cd pr/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
- name: Checkout Base Branch
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.base.ref }}
path: base
- name: Build Base Branch
run: |
mkdir base/build
cd base/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
- name: Run Benchmarks
run: |
./pr/scripts/ci/run_benchmarks.sh base pr
- name: Post Benchmark Results
run: |
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
ci-complete:
runs-on: ubuntu-22.04
needs: [build-test-publish, docker-image, windows-release-node]
needs: [build-test-publish, docker-image, windows-release-node, benchmarks]
steps:
- run: echo "CI complete"

View File

@ -46,6 +46,10 @@
- FIXED: Correctly handle compressed traffic signals. [#6724](https://github.com/Project-OSRM/osrm-backend/pull/6724)
- FIXED: Fix bug when searching for maneuver overrides [#6739](https://github.com/Project-OSRM/osrm-backend/pull/6739)
- FIXED: Remove force-loop checks for routes with u-turns [#6858](https://github.com/Project-OSRM/osrm-backend/pull/6858)
- FIXED: Correctly check runtime search conditions for forcing routing steps [#6866](https://github.com/Project-OSRM/osrm-backend/pull/6866)
- Map Matching:
- CHANGED: Optimise path distance calculation in MLD map matching. [#6876](https://github.com/Project-OSRM/osrm-backend/pull/6876)
- CHANGED: Optimise R-tree queries in the case of map matching. [#6881](https://github.com/Project-OSRM/osrm-backend/pull/6876)
- Debug tiles:
- FIXED: Ensure speed layer features have unique ids. [#6726](https://github.com/Project-OSRM/osrm-backend/pull/6726)

View File

@ -15,7 +15,7 @@ The following services are available via HTTP API, C++ library interface and Nod
To quickly try OSRM use our [demo server](http://map.project-osrm.org) which comes with both the backend and a frontend on top.
For a quick introduction about how the road network is represented in OpenStreetMap and how to map specific road network features have a look at [this guide about mapping for navigation](https://www.mapbox.com/mapping/mapping-for-navigation/).
For a quick introduction about how the road network is represented in OpenStreetMap and how to map specific road network features have a look at [the OSM wiki on routing](https://wiki.openstreetmap.org/wiki/Routing) or [this guide about mapping for navigation](https://web.archive.org/web/20221206013651/https://labs.mapbox.com/mapping/mapping-for-navigation/).
Related [Project-OSRM](https://github.com/Project-OSRM) repositories:
- [osrm-frontend](https://github.com/Project-OSRM/osrm-frontend) - User-facing frontend with map. The demo server runs this on top of the backend

View File

@ -0,0 +1,101 @@
@routing @testbot @via
Feature: Force routing steps
Background:
Given the profile "testbot"
Scenario: Direct routes with waypoints on same edge
Given the node map
"""
1 2
a-------b
| |
d-------c
| |
e-------f
3 4
"""
And the ways
| nodes | oneway |
| ab | no |
| ad | no |
| bc | no |
| cf | no |
| dc | no |
| de | no |
| ef | yes |
When I route I should get
| waypoints | approaches | weight | route |
| 1,2 | | 20 | ab,ab |
| 1,2 | curb curb | 100 | ab,ad,dc,bc,ab |
| 2,1 | | 20 | ab,ab |
| 2,1 | opposite opposite | 100 | ab,bc,dc,ad,ab |
| 3,4 | | 20 | ef,ef |
| 4,3 | | 100 | ef,cf,dc,de,ef |
Scenario: Via routes with waypoints on same edge
Given the node map
"""
1 2
a-------b
| |
d-5-----c
| |
e-------f
3 4
"""
And the ways
| nodes | oneway |
| ab | no |
| ad | no |
| bc | no |
| cf | no |
| dc | no |
| de | no |
| ef | yes |
When I route I should get
| waypoints | approaches | weight | route |
| 5,1,2 | | 59.8 | dc,ad,ab,ab,ab |
| 5,1,2 | unrestricted curb curb | 180.2 | dc,bc,ab,ab,ab,ad,dc,bc,ab |
| 5,2,1 | | 80.2 | dc,bc,ab,ab,ab |
| 5,2,1 | unrestricted opposite opposite | 159.8 | dc,ad,ab,ab,ab,bc,dc,ad,ab |
| 5,3,4 | | 59.8 | dc,de,ef,ef,ef |
| 5,4,3 | | 159.8 | dc,de,ef,ef,ef,cf,dc,de,ef |
Scenario: [U-turns allowed] Via routes with waypoints on same edge
Given the node map
"""
1 2
a-------b
| |
d-5-----c
| |
e-------f
3 4
"""
And the ways
| nodes | oneway |
| ab | no |
| ad | no |
| bc | no |
| cf | no |
| dc | no |
| de | no |
| ef | yes |
And the query options
| continue_straight | false |
When I route I should get
| waypoints | approaches | weight | route |
| 5,1,2 | | 59.8 | dc,ad,ab,ab,ab |
| 5,1,2 | unrestricted curb curb | 180.2 | dc,bc,ab,ab,ab,ad,dc,bc,ab |
| 5,2,1 | | 79.8 | dc,ad,ab,ab,ab,ab |
| 5,2,1 | unrestricted opposite opposite | 159.8 | dc,ad,ab,ab,ab,bc,dc,ad,ab |
| 5,3,4 | | 59.8 | dc,de,ef,ef,ef |
| 5,4,3 | | 159.8 | dc,de,ef,ef,ef,cf,dc,de,ef |

View File

@ -375,7 +375,7 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
BOOST_ASSERT(m_geospatial_query.get());
return m_geospatial_query->NearestPhantomNodes(
input_coordinate, approach, boost::none, max_distance, bearing, use_all_edges);
input_coordinate, approach, max_distance, bearing, use_all_edges);
}
std::vector<PhantomNodeWithDistance>

View File

@ -47,12 +47,42 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
return rtree.SearchInBox(bbox);
}
std::vector<PhantomNodeWithDistance>
NearestPhantomNodes(const util::Coordinate input_coordinate,
const Approach approach,
const double max_distance,
const boost::optional<Bearing> bearing_with_range,
const boost::optional<bool> use_all_edges) const
{
auto results = rtree.SearchInRange(
input_coordinate,
max_distance,
[this, approach, &input_coordinate, &bearing_with_range, &use_all_edges, max_distance](
const CandidateSegment &segment)
{
auto invalidDistance =
CheckSegmentDistance(input_coordinate, segment, max_distance);
if (invalidDistance)
{
return std::make_pair(false, false);
}
auto valid = CheckSegmentExclude(segment) &&
CheckApproach(input_coordinate, segment, approach) &&
(use_all_edges ? HasValidEdge(segment, *use_all_edges)
: HasValidEdge(segment)) &&
(bearing_with_range ? CheckSegmentBearing(segment, *bearing_with_range)
: std::make_pair(true, true));
return valid;
});
return MakePhantomNodes(input_coordinate, results);
}
// Returns max_results nearest PhantomNodes that are valid within the provided parameters.
// Does not filter by small/big component!
std::vector<PhantomNodeWithDistance>
NearestPhantomNodes(const util::Coordinate input_coordinate,
const Approach approach,
const boost::optional<size_t> max_results,
const size_t max_results,
const boost::optional<double> max_distance,
const boost::optional<Bearing> bearing_with_range,
const boost::optional<bool> use_all_edges) const
@ -70,10 +100,10 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
: std::make_pair(true, true));
return valid;
},
[this, &max_distance, &max_results, input_coordinate](const std::size_t num_results,
const CandidateSegment &segment)
[this, &max_distance, max_results, input_coordinate](const std::size_t num_results,
const CandidateSegment &segment)
{
return (max_results && num_results >= *max_results) ||
return (num_results >= max_results) ||
(max_distance && max_distance != -1.0 &&
CheckSegmentDistance(input_coordinate, segment, *max_distance));
});

View File

@ -71,24 +71,27 @@ void insertTargetInReverseHeap(Heap &reverse_heap, const PhantomNode &target)
static constexpr bool FORWARD_DIRECTION = true;
static constexpr bool REVERSE_DIRECTION = false;
// Identify nodes in the forward(reverse) search direction that will require loop forcing
// Identify nodes in the forward(reverse) search direction that will require step forcing
// e.g. if source and destination nodes are on the same segment.
std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &candidates);
std::vector<NodeID> getForwardLoopNodes(const PhantomCandidatesToTarget &candidates);
std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &candidates);
std::vector<NodeID> getBackwardLoopNodes(const PhantomCandidatesToTarget &candidates);
std::vector<NodeID> getForwardForceNodes(const PhantomEndpointCandidates &candidates);
std::vector<NodeID> getForwardForceNodes(const PhantomCandidatesToTarget &candidates);
std::vector<NodeID> getBackwardForceNodes(const PhantomEndpointCandidates &candidates);
std::vector<NodeID> getBackwardForceNodes(const PhantomCandidatesToTarget &candidates);
// Find the specific phantom node endpoints for a given path from a list of candidates.
PhantomEndpoints endpointsFromCandidates(const PhantomEndpointCandidates &candidates,
const std::vector<NodeID> &path);
template <typename HeapNodeT>
inline bool force_loop(const std::vector<NodeID> &force_nodes, const HeapNodeT &heap_node)
inline bool shouldForceStep(const std::vector<NodeID> &force_nodes,
const HeapNodeT &forward_heap_node,
const HeapNodeT &reverse_heap_node)
{
// if loops are forced, they are so at the source
return !force_nodes.empty() &&
std::find(force_nodes.begin(), force_nodes.end(), heap_node.node) != force_nodes.end() &&
heap_node.data.parent == heap_node.node;
// routing steps are forced when the node is a source of both forward and reverse search heaps.
return forward_heap_node.data.parent == forward_heap_node.node &&
reverse_heap_node.data.parent == reverse_heap_node.node &&
std::find(force_nodes.begin(), force_nodes.end(), forward_heap_node.node) !=
force_nodes.end();
}
template <typename Heap>

View File

@ -112,8 +112,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
NodeID &middle_node_id,
EdgeWeight &upper_bound,
EdgeWeight min_edge_offset,
const std::vector<NodeID> &force_loop_forward_nodes,
const std::vector<NodeID> &force_loop_reverse_nodes)
const std::vector<NodeID> &force_step_nodes)
{
auto heapNode = forward_heap.DeleteMinGetHeapNode();
const auto reverseHeapNode = reverse_heap.GetHeapNodeIfWasInserted(heapNode.node);
@ -123,13 +122,13 @@ void routingStep(const DataFacade<Algorithm> &facade,
const EdgeWeight new_weight = reverseHeapNode->weight + heapNode.weight;
if (new_weight < upper_bound)
{
if (force_loop(force_loop_forward_nodes, heapNode) ||
force_loop(force_loop_reverse_nodes, heapNode) ||
if (shouldForceStep(force_step_nodes, heapNode, reverseHeapNode.get()) ||
// in this case we are looking at a bi-directional way where the source
// and target phantom are on the same edge based node
new_weight < EdgeWeight{0})
{
// check whether there is a loop present at the node
// Before forcing step, check whether there is a loop present at the node.
// We may find a valid weight path by following the loop.
for (const auto edge : facade.GetAdjacentEdgeRange(heapNode.node))
{
const auto &data = facade.GetEdgeData(edge);
@ -421,23 +420,22 @@ void retrievePackedPathFromSingleManyToManyHeap(
// assumes that heaps are already setup correctly.
// ATTENTION: This only works if no additional offset is supplied next to the Phantom Node
// Offsets.
// In case additional offsets are supplied, you might have to force a loop first.
// A forced loop might be necessary, if source and target are on the same segment.
// In case additional offsets are supplied, you might have to force a routing step first.
// A forced step might be necessary, if source and target are on the same segment.
// If this is the case and the offsets of the respective direction are larger for the source
// than the target
// then a force loop is required (e.g. source_phantom.forward_segment_id ==
// then a force step is required (e.g. source_phantom.forward_segment_id ==
// target_phantom.forward_segment_id
// && source_phantom.GetForwardWeightPlusOffset() > target_phantom.GetForwardWeightPlusOffset())
// requires
// a force loop, if the heaps have been initialized with positive offsets.
// a force step, if the heaps have been initialized with positive offsets.
void search(SearchEngineData<Algorithm> &engine_working_data,
const DataFacade<Algorithm> &facade,
SearchEngineData<Algorithm>::QueryHeap &forward_heap,
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
EdgeWeight &weight,
std::vector<NodeID> &packed_leg,
const std::vector<NodeID> &force_loop_forward_node,
const std::vector<NodeID> &force_loop_reverse_node,
const std::vector<NodeID> &force_step_nodes,
const EdgeWeight duration_upper_bound = INVALID_EDGE_WEIGHT);
template <typename PhantomEndpointT>
@ -447,8 +445,7 @@ void search(SearchEngineData<Algorithm> &engine_working_data,
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
EdgeWeight &weight,
std::vector<NodeID> &packed_leg,
const std::vector<NodeID> &force_loop_forward_node,
const std::vector<NodeID> &force_loop_reverse_node,
const std::vector<NodeID> &force_step_nodes,
const PhantomEndpointT & /*endpoints*/,
const EdgeWeight duration_upper_bound = INVALID_EDGE_WEIGHT)
{
@ -459,14 +456,13 @@ void search(SearchEngineData<Algorithm> &engine_working_data,
reverse_heap,
weight,
packed_leg,
force_loop_forward_node,
force_loop_reverse_node,
force_step_nodes,
duration_upper_bound);
}
// Requires the heaps for be empty
// If heaps should be adjusted to be initialized outside of this function,
// the addition of force_loop parameters might be required
// the addition of force_step parameters might be required
double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
const DataFacade<ch::Algorithm> &facade,
SearchEngineData<Algorithm>::QueryHeap &forward_heap,

View File

@ -389,8 +389,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
NodeID &middle_node,
EdgeWeight &path_upper_bound,
const std::vector<NodeID> &force_loop_forward_nodes,
const std::vector<NodeID> &force_loop_reverse_nodes,
const std::vector<NodeID> &force_step_nodes,
const Args &...args)
{
const auto heapNode = forward_heap.DeleteMinGetHeapNode();
@ -409,11 +408,8 @@ void routingStep(const DataFacade<Algorithm> &facade,
auto reverse_weight = reverseHeapNode->weight;
auto path_weight = weight + reverse_weight;
// MLD uses loops forcing only to prune single node paths in forward and/or
// backward direction (there is no need to force loops in MLD but in CH)
if (!force_loop(force_loop_forward_nodes, heapNode) &&
!force_loop(force_loop_reverse_nodes, heapNode) && (path_weight >= EdgeWeight{0}) &&
(path_weight < path_upper_bound))
if (!shouldForceStep(force_step_nodes, heapNode, reverseHeapNode.get()) &&
(path_weight >= EdgeWeight{0}) && (path_weight < path_upper_bound))
{
middle_node = heapNode.node;
path_upper_bound = path_weight;
@ -438,8 +434,7 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
const DataFacade<Algorithm> &facade,
typename SearchEngineData<Algorithm>::QueryHeap &forward_heap,
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
const std::vector<NodeID> &force_loop_forward_nodes,
const std::vector<NodeID> &force_loop_reverse_nodes,
const std::vector<NodeID> &force_step_nodes,
EdgeWeight weight_upper_bound,
const Args &...args)
{
@ -463,27 +458,15 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
{
if (!forward_heap.Empty())
{
routingStep<FORWARD_DIRECTION>(facade,
forward_heap,
reverse_heap,
middle,
weight,
force_loop_forward_nodes,
force_loop_reverse_nodes,
args...);
routingStep<FORWARD_DIRECTION>(
facade, forward_heap, reverse_heap, middle, weight, force_step_nodes, args...);
if (!forward_heap.Empty())
forward_heap_min = forward_heap.MinKey();
}
if (!reverse_heap.Empty())
{
routingStep<REVERSE_DIRECTION>(facade,
reverse_heap,
forward_heap,
middle,
weight,
force_loop_reverse_nodes,
force_loop_forward_nodes,
args...);
routingStep<REVERSE_DIRECTION>(
facade, reverse_heap, forward_heap, middle, weight, force_step_nodes, args...);
if (!reverse_heap.Empty())
reverse_heap_min = reverse_heap.MinKey();
}
@ -512,9 +495,7 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
for (auto const &packed_edge : packed_path)
{
NodeID source, target;
bool overlay_edge;
std::tie(source, target, overlay_edge) = packed_edge;
auto [source, target, overlay_edge] = packed_edge;
if (!overlay_edge)
{ // a base graph edge
unpacked_nodes.push_back(target);
@ -534,21 +515,14 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
forward_heap.Insert(source, {0}, {source});
reverse_heap.Insert(target, {0}, {target});
// TODO: when structured bindings will be allowed change to
// auto [subpath_weight, subpath_source, subpath_target, subpath] = ...
EdgeWeight subpath_weight;
std::vector<NodeID> subpath_nodes;
std::vector<EdgeID> subpath_edges;
std::tie(subpath_weight, subpath_nodes, subpath_edges) =
search(engine_working_data,
facade,
forward_heap,
reverse_heap,
force_loop_forward_nodes,
force_loop_reverse_nodes,
INVALID_EDGE_WEIGHT,
sublevel,
parent_cell_id);
auto [subpath_weight, subpath_nodes, subpath_edges] = search(engine_working_data,
facade,
forward_heap,
reverse_heap,
force_step_nodes,
INVALID_EDGE_WEIGHT,
sublevel,
parent_cell_id);
BOOST_ASSERT(!subpath_edges.empty());
BOOST_ASSERT(subpath_nodes.size() > 1);
BOOST_ASSERT(subpath_nodes.front() == source);
@ -570,8 +544,7 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
EdgeWeight &weight,
std::vector<NodeID> &unpacked_nodes,
const std::vector<NodeID> &force_loop_forward_node,
const std::vector<NodeID> &force_loop_reverse_node,
const std::vector<NodeID> &force_step_nodes,
const PhantomEndpointT &endpoints,
const EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
{
@ -580,8 +553,7 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
facade,
forward_heap,
reverse_heap,
force_loop_forward_node,
force_loop_reverse_node,
force_step_nodes,
weight_upper_bound,
endpoints);
}
@ -633,28 +605,46 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
const PhantomEndpoints endpoints{source_phantom, target_phantom};
insertNodesInHeaps(forward_heap, reverse_heap, endpoints);
EdgeWeight weight = INVALID_EDGE_WEIGHT;
std::vector<NodeID> unpacked_nodes;
std::vector<EdgeID> unpacked_edges;
std::tie(weight, unpacked_nodes, unpacked_edges) = search(engine_working_data,
facade,
forward_heap,
reverse_heap,
{},
{},
weight_upper_bound,
endpoints);
auto [weight, unpacked_nodes, unpacked_edges] = search(
engine_working_data, facade, forward_heap, reverse_heap, {}, weight_upper_bound, endpoints);
if (weight == INVALID_EDGE_WEIGHT)
{
return std::numeric_limits<double>::max();
}
std::vector<PathData> unpacked_path;
BOOST_ASSERT(unpacked_nodes.size() >= 1);
annotatePath(facade, endpoints, unpacked_nodes, unpacked_edges, unpacked_path);
EdgeDistance distance = {0.0};
return getPathDistance(facade, unpacked_path, source_phantom, target_phantom);
if (source_phantom.forward_segment_id.id == unpacked_nodes.front())
{
BOOST_ASSERT(source_phantom.forward_segment_id.enabled);
distance = EdgeDistance{0} - source_phantom.GetForwardDistance();
}
else if (source_phantom.reverse_segment_id.id == unpacked_nodes.front())
{
BOOST_ASSERT(source_phantom.reverse_segment_id.enabled);
distance = EdgeDistance{0} - source_phantom.GetReverseDistance();
}
for (size_t index = 0; index < unpacked_nodes.size() - 1; ++index)
{
distance += facade.GetNodeDistance(unpacked_nodes[index]);
}
if (target_phantom.forward_segment_id.id == unpacked_nodes.back())
{
BOOST_ASSERT(target_phantom.forward_segment_id.enabled);
distance += target_phantom.GetForwardDistance();
}
else if (target_phantom.reverse_segment_id.id == unpacked_nodes.back())
{
BOOST_ASSERT(target_phantom.reverse_segment_id.enabled);
distance += target_phantom.GetReverseDistance();
}
return from_alias<double>(distance);
}
} // namespace osrm::engine::routing_algorithms::mld

View File

@ -64,7 +64,6 @@ void searchWithUTurn(SearchEngineData<Algorithm> &engine_working_data,
leg_weight,
leg_packed_path,
{},
{},
candidates);
}
@ -124,8 +123,7 @@ void search(SearchEngineData<Algorithm> &engine_working_data,
reverse_heap,
new_total_weight_to_forward,
leg_packed_path_forward,
getForwardLoopNodes(candidates),
{},
getForwardForceNodes(candidates),
candidates);
}
@ -164,8 +162,7 @@ void search(SearchEngineData<Algorithm> &engine_working_data,
reverse_heap,
new_total_weight_to_reverse,
leg_packed_path_reverse,
{},
getBackwardLoopNodes(candidates),
getBackwardForceNodes(candidates),
candidates);
}
}

View File

@ -36,6 +36,13 @@ inline double radToDeg(const double radian)
}
} // namespace detail
const constexpr static double METERS_PER_DEGREE_LAT = 110567.0;
inline double metersPerLngDegree(const FixedLatitude lat)
{
return std::cos(detail::degToRad(static_cast<double>(toFloating(lat)))) * METERS_PER_DEGREE_LAT;
}
//! Takes the squared euclidean distance of the input coordinates. Does not return meters!
std::uint64_t squaredEuclideanDistance(const Coordinate lhs, const Coordinate rhs);

View File

@ -3,7 +3,6 @@
#include "util/coordinate.hpp"
#include "util/coordinate_calculation.hpp"
#include <boost/assert.hpp>
#include <limits>
@ -168,6 +167,18 @@ struct RectangleInt2D
min_lat != FixedLatitude{std::numeric_limits<std::int32_t>::max()} &&
max_lat != FixedLatitude{std::numeric_limits<std::int32_t>::min()};
}
static RectangleInt2D ExpandMeters(const Coordinate &coordinate, const double meters)
{
const double lat_offset = meters / coordinate_calculation::METERS_PER_DEGREE_LAT;
const double lon_offset =
meters / coordinate_calculation::metersPerLngDegree(coordinate.lat);
return RectangleInt2D{coordinate.lon - toFixed(FloatLongitude{lon_offset}),
coordinate.lon + toFixed(FloatLongitude{lon_offset}),
coordinate.lat - toFixed(FloatLatitude{lat_offset}),
coordinate.lat + toFixed(FloatLatitude{lat_offset})};
}
};
} // namespace osrm::util

View File

@ -2,7 +2,6 @@
#define STATIC_RTREE_HPP
#include "storage/tar_fwd.hpp"
#include "util/bearing.hpp"
#include "util/coordinate_calculation.hpp"
#include "util/deallocating_vector.hpp"
@ -11,6 +10,7 @@
#include "util/integer_range.hpp"
#include "util/mmap_file.hpp"
#include "util/rectangle.hpp"
#include "util/timing_util.hpp"
#include "util/typedefs.hpp"
#include "util/vector_view.hpp"
#include "util/web_mercator.hpp"
@ -487,70 +487,9 @@ class StaticRTree
Rectangle needs to be projected!*/
std::vector<EdgeDataT> SearchInBox(const Rectangle &search_rectangle) const
{
const Rectangle projected_rectangle{
search_rectangle.min_lon,
search_rectangle.max_lon,
toFixed(FloatLatitude{
web_mercator::latToY(toFloating(FixedLatitude(search_rectangle.min_lat)))}),
toFixed(FloatLatitude{
web_mercator::latToY(toFloating(FixedLatitude(search_rectangle.max_lat)))})};
std::vector<EdgeDataT> results;
std::queue<TreeIndex> traversal_queue;
traversal_queue.push(TreeIndex{});
while (!traversal_queue.empty())
{
auto const current_tree_index = traversal_queue.front();
traversal_queue.pop();
// If we're at the bottom of the tree, we need to explore the
// element array
if (is_leaf(current_tree_index))
{
// Note: irange is [start,finish), so we need to +1 to make sure we visit the
// last
for (const auto current_child_index : child_indexes(current_tree_index))
{
const auto &current_edge = m_objects[current_child_index];
// we don't need to project the coordinates here,
// because we use the unprojected rectangle to test against
const Rectangle bbox{std::min(m_coordinate_list[current_edge.u].lon,
m_coordinate_list[current_edge.v].lon),
std::max(m_coordinate_list[current_edge.u].lon,
m_coordinate_list[current_edge.v].lon),
std::min(m_coordinate_list[current_edge.u].lat,
m_coordinate_list[current_edge.v].lat),
std::max(m_coordinate_list[current_edge.u].lat,
m_coordinate_list[current_edge.v].lat)};
// use the _unprojected_ input rectangle here
if (bbox.Intersects(search_rectangle))
{
results.push_back(current_edge);
}
}
}
else
{
BOOST_ASSERT(current_tree_index.level + 1 < m_tree_level_starts.size());
for (const auto child_index : child_indexes(current_tree_index))
{
const auto &child_rectangle =
m_search_tree[child_index].minimum_bounding_rectangle;
if (child_rectangle.Intersects(projected_rectangle))
{
traversal_queue.push(TreeIndex(
current_tree_index.level + 1,
child_index - m_tree_level_starts[current_tree_index.level + 1]));
}
}
}
}
SearchInBox(search_rectangle,
[&results](const auto &edge_data) { results.push_back(edge_data); });
return results;
}
@ -565,6 +504,45 @@ class StaticRTree
{ return num_results >= max_results; });
}
// NB 1: results are not guaranteed to be sorted by distance
// NB 2: maxDistanceMeters is not a hard limit, it's just a way to reduce the number of edges
// returned
template <typename FilterT>
std::vector<CandidateSegment> SearchInRange(const Coordinate input_coordinate,
double maxDistanceMeters,
const FilterT filter) const
{
auto projected_coordinate = web_mercator::fromWGS84(input_coordinate);
Coordinate fixed_projected_coordinate{projected_coordinate};
auto bbox = Rectangle::ExpandMeters(input_coordinate, maxDistanceMeters);
std::vector<CandidateSegment> results;
SearchInBox(
bbox,
[&results, &filter, fixed_projected_coordinate, this](const EdgeDataT &current_edge)
{
const auto projected_u = web_mercator::fromWGS84(m_coordinate_list[current_edge.u]);
const auto projected_v = web_mercator::fromWGS84(m_coordinate_list[current_edge.v]);
auto [_, projected_nearest] = coordinate_calculation::projectPointOnSegment(
projected_u, projected_v, fixed_projected_coordinate);
CandidateSegment current_candidate{projected_nearest, current_edge};
auto use_segment = filter(current_candidate);
if (!use_segment.first && !use_segment.second)
{
return;
}
current_candidate.data.forward_segment_id.enabled &= use_segment.first;
current_candidate.data.reverse_segment_id.enabled &= use_segment.second;
results.push_back(current_candidate);
});
return results;
}
// Return edges in distance order with the coordinate of the closest point on the edge.
template <typename FilterT, typename TerminationT>
std::vector<CandidateSegment> Nearest(const Coordinate input_coordinate,
@ -572,8 +550,10 @@ class StaticRTree
const TerminationT terminate) const
{
std::vector<CandidateSegment> results;
auto projected_coordinate = web_mercator::fromWGS84(input_coordinate);
Coordinate fixed_projected_coordinate{projected_coordinate};
// initialize queue with root element
std::priority_queue<QueryCandidate> traversal_queue;
traversal_queue.push(QueryCandidate{0, TreeIndex{}});
@ -631,6 +611,73 @@ class StaticRTree
}
private:
template <typename Callback>
void SearchInBox(const Rectangle &search_rectangle, Callback &&callback) const
{
const Rectangle projected_rectangle{
search_rectangle.min_lon,
search_rectangle.max_lon,
toFixed(FloatLatitude{
web_mercator::latToY(toFloating(FixedLatitude(search_rectangle.min_lat)))}),
toFixed(FloatLatitude{
web_mercator::latToY(toFloating(FixedLatitude(search_rectangle.max_lat)))})};
std::queue<TreeIndex> traversal_queue;
traversal_queue.push(TreeIndex{});
while (!traversal_queue.empty())
{
auto const current_tree_index = traversal_queue.front();
traversal_queue.pop();
// If we're at the bottom of the tree, we need to explore the
// element array
if (is_leaf(current_tree_index))
{
// Note: irange is [start,finish), so we need to +1 to make sure we visit the
// last
for (const auto current_child_index : child_indexes(current_tree_index))
{
const auto &current_edge = m_objects[current_child_index];
// we don't need to project the coordinates here,
// because we use the unprojected rectangle to test against
const Rectangle bbox{std::min(m_coordinate_list[current_edge.u].lon,
m_coordinate_list[current_edge.v].lon),
std::max(m_coordinate_list[current_edge.u].lon,
m_coordinate_list[current_edge.v].lon),
std::min(m_coordinate_list[current_edge.u].lat,
m_coordinate_list[current_edge.v].lat),
std::max(m_coordinate_list[current_edge.u].lat,
m_coordinate_list[current_edge.v].lat)};
// use the _unprojected_ input rectangle here
if (bbox.Intersects(search_rectangle))
{
callback(current_edge);
}
}
}
else
{
BOOST_ASSERT(current_tree_index.level + 1 < m_tree_level_starts.size());
for (const auto child_index : child_indexes(current_tree_index))
{
const auto &child_rectangle =
m_search_tree[child_index].minimum_bounding_rectangle;
if (child_rectangle.Intersects(projected_rectangle))
{
traversal_queue.push(TreeIndex(
current_tree_index.level + 1,
child_index - m_tree_level_starts[current_tree_index.level + 1]));
}
}
}
}
}
/**
* Iterates over all the objects in a leaf node and inserts them into our
* search priority queue. The speed of this function is very much governed

View File

@ -0,0 +1,96 @@
import requests
import os
import re
import sys
import json
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
REPO = os.getenv('GITHUB_REPOSITORY')
PR_NUMBER = os.getenv('PR_NUMBER')
REPO_OWNER, REPO_NAME = REPO.split('/')
def create_markdown_table(results):
results = sorted(results, key=lambda x: x['name'])
header = "| Benchmark | Base | PR |\n|-----------|------|----|"
rows = []
for result in results:
name = result['name']
base = result['base'].replace('\n', '<br/>')
pr = result['pr'].replace('\n', '<br/>')
row = f"| {name} | {base} | {pr} |"
rows.append(row)
return f"{header}\n" + "\n".join(rows)
def get_pr_details(repo_owner, repo_name, pr_number):
url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/pulls/{pr_number}"
headers = {'Authorization': f'token {GITHUB_TOKEN}'}
response = requests.get(url, headers=headers)
response.raise_for_status()
return response.json()
def update_pr_description(repo_owner, repo_name, pr_number, body):
url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/pulls/{pr_number}"
headers = {'Authorization': f'token {GITHUB_TOKEN}'}
data = {'body': body}
response = requests.patch(url, headers=headers, json=data)
response.raise_for_status()
return response.json()
def collect_benchmark_results(base_folder, pr_folder):
results = []
results_index = {}
for file in os.listdir(base_folder):
if not file.endswith('.bench'): continue
with open(f"{base_folder}/{file}") as f:
result = f.read().strip()
results.append({'base': result, 'pr': None, 'name': os.path.splitext(file)[0]})
results_index[file] = len(results) - 1
for file in os.listdir(pr_folder):
if not file.endswith('.bench'): continue
with open(f"{pr_folder}/{file}") as f:
result = f.read().strip()
if file in results_index:
results[results_index[file]]['pr'] = result
else:
results.append({'base': None, 'pr': result, 'name': os.path.splitext(file)[0]})
return results
def main():
if len(sys.argv) != 3:
print("Usage: python post_benchmark_results.py <base_folder> <pr_folder>")
exit(1)
base_folder = sys.argv[1]
pr_folder = sys.argv[2]
benchmark_results = collect_benchmark_results(base_folder, pr_folder)
pr_details = get_pr_details(REPO_OWNER, REPO_NAME, PR_NUMBER)
# in both cases when there is no PR body or PR body is None fallback to empty string
pr_body = pr_details.get('body', '') or ''
markdown_table = create_markdown_table(benchmark_results)
new_benchmark_section = f"<!-- BENCHMARK_RESULTS_START -->\n## Benchmark Results\n{markdown_table}\n<!-- BENCHMARK_RESULTS_END -->"
if re.search(r'<!-- BENCHMARK_RESULTS_START -->.*<!-- BENCHMARK_RESULTS_END -->', pr_body, re.DOTALL):
updated_body = re.sub(
r'<!-- BENCHMARK_RESULTS_START -->.*<!-- BENCHMARK_RESULTS_END -->',
new_benchmark_section,
pr_body,
flags=re.DOTALL
)
else:
updated_body = f"{pr_body}\n\n{new_benchmark_section}" if len(pr_body) > 0 else new_benchmark_section
update_pr_description(REPO_OWNER, REPO_NAME, PR_NUMBER, updated_body)
print("PR description updated successfully.")
if __name__ == "__main__":
main()

24
scripts/ci/run_benchmarks.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -eou pipefail
function run_benchmarks_for_folder {
echo "Running benchmarks for $1"
FOLDER=$1
RESULTS_FOLDER=$2
mkdir -p $RESULTS_FOLDER
BENCHMARKS_FOLDER="$FOLDER/build/src/benchmarks"
./$BENCHMARKS_FOLDER/match-bench "./$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench"
./$BENCHMARKS_FOLDER/match-bench "./$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench"
./$BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench"
./$BENCHMARKS_FOLDER/json-render-bench "./$FOLDER/src/benchmarks/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench"
./$BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench"
./$BENCHMARKS_FOLDER/rtree-bench "./$FOLDER/test/data/monaco.osrm.ramIndex" "./$FOLDER/test/data/monaco.osrm.fileIndex" "./$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench"
}
run_benchmarks_for_folder $1 "${1}_results"
run_benchmarks_for_folder $2 "${2}_results"

View File

@ -64,7 +64,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(aliased_u32);
util::Log() << "aliased u32: " << TIMER_MSEC(aliased_u32);
std::cout << "aliased u32: " << TIMER_MSEC(aliased_u32) << std::endl;
TIMER_START(plain_u32);
for (auto round : util::irange(0, num_rounds))
@ -83,7 +83,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(plain_u32);
util::Log() << "plain u32: " << TIMER_MSEC(plain_u32);
std::cout << "plain u32: " << TIMER_MSEC(plain_u32) << std::endl;
TIMER_START(aliased_double);
for (auto round : util::irange(0, num_rounds))
@ -103,7 +103,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(aliased_double);
util::Log() << "aliased double: " << TIMER_MSEC(aliased_double);
std::cout << "aliased double: " << TIMER_MSEC(aliased_double) << std::endl;
TIMER_START(plain_double);
for (auto round : util::irange(0, num_rounds))
@ -123,5 +123,5 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(plain_double);
util::Log() << "plain double: " << TIMER_MSEC(plain_double);
std::cout << "plain double: " << TIMER_MSEC(plain_double) << std::endl;
}

View File

@ -1,3 +1,4 @@
#include "engine/engine_config.hpp"
#include "util/timing_util.hpp"
#include "osrm/match_parameters.hpp"
@ -11,13 +12,14 @@
#include <boost/assert.hpp>
#include <cstdlib>
#include <exception>
#include <iostream>
#include <optional>
#include <stdexcept>
#include <string>
#include <utility>
#include <cstdlib>
int main(int argc, const char *argv[])
try
{
@ -32,6 +34,8 @@ try
// Configure based on a .osrm base path, and no datasets in shared mem from osrm-datastore
EngineConfig config;
config.storage_config = {argv[1]};
config.algorithm = (argc > 2 && std::string{argv[2]} == "mld") ? EngineConfig::Algorithm::MLD
: EngineConfig::Algorithm::CH;
config.use_shared_memory = false;
// Routing machine with several services (such as Route, Table, Nearest, Trip, Match)
@ -211,24 +215,49 @@ try
params.coordinates.push_back(
FloatCoordinate{FloatLongitude{7.415342330932617}, FloatLatitude{43.733251335381205}});
TIMER_START(routes);
auto NUM = 100;
for (int i = 0; i < NUM; ++i)
auto run_benchmark = [&](std::optional<double> radiusInMeters)
{
engine::api::ResultT result = json::Object();
const auto rc = osrm.Match(params, result);
auto &json_result = result.get<json::Object>();
if (rc != Status::Ok ||
json_result.values.at("matchings").get<json::Array>().values.size() != 1)
params.radiuses = {};
if (radiusInMeters)
{
return EXIT_FAILURE;
for (size_t index = 0; index < params.coordinates.size(); ++index)
{
params.radiuses.emplace_back(*radiusInMeters);
}
}
TIMER_START(routes);
auto NUM = 100;
for (int i = 0; i < NUM; ++i)
{
engine::api::ResultT result = json::Object();
const auto rc = osrm.Match(params, result);
auto &json_result = result.get<json::Object>();
if (rc != Status::Ok ||
json_result.values.at("matchings").get<json::Array>().values.size() != 1)
{
throw std::runtime_error{"Couldn't match"};
}
}
TIMER_STOP(routes);
if (radiusInMeters)
{
std::cout << "Radius " << *radiusInMeters << "m: " << std::endl;
}
else
{
std::cout << "Default radius: " << std::endl;
}
std::cout << (TIMER_MSEC(routes) / NUM) << "ms/req at " << params.coordinates.size()
<< " coordinate" << std::endl;
std::cout << (TIMER_MSEC(routes) / NUM / params.coordinates.size()) << "ms/coordinate"
<< std::endl;
};
for (auto radius : std::vector<std::optional<double>>{std::nullopt, 5.0, 10.0, 15.0, 30.0})
{
run_benchmark(radius);
}
TIMER_STOP(routes);
std::cout << (TIMER_MSEC(routes) / NUM) << "ms/req at " << params.coordinates.size()
<< " coordinate" << std::endl;
std::cout << (TIMER_MSEC(routes) / NUM / params.coordinates.size()) << "ms/coordinate"
<< std::endl;
return EXIT_SUCCESS;
}
@ -236,4 +265,4 @@ catch (const std::exception &e)
{
std::cerr << "Error: " << e.what() << std::endl;
return EXIT_FAILURE;
}
}

View File

@ -72,10 +72,10 @@ int main(int, char **)
auto write_slowdown = result_packed.random_write_ms / result_plain.random_write_ms;
auto read_slowdown = result_packed.random_read_ms / result_plain.random_read_ms;
util::Log() << "random write: std::vector " << result_plain.random_write_ms
<< " ms, util::packed_vector " << result_packed.random_write_ms << " ms. "
<< write_slowdown;
util::Log() << "random read: std::vector " << result_plain.random_read_ms
<< " ms, util::packed_vector " << result_packed.random_read_ms << " ms. "
<< read_slowdown;
std::cout << "random write:\nstd::vector " << result_plain.random_write_ms
<< " ms\nutil::packed_vector " << result_packed.random_write_ms << " ms\n"
<< "slowdown: " << write_slowdown << std::endl;
std::cout << "random read:\nstd::vector " << result_plain.random_read_ms
<< " ms\nutil::packed_vector " << result_packed.random_read_ms << " ms\n"
<< "slowdown: " << read_slowdown << std::endl;
}

View File

@ -36,8 +36,6 @@ void benchmarkQuery(const std::vector<util::Coordinate> &queries,
const std::string &name,
QueryT query)
{
std::cout << "Running " << name << " with " << queries.size() << " coordinates: " << std::flush;
TIMER_START(query);
for (const auto &q : queries)
{
@ -46,11 +44,9 @@ void benchmarkQuery(const std::vector<util::Coordinate> &queries,
}
TIMER_STOP(query);
std::cout << "Took " << TIMER_SEC(query) << " seconds "
<< "(" << TIMER_MSEC(query) << "ms"
<< ") -> " << TIMER_MSEC(query) / queries.size() << " ms/query "
<< "(" << TIMER_MSEC(query) << "ms"
<< ")" << std::endl;
std::cout << name << ":\n"
<< TIMER_MSEC(query) << "ms"
<< " -> " << TIMER_MSEC(query) / queries.size() << " ms/query" << std::endl;
}
void benchmark(BenchStaticRTree &rtree, unsigned num_queries)
@ -65,11 +61,10 @@ void benchmark(BenchStaticRTree &rtree, unsigned num_queries)
util::FixedLatitude{lat_udist(mt_rand)});
}
benchmarkQuery(
queries, "1 result", [&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 1); });
benchmarkQuery(queries,
"raw RTree queries (1 result)",
[&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 1); });
benchmarkQuery(queries,
"raw RTree queries (10 results)",
"10 results",
[&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 10); });
}
} // namespace osrm::benchmarks

View File

@ -187,7 +187,6 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
s_v_middle,
upper_bound_s_v_path_weight,
min_edge_offset,
{},
{});
}
// compute path <v,..,t> by reusing backward search from node t
@ -202,7 +201,6 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
v_t_middle,
upper_bound_of_v_t_path_weight,
min_edge_offset,
{},
{});
}
*real_weight_of_via_path = upper_bound_s_v_path_weight + upper_bound_of_v_t_path_weight;
@ -348,7 +346,6 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
*s_v_middle,
upper_bound_s_v_path_weight,
min_edge_offset,
{},
{});
}
@ -369,7 +366,6 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
*v_t_middle,
upper_bound_of_v_t_path_weight,
min_edge_offset,
{},
{});
}
@ -538,12 +534,12 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
if (!forward_heap3.Empty())
{
routingStep<FORWARD_DIRECTION>(
facade, forward_heap3, reverse_heap3, middle, upper_bound, min_edge_offset, {}, {});
facade, forward_heap3, reverse_heap3, middle, upper_bound, min_edge_offset, {});
}
if (!reverse_heap3.Empty())
{
routingStep<REVERSE_DIRECTION>(
facade, reverse_heap3, forward_heap3, middle, upper_bound, min_edge_offset, {}, {});
facade, reverse_heap3, forward_heap3, middle, upper_bound, min_edge_offset, {});
}
}
return (upper_bound <= t_test_path_weight);

View File

@ -631,7 +631,6 @@ void unpackPackedPaths(InputIt first,
forward_heap,
reverse_heap,
{},
{},
INVALID_EDGE_WEIGHT,
sublevel,
parent_cell_id);
@ -720,7 +719,6 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
overlap_via,
overlap_weight,
{},
{},
endpoint_candidates);
if (!forward_heap.Empty())
@ -746,7 +744,6 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
overlap_via,
overlap_weight,
{},
{},
endpoint_candidates);
if (!reverse_heap.Empty())

View File

@ -34,7 +34,6 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<ch::Algorithm> &en
weight,
packed_leg,
{},
{},
endpoint_candidates);
std::vector<NodeID> unpacked_nodes;
@ -81,7 +80,6 @@ InternalRouteResult directShortestPathSearch(SearchEngineData<mld::Algorithm> &e
forward_heap,
reverse_heap,
{},
{},
INVALID_EDGE_WEIGHT,
endpoint_candidates);

View File

@ -3,21 +3,29 @@
namespace osrm::engine::routing_algorithms
{
bool requiresForwardLoop(const PhantomNode &source, const PhantomNode &target)
bool requiresForwardForce(const PhantomNode &source, const PhantomNode &target)
{
// Conditions to force a routing step:
// - Valid source and target.
// - Source and target on same segment.
// - Source is "downstream" of target in the direction of the edge.
return source.IsValidForwardSource() && target.IsValidForwardTarget() &&
source.forward_segment_id.id == target.forward_segment_id.id &&
source.GetForwardWeightPlusOffset() > target.GetForwardWeightPlusOffset();
}
bool requiresBackwardLoop(const PhantomNode &source, const PhantomNode &target)
bool requiresBackwardForce(const PhantomNode &source, const PhantomNode &target)
{
// Conditions to force a routing step:
// - Valid source and target.
// - Source and target on same segment.
// - Source is "downstream" of target in the direction of the edge.
return source.IsValidReverseSource() && target.IsValidReverseTarget() &&
source.reverse_segment_id.id == target.reverse_segment_id.id &&
source.GetReverseWeightPlusOffset() > target.GetReverseWeightPlusOffset();
}
std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &endpoint_candidates)
std::vector<NodeID> getForwardForceNodes(const PhantomEndpointCandidates &endpoint_candidates)
{
std::vector<NodeID> res;
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
@ -26,7 +34,7 @@ std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &endpoin
std::any_of(endpoint_candidates.target_phantoms.begin(),
endpoint_candidates.target_phantoms.end(),
[&](const auto &target_phantom)
{ return requiresForwardLoop(source_phantom, target_phantom); });
{ return requiresForwardForce(source_phantom, target_phantom); });
if (requires_loop)
{
res.push_back(source_phantom.forward_segment_id.id);
@ -35,12 +43,12 @@ std::vector<NodeID> getForwardLoopNodes(const PhantomEndpointCandidates &endpoin
return res;
}
std::vector<NodeID> getForwardLoopNodes(const PhantomCandidatesToTarget &endpoint_candidates)
std::vector<NodeID> getForwardForceNodes(const PhantomCandidatesToTarget &endpoint_candidates)
{
std::vector<NodeID> res;
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
{
if (requiresForwardLoop(source_phantom, endpoint_candidates.target_phantom))
if (requiresForwardForce(source_phantom, endpoint_candidates.target_phantom))
{
res.push_back(source_phantom.forward_segment_id.id);
}
@ -48,7 +56,7 @@ std::vector<NodeID> getForwardLoopNodes(const PhantomCandidatesToTarget &endpoin
return res;
}
std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &endpoint_candidates)
std::vector<NodeID> getBackwardForceNodes(const PhantomEndpointCandidates &endpoint_candidates)
{
std::vector<NodeID> res;
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
@ -57,7 +65,7 @@ std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &endpoi
std::any_of(endpoint_candidates.target_phantoms.begin(),
endpoint_candidates.target_phantoms.end(),
[&](const auto &target_phantom)
{ return requiresBackwardLoop(source_phantom, target_phantom); });
{ return requiresBackwardForce(source_phantom, target_phantom); });
if (requires_loop)
{
res.push_back(source_phantom.reverse_segment_id.id);
@ -66,12 +74,12 @@ std::vector<NodeID> getBackwardLoopNodes(const PhantomEndpointCandidates &endpoi
return res;
}
std::vector<NodeID> getBackwardLoopNodes(const PhantomCandidatesToTarget &endpoint_candidates)
std::vector<NodeID> getBackwardForceNodes(const PhantomCandidatesToTarget &endpoint_candidates)
{
std::vector<NodeID> res;
for (const auto &source_phantom : endpoint_candidates.source_phantoms)
{
if (requiresBackwardLoop(source_phantom, endpoint_candidates.target_phantom))
if (requiresBackwardForce(source_phantom, endpoint_candidates.target_phantom))
{
res.push_back(source_phantom.reverse_segment_id.id);
}

View File

@ -73,23 +73,22 @@ void retrievePackedPathFromSingleManyToManyHeap(
// assumes that heaps are already setup correctly.
// ATTENTION: This only works if no additional offset is supplied next to the Phantom Node
// Offsets.
// In case additional offsets are supplied, you might have to force a loop first.
// A forced loop might be necessary, if source and target are on the same segment.
// In case additional offsets are supplied, you might have to force a routing step first.
// A forced step might be necessary, if source and target are on the same segment.
// If this is the case and the offsets of the respective direction are larger for the source
// than the target
// then a force loop is required (e.g. source_phantom.forward_segment_id ==
// then a force step is required (e.g. source_phantom.forward_segment_id ==
// target_phantom.forward_segment_id
// && source_phantom.GetForwardWeightPlusOffset() > target_phantom.GetForwardWeightPlusOffset())
// requires
// a force loop, if the heaps have been initialized with positive offsets.
// a force step, if the heaps have been initialized with positive offsets.
void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
const DataFacade<Algorithm> &facade,
SearchEngineData<Algorithm>::QueryHeap &forward_heap,
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
EdgeWeight &weight,
std::vector<NodeID> &packed_leg,
const std::vector<NodeID> &force_loop_forward_nodes,
const std::vector<NodeID> &force_loop_reverse_nodes,
const std::vector<NodeID> &force_step_nodes,
const EdgeWeight weight_upper_bound)
{
if (forward_heap.Empty() || reverse_heap.Empty())
@ -118,8 +117,7 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
middle,
weight,
min_edge_offset,
force_loop_forward_nodes,
force_loop_reverse_nodes);
force_step_nodes);
}
if (!reverse_heap.Empty())
{
@ -129,8 +127,7 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
middle,
weight,
min_edge_offset,
force_loop_reverse_nodes,
force_loop_forward_nodes);
force_step_nodes);
}
}
@ -159,7 +156,7 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
// Requires the heaps for be empty
// If heaps should be adjusted to be initialized outside of this function,
// the addition of force_loop parameters might be required
// the addition of force_step parameters might be required
double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
const DataFacade<Algorithm> &facade,
SearchEngineData<Algorithm>::QueryHeap &forward_heap,
@ -183,7 +180,6 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
weight,
packed_path,
{},
{},
endpoints,
weight_upper_bound);

View File

@ -33,7 +33,7 @@ mld/$(DATA_NAME).osrm: $(DATA_NAME).osrm
cp $(DATA_NAME).osrm.* mld/
$(DATA_NAME).osrm: $(DATA_NAME).osm.pbf $(DATA_NAME).poly $(PROFILE) $(OSRM_EXTRACT)
@echo "Verifiyng data file integrity..."
@echo "Verifying data file integrity..."
$(MD5SUM) -c data.md5sum
@echo "Running osrm-extract..."
$(TIMER) "osrm-extract\t$@" $(OSRM_EXTRACT) $< -p $(PROFILE)

View File

@ -334,8 +334,7 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
typename SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
EdgeWeight &weight,
std::vector<NodeID> &packed_leg,
const std::vector<NodeID> &forward_loop_nodes,
const std::vector<NodeID> &reverse_loop_nodes,
const std::vector<NodeID> &loop_nodes,
const PhantomT &endpoints,
const EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
{
@ -345,8 +344,7 @@ inline void search(SearchEngineData<Algorithm> &engine_working_data,
reverse_heap,
weight,
packed_leg,
forward_loop_nodes,
reverse_loop_nodes,
loop_nodes,
endpoints,
weight_upper_bound);
}

View File

@ -348,7 +348,13 @@ BOOST_AUTO_TEST_CASE(radius_regression_test)
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 0.01, boost::none, true);
input, osrm::engine::Approach::UNRESTRICTED, 0.01, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 0);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 1, 0.01, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 0);
}
}
@ -374,13 +380,25 @@ BOOST_AUTO_TEST_CASE(permissive_edge_snapping)
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 1000, boost::none, false);
input, osrm::engine::Approach::UNRESTRICTED, 1000, boost::none, false);
BOOST_CHECK_EQUAL(results.size(), 1);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 1000, boost::none, true);
input, osrm::engine::Approach::UNRESTRICTED, 1000, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 2);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 10, 1000, boost::none, false);
BOOST_CHECK_EQUAL(results.size(), 1);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 10, 1000, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 2);
}
}
@ -442,27 +460,45 @@ BOOST_AUTO_TEST_CASE(bearing_tests)
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, boost::none, 11000, boost::none, true);
input, osrm::engine::Approach::UNRESTRICTED, 11000, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 2);
}
{
auto results = query.NearestPhantomNodes(input,
osrm::engine::Approach::UNRESTRICTED,
boost::none,
11000,
engine::Bearing{270, 10},
true);
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 10, 11000, boost::none, true);
BOOST_CHECK_EQUAL(results.size(), 2);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 11000, engine::Bearing{270, 10}, true);
BOOST_CHECK_EQUAL(results.size(), 0);
}
{
auto results = query.NearestPhantomNodes(input,
osrm::engine::Approach::UNRESTRICTED,
boost::none,
11000,
engine::Bearing{45, 10},
true);
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 10, 11000, engine::Bearing{270, 10}, true);
BOOST_CHECK_EQUAL(results.size(), 0);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 11000, engine::Bearing{45, 10}, true);
BOOST_CHECK_EQUAL(results.size(), 2);
BOOST_CHECK(results[0].phantom_node.forward_segment_id.enabled);
BOOST_CHECK(!results[0].phantom_node.reverse_segment_id.enabled);
BOOST_CHECK_EQUAL(results[0].phantom_node.forward_segment_id.id, 1);
BOOST_CHECK(!results[1].phantom_node.forward_segment_id.enabled);
BOOST_CHECK(results[1].phantom_node.reverse_segment_id.enabled);
BOOST_CHECK_EQUAL(results[1].phantom_node.reverse_segment_id.id, 1);
}
{
auto results = query.NearestPhantomNodes(
input, osrm::engine::Approach::UNRESTRICTED, 10, 11000, engine::Bearing{45, 10}, true);
BOOST_CHECK_EQUAL(results.size(), 2);
BOOST_CHECK(results[0].phantom_node.forward_segment_id.enabled);