Implement distance pruning for MLD as well
This commit is contained in:
parent
da474a16a9
commit
a88fef2937
@ -186,6 +186,7 @@ search(const datafacade::ContiguousInternalMemoryDataFacade<Algorithm> &facade,
|
|||||||
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||||
const bool force_loop_forward,
|
const bool force_loop_forward,
|
||||||
const bool force_loop_reverse,
|
const bool force_loop_reverse,
|
||||||
|
EdgeWeight weight_upper_bound,
|
||||||
Args... args)
|
Args... args)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -196,7 +197,7 @@ search(const datafacade::ContiguousInternalMemoryDataFacade<Algorithm> &facade,
|
|||||||
|
|
||||||
// run two-Target Dijkstra routing step.
|
// run two-Target Dijkstra routing step.
|
||||||
NodeID middle = SPECIAL_NODEID;
|
NodeID middle = SPECIAL_NODEID;
|
||||||
EdgeWeight weight = INVALID_EDGE_WEIGHT;
|
EdgeWeight weight = weight_upper_bound;
|
||||||
EdgeWeight forward_heap_min = forward_heap.MinKey();
|
EdgeWeight forward_heap_min = forward_heap.MinKey();
|
||||||
EdgeWeight reverse_heap_min = reverse_heap.MinKey();
|
EdgeWeight reverse_heap_min = reverse_heap.MinKey();
|
||||||
while (forward_heap.Size() + reverse_heap.Size() > 0 &&
|
while (forward_heap.Size() + reverse_heap.Size() > 0 &&
|
||||||
@ -231,7 +232,7 @@ search(const datafacade::ContiguousInternalMemoryDataFacade<Algorithm> &facade,
|
|||||||
};
|
};
|
||||||
|
|
||||||
// No path found for both target nodes?
|
// No path found for both target nodes?
|
||||||
if (weight == INVALID_EDGE_WEIGHT || SPECIAL_NODEID == middle)
|
if (weight >= weight_upper_bound || SPECIAL_NODEID == middle)
|
||||||
{
|
{
|
||||||
return std::make_tuple(
|
return std::make_tuple(
|
||||||
INVALID_EDGE_WEIGHT, SPECIAL_NODEID, SPECIAL_NODEID, std::vector<EdgeID>());
|
INVALID_EDGE_WEIGHT, SPECIAL_NODEID, SPECIAL_NODEID, std::vector<EdgeID>());
|
||||||
@ -297,6 +298,7 @@ search(const datafacade::ContiguousInternalMemoryDataFacade<Algorithm> &facade,
|
|||||||
reverse_heap,
|
reverse_heap,
|
||||||
force_loop_forward,
|
force_loop_forward,
|
||||||
force_loop_reverse,
|
force_loop_reverse,
|
||||||
|
INVALID_EDGE_WEIGHT,
|
||||||
sublevel,
|
sublevel,
|
||||||
parent_cell_id);
|
parent_cell_id);
|
||||||
BOOST_ASSERT(!subpath.empty());
|
BOOST_ASSERT(!subpath.empty());
|
||||||
@ -320,14 +322,17 @@ inline void search(const datafacade::ContiguousInternalMemoryDataFacade<Algorith
|
|||||||
const bool force_loop_forward,
|
const bool force_loop_forward,
|
||||||
const bool force_loop_reverse,
|
const bool force_loop_reverse,
|
||||||
const PhantomNodes &phantom_nodes,
|
const PhantomNodes &phantom_nodes,
|
||||||
const int duration_upper_bound = INVALID_EDGE_WEIGHT)
|
const EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
|
||||||
{
|
{
|
||||||
(void)duration_upper_bound; // TODO: limiting search radius is not implemented for MLD
|
|
||||||
|
|
||||||
NodeID source_node, target_node;
|
NodeID source_node, target_node;
|
||||||
std::vector<EdgeID> unpacked_edges;
|
std::vector<EdgeID> unpacked_edges;
|
||||||
std::tie(weight, source_node, target_node, unpacked_edges) = mld::search(
|
std::tie(weight, source_node, target_node, unpacked_edges) = mld::search(facade,
|
||||||
facade, forward_heap, reverse_heap, force_loop_forward, force_loop_reverse, phantom_nodes);
|
forward_heap,
|
||||||
|
reverse_heap,
|
||||||
|
force_loop_forward,
|
||||||
|
force_loop_reverse,
|
||||||
|
weight_upper_bound,
|
||||||
|
phantom_nodes);
|
||||||
|
|
||||||
if (weight != INVALID_EDGE_WEIGHT)
|
if (weight != INVALID_EDGE_WEIGHT)
|
||||||
{
|
{
|
||||||
@ -374,7 +379,7 @@ getNetworkDistance(const datafacade::ContiguousInternalMemoryDataFacade<Algorith
|
|||||||
SearchEngineData<Algorithm>::QueryHeap & /*reverse_core_heap*/,
|
SearchEngineData<Algorithm>::QueryHeap & /*reverse_core_heap*/,
|
||||||
const PhantomNode &source_phantom,
|
const PhantomNode &source_phantom,
|
||||||
const PhantomNode &target_phantom,
|
const PhantomNode &target_phantom,
|
||||||
int /*duration_upper_bound*/)
|
EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT)
|
||||||
{
|
{
|
||||||
forward_heap.Clear();
|
forward_heap.Clear();
|
||||||
reverse_heap.Clear();
|
reverse_heap.Clear();
|
||||||
@ -385,8 +390,13 @@ getNetworkDistance(const datafacade::ContiguousInternalMemoryDataFacade<Algorith
|
|||||||
EdgeWeight weight;
|
EdgeWeight weight;
|
||||||
NodeID source_node, target_node;
|
NodeID source_node, target_node;
|
||||||
std::vector<EdgeID> unpacked_edges;
|
std::vector<EdgeID> unpacked_edges;
|
||||||
std::tie(weight, source_node, target_node, unpacked_edges) = search(
|
std::tie(weight, source_node, target_node, unpacked_edges) = search(facade,
|
||||||
facade, forward_heap, reverse_heap, DO_NOT_FORCE_LOOPS, DO_NOT_FORCE_LOOPS, phantom_nodes);
|
forward_heap,
|
||||||
|
reverse_heap,
|
||||||
|
DO_NOT_FORCE_LOOPS,
|
||||||
|
DO_NOT_FORCE_LOOPS,
|
||||||
|
weight_upper_bound,
|
||||||
|
phantom_nodes);
|
||||||
|
|
||||||
if (weight == INVALID_EDGE_WEIGHT)
|
if (weight == INVALID_EDGE_WEIGHT)
|
||||||
return std::numeric_limits<double>::max();
|
return std::numeric_limits<double>::max();
|
||||||
|
@ -140,8 +140,13 @@ InternalRouteResult directShortestPathSearch(
|
|||||||
EdgeWeight weight;
|
EdgeWeight weight;
|
||||||
NodeID source_node, target_node;
|
NodeID source_node, target_node;
|
||||||
std::vector<EdgeID> unpacked_edges;
|
std::vector<EdgeID> unpacked_edges;
|
||||||
std::tie(weight, source_node, target_node, unpacked_edges) = mld::search(
|
std::tie(weight, source_node, target_node, unpacked_edges) = mld::search(facade,
|
||||||
facade, forward_heap, reverse_heap, DO_NOT_FORCE_LOOPS, DO_NOT_FORCE_LOOPS, phantom_nodes);
|
forward_heap,
|
||||||
|
reverse_heap,
|
||||||
|
DO_NOT_FORCE_LOOPS,
|
||||||
|
DO_NOT_FORCE_LOOPS,
|
||||||
|
INVALID_EDGE_WEIGHT,
|
||||||
|
phantom_nodes);
|
||||||
|
|
||||||
return extractRoute(facade, weight, source_node, target_node, unpacked_edges, phantom_nodes);
|
return extractRoute(facade, weight, source_node, target_node, unpacked_edges, phantom_nodes);
|
||||||
}
|
}
|
||||||
|
@ -198,9 +198,9 @@ SubMatchingList mapMatching(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
|
|
||||||
const auto haversine_distance = util::coordinate_calculation::haversineDistance(
|
const auto haversine_distance = util::coordinate_calculation::haversineDistance(
|
||||||
prev_coordinate, current_coordinate);
|
prev_coordinate, current_coordinate);
|
||||||
// assumes minumum of 0.1 m/s
|
// assumes minumum of 4 m/s
|
||||||
const int duration_upper_bound =
|
const EdgeWeight weight_upper_bound =
|
||||||
((haversine_distance + max_distance_delta) * 0.25) * 10;
|
((haversine_distance + max_distance_delta) / 4.) * facade.GetWeightMultiplier();
|
||||||
|
|
||||||
// compute d_t for this timestamp and the next one
|
// compute d_t for this timestamp and the next one
|
||||||
for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
|
for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
|
||||||
@ -227,7 +227,7 @@ SubMatchingList mapMatching(SearchEngineData<Algorithm> &engine_working_data,
|
|||||||
reverse_core_heap,
|
reverse_core_heap,
|
||||||
prev_unbroken_timestamps_list[s].phantom_node,
|
prev_unbroken_timestamps_list[s].phantom_node,
|
||||||
current_timestamps_list[s_prime].phantom_node,
|
current_timestamps_list[s_prime].phantom_node,
|
||||||
duration_upper_bound);
|
weight_upper_bound);
|
||||||
|
|
||||||
// get distance diff between loc1/2 and locs/s_prime
|
// get distance diff between loc1/2 and locs/s_prime
|
||||||
const auto d_t = std::abs(network_distance - haversine_distance);
|
const auto d_t = std::abs(network_distance - haversine_distance);
|
||||||
|
Loading…
Reference in New Issue
Block a user