osrm-backend/include/engine/routing_algorithms/routing_base.hpp

960 lines
40 KiB
C++
Raw Normal View History

#ifndef ROUTING_BASE_HPP
#define ROUTING_BASE_HPP
2016-05-27 15:05:04 -04:00
#include "extractor/guidance/turn_instruction.hpp"
#include "engine/edge_unpacker.hpp"
2016-01-02 11:13:44 -05:00
#include "engine/internal_route_result.hpp"
#include "engine/search_engine_data.hpp"
#include "util/coordinate_calculation.hpp"
#include "util/guidance/turn_bearing.hpp"
#include "util/typedefs.hpp"
2013-09-20 05:35:59 -04:00
#include <boost/assert.hpp>
#include <cstddef>
#include <cstdint>
#include <algorithm>
#include <iterator>
#include <numeric>
#include <stack>
#include <utility>
#include <vector>
2016-01-05 10:51:13 -05:00
namespace osrm
{
namespace engine
{
namespace routing_algorithms
{
template <class DataFacadeT, class Derived> class BasicRoutingInterface
{
private:
using EdgeData = typename DataFacadeT::EdgeData;
public:
/*
min_edge_offset is needed in case we use multiple
nodes as start/target nodes with different (even negative) offsets.
In that case the termination criterion is not correct
anymore.
Example:
forward heap: a(-100), b(0),
reverse heap: c(0), d(100)
a --- d
\ /
/ \
b --- c
This is equivalent to running a bi-directional Dijkstra on the following graph:
a --- d
/ \ / \
y x z
\ / \ /
b --- c
The graph is constructed by inserting nodes y and z that are connected to the initial nodes
using edges (y, a) with weight -100, (y, b) with weight 0 and,
(d, z) with weight 100, (c, z) with weight 0 corresponding.
Since we are dealing with a graph that contains _negative_ edges,
we need to add an offset to the termination criterion.
*/
void RoutingStep(const DataFacadeT &facade,
SearchEngineData::QueryHeap &forward_heap,
SearchEngineData::QueryHeap &reverse_heap,
NodeID &middle_node_id,
std::int32_t &upper_bound,
std::int32_t min_edge_offset,
2015-10-08 16:11:34 -04:00
const bool forward_direction,
const bool stalling,
const bool force_loop_forward,
const bool force_loop_reverse) const
2014-04-08 19:47:52 -04:00
{
2013-09-20 07:08:18 -04:00
const NodeID node = forward_heap.DeleteMin();
const std::int32_t weight = forward_heap.GetKey(node);
2014-04-08 19:47:52 -04:00
if (reverse_heap.WasInserted(node))
{
const std::int32_t new_weight = reverse_heap.GetKey(node) + weight;
if (new_weight < upper_bound)
2014-04-08 19:47:52 -04:00
{
2016-03-30 07:14:48 -04:00
// if loops are forced, they are so at the source
if ((force_loop_forward && forward_heap.GetData(node).parent == node) ||
(force_loop_reverse && reverse_heap.GetData(node).parent == node) ||
// in this case we are looking at a bi-directional way where the source
// and target phantom are on the same edge based node
new_weight < 0)
{
// check whether there is a loop present at the node
for (const auto edge : facade.GetAdjacentEdgeRange(node))
{
const EdgeData &data = facade.GetEdgeData(edge);
bool forward_directionFlag =
(forward_direction ? data.forward : data.backward);
if (forward_directionFlag)
{
const NodeID to = facade.GetTarget(edge);
if (to == node)
{
const EdgeWeight edge_weight = data.weight;
const std::int32_t loop_weight = new_weight + edge_weight;
if (loop_weight >= 0 && loop_weight < upper_bound)
{
middle_node_id = node;
upper_bound = loop_weight;
}
}
}
}
}
else
{
BOOST_ASSERT(new_weight >= 0);
middle_node_id = node;
upper_bound = new_weight;
}
}
}
// make sure we don't terminate too early if we initialize the weight
// for the nodes in the forward heap with the forward/reverse offset
BOOST_ASSERT(min_edge_offset <= 0);
if (weight + min_edge_offset > upper_bound)
2014-04-08 19:47:52 -04:00
{
2013-09-20 07:08:18 -04:00
forward_heap.DeleteAll();
return;
}
// Stalling
2015-10-08 16:11:34 -04:00
if (stalling)
2014-04-25 10:00:39 -04:00
{
for (const auto edge : facade.GetAdjacentEdgeRange(node))
{
const EdgeData &data = facade.GetEdgeData(edge);
const bool reverse_flag = ((!forward_direction) ? data.forward : data.backward);
if (reverse_flag)
{
const NodeID to = facade.GetTarget(edge);
const EdgeWeight edge_weight = data.weight;
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
if (forward_heap.WasInserted(to))
{
if (forward_heap.GetKey(to) + edge_weight < weight)
{
return;
}
}
}
}
}
for (const auto edge : facade.GetAdjacentEdgeRange(node))
2014-04-25 10:00:39 -04:00
{
const EdgeData &data = facade.GetEdgeData(edge);
2014-04-25 10:00:39 -04:00
bool forward_directionFlag = (forward_direction ? data.forward : data.backward);
if (forward_directionFlag)
{
const NodeID to = facade.GetTarget(edge);
const EdgeWeight edge_weight = data.weight;
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
const int to_weight = weight + edge_weight;
// New Node discovered -> Add to Heap + Node Info Storage
if (!forward_heap.WasInserted(to))
{
forward_heap.Insert(to, to_weight, node);
}
// Found a shorter Path -> Update weight
else if (to_weight < forward_heap.GetKey(to))
{
// new parent
forward_heap.GetData(to).parent = node;
forward_heap.DecreaseKey(to, to_weight);
}
}
}
}
inline EdgeWeight GetLoopWeight(const DataFacadeT &facade, NodeID node) const
{
EdgeWeight loop_weight = INVALID_EDGE_WEIGHT;
for (auto edge : facade.GetAdjacentEdgeRange(node))
{
const auto &data = facade.GetEdgeData(edge);
if (data.forward)
{
const NodeID to = facade.GetTarget(edge);
if (to == node)
{
loop_weight = std::min(loop_weight, data.weight);
}
}
}
return loop_weight;
}
template <typename RandomIter>
void UnpackPath(const DataFacadeT &facade,
RandomIter packed_path_begin,
RandomIter packed_path_end,
const PhantomNodes &phantom_node_pair,
std::vector<PathData> &unpacked_path) const
2014-04-25 10:00:39 -04:00
{
BOOST_ASSERT(std::distance(packed_path_begin, packed_path_end) > 0);
const bool start_traversed_in_reverse =
(*packed_path_begin != phantom_node_pair.source_phantom.forward_segment_id.id);
const bool target_traversed_in_reverse =
(*std::prev(packed_path_end) != phantom_node_pair.target_phantom.forward_segment_id.id);
2014-03-10 10:36:58 -04:00
BOOST_ASSERT(*packed_path_begin == phantom_node_pair.source_phantom.forward_segment_id.id ||
*packed_path_begin == phantom_node_pair.source_phantom.reverse_segment_id.id);
2016-05-27 15:05:04 -04:00
BOOST_ASSERT(
*std::prev(packed_path_end) == phantom_node_pair.target_phantom.forward_segment_id.id ||
*std::prev(packed_path_end) == phantom_node_pair.target_phantom.reverse_segment_id.id);
2016-09-30 08:33:43 -04:00
UnpackCHPath(
facade,
packed_path_begin,
packed_path_end,
[this,
&facade,
&unpacked_path,
&phantom_node_pair,
&start_traversed_in_reverse,
&target_traversed_in_reverse](std::pair<NodeID, NodeID> & /* edge */,
2016-09-30 08:33:43 -04:00
const EdgeData &edge_data) {
2016-09-30 08:33:43 -04:00
BOOST_ASSERT_MSG(!edge_data.shortcut, "original edge flagged as shortcut");
const auto name_index = facade.GetNameIndexFromEdgeID(edge_data.id);
const auto turn_instruction = facade.GetTurnInstructionForEdgeID(edge_data.id);
2016-02-13 13:17:30 -05:00
const extractor::TravelMode travel_mode =
(unpacked_path.empty() && start_traversed_in_reverse)
? phantom_node_pair.source_phantom.backward_travel_mode
: facade.GetTravelModeForEdgeID(edge_data.id);
2014-08-12 04:02:29 -04:00
const auto geometry_index = facade.GetGeometryIndexForEdgeID(edge_data.id);
2016-03-17 11:40:08 -04:00
std::vector<NodeID> id_vector;
std::vector<EdgeWeight> weight_vector;
std::vector<DatasourceID> datasource_vector;
if (geometry_index.forward)
2014-04-25 09:48:10 -04:00
{
id_vector = facade.GetUncompressedForwardGeometry(geometry_index.id);
weight_vector = facade.GetUncompressedForwardWeights(geometry_index.id);
datasource_vector = facade.GetUncompressedForwardDatasources(geometry_index.id);
2014-02-11 05:42:24 -05:00
}
else
{
id_vector = facade.GetUncompressedReverseGeometry(geometry_index.id);
weight_vector = facade.GetUncompressedReverseWeights(geometry_index.id);
datasource_vector = facade.GetUncompressedReverseDatasources(geometry_index.id);
}
BOOST_ASSERT(id_vector.size() > 0);
BOOST_ASSERT(weight_vector.size() > 0);
BOOST_ASSERT(datasource_vector.size() > 0);
const auto total_weight =
std::accumulate(weight_vector.begin(), weight_vector.end(), 0);
BOOST_ASSERT(weight_vector.size() == id_vector.size() - 1);
const bool is_first_segment = unpacked_path.empty();
const std::size_t start_index =
(is_first_segment
? ((start_traversed_in_reverse)
? weight_vector.size() -
phantom_node_pair.source_phantom.fwd_segment_position - 1
: phantom_node_pair.source_phantom.fwd_segment_position)
: 0);
const std::size_t end_index = weight_vector.size();
BOOST_ASSERT(start_index >= 0);
BOOST_ASSERT(start_index < end_index);
for (std::size_t segment_idx = start_index; segment_idx < end_index; ++segment_idx)
{
unpacked_path.push_back(
PathData{id_vector[segment_idx + 1],
name_index,
weight_vector[segment_idx],
extractor::guidance::TurnInstruction::NO_TURN(),
{{0, INVALID_LANEID}, INVALID_LANE_DESCRIPTIONID},
travel_mode,
INVALID_ENTRY_CLASSID,
datasource_vector[segment_idx],
util::guidance::TurnBearing(0),
util::guidance::TurnBearing(0)});
}
BOOST_ASSERT(unpacked_path.size() > 0);
if (facade.hasLaneData(edge_data.id))
unpacked_path.back().lane_data = facade.GetLaneData(edge_data.id);
unpacked_path.back().entry_classid = facade.GetEntryClassID(edge_data.id);
unpacked_path.back().turn_instruction = turn_instruction;
unpacked_path.back().duration_until_turn += (edge_data.weight - total_weight);
unpacked_path.back().pre_turn_bearing = facade.PreTurnBearing(edge_data.id);
unpacked_path.back().post_turn_bearing = facade.PostTurnBearing(edge_data.id);
});
std::size_t start_index = 0, end_index = 0;
std::vector<unsigned> id_vector;
std::vector<EdgeWeight> weight_vector;
std::vector<DatasourceID> datasource_vector;
const bool is_local_path = (phantom_node_pair.source_phantom.packed_geometry_id ==
phantom_node_pair.target_phantom.packed_geometry_id) &&
unpacked_path.empty();
if (target_traversed_in_reverse)
{
id_vector = facade.GetUncompressedReverseGeometry(
phantom_node_pair.target_phantom.packed_geometry_id);
weight_vector = facade.GetUncompressedReverseWeights(
phantom_node_pair.target_phantom.packed_geometry_id);
datasource_vector = facade.GetUncompressedReverseDatasources(
phantom_node_pair.target_phantom.packed_geometry_id);
2016-04-01 05:39:47 -04:00
if (is_local_path)
{
start_index = weight_vector.size() -
phantom_node_pair.source_phantom.fwd_segment_position - 1;
}
end_index =
weight_vector.size() - phantom_node_pair.target_phantom.fwd_segment_position - 1;
2016-04-01 05:39:47 -04:00
}
else
{
if (is_local_path)
2016-03-17 15:38:57 -04:00
{
start_index = phantom_node_pair.source_phantom.fwd_segment_position;
}
2016-04-01 05:39:47 -04:00
end_index = phantom_node_pair.target_phantom.fwd_segment_position;
id_vector = facade.GetUncompressedForwardGeometry(
phantom_node_pair.target_phantom.packed_geometry_id);
weight_vector = facade.GetUncompressedForwardWeights(
phantom_node_pair.target_phantom.packed_geometry_id);
datasource_vector = facade.GetUncompressedForwardDatasources(
phantom_node_pair.target_phantom.packed_geometry_id);
}
2016-03-17 15:38:57 -04:00
// Given the following compressed geometry:
// U---v---w---x---y---Z
// s t
// s: fwd_segment 0
// t: fwd_segment 3
// -> (U, v), (v, w), (w, x)
2016-03-18 11:14:48 -04:00
// note that (x, t) is _not_ included but needs to be added later.
for (std::size_t segment_idx = start_index; segment_idx != end_index;
(start_index < end_index ? ++segment_idx : --segment_idx))
{
BOOST_ASSERT(segment_idx < id_vector.size() - 1);
BOOST_ASSERT(phantom_node_pair.target_phantom.forward_travel_mode > 0);
unpacked_path.push_back(PathData{
id_vector[start_index < end_index ? segment_idx + 1 : segment_idx - 1],
2016-05-27 15:05:04 -04:00
phantom_node_pair.target_phantom.name_id,
weight_vector[segment_idx],
extractor::guidance::TurnInstruction::NO_TURN(),
{{0, INVALID_LANEID}, INVALID_LANE_DESCRIPTIONID},
target_traversed_in_reverse ? phantom_node_pair.target_phantom.backward_travel_mode
: phantom_node_pair.target_phantom.forward_travel_mode,
INVALID_ENTRY_CLASSID,
datasource_vector[segment_idx],
util::guidance::TurnBearing(0),
util::guidance::TurnBearing(0)});
2014-02-28 11:14:38 -05:00
}
2016-04-01 05:39:47 -04:00
if (unpacked_path.size() > 0)
2016-03-17 15:38:57 -04:00
{
2016-03-30 07:14:48 -04:00
const auto source_weight = start_traversed_in_reverse
? phantom_node_pair.source_phantom.reverse_weight
: phantom_node_pair.source_phantom.forward_weight;
2016-03-17 15:38:57 -04:00
// The above code will create segments for (v, w), (w,x), (x, y) and (y, Z).
// However the first segment duration needs to be adjusted to the fact that the source
2016-03-18 11:14:48 -04:00
// phantom is in the middle of the segment. We do this by subtracting v--s from the
// duration.
// Since it's possible duration_until_turn can be less than source_weight here if
// a negative enough turn penalty is used to modify this edge weight during
// osrm-contract, we clamp to 0 here so as not to return a negative duration
// for this segment.
// TODO this creates a scenario where it's possible the duration from a phantom
// node to the first turn would be the same as from end to end of a segment,
// which is obviously incorrect and not ideal...
unpacked_path.front().duration_until_turn =
std::max(unpacked_path.front().duration_until_turn - source_weight, 0);
2016-03-17 15:38:57 -04:00
}
// there is no equivalent to a node-based node in an edge-expanded graph.
// two equivalent routes may start (or end) at different node-based edges
// as they are added with the offset how much "weight" on the edge
// has already been traversed. Depending on offset one needs to remove
// the last node.
if (unpacked_path.size() > 1)
{
2014-07-22 06:56:24 -04:00
const std::size_t last_index = unpacked_path.size() - 1;
const std::size_t second_to_last_index = last_index - 1;
if (unpacked_path[last_index].turn_via_node ==
unpacked_path[second_to_last_index].turn_via_node)
{
unpacked_path.pop_back();
}
BOOST_ASSERT(!unpacked_path.empty());
}
}
/**
* Unpacks a single edge (NodeID->NodeID) from the CH graph down to it's original non-shortcut
* route.
* @param from the node the CH edge starts at
* @param to the node the CH edge finishes at
* @param unpacked_path the sequence of original NodeIDs that make up the expanded CH edge
*/
void UnpackEdge(const DataFacadeT &facade,
const NodeID from,
const NodeID to,
std::vector<NodeID> &unpacked_path) const
{
std::array<NodeID, 2> path{{from, to}};
2016-09-30 08:33:43 -04:00
UnpackCHPath(
facade,
path.begin(),
path.end(),
[&unpacked_path](const std::pair<NodeID, NodeID> &edge, const EdgeData & /* data */) {
unpacked_path.emplace_back(edge.first);
});
unpacked_path.emplace_back(to);
}
void RetrievePackedPathFromHeap(const SearchEngineData::QueryHeap &forward_heap,
const SearchEngineData::QueryHeap &reverse_heap,
const NodeID middle_node_id,
std::vector<NodeID> &packed_path) const
2014-04-25 09:48:10 -04:00
{
2014-07-24 03:56:20 -04:00
RetrievePackedPathFromSingleHeap(forward_heap, middle_node_id, packed_path);
2013-09-20 07:08:18 -04:00
std::reverse(packed_path.begin(), packed_path.end());
packed_path.emplace_back(middle_node_id);
2014-07-24 03:56:20 -04:00
RetrievePackedPathFromSingleHeap(reverse_heap, middle_node_id, packed_path);
}
void RetrievePackedPathFromSingleHeap(const SearchEngineData::QueryHeap &search_heap,
const NodeID middle_node_id,
std::vector<NodeID> &packed_path) const
2014-04-25 09:48:10 -04:00
{
2013-09-20 07:08:18 -04:00
NodeID current_node_id = middle_node_id;
// all initial nodes will have itself as parent, or a node not in the heap
// in case of a core search heap. We need a distinction between core entry nodes
// and start nodes since otherwise start node specific code that assumes
// node == node.parent (e.g. the loop code) might get actived.
while (current_node_id != search_heap.GetData(current_node_id).parent &&
search_heap.WasInserted(search_heap.GetData(current_node_id).parent))
2014-04-25 09:48:10 -04:00
{
2013-09-20 07:08:18 -04:00
current_node_id = search_heap.GetData(current_node_id).parent;
packed_path.emplace_back(current_node_id);
}
}
2015-11-25 15:45:09 -05:00
// assumes that heaps are already setup correctly.
// ATTENTION: This only works if no additional offset is supplied next to the Phantom Node
// Offsets.
// In case additional offsets are supplied, you might have to force a loop first.
// A forced loop might be necessary, if source and target are on the same segment.
// If this is the case and the offsets of the respective direction are larger for the source
// than the target
// then a force loop is required (e.g. source_phantom.forward_segment_id ==
// target_phantom.forward_segment_id
// && source_phantom.GetForwardWeightPlusOffset() > target_phantom.GetForwardWeightPlusOffset())
// requires
// a force loop, if the heaps have been initialized with positive offsets.
void Search(const DataFacadeT &facade,
SearchEngineData::QueryHeap &forward_heap,
2015-11-25 15:45:09 -05:00
SearchEngineData::QueryHeap &reverse_heap,
std::int32_t &weight,
std::vector<NodeID> &packed_leg,
const bool force_loop_forward,
const bool force_loop_reverse,
const int duration_upper_bound = INVALID_EDGE_WEIGHT) const
2015-11-25 15:45:09 -05:00
{
NodeID middle = SPECIAL_NODEID;
weight = duration_upper_bound;
2015-11-25 15:45:09 -05:00
// get offset to account for offsets on phantom nodes on compressed edges
const auto min_edge_offset = std::min(0, forward_heap.MinKey());
BOOST_ASSERT(min_edge_offset <= 0);
// we only every insert negative offsets for nodes in the forward heap
BOOST_ASSERT(reverse_heap.MinKey() >= 0);
2015-11-25 15:45:09 -05:00
// run two-Target Dijkstra routing step.
const constexpr bool STALLING_ENABLED = true;
2015-11-25 15:45:09 -05:00
while (0 < (forward_heap.Size() + reverse_heap.Size()))
{
if (!forward_heap.Empty())
{
RoutingStep(facade,
forward_heap,
2016-05-27 15:05:04 -04:00
reverse_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_edge_offset,
true,
STALLING_ENABLED,
force_loop_forward,
force_loop_reverse);
2015-11-25 15:45:09 -05:00
}
if (!reverse_heap.Empty())
{
RoutingStep(facade,
reverse_heap,
2016-05-27 15:05:04 -04:00
forward_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_edge_offset,
false,
STALLING_ENABLED,
force_loop_reverse,
force_loop_forward);
2015-11-25 15:45:09 -05:00
}
}
// No path found for both target nodes?
if (duration_upper_bound <= weight || SPECIAL_NODEID == middle)
2015-11-25 15:45:09 -05:00
{
weight = INVALID_EDGE_WEIGHT;
2015-11-25 15:45:09 -05:00
return;
}
// Was a paths over one of the forward/reverse nodes not found?
BOOST_ASSERT_MSG((SPECIAL_NODEID != middle && INVALID_EDGE_WEIGHT != weight),
2015-11-25 15:45:09 -05:00
"no path found");
// make sure to correctly unpack loops
if (weight != forward_heap.GetKey(middle) + reverse_heap.GetKey(middle))
{
2016-04-25 07:11:23 -04:00
// self loop makes up the full path
packed_leg.push_back(middle);
packed_leg.push_back(middle);
}
else
{
RetrievePackedPathFromHeap(forward_heap, reverse_heap, middle, packed_leg);
}
2015-11-25 15:45:09 -05:00
}
// assumes that heaps are already setup correctly.
// A forced loop might be necessary, if source and target are on the same segment.
// If this is the case and the offsets of the respective direction are larger for the source
// than the target
// then a force loop is required (e.g. source_phantom.forward_segment_id ==
// target_phantom.forward_segment_id
// && source_phantom.GetForwardWeightPlusOffset() > target_phantom.GetForwardWeightPlusOffset())
// requires
// a force loop, if the heaps have been initialized with positive offsets.
void SearchWithCore(const DataFacadeT &facade,
SearchEngineData::QueryHeap &forward_heap,
2015-11-25 15:45:09 -05:00
SearchEngineData::QueryHeap &reverse_heap,
SearchEngineData::QueryHeap &forward_core_heap,
SearchEngineData::QueryHeap &reverse_core_heap,
int &weight,
std::vector<NodeID> &packed_leg,
const bool force_loop_forward,
const bool force_loop_reverse,
int duration_upper_bound = INVALID_EDGE_WEIGHT) const
2015-11-25 15:45:09 -05:00
{
NodeID middle = SPECIAL_NODEID;
weight = duration_upper_bound;
2015-11-25 15:45:09 -05:00
using CoreEntryPoint = std::tuple<NodeID, EdgeWeight, NodeID>;
std::vector<CoreEntryPoint> forward_entry_points;
std::vector<CoreEntryPoint> reverse_entry_points;
2015-11-25 15:45:09 -05:00
// get offset to account for offsets on phantom nodes on compressed edges
const auto min_edge_offset = std::min(0, forward_heap.MinKey());
// we only every insert negative offsets for nodes in the forward heap
BOOST_ASSERT(reverse_heap.MinKey() >= 0);
const constexpr bool STALLING_ENABLED = true;
2015-11-25 15:45:09 -05:00
// run two-Target Dijkstra routing step.
while (0 < (forward_heap.Size() + reverse_heap.Size()))
{
if (!forward_heap.Empty())
{
if (facade.IsCoreNode(forward_heap.Min()))
2015-11-25 15:45:09 -05:00
{
const NodeID node = forward_heap.DeleteMin();
const int key = forward_heap.GetKey(node);
forward_entry_points.emplace_back(node, key, forward_heap.GetData(node).parent);
2015-11-25 15:45:09 -05:00
}
else
{
RoutingStep(facade,
forward_heap,
2016-05-27 15:05:04 -04:00
reverse_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_edge_offset,
true,
STALLING_ENABLED,
force_loop_forward,
force_loop_reverse);
2015-11-25 15:45:09 -05:00
}
}
if (!reverse_heap.Empty())
{
if (facade.IsCoreNode(reverse_heap.Min()))
2015-11-25 15:45:09 -05:00
{
const NodeID node = reverse_heap.DeleteMin();
const int key = reverse_heap.GetKey(node);
reverse_entry_points.emplace_back(node, key, reverse_heap.GetData(node).parent);
2015-11-25 15:45:09 -05:00
}
else
{
RoutingStep(facade,
reverse_heap,
2016-05-27 15:05:04 -04:00
forward_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_edge_offset,
false,
STALLING_ENABLED,
force_loop_reverse,
force_loop_forward);
2015-11-25 15:45:09 -05:00
}
}
}
const auto insertInCoreHeap = [](const CoreEntryPoint &p,
SearchEngineData::QueryHeap &core_heap) {
NodeID id;
EdgeWeight weight;
NodeID parent;
// TODO this should use std::apply when we get c++17 support
std::tie(id, weight, parent) = p;
core_heap.Insert(id, weight, parent);
};
2016-02-13 13:17:30 -05:00
forward_core_heap.Clear();
2016-03-15 06:47:14 -04:00
for (const auto &p : forward_entry_points)
2015-11-25 15:45:09 -05:00
{
insertInCoreHeap(p, forward_core_heap);
2015-11-25 15:45:09 -05:00
}
reverse_core_heap.Clear();
2016-03-15 06:47:14 -04:00
for (const auto &p : reverse_entry_points)
2015-11-25 15:45:09 -05:00
{
insertInCoreHeap(p, reverse_core_heap);
2015-11-25 15:45:09 -05:00
}
// get offset to account for offsets on phantom nodes on compressed edges
int min_core_edge_offset = 0;
if (forward_core_heap.Size() > 0)
{
min_core_edge_offset = std::min(min_core_edge_offset, forward_core_heap.MinKey());
}
if (reverse_core_heap.Size() > 0 && reverse_core_heap.MinKey() < 0)
{
min_core_edge_offset = std::min(min_core_edge_offset, reverse_core_heap.MinKey());
}
BOOST_ASSERT(min_core_edge_offset <= 0);
2015-11-25 15:45:09 -05:00
// run two-target Dijkstra routing step on core with termination criterion
const constexpr bool STALLING_DISABLED = false;
while (0 < forward_core_heap.Size() && 0 < reverse_core_heap.Size() &&
weight > (forward_core_heap.MinKey() + reverse_core_heap.MinKey()))
2015-11-25 15:45:09 -05:00
{
RoutingStep(facade,
forward_core_heap,
2016-05-27 15:05:04 -04:00
reverse_core_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_core_edge_offset,
true,
STALLING_DISABLED,
force_loop_forward,
force_loop_reverse);
RoutingStep(facade,
reverse_core_heap,
2016-05-27 15:05:04 -04:00
forward_core_heap,
middle,
weight,
2016-05-27 15:05:04 -04:00
min_core_edge_offset,
false,
STALLING_DISABLED,
force_loop_reverse,
force_loop_forward);
2015-11-25 15:45:09 -05:00
}
// No path found for both target nodes?
if (duration_upper_bound <= weight || SPECIAL_NODEID == middle)
2015-11-25 15:45:09 -05:00
{
weight = INVALID_EDGE_WEIGHT;
2015-11-25 15:45:09 -05:00
return;
}
// Was a paths over one of the forward/reverse nodes not found?
BOOST_ASSERT_MSG((SPECIAL_NODEID != middle && INVALID_EDGE_WEIGHT != weight),
2015-11-25 15:45:09 -05:00
"no path found");
// we need to unpack sub path from core heaps
if (facade.IsCoreNode(middle))
2015-11-25 15:45:09 -05:00
{
if (weight != forward_core_heap.GetKey(middle) + reverse_core_heap.GetKey(middle))
{
// self loop
BOOST_ASSERT(forward_core_heap.GetData(middle).parent == middle &&
reverse_core_heap.GetData(middle).parent == middle);
packed_leg.push_back(middle);
packed_leg.push_back(middle);
}
else
{
std::vector<NodeID> packed_core_leg;
2016-05-27 15:05:04 -04:00
RetrievePackedPathFromHeap(
forward_core_heap, reverse_core_heap, middle, packed_core_leg);
BOOST_ASSERT(packed_core_leg.size() > 0);
RetrievePackedPathFromSingleHeap(forward_heap, packed_core_leg.front(), packed_leg);
std::reverse(packed_leg.begin(), packed_leg.end());
packed_leg.insert(packed_leg.end(), packed_core_leg.begin(), packed_core_leg.end());
RetrievePackedPathFromSingleHeap(reverse_heap, packed_core_leg.back(), packed_leg);
}
}
else
{
if (weight != forward_heap.GetKey(middle) + reverse_heap.GetKey(middle))
{
// self loop
BOOST_ASSERT(forward_heap.GetData(middle).parent == middle &&
reverse_heap.GetData(middle).parent == middle);
packed_leg.push_back(middle);
packed_leg.push_back(middle);
}
else
{
RetrievePackedPathFromHeap(forward_heap, reverse_heap, middle, packed_leg);
}
2015-11-25 15:45:09 -05:00
}
}
bool NeedsLoopForward(const PhantomNode &source_phantom,
const PhantomNode &target_phantom) const
{
return source_phantom.forward_segment_id.enabled &&
target_phantom.forward_segment_id.enabled &&
source_phantom.forward_segment_id.id == target_phantom.forward_segment_id.id &&
source_phantom.GetForwardWeightPlusOffset() >
target_phantom.GetForwardWeightPlusOffset();
}
bool NeedsLoopBackwards(const PhantomNode &source_phantom,
const PhantomNode &target_phantom) const
{
return source_phantom.reverse_segment_id.enabled &&
target_phantom.reverse_segment_id.enabled &&
source_phantom.reverse_segment_id.id == target_phantom.reverse_segment_id.id &&
source_phantom.GetReverseWeightPlusOffset() >
target_phantom.GetReverseWeightPlusOffset();
}
double GetPathDistance(const DataFacadeT &facade,
const std::vector<NodeID> &packed_path,
const PhantomNode &source_phantom,
const PhantomNode &target_phantom) const
{
std::vector<PathData> unpacked_path;
PhantomNodes nodes;
nodes.source_phantom = source_phantom;
nodes.target_phantom = target_phantom;
UnpackPath(facade, packed_path.begin(), packed_path.end(), nodes, unpacked_path);
using util::coordinate_calculation::detail::DEGREE_TO_RAD;
using util::coordinate_calculation::detail::EARTH_RADIUS;
double distance = 0;
double prev_lat =
static_cast<double>(toFloating(source_phantom.location.lat)) * DEGREE_TO_RAD;
double prev_lon =
static_cast<double>(toFloating(source_phantom.location.lon)) * DEGREE_TO_RAD;
double prev_cos = std::cos(prev_lat);
for (const auto &p : unpacked_path)
{
const auto current_coordinate = facade.GetCoordinateOfNode(p.turn_via_node);
const double current_lat =
static_cast<double>(toFloating(current_coordinate.lat)) * DEGREE_TO_RAD;
const double current_lon =
static_cast<double>(toFloating(current_coordinate.lon)) * DEGREE_TO_RAD;
const double current_cos = std::cos(current_lat);
const double sin_dlon = std::sin((prev_lon - current_lon) / 2.0);
const double sin_dlat = std::sin((prev_lat - current_lat) / 2.0);
const double aharv = sin_dlat * sin_dlat + prev_cos * current_cos * sin_dlon * sin_dlon;
const double charv = 2. * std::atan2(std::sqrt(aharv), std::sqrt(1.0 - aharv));
distance += EARTH_RADIUS * charv;
prev_lat = current_lat;
prev_lon = current_lon;
prev_cos = current_cos;
}
const double current_lat =
static_cast<double>(toFloating(target_phantom.location.lat)) * DEGREE_TO_RAD;
const double current_lon =
static_cast<double>(toFloating(target_phantom.location.lon)) * DEGREE_TO_RAD;
const double current_cos = std::cos(current_lat);
const double sin_dlon = std::sin((prev_lon - current_lon) / 2.0);
const double sin_dlat = std::sin((prev_lat - current_lat) / 2.0);
const double aharv = sin_dlat * sin_dlat + prev_cos * current_cos * sin_dlon * sin_dlon;
const double charv = 2. * std::atan2(std::sqrt(aharv), std::sqrt(1.0 - aharv));
distance += EARTH_RADIUS * charv;
return distance;
}
// Requires the heaps for be empty
// If heaps should be adjusted to be initialized outside of this function,
// the addition of force_loop parameters might be required
double GetNetworkDistanceWithCore(const DataFacadeT &facade,
SearchEngineData::QueryHeap &forward_heap,
SearchEngineData::QueryHeap &reverse_heap,
SearchEngineData::QueryHeap &forward_core_heap,
SearchEngineData::QueryHeap &reverse_core_heap,
const PhantomNode &source_phantom,
const PhantomNode &target_phantom,
2016-03-03 08:26:13 -05:00
int duration_upper_bound = INVALID_EDGE_WEIGHT) const
{
BOOST_ASSERT(forward_heap.Empty());
BOOST_ASSERT(reverse_heap.Empty());
if (source_phantom.forward_segment_id.enabled)
{
forward_heap.Insert(source_phantom.forward_segment_id.id,
-source_phantom.GetForwardWeightPlusOffset(),
source_phantom.forward_segment_id.id);
}
if (source_phantom.reverse_segment_id.enabled)
{
forward_heap.Insert(source_phantom.reverse_segment_id.id,
-source_phantom.GetReverseWeightPlusOffset(),
source_phantom.reverse_segment_id.id);
}
if (target_phantom.forward_segment_id.enabled)
{
reverse_heap.Insert(target_phantom.forward_segment_id.id,
target_phantom.GetForwardWeightPlusOffset(),
target_phantom.forward_segment_id.id);
}
if (target_phantom.reverse_segment_id.enabled)
{
reverse_heap.Insert(target_phantom.reverse_segment_id.id,
target_phantom.GetReverseWeightPlusOffset(),
target_phantom.reverse_segment_id.id);
}
const bool constexpr DO_NOT_FORCE_LOOPS =
false; // prevents forcing of loops, since offsets are set correctly
int duration = INVALID_EDGE_WEIGHT;
std::vector<NodeID> packed_path;
SearchWithCore(facade,
forward_heap,
2016-05-27 15:05:04 -04:00
reverse_heap,
forward_core_heap,
reverse_core_heap,
duration,
packed_path,
DO_NOT_FORCE_LOOPS,
DO_NOT_FORCE_LOOPS,
duration_upper_bound);
double distance = std::numeric_limits<double>::max();
if (duration != INVALID_EDGE_WEIGHT)
{
return GetPathDistance(facade, packed_path, source_phantom, target_phantom);
}
return distance;
}
// Requires the heaps for be empty
// If heaps should be adjusted to be initialized outside of this function,
// the addition of force_loop parameters might be required
double GetNetworkDistance(const DataFacadeT &facade,
SearchEngineData::QueryHeap &forward_heap,
SearchEngineData::QueryHeap &reverse_heap,
const PhantomNode &source_phantom,
const PhantomNode &target_phantom,
2016-03-03 08:26:13 -05:00
int duration_upper_bound = INVALID_EDGE_WEIGHT) const
{
BOOST_ASSERT(forward_heap.Empty());
BOOST_ASSERT(reverse_heap.Empty());
if (source_phantom.forward_segment_id.enabled)
{
forward_heap.Insert(source_phantom.forward_segment_id.id,
-source_phantom.GetForwardWeightPlusOffset(),
source_phantom.forward_segment_id.id);
}
if (source_phantom.reverse_segment_id.enabled)
{
forward_heap.Insert(source_phantom.reverse_segment_id.id,
-source_phantom.GetReverseWeightPlusOffset(),
source_phantom.reverse_segment_id.id);
}
if (target_phantom.forward_segment_id.enabled)
{
reverse_heap.Insert(target_phantom.forward_segment_id.id,
target_phantom.GetForwardWeightPlusOffset(),
target_phantom.forward_segment_id.id);
}
if (target_phantom.reverse_segment_id.enabled)
{
reverse_heap.Insert(target_phantom.reverse_segment_id.id,
target_phantom.GetReverseWeightPlusOffset(),
target_phantom.reverse_segment_id.id);
}
const bool constexpr DO_NOT_FORCE_LOOPS =
false; // prevents forcing of loops, since offsets are set correctly
int duration = INVALID_EDGE_WEIGHT;
std::vector<NodeID> packed_path;
Search(facade,
forward_heap,
2016-05-27 15:05:04 -04:00
reverse_heap,
duration,
packed_path,
DO_NOT_FORCE_LOOPS,
DO_NOT_FORCE_LOOPS,
duration_upper_bound);
if (duration == INVALID_EDGE_WEIGHT)
{
return std::numeric_limits<double>::max();
}
return GetPathDistance(facade, packed_path, source_phantom, target_phantom);
}
};
2016-01-05 10:51:13 -05:00
}
}
}
#endif // ROUTING_BASE_HPP