Store edge distances to improve matrix distance calculation.
This commit is contained in:
parent
8ba516c17e
commit
2cc32dcc88
@ -12,23 +12,26 @@ namespace contractor
|
||||
struct ContractorEdgeData
|
||||
{
|
||||
ContractorEdgeData()
|
||||
: weight(0), duration(0), id(0), originalEdges(0), shortcut(0), forward(0), backward(0)
|
||||
: weight(0), duration(0), distance(0), id(0), originalEdges(0), shortcut(0), forward(0),
|
||||
backward(0)
|
||||
{
|
||||
}
|
||||
ContractorEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance,
|
||||
unsigned original_edges,
|
||||
unsigned id,
|
||||
bool shortcut,
|
||||
bool forward,
|
||||
bool backward)
|
||||
: weight(weight), duration(duration), id(id),
|
||||
: weight(weight), duration(duration), distance(distance), id(id),
|
||||
originalEdges(std::min((1u << 29) - 1u, original_edges)), shortcut(shortcut),
|
||||
forward(forward), backward(backward)
|
||||
{
|
||||
}
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDistance distance;
|
||||
unsigned id;
|
||||
unsigned originalEdges : 29;
|
||||
bool shortcut : 1;
|
||||
|
@ -41,6 +41,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
input_edge.target,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
false,
|
||||
@ -51,6 +52,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
input_edge.source,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
false,
|
||||
@ -82,6 +84,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
forward_edge.data.originalEdges = reverse_edge.data.originalEdges = 1;
|
||||
forward_edge.data.weight = reverse_edge.data.weight = INVALID_EDGE_WEIGHT;
|
||||
forward_edge.data.duration = reverse_edge.data.duration = MAXIMAL_EDGE_DURATION;
|
||||
forward_edge.data.distance = reverse_edge.data.distance = MAXIMAL_EDGE_DISTANCE;
|
||||
// remove parallel edges
|
||||
while (i < edges.size() && edges[i].source == source && edges[i].target == target)
|
||||
{
|
||||
@ -90,12 +93,16 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
forward_edge.data.weight = std::min(edges[i].data.weight, forward_edge.data.weight);
|
||||
forward_edge.data.duration =
|
||||
std::min(edges[i].data.duration, forward_edge.data.duration);
|
||||
forward_edge.data.distance =
|
||||
std::min(edges[i].data.distance, forward_edge.data.distance);
|
||||
}
|
||||
if (edges[i].data.backward)
|
||||
{
|
||||
reverse_edge.data.weight = std::min(edges[i].data.weight, reverse_edge.data.weight);
|
||||
reverse_edge.data.duration =
|
||||
std::min(edges[i].data.duration, reverse_edge.data.duration);
|
||||
reverse_edge.data.distance =
|
||||
std::min(edges[i].data.distance, reverse_edge.data.distance);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
@ -151,6 +158,7 @@ template <class Edge, typename GraphT> inline std::vector<Edge> toEdges(GraphT g
|
||||
BOOST_ASSERT_MSG(SPECIAL_NODEID != new_edge.target, "Target id invalid");
|
||||
new_edge.data.weight = data.weight;
|
||||
new_edge.data.duration = data.duration;
|
||||
new_edge.data.distance = data.distance;
|
||||
new_edge.data.shortcut = data.shortcut;
|
||||
new_edge.data.turn_id = data.id;
|
||||
BOOST_ASSERT_MSG(new_edge.data.turn_id != INT_MAX, // 2^31
|
||||
|
@ -17,7 +17,8 @@ struct QueryEdge
|
||||
struct EdgeData
|
||||
{
|
||||
explicit EdgeData()
|
||||
: turn_id(0), shortcut(false), weight(0), duration(0), forward(false), backward(false)
|
||||
: turn_id(0), shortcut(false), weight(0), duration(0), forward(false), backward(false),
|
||||
distance(0)
|
||||
{
|
||||
}
|
||||
|
||||
@ -25,10 +26,11 @@ struct QueryEdge
|
||||
const bool shortcut,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: turn_id(turn_id), shortcut(shortcut), weight(weight), duration(duration),
|
||||
forward(forward), backward(backward)
|
||||
forward(forward), backward(backward), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
@ -40,6 +42,7 @@ struct QueryEdge
|
||||
turn_id = other.id;
|
||||
forward = other.forward;
|
||||
backward = other.backward;
|
||||
distance = other.distance;
|
||||
}
|
||||
// this ID is either the middle node of the shortcut, or the ID of the edge based node (node
|
||||
// based edge) storing the appropriate data. If `shortcut` is set to true, we get the middle
|
||||
@ -50,6 +53,7 @@ struct QueryEdge
|
||||
EdgeWeight duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
EdgeDistance distance;
|
||||
} data;
|
||||
|
||||
QueryEdge() : source(SPECIAL_NODEID), target(SPECIAL_NODEID) {}
|
||||
@ -69,10 +73,11 @@ struct QueryEdge
|
||||
return (source == right.source && target == right.target &&
|
||||
data.weight == right.data.weight && data.duration == right.data.duration &&
|
||||
data.shortcut == right.data.shortcut && data.forward == right.data.forward &&
|
||||
data.backward == right.data.backward && data.turn_id == right.data.turn_id);
|
||||
data.backward == right.data.backward && data.turn_id == right.data.turn_id &&
|
||||
data.distance == right.data.distance);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace contractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif // QUERYEDGE_HPP
|
||||
|
@ -34,6 +34,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#include "util/coordinate.hpp"
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
namespace osrm
|
||||
@ -44,14 +46,15 @@ namespace engine
|
||||
struct PhantomNode
|
||||
{
|
||||
PhantomNode()
|
||||
: forward_segment_id{SPECIAL_SEGMENTID, false},
|
||||
reverse_segment_id{SPECIAL_SEGMENTID, false}, forward_weight(INVALID_EDGE_WEIGHT),
|
||||
reverse_weight(INVALID_EDGE_WEIGHT), forward_weight_offset(0), reverse_weight_offset(0),
|
||||
: forward_segment_id{SPECIAL_SEGMENTID, false}, reverse_segment_id{SPECIAL_SEGMENTID,
|
||||
false},
|
||||
forward_weight(INVALID_EDGE_WEIGHT), reverse_weight(INVALID_EDGE_WEIGHT),
|
||||
forward_weight_offset(0), reverse_weight_offset(0),
|
||||
forward_distance(INVALID_EDGE_DISTANCE), reverse_distance(INVALID_EDGE_DISTANCE),
|
||||
forward_distance_offset(0), reverse_distance_offset(0),
|
||||
forward_duration(MAXIMAL_EDGE_DURATION), reverse_duration(MAXIMAL_EDGE_DURATION),
|
||||
forward_duration_offset(0), reverse_duration_offset(0), fwd_segment_position(0),
|
||||
is_valid_forward_source{false}, is_valid_forward_target{false},
|
||||
forward_duration_offset(0), reverse_duration_offset(0),
|
||||
fwd_segment_position(0), is_valid_forward_source{false}, is_valid_forward_target{false},
|
||||
is_valid_reverse_source{false}, is_valid_reverse_target{false}, bearing(0)
|
||||
|
||||
{
|
||||
@ -91,6 +94,9 @@ struct PhantomNode
|
||||
// x <-- this is PhantomNode.location
|
||||
// 0----1----2----3----4 <-- EdgeBasedGraph Node segments
|
||||
BOOST_ASSERT(forward_segment_id.enabled);
|
||||
std::cout << "forward_distance: " << forward_distance;
|
||||
std::cout << " forward_distance_offset: " << forward_distance_offset;
|
||||
std::cout << std::endl;
|
||||
return forward_distance + forward_distance_offset;
|
||||
}
|
||||
|
||||
@ -102,6 +108,9 @@ struct PhantomNode
|
||||
// x <-- this is PhantomNode.location
|
||||
// 0----1----2----3----4 <-- EdgeBasedGraph Node segments
|
||||
BOOST_ASSERT(reverse_segment_id.enabled);
|
||||
std::cout << "reverse_distance: " << reverse_distance;
|
||||
std::cout << " reverse_distance_offset: " << reverse_distance_offset;
|
||||
std::cout << std::endl;
|
||||
return reverse_distance + reverse_distance_offset;
|
||||
}
|
||||
|
||||
@ -109,8 +118,9 @@ struct PhantomNode
|
||||
|
||||
bool IsValid(const unsigned number_of_nodes) const
|
||||
{
|
||||
return location.IsValid() && ((forward_segment_id.id < number_of_nodes) ||
|
||||
(reverse_segment_id.id < number_of_nodes)) &&
|
||||
return location.IsValid() &&
|
||||
((forward_segment_id.id < number_of_nodes) ||
|
||||
(reverse_segment_id.id < number_of_nodes)) &&
|
||||
((forward_weight != INVALID_EDGE_WEIGHT) ||
|
||||
(reverse_weight != INVALID_EDGE_WEIGHT)) &&
|
||||
((forward_duration != MAXIMAL_EDGE_DURATION) ||
|
||||
@ -234,7 +244,7 @@ struct PhantomNodes
|
||||
PhantomNode source_phantom;
|
||||
PhantomNode target_phantom;
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace engine
|
||||
} // namespace osrm
|
||||
|
||||
#endif // PHANTOM_NODES_H
|
||||
|
@ -25,15 +25,17 @@ struct NodeBucket
|
||||
unsigned from_clique_arc : 1;
|
||||
EdgeWeight weight;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
|
||||
NodeBucket(NodeID middle_node,
|
||||
NodeID parent_node,
|
||||
bool from_clique_arc,
|
||||
unsigned column_index,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration)
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance)
|
||||
: middle_node(middle_node), parent_node(parent_node), column_index(column_index),
|
||||
from_clique_arc(from_clique_arc), weight(weight), duration(duration)
|
||||
from_clique_arc(from_clique_arc), weight(weight), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
@ -41,9 +43,10 @@ struct NodeBucket
|
||||
NodeID parent_node,
|
||||
unsigned column_index,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration)
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance)
|
||||
: middle_node(middle_node), parent_node(parent_node), column_index(column_index),
|
||||
from_clique_arc(false), weight(weight), duration(duration)
|
||||
from_clique_arc(false), weight(weight), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -85,13 +85,17 @@ void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
{
|
||||
heap.Insert(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
{phantom_node.forward_segment_id.id, -phantom_node.GetForwardDuration()});
|
||||
{phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance()});
|
||||
}
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
heap.Insert(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
{phantom_node.reverse_segment_id.id, -phantom_node.GetReverseDuration()});
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,13 +106,17 @@ void insertTargetInHeap(ManyToManyQueryHeap &heap, const PhantomNode &phantom_no
|
||||
{
|
||||
heap.Insert(phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
{phantom_node.forward_segment_id.id, phantom_node.GetForwardDuration()});
|
||||
{phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance()});
|
||||
}
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
{
|
||||
heap.Insert(phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
{phantom_node.reverse_segment_id.id, phantom_node.GetReverseDuration()});
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,9 +186,10 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
|
||||
template <bool UseDuration>
|
||||
EdgeWeight getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
std::tuple<EdgeWeight,EdgeDistance> getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
{
|
||||
EdgeWeight loop_weight = UseDuration ? MAXIMAL_EDGE_DURATION : INVALID_EDGE_WEIGHT;
|
||||
EdgeDistance loop_distance = MAXIMAL_EDGE_DISTANCE;
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
{
|
||||
const auto &data = facade.GetEdgeData(edge);
|
||||
@ -198,11 +199,15 @@ EdgeWeight getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
if (to == node)
|
||||
{
|
||||
const auto value = UseDuration ? data.duration : data.weight;
|
||||
loop_weight = std::min(loop_weight, value);
|
||||
if (value < loop_weight)
|
||||
{
|
||||
loop_weight = value;
|
||||
loop_distance = data.distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return loop_weight;
|
||||
return std::make_tuple(loop_weight, loop_distance);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -30,7 +30,11 @@ struct HeapData
|
||||
struct ManyToManyHeapData : HeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
ManyToManyHeapData(NodeID p, EdgeWeight duration) : HeapData(p), duration(duration) {}
|
||||
EdgeDistance distance;
|
||||
ManyToManyHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
: HeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
template <> struct SearchEngineData<routing_algorithms::ch::Algorithm>
|
||||
@ -75,12 +79,16 @@ struct MultiLayerDijkstraHeapData
|
||||
struct ManyToManyMultiLayerDijkstraHeapData : MultiLayerDijkstraHeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeWeight duration)
|
||||
: MultiLayerDijkstraHeapData(p), duration(duration)
|
||||
EdgeDistance distance;
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, bool from, EdgeWeight duration)
|
||||
: MultiLayerDijkstraHeapData(p, from), duration(duration)
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p,
|
||||
bool from,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p, from), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
};
|
||||
@ -112,7 +120,7 @@ template <> struct SearchEngineData<routing_algorithms::mld::Algorithm>
|
||||
void InitializeOrClearManyToManyThreadLocalStorage(unsigned number_of_nodes,
|
||||
unsigned number_of_boundary_nodes);
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace engine
|
||||
} // namespace osrm
|
||||
|
||||
#endif // SEARCH_ENGINE_DATA_HPP
|
||||
|
@ -82,7 +82,7 @@ class CompressedEdgeContainer
|
||||
std::unordered_map<EdgeID, unsigned> m_reverse_edge_id_to_zipped_index_map;
|
||||
std::unique_ptr<SegmentDataContainer> segment_data;
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif // GEOMETRY_COMPRESSOR_HPP_
|
||||
|
@ -15,20 +15,25 @@ struct EdgeBasedEdge
|
||||
public:
|
||||
struct EdgeData
|
||||
{
|
||||
EdgeData() : turn_id(0), weight(0), duration(0), forward(false), backward(false) {}
|
||||
EdgeData()
|
||||
: turn_id(0), weight(0), distance(0), duration(0), forward(false), backward(false)
|
||||
{
|
||||
}
|
||||
|
||||
EdgeData(const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDistance distance,
|
||||
const EdgeWeight duration,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: turn_id(turn_id), weight(weight), duration(duration), forward(forward),
|
||||
backward(backward)
|
||||
: turn_id(turn_id), weight(weight), distance(distance), duration(duration),
|
||||
forward(forward), backward(backward)
|
||||
{
|
||||
}
|
||||
|
||||
NodeID turn_id; // ID of the edge based node (node based edge)
|
||||
EdgeWeight weight;
|
||||
EdgeDistance distance;
|
||||
EdgeWeight duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
@ -43,6 +48,7 @@ struct EdgeBasedEdge
|
||||
const NodeID edge_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward);
|
||||
EdgeBasedEdge(const NodeID source, const NodeID target, const EdgeBasedEdge::EdgeData &data);
|
||||
@ -53,7 +59,7 @@ struct EdgeBasedEdge
|
||||
NodeID target;
|
||||
EdgeData data;
|
||||
};
|
||||
static_assert(sizeof(extractor::EdgeBasedEdge) == 20,
|
||||
static_assert(sizeof(extractor::EdgeBasedEdge) == 24,
|
||||
"Size of extractor::EdgeBasedEdge type is "
|
||||
"bigger than expected. This will influence "
|
||||
"memory consumption.");
|
||||
@ -67,9 +73,10 @@ inline EdgeBasedEdge::EdgeBasedEdge(const NodeID source,
|
||||
const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: source(source), target(target), data{turn_id, weight, duration, forward, backward}
|
||||
: source(source), target(target), data{turn_id, weight, distance, duration, forward, backward}
|
||||
{
|
||||
}
|
||||
|
||||
@ -89,7 +96,7 @@ inline bool EdgeBasedEdge::operator<(const EdgeBasedEdge &other) const
|
||||
return std::tie(source, target, data.weight, unidirectional) <
|
||||
std::tie(other.source, other.target, other.data.weight, other_is_unidirectional);
|
||||
}
|
||||
} // ns extractor
|
||||
} // ns osrm
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif /* EDGE_BASED_EDGE_HPP */
|
||||
|
@ -49,7 +49,7 @@ struct ByEdgeOrByMeterValue
|
||||
using value_type = float;
|
||||
value_type value;
|
||||
};
|
||||
}
|
||||
} // namespace detail
|
||||
|
||||
struct InternalExtractorEdge
|
||||
{
|
||||
@ -63,7 +63,7 @@ struct InternalExtractorEdge
|
||||
WeightData weight_data,
|
||||
DurationData duration_data,
|
||||
util::Coordinate source_coordinate)
|
||||
: result(source, target, 0, 0, {}, -1, {}), weight_data(std::move(weight_data)),
|
||||
: result(source, target, 0, 0, 0, {}, -1, {}), weight_data(std::move(weight_data)),
|
||||
duration_data(std::move(duration_data)), source_coordinate(std::move(source_coordinate))
|
||||
{
|
||||
}
|
||||
@ -113,7 +113,7 @@ struct InternalExtractorEdge
|
||||
return v;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif // INTERNAL_EXTRACTOR_EDGE_HPP
|
||||
|
@ -97,6 +97,7 @@ struct NodeBasedEdge
|
||||
NodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags);
|
||||
@ -107,6 +108,7 @@ struct NodeBasedEdge
|
||||
NodeID target; // 32 4
|
||||
EdgeWeight weight; // 32 4
|
||||
EdgeDuration duration; // 32 4
|
||||
EdgeDistance distance; // 32 4
|
||||
GeometryID geometry_id; // 32 4
|
||||
AnnotationID annotation_data; // 32 4
|
||||
NodeBasedEdgeClassification flags; // 32 4
|
||||
@ -120,6 +122,7 @@ struct NodeBasedEdgeWithOSM : NodeBasedEdge
|
||||
OSMNodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags);
|
||||
@ -137,7 +140,8 @@ inline NodeBasedEdgeClassification::NodeBasedEdgeClassification()
|
||||
}
|
||||
|
||||
inline NodeBasedEdge::NodeBasedEdge()
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight(0), duration(0), annotation_data(-1)
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight(0), duration(0), distance(0),
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
||||
@ -145,11 +149,12 @@ inline NodeBasedEdge::NodeBasedEdge(NodeID source,
|
||||
NodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags)
|
||||
: source(source), target(target), weight(weight), duration(duration), geometry_id(geometry_id),
|
||||
annotation_data(annotation_data), flags(flags)
|
||||
: source(source), target(target), weight(weight), duration(duration), distance(distance),
|
||||
geometry_id(geometry_id), annotation_data(annotation_data), flags(flags)
|
||||
{
|
||||
}
|
||||
|
||||
@ -175,11 +180,18 @@ inline NodeBasedEdgeWithOSM::NodeBasedEdgeWithOSM(OSMNodeID source,
|
||||
OSMNodeID target,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
AnnotationID annotation_data,
|
||||
NodeBasedEdgeClassification flags)
|
||||
: NodeBasedEdge(
|
||||
SPECIAL_NODEID, SPECIAL_NODEID, weight, duration, geometry_id, annotation_data, flags),
|
||||
: NodeBasedEdge(SPECIAL_NODEID,
|
||||
SPECIAL_NODEID,
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
geometry_id,
|
||||
annotation_data,
|
||||
flags),
|
||||
osm_source_id(std::move(source)), osm_target_id(std::move(target))
|
||||
{
|
||||
}
|
||||
@ -189,12 +201,12 @@ inline NodeBasedEdgeWithOSM::NodeBasedEdgeWithOSM()
|
||||
{
|
||||
}
|
||||
|
||||
static_assert(sizeof(extractor::NodeBasedEdge) == 28,
|
||||
static_assert(sizeof(extractor::NodeBasedEdge) == 32,
|
||||
"Size of extractor::NodeBasedEdge type is "
|
||||
"bigger than expected. This will influence "
|
||||
"memory consumption.");
|
||||
|
||||
} // ns extractor
|
||||
} // ns osrm
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif /* NODE_BASED_EDGE_HPP */
|
||||
|
@ -14,6 +14,8 @@
|
||||
#include <tbb/parallel_for.h>
|
||||
#include <tbb/parallel_reduce.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
#include <algorithm>
|
||||
@ -38,11 +40,22 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
if (edge.data.weight == INVALID_EDGE_WEIGHT)
|
||||
continue;
|
||||
|
||||
std::cout << " EdgeBasedEdge {";
|
||||
std::cout << " source " << edge.source << ", target: " << edge.target;
|
||||
std::cout << " EdgeBasedEdgeData data {";
|
||||
std::cout << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
std::cout << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
std::cout << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
std::cout << " }";
|
||||
std::cout << "}" << std::endl;
|
||||
|
||||
directed.emplace_back(edge.source,
|
||||
edge.target,
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
edge.data.distance,
|
||||
edge.data.forward,
|
||||
edge.data.backward);
|
||||
|
||||
@ -51,10 +64,26 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
edge.data.distance,
|
||||
edge.data.backward,
|
||||
edge.data.forward);
|
||||
}
|
||||
|
||||
std::cout << "Directed edges" << std::endl;
|
||||
for (const auto &edge : directed)
|
||||
{
|
||||
std::cout << " EdgeBasedEdge {";
|
||||
std::cout << " source " << edge.source << ", target: " << edge.target;
|
||||
std::cout << " EdgeBasedEdgeData data {";
|
||||
std::cout << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
std::cout << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
std::cout << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
std::cout << " }";
|
||||
std::cout << "}" << std::endl;
|
||||
}
|
||||
std::cout << "Done directed edges" << std::endl;
|
||||
|
||||
return directed;
|
||||
}
|
||||
|
||||
@ -69,6 +98,21 @@ std::vector<OutputEdgeT> prepareEdgesForUsageInGraph(std::vector<extractor::Edge
|
||||
std::tie(rhs.source, rhs.target, lhs.data.forward, rhs.data.weight);
|
||||
});
|
||||
|
||||
std::cout << "Directed edges after sorting" << std::endl;
|
||||
for (const auto &edge : edges)
|
||||
{
|
||||
std::cout << " EdgeBasedEdge {";
|
||||
std::cout << " source " << edge.source << ", target: " << edge.target;
|
||||
std::cout << " EdgeBasedEdgeData data {";
|
||||
std::cout << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
std::cout << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
std::cout << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
std::cout << " }";
|
||||
std::cout << "}" << std::endl;
|
||||
}
|
||||
std::cout << "Done sorted directed edges" << std::endl;
|
||||
|
||||
std::vector<OutputEdgeT> output_edges;
|
||||
output_edges.reserve(edges.size());
|
||||
|
||||
@ -90,6 +134,18 @@ std::vector<OutputEdgeT> prepareEdgesForUsageInGraph(std::vector<extractor::Edge
|
||||
continue;
|
||||
}
|
||||
|
||||
std::cout << " EdgeBasedEdge2 {";
|
||||
std::cout << " source " << begin_interval->source << ", target: " << begin_interval->target;
|
||||
std::cout << " EdgeBasedEdgeData data {";
|
||||
std::cout << " turn_id: " << begin_interval->data.turn_id
|
||||
<< ", weight: " << begin_interval->data.weight;
|
||||
std::cout << " distance: " << begin_interval->data.distance
|
||||
<< ", duration: " << begin_interval->data.duration;
|
||||
std::cout << " forward: " << (begin_interval->data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (begin_interval->data.backward == 0 ? "false" : "true");
|
||||
std::cout << " }";
|
||||
std::cout << "}" << std::endl;
|
||||
|
||||
BOOST_ASSERT_MSG(begin_interval->data.forward != begin_interval->data.backward,
|
||||
"The forward and backward flag need to be mutally exclusive");
|
||||
|
||||
@ -196,7 +252,7 @@ inline DynamicEdgeBasedGraph LoadEdgeBasedGraph(const boost::filesystem::path &p
|
||||
return DynamicEdgeBasedGraph(number_of_edge_based_nodes, std::move(tidied), checksum);
|
||||
}
|
||||
|
||||
} // ns partition
|
||||
} // ns osrm
|
||||
} // namespace partitioner
|
||||
} // namespace osrm
|
||||
|
||||
#endif
|
||||
|
@ -1,9 +1,11 @@
|
||||
#ifndef OSRM_UTIL_DEBUG_HPP_
|
||||
#define OSRM_UTIL_DEBUG_HPP_
|
||||
|
||||
#include "extractor/edge_based_edge.hpp"
|
||||
#include "extractor/node_data_container.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "guidance/intersection.hpp"
|
||||
#include "guidance/turn_instruction.hpp"
|
||||
#include "guidance/turn_lane_data.hpp"
|
||||
#include "engine/guidance/route_step.hpp"
|
||||
#include "util/node_based_graph.hpp"
|
||||
@ -25,7 +27,7 @@ inline std::ostream &operator<<(std::ostream &out, const Coordinate &coordinate)
|
||||
<< toFloating(coordinate.lat) << "}";
|
||||
return out;
|
||||
}
|
||||
}
|
||||
} // namespace util
|
||||
|
||||
namespace engine
|
||||
{
|
||||
@ -60,8 +62,8 @@ inline std::ostream &operator<<(std::ostream &out, const RouteStep &step)
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace guidance
|
||||
} // namespace engine
|
||||
|
||||
namespace guidance
|
||||
{
|
||||
@ -74,7 +76,7 @@ inline std::ostream &operator<<(std::ostream &out, const ConnectedRoad &road)
|
||||
<< static_cast<std::int32_t>(road.lane_data_id) << "}";
|
||||
return out;
|
||||
}
|
||||
}
|
||||
} // namespace guidance
|
||||
|
||||
namespace extractor
|
||||
{
|
||||
@ -93,7 +95,7 @@ inline std::ostream &operator<<(std::ostream &out, const IntersectionViewData &v
|
||||
<< " angle: " << view.angle << " bearing: " << view.perceived_bearing << "}";
|
||||
return out;
|
||||
}
|
||||
}
|
||||
} // namespace intersection
|
||||
|
||||
namespace TurnLaneType
|
||||
{
|
||||
@ -123,9 +125,9 @@ inline std::ostream &operator<<(std::ostream &out, const Mask lane_type)
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace TurnLaneType
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
namespace std
|
||||
{
|
||||
@ -145,7 +147,7 @@ inline std::ostream &operator<<(std::ostream &out,
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
} // namespace std
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -184,8 +186,26 @@ inline std::ostream &operator<<(std::ostream &out, const LaneDataVector &turn_la
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace lanes
|
||||
} // namespace guidance
|
||||
|
||||
namespace extractor
|
||||
{
|
||||
inline std::ostream &operator<<(std::ostream &out, const EdgeBasedEdge &edge)
|
||||
{
|
||||
out << " EdgeBasedEdge {";
|
||||
out << " source " << edge.source << ", target: " << edge.target;
|
||||
out << " EdgeBasedEdgeData data {";
|
||||
out << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
out << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
out << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
out << " }";
|
||||
out << "}";
|
||||
return out;
|
||||
}
|
||||
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
||||
#endif /*OSRM_ENGINE_GUIDANCE_DEBUG_HPP_*/
|
||||
|
@ -20,24 +20,27 @@ namespace util
|
||||
struct NodeBasedEdgeData
|
||||
{
|
||||
NodeBasedEdgeData()
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_WEIGHT), geometry_id({0, false}),
|
||||
reversed(false), annotation_data(-1)
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_WEIGHT),
|
||||
distance(INVALID_EDGE_DISTANCE), geometry_id({0, false}), reversed(false),
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
||||
NodeBasedEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
bool reversed,
|
||||
extractor::NodeBasedEdgeClassification flags,
|
||||
AnnotationID annotation_data)
|
||||
: weight(weight), duration(duration), geometry_id(geometry_id), reversed(reversed),
|
||||
flags(flags), annotation_data(annotation_data)
|
||||
: weight(weight), duration(duration), distance(distance), geometry_id(geometry_id),
|
||||
reversed(reversed), flags(flags), annotation_data(annotation_data)
|
||||
{
|
||||
}
|
||||
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDistance distance;
|
||||
GeometryID geometry_id;
|
||||
bool reversed : 1;
|
||||
extractor::NodeBasedEdgeClassification flags;
|
||||
@ -80,18 +83,20 @@ NodeBasedDynamicGraphFromEdges(NodeID number_of_nodes,
|
||||
const extractor::NodeBasedEdge &input_edge) {
|
||||
output_edge.data.weight = input_edge.weight;
|
||||
output_edge.data.duration = input_edge.duration;
|
||||
output_edge.data.distance = input_edge.distance;
|
||||
output_edge.data.flags = input_edge.flags;
|
||||
output_edge.data.annotation_data = input_edge.annotation_data;
|
||||
|
||||
BOOST_ASSERT(output_edge.data.weight > 0);
|
||||
BOOST_ASSERT(output_edge.data.duration > 0);
|
||||
BOOST_ASSERT(output_edge.data.distance > 0);
|
||||
});
|
||||
|
||||
tbb::parallel_sort(edges_list.begin(), edges_list.end());
|
||||
|
||||
return NodeBasedDynamicGraph(number_of_nodes, edges_list);
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace util
|
||||
} // namespace osrm
|
||||
|
||||
#endif // NODE_BASED_GRAPH_HPP
|
||||
|
@ -48,7 +48,7 @@ struct osm_way_id
|
||||
struct duplicated_node
|
||||
{
|
||||
};
|
||||
}
|
||||
} // namespace tag
|
||||
using OSMNodeID = osrm::Alias<std::uint64_t, tag::osm_node_id>;
|
||||
static_assert(std::is_pod<OSMNodeID>(), "OSMNodeID is not a valid alias");
|
||||
using OSMWayID = osrm::Alias<std::uint64_t, tag::osm_way_id>;
|
||||
@ -113,6 +113,7 @@ static const SegmentWeight MAX_SEGMENT_WEIGHT = INVALID_SEGMENT_WEIGHT - 1;
|
||||
static const SegmentDuration MAX_SEGMENT_DURATION = INVALID_SEGMENT_DURATION - 1;
|
||||
static const EdgeWeight INVALID_EDGE_WEIGHT = std::numeric_limits<EdgeWeight>::max();
|
||||
static const EdgeDuration MAXIMAL_EDGE_DURATION = std::numeric_limits<EdgeDuration>::max();
|
||||
static const EdgeDistance MAXIMAL_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
static const TurnPenalty INVALID_TURN_PENALTY = std::numeric_limits<TurnPenalty>::max();
|
||||
static const EdgeDistance INVALID_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
|
||||
|
@ -102,6 +102,21 @@ int Contractor::Run()
|
||||
QueryGraph query_graph;
|
||||
std::vector<std::vector<bool>> edge_filters;
|
||||
std::vector<std::vector<bool>> cores;
|
||||
|
||||
std::cout << "Pre contractor edge-based-graph" << std::endl;
|
||||
for (const auto &edge : edge_based_edge_list)
|
||||
{
|
||||
std::cout << " EdgeBasedEdge {";
|
||||
std::cout << " source " << edge.source << ", target: " << edge.target;
|
||||
std::cout << " EdgeBasedEdgeData data {";
|
||||
std::cout << " turn_id: " << edge.data.turn_id << ", weight: " << edge.data.weight;
|
||||
std::cout << " distance: " << edge.data.distance << ", duration: " << edge.data.duration;
|
||||
std::cout << " forward: " << (edge.data.forward == 0 ? "false" : "true")
|
||||
<< ", backward: " << (edge.data.backward == 0 ? "false" : "true");
|
||||
std::cout << " }";
|
||||
std::cout << "}" << std::endl;
|
||||
}
|
||||
|
||||
std::tie(query_graph, edge_filters) = contractExcludableGraph(
|
||||
toContractorGraph(number_of_edge_based_nodes, std::move(edge_based_edge_list)),
|
||||
std::move(node_weights),
|
||||
|
@ -207,6 +207,8 @@ void ContractNode(ContractorThreadData *data,
|
||||
}
|
||||
else
|
||||
{
|
||||
std::cout << "Contractor: " << in_data.distance << "," << out_data.distance
|
||||
<< std::endl;
|
||||
// CAREFUL: This only works due to the independent node-setting. This
|
||||
// guarantees that source is not connected to another node that is
|
||||
// contracted
|
||||
@ -215,6 +217,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
target,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -225,6 +228,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
source,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -276,10 +280,13 @@ void ContractNode(ContractorThreadData *data,
|
||||
}
|
||||
else
|
||||
{
|
||||
std::cout << "Contractor: " << in_data.distance << "," << out_data.distance
|
||||
<< std::endl;
|
||||
inserted_edges.emplace_back(source,
|
||||
target,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -290,6 +297,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
source,
|
||||
path_weight,
|
||||
in_data.duration + out_data.duration,
|
||||
in_data.distance + out_data.distance,
|
||||
out_data.originalEdges + in_data.originalEdges,
|
||||
node,
|
||||
SHORTCUT_ARC,
|
||||
@ -556,7 +564,7 @@ bool IsNodeIndependent(const util::XORFastHash<> &hash,
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
std::vector<bool> contractGraph(ContractorGraph &graph,
|
||||
std::vector<bool> node_is_uncontracted_,
|
||||
|
@ -90,7 +90,7 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
else
|
||||
{
|
||||
// check whether there is a loop present at the node
|
||||
const auto loop_weight = getLoopWeight<false>(facade, node);
|
||||
const auto loop_weight = std::get<0>(getLoopWeight<false>(facade, node));
|
||||
const EdgeWeight new_weight_with_loop = new_weight + loop_weight;
|
||||
if (loop_weight != INVALID_EDGE_WEIGHT &&
|
||||
new_weight_with_loop <= *upper_bound_to_shortest_path_weight)
|
||||
@ -558,7 +558,7 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
}
|
||||
return (upper_bound <= t_test_path_weight);
|
||||
}
|
||||
} // anon. namespace
|
||||
} // namespace
|
||||
|
||||
InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const DataFacade<Algorithm> &facade,
|
||||
@ -853,4 +853,4 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
|
||||
} // namespace routing_algorithms
|
||||
} // namespace engine
|
||||
} // namespace osrm}
|
||||
} // namespace osrm
|
||||
|
@ -21,18 +21,21 @@ namespace ch
|
||||
inline bool addLoopWeight(const DataFacade<ch::Algorithm> &facade,
|
||||
const NodeID node,
|
||||
EdgeWeight &weight,
|
||||
EdgeDuration &duration)
|
||||
EdgeDuration &duration,
|
||||
EdgeDistance &distance)
|
||||
{ // Special case for CH when contractor creates a loop edge node->node
|
||||
BOOST_ASSERT(weight < 0);
|
||||
|
||||
const auto loop_weight = ch::getLoopWeight<false>(facade, node);
|
||||
if (loop_weight != INVALID_EDGE_WEIGHT)
|
||||
if (std::get<0>(loop_weight) != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
const auto new_weight_with_loop = weight + loop_weight;
|
||||
const auto new_weight_with_loop = weight + std::get<0>(loop_weight);
|
||||
if (new_weight_with_loop >= 0)
|
||||
{
|
||||
weight = new_weight_with_loop;
|
||||
duration += ch::getLoopWeight<true>(facade, node);
|
||||
auto result = ch::getLoopWeight<true>(facade, node);
|
||||
duration += std::get<0>(result);
|
||||
distance += std::get<1>(result);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -46,6 +49,7 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
const NodeID node,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
typename SearchEngineData<Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
const PhantomNode &)
|
||||
{
|
||||
@ -63,21 +67,24 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
const auto edge_weight = data.weight;
|
||||
|
||||
const auto edge_duration = data.duration;
|
||||
const auto edge_distance = data.distance;
|
||||
std::cout << "edge_distance " << data.distance << std::endl;
|
||||
|
||||
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
|
||||
const auto to_weight = weight + edge_weight;
|
||||
const auto to_duration = duration + edge_duration;
|
||||
const auto to_distance = distance + edge_distance;
|
||||
|
||||
// New Node discovered -> Add to Heap + Node Info Storage
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, to_duration, to_distance});
|
||||
}
|
||||
// Found a shorter Path -> Update weight and set new parent
|
||||
else if (std::tie(to_weight, to_duration) <
|
||||
std::tie(query_heap.GetKey(to), query_heap.GetData(to).duration))
|
||||
{
|
||||
query_heap.GetData(to) = {node, to_duration};
|
||||
query_heap.GetData(to) = {node, to_duration, to_distance};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -91,12 +98,14 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const std::vector<NodeBucket> &search_space_with_buckets,
|
||||
std::vector<EdgeWeight> &weights_table,
|
||||
std::vector<EdgeDuration> &durations_table,
|
||||
std::vector<EdgeDistance> &distances_table,
|
||||
std::vector<NodeID> &middle_nodes_table,
|
||||
const PhantomNode &phantom_node)
|
||||
{
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto source_weight = query_heap.GetKey(node);
|
||||
const auto source_duration = query_heap.GetData(node).duration;
|
||||
const auto source_distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Check if each encountered node has an entry
|
||||
const auto &bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
@ -109,20 +118,24 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto column_index = current_bucket.column_index;
|
||||
const auto target_weight = current_bucket.weight;
|
||||
const auto target_duration = current_bucket.duration;
|
||||
const auto target_distance = current_bucket.distance;
|
||||
|
||||
auto ¤t_weight = weights_table[row_index * number_of_targets + column_index];
|
||||
auto ¤t_duration = durations_table[row_index * number_of_targets + column_index];
|
||||
auto ¤t_distance = distances_table[row_index * number_of_targets + column_index];
|
||||
|
||||
// Check if new weight is better
|
||||
auto new_weight = source_weight + target_weight;
|
||||
auto new_duration = source_duration + target_duration;
|
||||
auto new_distance = source_distance + target_distance;
|
||||
|
||||
if (new_weight < 0)
|
||||
{
|
||||
if (addLoopWeight(facade, node, new_weight, new_duration))
|
||||
if (addLoopWeight(facade, node, new_weight, new_duration, new_distance))
|
||||
{
|
||||
current_weight = std::min(current_weight, new_weight);
|
||||
current_duration = std::min(current_duration, new_duration);
|
||||
current_distance = std::min(current_distance, new_distance);
|
||||
middle_nodes_table[row_index * number_of_targets + column_index] = node;
|
||||
}
|
||||
}
|
||||
@ -130,12 +143,13 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
{
|
||||
current_weight = new_weight;
|
||||
current_duration = new_duration;
|
||||
current_distance = new_distance;
|
||||
middle_nodes_table[row_index * number_of_targets + column_index] = node;
|
||||
}
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<FORWARD_DIRECTION>(
|
||||
facade, node, source_weight, source_duration, query_heap, phantom_node);
|
||||
facade, node, source_weight, source_duration, source_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
@ -147,14 +161,15 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto target_weight = query_heap.GetKey(node);
|
||||
const auto target_duration = query_heap.GetData(node).duration;
|
||||
const auto target_distance = query_heap.GetData(node).distance;
|
||||
const auto parent = query_heap.GetData(node).parent;
|
||||
|
||||
// Store settled nodes in search space bucket
|
||||
search_space_with_buckets.emplace_back(
|
||||
node, parent, column_index, target_weight, target_duration);
|
||||
node, parent, column_index, target_weight, target_duration, target_distance);
|
||||
|
||||
relaxOutgoingEdges<REVERSE_DIRECTION>(
|
||||
facade, node, target_weight, target_duration, query_heap, phantom_node);
|
||||
facade, node, target_weight, target_duration, target_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
} // namespace ch
|
||||
@ -235,7 +250,7 @@ void calculateDistances(typename SearchEngineData<ch::Algorithm>::ManyToManyQuer
|
||||
needsLoopBackwards(source_phantom, target_phantom)))
|
||||
{
|
||||
auto weight = ch::getLoopWeight<false>(facade, packed_leg.front());
|
||||
if (weight != INVALID_EDGE_WEIGHT)
|
||||
if (std::get<0>(weight) != INVALID_EDGE_WEIGHT)
|
||||
packed_leg.push_back(packed_leg.front());
|
||||
}
|
||||
if (!packed_leg.empty())
|
||||
@ -331,7 +346,7 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
|
||||
std::vector<EdgeWeight> weights_table(number_of_entries, INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations_table(number_of_entries, MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table;
|
||||
std::vector<EdgeDistance> distances_table(number_of_entries, MAXIMAL_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(number_of_entries, SPECIAL_NODEID);
|
||||
|
||||
std::vector<NodeBucket> search_space_with_buckets;
|
||||
@ -380,6 +395,7 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
search_space_with_buckets,
|
||||
weights_table,
|
||||
durations_table,
|
||||
distances_table,
|
||||
middle_nodes_table,
|
||||
source_phantom);
|
||||
}
|
||||
@ -387,17 +403,21 @@ manyToManySearch(SearchEngineData<ch::Algorithm> &engine_working_data,
|
||||
if (calculate_distance)
|
||||
{
|
||||
distances_table.resize(number_of_entries, INVALID_EDGE_DISTANCE);
|
||||
calculateDistances(query_heap,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
target_indices,
|
||||
row_index,
|
||||
source_index,
|
||||
source_phantom,
|
||||
number_of_targets,
|
||||
search_space_with_buckets,
|
||||
distances_table,
|
||||
middle_nodes_table);
|
||||
// TODO: this is a hack to work around stuff
|
||||
if (number_of_entries == 0)
|
||||
{
|
||||
calculateDistances(query_heap,
|
||||
facade,
|
||||
phantom_nodes,
|
||||
target_indices,
|
||||
row_index,
|
||||
source_index,
|
||||
source_phantom,
|
||||
number_of_targets,
|
||||
search_space_with_buckets,
|
||||
distances_table,
|
||||
middle_nodes_table);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,6 +41,7 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const NodeID node,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance /* distance TODO use this */,
|
||||
typename SearchEngineData<mld::Algorithm>::ManyToManyQueryHeap &query_heap,
|
||||
Args... args)
|
||||
{
|
||||
@ -77,14 +78,14 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const auto to_duration = duration + shortcut_durations.front();
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration, 0});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
std::tie(query_heap.GetKey(to),
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, true, to_duration};
|
||||
query_heap.GetData(to) = {node, true, to_duration, 0};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -109,14 +110,14 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const auto to_duration = duration + shortcut_durations.front();
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, true, to_duration, 0});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
std::tie(query_heap.GetKey(to),
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, true, to_duration};
|
||||
query_heap.GetData(to) = {node, true, to_duration, 0};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -153,7 +154,7 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
// New Node discovered -> Add to Heap + Node Info Storage
|
||||
if (!query_heap.WasInserted(to))
|
||||
{
|
||||
query_heap.Insert(to, to_weight, {node, false, to_duration});
|
||||
query_heap.Insert(to, to_weight, {node, false, to_duration, 0});
|
||||
}
|
||||
// Found a shorter Path -> Update weight and set new parent
|
||||
else if (std::tie(to_weight, to_duration, node) <
|
||||
@ -161,7 +162,7 @@ void relaxOutgoingEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
query_heap.GetData(to).duration,
|
||||
query_heap.GetData(to).parent))
|
||||
{
|
||||
query_heap.GetData(to) = {node, false, to_duration};
|
||||
query_heap.GetData(to) = {node, false, to_duration, 0};
|
||||
query_heap.DecreaseKey(to, to_weight);
|
||||
}
|
||||
}
|
||||
@ -178,15 +179,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const std::vector<PhantomNode> &phantom_nodes,
|
||||
std::size_t phantom_index,
|
||||
const std::vector<std::size_t> &phantom_indices,
|
||||
const bool calculate_distance)
|
||||
const bool /* calculate_distance */)
|
||||
{
|
||||
std::vector<EdgeWeight> weights(phantom_indices.size(), INVALID_EDGE_WEIGHT);
|
||||
std::vector<EdgeDuration> durations(phantom_indices.size(), MAXIMAL_EDGE_DURATION);
|
||||
std::vector<EdgeDistance> distances_table;
|
||||
std::vector<EdgeDistance> distances_table(phantom_indices.size(), MAXIMAL_EDGE_DISTANCE);
|
||||
std::vector<NodeID> middle_nodes_table(phantom_indices.size(), SPECIAL_NODEID);
|
||||
|
||||
// Collect destination (source) nodes into a map
|
||||
std::unordered_multimap<NodeID, std::tuple<std::size_t, EdgeWeight, EdgeDuration>>
|
||||
std::unordered_multimap<NodeID, std::tuple<std::size_t, EdgeWeight, EdgeDuration, EdgeDistance>>
|
||||
target_nodes_index;
|
||||
target_nodes_index.reserve(phantom_indices.size());
|
||||
for (std::size_t index = 0; index < phantom_indices.size(); ++index)
|
||||
@ -201,13 +202,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{phantom_node.forward_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
phantom_node.GetForwardDuration())});
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance())});
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
phantom_node.GetReverseDuration())});
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance())});
|
||||
}
|
||||
else if (DIRECTION == REVERSE_DIRECTION)
|
||||
{
|
||||
@ -216,13 +219,15 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{phantom_node.forward_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration())});
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance())});
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration())});
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance())});
|
||||
}
|
||||
}
|
||||
|
||||
@ -232,44 +237,50 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
auto &query_heap = *(engine_working_data.many_to_many_heap);
|
||||
|
||||
// Check if node is in the destinations list and update weights/durations
|
||||
auto update_values = [&](NodeID node, EdgeWeight weight, EdgeDuration duration) {
|
||||
auto candidates = target_nodes_index.equal_range(node);
|
||||
for (auto it = candidates.first; it != candidates.second;)
|
||||
{
|
||||
std::size_t index;
|
||||
EdgeWeight target_weight;
|
||||
EdgeDuration target_duration;
|
||||
std::tie(index, target_weight, target_duration) = it->second;
|
||||
|
||||
const auto path_weight = weight + target_weight;
|
||||
if (path_weight >= 0)
|
||||
auto update_values =
|
||||
[&](NodeID node, EdgeWeight weight, EdgeDuration duration, EdgeDistance distance) {
|
||||
auto candidates = target_nodes_index.equal_range(node);
|
||||
for (auto it = candidates.first; it != candidates.second;)
|
||||
{
|
||||
const auto path_duration = duration + target_duration;
|
||||
std::size_t index;
|
||||
EdgeWeight target_weight;
|
||||
EdgeDuration target_duration;
|
||||
EdgeDuration target_distance;
|
||||
std::tie(index, target_weight, target_duration, target_distance) = it->second;
|
||||
|
||||
if (std::tie(path_weight, path_duration) <
|
||||
std::tie(weights[index], durations[index]))
|
||||
const auto path_weight = weight + target_weight;
|
||||
if (path_weight >= 0)
|
||||
{
|
||||
weights[index] = path_weight;
|
||||
durations[index] = path_duration;
|
||||
middle_nodes_table[index] = node;
|
||||
const auto path_duration = duration + target_duration;
|
||||
const auto path_distance = distance + target_distance;
|
||||
|
||||
if (std::tie(path_weight, path_duration) <
|
||||
std::tie(weights[index], durations[index]))
|
||||
{
|
||||
weights[index] = path_weight;
|
||||
durations[index] = path_duration;
|
||||
distances_table[index] = path_distance;
|
||||
middle_nodes_table[index] = node;
|
||||
}
|
||||
|
||||
// Remove node from destinations list
|
||||
it = target_nodes_index.erase(it);
|
||||
}
|
||||
else
|
||||
{
|
||||
++it;
|
||||
}
|
||||
|
||||
// Remove node from destinations list
|
||||
it = target_nodes_index.erase(it);
|
||||
}
|
||||
else
|
||||
{
|
||||
++it;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
auto insert_node = [&](NodeID node, EdgeWeight initial_weight, EdgeDuration initial_duration) {
|
||||
};
|
||||
|
||||
auto insert_node = [&](NodeID node,
|
||||
EdgeWeight initial_weight,
|
||||
EdgeDuration initial_duration,
|
||||
EdgeDistance initial_distance) {
|
||||
// Update single node paths
|
||||
update_values(node, initial_weight, initial_duration);
|
||||
update_values(node, initial_weight, initial_duration, initial_distance);
|
||||
|
||||
query_heap.Insert(node, initial_weight, {node, initial_duration});
|
||||
query_heap.Insert(node, initial_weight, {node, initial_duration, initial_distance});
|
||||
|
||||
// Place adjacent nodes into heap
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
@ -292,8 +303,9 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
const auto edge_duration = initial_duration + facade.GetNodeDuration(node_id) +
|
||||
facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
const auto edge_distance = initial_distance;
|
||||
|
||||
query_heap.Insert(to, edge_weight, {node, edge_duration});
|
||||
query_heap.Insert(to, edge_weight, {node, edge_duration, edge_distance});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -307,14 +319,16 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{
|
||||
insert_node(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration());
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance());
|
||||
}
|
||||
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
insert_node(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration());
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance());
|
||||
}
|
||||
}
|
||||
else if (DIRECTION == REVERSE_DIRECTION)
|
||||
@ -323,14 +337,16 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
{
|
||||
insert_node(phantom_node.forward_segment_id.id,
|
||||
phantom_node.GetForwardWeightPlusOffset(),
|
||||
phantom_node.GetForwardDuration());
|
||||
phantom_node.GetForwardDuration(),
|
||||
phantom_node.GetForwardDistance());
|
||||
}
|
||||
|
||||
if (phantom_node.IsValidReverseTarget())
|
||||
{
|
||||
insert_node(phantom_node.reverse_segment_id.id,
|
||||
phantom_node.GetReverseWeightPlusOffset(),
|
||||
phantom_node.GetReverseDuration());
|
||||
phantom_node.GetReverseDuration(),
|
||||
phantom_node.GetReverseDistance());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -341,22 +357,26 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto weight = query_heap.GetKey(node);
|
||||
const auto duration = query_heap.GetData(node).duration;
|
||||
const auto distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Update values
|
||||
update_values(node, weight, duration);
|
||||
update_values(node, weight, duration, distance);
|
||||
|
||||
// Relax outgoing edges
|
||||
relaxOutgoingEdges<DIRECTION>(facade,
|
||||
node,
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
query_heap,
|
||||
phantom_nodes,
|
||||
phantom_index,
|
||||
phantom_indices);
|
||||
}
|
||||
|
||||
if (calculate_distance)
|
||||
// TODO: re-enable this if we need to fallback
|
||||
// if (calculate_distance)
|
||||
if (false)
|
||||
{
|
||||
// Initialize unpacking heaps
|
||||
engine_working_data.InitializeOrClearFirstThreadLocalStorage(
|
||||
@ -497,6 +517,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto source_weight = query_heap.GetKey(node);
|
||||
const auto source_duration = query_heap.GetData(node).duration;
|
||||
const auto source_distance = query_heap.GetData(node).distance;
|
||||
|
||||
// Check if each encountered node has an entry
|
||||
const auto &bucket_list = std::equal_range(search_space_with_buckets.begin(),
|
||||
@ -533,7 +554,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
|
||||
relaxOutgoingEdges<DIRECTION>(
|
||||
facade, node, source_weight, source_duration, query_heap, phantom_node);
|
||||
facade, node, source_weight, source_duration, source_distance, query_heap, phantom_node);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
@ -546,6 +567,7 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto node = query_heap.DeleteMin();
|
||||
const auto target_weight = query_heap.GetKey(node);
|
||||
const auto target_duration = query_heap.GetData(node).duration;
|
||||
const auto target_distance = query_heap.GetData(node).distance;
|
||||
const auto parent = query_heap.GetData(node).parent;
|
||||
const auto from_clique_arc = query_heap.GetData(node).from_clique_arc;
|
||||
|
||||
@ -556,8 +578,14 @@ void backwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const auto &partition = facade.GetMultiLevelPartition();
|
||||
const auto maximal_level = partition.GetNumberOfLevels() - 1;
|
||||
|
||||
relaxOutgoingEdges<!DIRECTION>(
|
||||
facade, node, target_weight, target_duration, query_heap, phantom_node, maximal_level);
|
||||
relaxOutgoingEdges<!DIRECTION>(facade,
|
||||
node,
|
||||
target_weight,
|
||||
target_duration,
|
||||
target_distance,
|
||||
query_heap,
|
||||
phantom_node,
|
||||
maximal_level);
|
||||
}
|
||||
|
||||
template <bool DIRECTION>
|
||||
|
@ -396,5 +396,5 @@ std::unique_ptr<SegmentDataContainer> CompressedEdgeContainer::ToSegmentData()
|
||||
|
||||
return std::move(segment_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
@ -51,7 +51,7 @@ template <> struct hash<std::pair<NodeID, NodeID>>
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
}
|
||||
} // namespace std
|
||||
|
||||
// Buffer size of turn_indexes_write_buffer to reduce number of write(v) syscals
|
||||
const constexpr int TURN_INDEX_WRITE_BUFFER_SIZE = 1000;
|
||||
@ -555,22 +555,21 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
&scripting_environment,
|
||||
weight_multiplier,
|
||||
&conditional_restriction_map](
|
||||
// what nodes will be used? In most cases this will be the id
|
||||
// stored in the edge_data. In case of duplicated nodes (e.g.
|
||||
// due to via-way restrictions), one/both of these might
|
||||
// refer to a newly added edge based node
|
||||
const auto edge_based_node_from,
|
||||
const auto edge_based_node_to,
|
||||
// the situation of the turn
|
||||
const auto node_along_road_entering,
|
||||
const auto node_based_edge_from,
|
||||
const auto intersection_node,
|
||||
const auto node_based_edge_to,
|
||||
const auto &turn_angle,
|
||||
const auto &road_legs_on_the_right,
|
||||
const auto &road_legs_on_the_left,
|
||||
const auto &edge_geometries) {
|
||||
|
||||
// what nodes will be used? In most cases this will be the id
|
||||
// stored in the edge_data. In case of duplicated nodes (e.g.
|
||||
// due to via-way restrictions), one/both of these might
|
||||
// refer to a newly added edge based node
|
||||
const auto edge_based_node_from,
|
||||
const auto edge_based_node_to,
|
||||
// the situation of the turn
|
||||
const auto node_along_road_entering,
|
||||
const auto node_based_edge_from,
|
||||
const auto intersection_node,
|
||||
const auto node_based_edge_to,
|
||||
const auto &turn_angle,
|
||||
const auto &road_legs_on_the_right,
|
||||
const auto &road_legs_on_the_left,
|
||||
const auto &edge_geometries) {
|
||||
const auto node_restricted =
|
||||
isRestricted(node_along_road_entering,
|
||||
intersection_node,
|
||||
@ -652,16 +651,19 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
// auto turn_id = m_edge_based_edge_list.size();
|
||||
auto weight = boost::numeric_cast<EdgeWeight>(edge_data1.weight + weight_penalty);
|
||||
auto duration = boost::numeric_cast<EdgeWeight>(edge_data1.duration + duration_penalty);
|
||||
auto distance = boost::numeric_cast<EdgeDistance>(edge_data1.distance);
|
||||
|
||||
EdgeBasedEdge edge_based_edge = {
|
||||
edge_based_node_from,
|
||||
edge_based_node_to,
|
||||
SPECIAL_NODEID, // This will be updated once the main loop
|
||||
// completes!
|
||||
weight,
|
||||
duration,
|
||||
true,
|
||||
false};
|
||||
std::cout << "EBGF distance: " << distance << std::endl;
|
||||
|
||||
EdgeBasedEdge edge_based_edge = {edge_based_node_from,
|
||||
edge_based_node_to,
|
||||
SPECIAL_NODEID, // This will be updated once the main
|
||||
// loop completes!
|
||||
weight,
|
||||
duration,
|
||||
distance,
|
||||
true,
|
||||
false};
|
||||
|
||||
// We write out the mapping between the edge-expanded edges and the original nodes.
|
||||
// Since each edge represents a possible maneuver, external programs can use this to
|
||||
@ -693,7 +695,6 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
//
|
||||
tbb::filter_t<tbb::blocked_range<NodeID>, EdgesPipelineBufferPtr> processor_stage(
|
||||
tbb::filter::parallel, [&](const tbb::blocked_range<NodeID> &intersection_node_range) {
|
||||
|
||||
auto buffer = std::make_shared<EdgesPipelineBuffer>();
|
||||
buffer->nodes_processed = intersection_node_range.size();
|
||||
|
||||
@ -891,7 +892,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
// TODO: this loop is not optimized - once we have a few
|
||||
// overrides available, we should index this for faster
|
||||
// lookups
|
||||
for (auto & override : unresolved_maneuver_overrides)
|
||||
for (auto &override : unresolved_maneuver_overrides)
|
||||
{
|
||||
for (auto &turn : override.turn_sequence)
|
||||
{
|
||||
@ -1032,7 +1033,6 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
std::vector<EdgeWithData> delayed_data;
|
||||
tbb::filter_t<EdgesPipelineBufferPtr, void> output_stage(
|
||||
tbb::filter::serial_in_order, [&](auto buffer) {
|
||||
|
||||
routing_progress.PrintAddition(buffer->nodes_processed);
|
||||
|
||||
m_connectivity_checksum = buffer->checksum.update_checksum(m_connectivity_checksum);
|
||||
|
@ -99,7 +99,7 @@ inline NodeID mapExternalToInternalNodeID(Iter first, Iter last, const OSMNodeID
|
||||
return (it == last || value < *it) ? SPECIAL_NODEID
|
||||
: static_cast<NodeID>(std::distance(first, it));
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -277,8 +277,8 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
{
|
||||
if (edge_iterator->result.osm_source_id < node_iterator->node_id)
|
||||
{
|
||||
util::Log(logDEBUG) << "Found invalid node reference "
|
||||
<< edge_iterator->result.source;
|
||||
util::Log(logDEBUG)
|
||||
<< "Found invalid node reference " << edge_iterator->result.source;
|
||||
edge_iterator->result.source = SPECIAL_NODEID;
|
||||
++edge_iterator;
|
||||
continue;
|
||||
@ -387,12 +387,16 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
const auto weight = edge_iterator->weight_data(distance);
|
||||
const auto duration = edge_iterator->duration_data(distance);
|
||||
|
||||
const auto accurate_distance =
|
||||
util::coordinate_calculation::fccApproximateDistance(source_coord, target_coord);
|
||||
|
||||
ExtractionSegment segment(source_coord, target_coord, distance, weight, duration);
|
||||
scripting_environment.ProcessSegment(segment);
|
||||
|
||||
auto &edge = edge_iterator->result;
|
||||
edge.weight = std::max<EdgeWeight>(1, std::round(segment.weight * weight_multiplier));
|
||||
edge.duration = std::max<EdgeWeight>(1, std::round(segment.duration * 10.));
|
||||
edge.distance = accurate_distance;
|
||||
|
||||
// assign new node id
|
||||
const auto node_id = mapExternalToInternalNodeID(
|
||||
@ -740,7 +744,6 @@ void ExtractionContainers::PrepareManeuverOverrides()
|
||||
**/
|
||||
auto const find_turn_from_way_tofrom_nodes = [&](auto const &from_segment,
|
||||
auto const &to_segment) {
|
||||
|
||||
if (from_segment.first_segment_source_id == to_segment.first_segment_source_id)
|
||||
{
|
||||
return NodeBasedTurn{osm_node_to_internal_nbn(from_segment.first_segment_target_id),
|
||||
@ -857,7 +860,6 @@ void ExtractionContainers::PrepareManeuverOverrides()
|
||||
// Later, the UnresolvedManeuverOverride will be converted into a final ManeuverOverride
|
||||
// once the edge-based-node IDs are generated by the edge-based-graph-factory
|
||||
const auto transform = [&](const auto &external, auto &internal) {
|
||||
|
||||
// Create a stub override
|
||||
auto maneuver_override =
|
||||
UnresolvedManeuverOverride{{},
|
||||
@ -1036,8 +1038,9 @@ void ExtractionContainers::PrepareRestrictions()
|
||||
// translate the turn from one segment onto another into a node restriction (the ways can
|
||||
// only
|
||||
// be connected at a single location)
|
||||
auto const get_node_restriction_from_OSM_ids = [&](
|
||||
auto const from_id, auto const to_id, const OSMNodeID via_node) {
|
||||
auto const get_node_restriction_from_OSM_ids = [&](auto const from_id,
|
||||
auto const to_id,
|
||||
const OSMNodeID via_node) {
|
||||
auto const from_segment_itr = referenced_ways.find(from_id);
|
||||
if (from_segment_itr->second.way_id != from_id)
|
||||
{
|
||||
|
@ -75,7 +75,7 @@ void ExtractorCallbacks::ProcessRestriction(const InputConditionalTurnRestrictio
|
||||
// util::Log() << restriction.toString();
|
||||
}
|
||||
|
||||
void ExtractorCallbacks::ProcessManeuverOverride(const InputManeuverOverride & override)
|
||||
void ExtractorCallbacks::ProcessManeuverOverride(const InputManeuverOverride &override)
|
||||
{
|
||||
external_memory.external_maneuver_overrides_list.push_back(override);
|
||||
}
|
||||
@ -415,6 +415,7 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{true,
|
||||
@ -450,6 +451,7 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{false,
|
||||
|
@ -185,17 +185,18 @@ void GraphCompressor::Compress(
|
||||
* just
|
||||
* like a barrier.
|
||||
*/
|
||||
const auto selectAnnotation = [&node_data_container](
|
||||
const AnnotationID front_annotation, const AnnotationID back_annotation) {
|
||||
// A lane has tags: u - (front) - v - (back) - w
|
||||
// During contraction, we keep only one of the tags. Usually the one closer
|
||||
// to the intersection is preferred. If its empty, however, we keep the
|
||||
// non-empty one
|
||||
if (node_data_container[back_annotation].lane_description_id ==
|
||||
INVALID_LANE_DESCRIPTIONID)
|
||||
return front_annotation;
|
||||
return back_annotation;
|
||||
};
|
||||
const auto selectAnnotation =
|
||||
[&node_data_container](const AnnotationID front_annotation,
|
||||
const AnnotationID back_annotation) {
|
||||
// A lane has tags: u - (front) - v - (back) - w
|
||||
// During contraction, we keep only one of the tags. Usually the one closer
|
||||
// to the intersection is preferred. If its empty, however, we keep the
|
||||
// non-empty one
|
||||
if (node_data_container[back_annotation].lane_description_id ==
|
||||
INVALID_LANE_DESCRIPTIONID)
|
||||
return front_annotation;
|
||||
return back_annotation;
|
||||
};
|
||||
|
||||
graph.GetEdgeData(forward_e1).annotation_data = selectAnnotation(
|
||||
fwd_edge_data1.annotation_data, fwd_edge_data2.annotation_data);
|
||||
@ -259,6 +260,8 @@ void GraphCompressor::Compress(
|
||||
const auto forward_weight2 = fwd_edge_data2.weight;
|
||||
const auto forward_duration1 = fwd_edge_data1.duration;
|
||||
const auto forward_duration2 = fwd_edge_data2.duration;
|
||||
// const auto forward_distance1 = fwd_edge_data1.distance;
|
||||
const auto forward_distance2 = fwd_edge_data2.distance;
|
||||
|
||||
BOOST_ASSERT(0 != forward_weight1);
|
||||
BOOST_ASSERT(0 != forward_weight2);
|
||||
@ -267,6 +270,8 @@ void GraphCompressor::Compress(
|
||||
const auto reverse_weight2 = rev_edge_data2.weight;
|
||||
const auto reverse_duration1 = rev_edge_data1.duration;
|
||||
const auto reverse_duration2 = rev_edge_data2.duration;
|
||||
// const auto reverse_distance1 = rev_edge_data1.distance;
|
||||
const auto reverse_distance2 = rev_edge_data2.distance;
|
||||
|
||||
BOOST_ASSERT(0 != reverse_weight1);
|
||||
BOOST_ASSERT(0 != reverse_weight2);
|
||||
@ -279,6 +284,10 @@ void GraphCompressor::Compress(
|
||||
graph.GetEdgeData(forward_e1).duration += forward_duration2;
|
||||
graph.GetEdgeData(reverse_e1).duration += reverse_duration2;
|
||||
|
||||
// add duration of e2's to e1
|
||||
graph.GetEdgeData(forward_e1).distance += forward_distance2;
|
||||
graph.GetEdgeData(reverse_e1).distance += reverse_distance2;
|
||||
|
||||
if (node_weight_penalty != INVALID_EDGE_WEIGHT &&
|
||||
node_duration_penalty != MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
@ -286,6 +295,7 @@ void GraphCompressor::Compress(
|
||||
graph.GetEdgeData(reverse_e1).weight += node_weight_penalty;
|
||||
graph.GetEdgeData(forward_e1).duration += node_duration_penalty;
|
||||
graph.GetEdgeData(reverse_e1).duration += node_duration_penalty;
|
||||
// Note: no penalties for distances
|
||||
}
|
||||
|
||||
// extend e1's to targets of e2's
|
||||
@ -359,5 +369,5 @@ void GraphCompressor::PrintStatistics(unsigned original_number_of_nodes,
|
||||
util::Log() << "Node compression ratio: " << new_node_count / (double)original_number_of_nodes;
|
||||
util::Log() << "Edge compression ratio: " << new_edge_count / (double)original_number_of_edges;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace extractor
|
||||
} // namespace osrm
|
||||
|
Loading…
Reference in New Issue
Block a user