Merge branch 'master' into sf-stop-sign
This commit is contained in:
commit
eb72ca4941
@ -6,6 +6,8 @@
|
||||
- FIXED: Handle snapping parameter for all plugins in NodeJs bindings, but not for Route only. [#6417](https://github.com/Project-OSRM/osrm-backend/pull/6417)
|
||||
- FIXED: Fix annotations=true handling in NodeJS bindings & libosrm. [#6415](https://github.com/Project-OSRM/osrm-backend/pull/6415/)
|
||||
- FIXED: Fix bindings compilation issue on the latest Node. Update NAN to 2.17.0. [#6416](https://github.com/Project-OSRM/osrm-backend/pull/6416)
|
||||
- CHANGED: Make edge metrics strongly typed [#6420](https://github.com/Project-OSRM/osrm-backend/pull/6420)
|
||||
- FIXED: Typo in file name src/util/timed_historgram.cpp -> src/util/timed_histogram.cpp [#6428](https://github.com/Project-OSRM/osrm-backend/issues/6428)
|
||||
- Routing:
|
||||
- FIXED: Fix adding traffic signal penalties during compression [#6419](https://github.com/Project-OSRM/osrm-backend/pull/6419)
|
||||
# 5.27.1
|
||||
|
@ -1,75 +0,0 @@
|
||||
{
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Description": "user for publishing to s3://mapbox-node-binary/osrm",
|
||||
"Resources": {
|
||||
"User": {
|
||||
"Type": "AWS::IAM::User",
|
||||
"Properties": {
|
||||
"Policies": [
|
||||
{
|
||||
"PolicyName": "list",
|
||||
"PolicyDocument": {
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"s3:ListBucket"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Resource": "arn:aws:s3:::mapbox-node-binary",
|
||||
"Condition": {
|
||||
"StringLike": {
|
||||
"s3:prefix": [
|
||||
"osrm/*"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PolicyName": "publish",
|
||||
"PolicyDocument": {
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"s3:DeleteObject",
|
||||
"s3:GetObject",
|
||||
"s3:GetObjectAcl",
|
||||
"s3:PutObject",
|
||||
"s3:PutObjectAcl"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Resource": "arn:aws:s3:::mapbox-node-binary/osrm/*"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"AccessKey": {
|
||||
"Type": "AWS::IAM::AccessKey",
|
||||
"Properties": {
|
||||
"UserName": {
|
||||
"Ref": "User"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs": {
|
||||
"AccessKeyId": {
|
||||
"Value": {
|
||||
"Ref": "AccessKey"
|
||||
}
|
||||
},
|
||||
"SecretAccessKey": {
|
||||
"Value": {
|
||||
"Fn::GetAtt": [
|
||||
"AccessKey",
|
||||
"SecretAccessKey"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
var cf = require('@mapbox/cloudfriend');
|
||||
var package_json = require('../package.json')
|
||||
|
||||
module.exports = {
|
||||
AWSTemplateFormatVersion: '2010-09-09',
|
||||
Description: 'user for publishing to s3://mapbox-node-binary/' + package_json.name,
|
||||
Resources: {
|
||||
User: {
|
||||
Type: 'AWS::IAM::User',
|
||||
Properties: {
|
||||
Policies: [
|
||||
{
|
||||
PolicyName: 'list',
|
||||
PolicyDocument: {
|
||||
Statement: [
|
||||
{
|
||||
Action: ['s3:ListBucket'],
|
||||
Effect: 'Allow',
|
||||
Resource: 'arn:aws:s3:::mapbox-node-binary',
|
||||
Condition : {
|
||||
StringLike : {
|
||||
"s3:prefix": [ package_json.name + "/*"]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
PolicyName: 'publish',
|
||||
PolicyDocument: {
|
||||
Statement: [
|
||||
{
|
||||
Action: ['s3:DeleteObject', 's3:GetObject', 's3:GetObjectAcl', 's3:PutObject', 's3:PutObjectAcl'],
|
||||
Effect: 'Allow',
|
||||
Resource: 'arn:aws:s3:::mapbox-node-binary/' + package_json.name + '/*'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
AccessKey: {
|
||||
Type: 'AWS::IAM::AccessKey',
|
||||
Properties: {
|
||||
UserName: cf.ref('User')
|
||||
}
|
||||
}
|
||||
},
|
||||
Outputs: {
|
||||
AccessKeyId: {
|
||||
Value: cf.ref('AccessKey')
|
||||
},
|
||||
SecretAccessKey: {
|
||||
Value: cf.getAtt('AccessKey', 'SecretAccessKey')
|
||||
}
|
||||
}
|
||||
};
|
@ -12,12 +12,12 @@ namespace contractor
|
||||
struct ContractorEdgeData
|
||||
{
|
||||
ContractorEdgeData()
|
||||
: weight(0), duration(0), distance(0), id(0), originalEdges(0), shortcut(0), forward(0),
|
||||
: weight{0}, duration{0}, distance{0}, id(0), originalEdges(0), shortcut(0), forward(0),
|
||||
backward(0)
|
||||
{
|
||||
}
|
||||
ContractorEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
unsigned original_edges,
|
||||
unsigned id,
|
||||
@ -30,7 +30,7 @@ struct ContractorEdgeData
|
||||
{
|
||||
}
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
unsigned id;
|
||||
unsigned originalEdges : 29;
|
||||
|
@ -29,18 +29,20 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
|
||||
#ifndef NDEBUG
|
||||
const unsigned int constexpr DAY_IN_DECI_SECONDS = 24 * 60 * 60 * 10;
|
||||
if (static_cast<unsigned int>(std::max(input_edge.data.weight, 1)) > DAY_IN_DECI_SECONDS)
|
||||
if (from_alias<unsigned int>(std::max(input_edge.data.weight, EdgeWeight{1})) >
|
||||
DAY_IN_DECI_SECONDS)
|
||||
{
|
||||
util::Log(logWARNING) << "Edge weight large -> "
|
||||
<< static_cast<unsigned int>(std::max(input_edge.data.weight, 1))
|
||||
<< from_alias<unsigned int>(
|
||||
std::max(input_edge.data.weight, EdgeWeight{1}))
|
||||
<< " : " << static_cast<unsigned int>(input_edge.source) << " -> "
|
||||
<< static_cast<unsigned int>(input_edge.target);
|
||||
}
|
||||
#endif
|
||||
edges.emplace_back(input_edge.source,
|
||||
input_edge.target,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
std::max(input_edge.data.weight, {1}),
|
||||
to_alias<EdgeDuration>(input_edge.data.duration),
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
@ -50,8 +52,8 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
|
||||
edges.emplace_back(input_edge.target,
|
||||
input_edge.source,
|
||||
std::max(input_edge.data.weight, 1),
|
||||
input_edge.data.duration,
|
||||
std::max(input_edge.data.weight, {1}),
|
||||
to_alias<EdgeDuration>(input_edge.data.duration),
|
||||
input_edge.data.distance,
|
||||
1,
|
||||
input_edge.data.turn_id,
|
||||
@ -109,7 +111,7 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
// merge edges (s,t) and (t,s) into bidirectional edge
|
||||
if (forward_edge.data.weight == reverse_edge.data.weight)
|
||||
{
|
||||
if ((int)forward_edge.data.weight != INVALID_EDGE_WEIGHT)
|
||||
if (forward_edge.data.weight != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
forward_edge.data.backward = true;
|
||||
edges[edge++] = forward_edge;
|
||||
@ -117,11 +119,11 @@ ContractorGraph toContractorGraph(NodeID number_of_nodes, InputEdgeContainer inp
|
||||
}
|
||||
else
|
||||
{ // insert seperate edges
|
||||
if (((int)forward_edge.data.weight) != INVALID_EDGE_WEIGHT)
|
||||
if (forward_edge.data.weight != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
edges[edge++] = forward_edge;
|
||||
}
|
||||
if ((int)reverse_edge.data.weight != INVALID_EDGE_WEIGHT)
|
||||
if (reverse_edge.data.weight != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
edges[edge++] = reverse_edge;
|
||||
}
|
||||
@ -157,7 +159,7 @@ template <class Edge, typename GraphT> inline std::vector<Edge> toEdges(GraphT g
|
||||
new_edge.target = target;
|
||||
BOOST_ASSERT_MSG(SPECIAL_NODEID != new_edge.target, "Target id invalid");
|
||||
new_edge.data.weight = data.weight;
|
||||
new_edge.data.duration = data.duration;
|
||||
new_edge.data.duration = from_alias<EdgeDuration::value_type>(data.duration);
|
||||
new_edge.data.distance = data.distance;
|
||||
new_edge.data.shortcut = data.shortcut;
|
||||
new_edge.data.turn_id = data.id;
|
||||
|
@ -17,15 +17,15 @@ struct QueryEdge
|
||||
struct EdgeData
|
||||
{
|
||||
explicit EdgeData()
|
||||
: turn_id(0), shortcut(false), weight(0), duration(0), forward(false), backward(false),
|
||||
distance(0)
|
||||
: turn_id(0), shortcut(false), weight{0}, duration(0), forward(false),
|
||||
backward(false), distance{0}
|
||||
{
|
||||
}
|
||||
|
||||
EdgeData(const NodeID turn_id,
|
||||
const bool shortcut,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
@ -50,7 +50,7 @@ struct QueryEdge
|
||||
NodeID turn_id : 31;
|
||||
bool shortcut : 1;
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration : 30;
|
||||
EdgeDuration::value_type duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
EdgeDistance distance;
|
||||
|
@ -61,7 +61,7 @@ class CellCustomizer
|
||||
}
|
||||
}
|
||||
heap.Clear();
|
||||
heap.Insert(source, 0, {false, 0, 0});
|
||||
heap.Insert(source, {0}, {false, {0}, {0}});
|
||||
|
||||
// explore search space
|
||||
while (!heap.Empty() && !destinations_set.empty())
|
||||
@ -216,12 +216,11 @@ class CellCustomizer
|
||||
partition.GetCell(level - 1, to)))
|
||||
{
|
||||
const EdgeWeight to_weight = weight + data.weight;
|
||||
const EdgeDuration to_duration = duration + data.duration;
|
||||
const EdgeDuration to_duration = duration + to_alias<EdgeDuration>(data.duration);
|
||||
const EdgeDistance to_distance = distance + data.distance;
|
||||
if (!heap.WasInserted(to))
|
||||
{
|
||||
heap.Insert(
|
||||
to, to_weight, {false, duration + data.duration, distance + data.distance});
|
||||
heap.Insert(to, to_weight, {false, to_duration, to_distance});
|
||||
}
|
||||
else if (std::tie(to_weight, to_duration, to_distance) <
|
||||
std::tie(
|
||||
|
@ -97,7 +97,7 @@ class MultiLevelGraph : public partitioner::MultiLevelGraph<EdgeDataT, Ownership
|
||||
|
||||
EdgeWeight GetNodeWeight(NodeID node) const { return node_weights[node]; }
|
||||
|
||||
EdgeWeight GetNodeDuration(NodeID node) const { return node_durations[node]; }
|
||||
EdgeDuration GetNodeDuration(NodeID node) const { return node_durations[node]; }
|
||||
|
||||
EdgeDistance GetNodeDistance(NodeID node) const { return node_distances[node]; }
|
||||
|
||||
|
@ -133,7 +133,8 @@ class TableAPI final : public BaseAPI
|
||||
}
|
||||
|
||||
bool have_speed_cells =
|
||||
parameters.fallback_speed != INVALID_FALLBACK_SPEED && parameters.fallback_speed > 0;
|
||||
parameters.fallback_speed != from_alias<double>(INVALID_FALLBACK_SPEED) &&
|
||||
parameters.fallback_speed > 0;
|
||||
flatbuffers::Offset<flatbuffers::Vector<uint32_t>> speed_cells;
|
||||
if (have_speed_cells)
|
||||
{
|
||||
@ -223,7 +224,8 @@ class TableAPI final : public BaseAPI
|
||||
MakeDistanceTable(tables.second, number_of_sources, number_of_destinations);
|
||||
}
|
||||
|
||||
if (parameters.fallback_speed != INVALID_FALLBACK_SPEED && parameters.fallback_speed > 0)
|
||||
if (parameters.fallback_speed != from_alias<double>(INVALID_FALLBACK_SPEED) &&
|
||||
parameters.fallback_speed > 0)
|
||||
{
|
||||
response.values["fallback_speed_cells"] = MakeEstimatesTable(fallback_speed_cells);
|
||||
}
|
||||
@ -272,17 +274,17 @@ class TableAPI final : public BaseAPI
|
||||
|
||||
virtual flatbuffers::Offset<flatbuffers::Vector<float>>
|
||||
MakeDurationTable(flatbuffers::FlatBufferBuilder &builder,
|
||||
const std::vector<EdgeWeight> &values) const
|
||||
const std::vector<EdgeDuration> &values) const
|
||||
{
|
||||
std::vector<float> distance_table;
|
||||
distance_table.resize(values.size());
|
||||
std::transform(
|
||||
values.begin(), values.end(), distance_table.begin(), [](const EdgeWeight duration) {
|
||||
values.begin(), values.end(), distance_table.begin(), [](const EdgeDuration duration) {
|
||||
if (duration == MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
return 0.;
|
||||
}
|
||||
return duration / 10.;
|
||||
return from_alias<double>(duration) / 10.;
|
||||
});
|
||||
return builder.CreateVector(distance_table);
|
||||
}
|
||||
@ -299,7 +301,7 @@ class TableAPI final : public BaseAPI
|
||||
{
|
||||
return 0.;
|
||||
}
|
||||
return std::round(distance * 10) / 10.;
|
||||
return std::round(from_alias<double>(distance) * 10) / 10.;
|
||||
});
|
||||
return builder.CreateVector(duration_table);
|
||||
}
|
||||
@ -347,7 +349,7 @@ class TableAPI final : public BaseAPI
|
||||
return json_waypoints;
|
||||
}
|
||||
|
||||
virtual util::json::Array MakeDurationTable(const std::vector<EdgeWeight> &values,
|
||||
virtual util::json::Array MakeDurationTable(const std::vector<EdgeDuration> &values,
|
||||
std::size_t number_of_rows,
|
||||
std::size_t number_of_columns) const
|
||||
{
|
||||
@ -361,13 +363,14 @@ class TableAPI final : public BaseAPI
|
||||
std::transform(row_begin_iterator,
|
||||
row_end_iterator,
|
||||
json_row.values.begin(),
|
||||
[](const EdgeWeight duration) {
|
||||
[](const EdgeDuration duration) {
|
||||
if (duration == MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
return util::json::Value(util::json::Null());
|
||||
}
|
||||
// division by 10 because the duration is in deciseconds (10s)
|
||||
return util::json::Value(util::json::Number(duration / 10.));
|
||||
return util::json::Value(
|
||||
util::json::Number(from_alias<double>(duration) / 10.));
|
||||
});
|
||||
json_table.values.push_back(std::move(json_row));
|
||||
}
|
||||
@ -394,8 +397,8 @@ class TableAPI final : public BaseAPI
|
||||
return util::json::Value(util::json::Null());
|
||||
}
|
||||
// round to single decimal place
|
||||
return util::json::Value(
|
||||
util::json::Number(std::round(distance * 10) / 10.));
|
||||
return util::json::Value(util::json::Number(
|
||||
std::round(from_alias<double>(distance) * 10) / 10.));
|
||||
});
|
||||
json_table.values.push_back(std::move(json_row));
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ struct TableParameters : public BaseParameters
|
||||
{
|
||||
std::vector<std::size_t> sources;
|
||||
std::vector<std::size_t> destinations;
|
||||
double fallback_speed = INVALID_FALLBACK_SPEED;
|
||||
double fallback_speed = from_alias<double>(INVALID_FALLBACK_SPEED);
|
||||
|
||||
enum class FallbackCoordinateType
|
||||
{
|
||||
|
@ -83,7 +83,7 @@ template <> class AlgorithmDataFacade<MLD>
|
||||
|
||||
virtual EdgeWeight GetNodeWeight(const NodeID edge_based_node_id) const = 0;
|
||||
|
||||
virtual EdgeWeight
|
||||
virtual EdgeDuration
|
||||
GetNodeDuration(const NodeID edge_based_node_id) const = 0; // TODO: to be removed
|
||||
|
||||
virtual EdgeDistance GetNodeDistance(const NodeID edge_based_node_id) const = 0;
|
||||
|
@ -320,75 +320,84 @@ template <typename RTreeT, typename DataFacadeT> class GeospatialQuery
|
||||
|
||||
const auto forward_weight_offset =
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
alias_cast<EdgeWeight>(
|
||||
std::accumulate(forward_weights.begin(),
|
||||
forward_weights.begin() + data.fwd_segment_position,
|
||||
EdgeWeight{0});
|
||||
SegmentWeight{0}));
|
||||
|
||||
const auto forward_duration_offset =
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
alias_cast<EdgeDuration>(
|
||||
std::accumulate(forward_durations.begin(),
|
||||
forward_durations.begin() + data.fwd_segment_position,
|
||||
EdgeDuration{0});
|
||||
SegmentDuration{0}));
|
||||
|
||||
EdgeDistance forward_distance_offset = 0;
|
||||
EdgeDistance forward_distance_offset = {0};
|
||||
// Sum up the distance from the start to the fwd_segment_position
|
||||
for (auto current = forward_geometry.begin();
|
||||
current < forward_geometry.begin() + data.fwd_segment_position;
|
||||
++current)
|
||||
{
|
||||
forward_distance_offset += util::coordinate_calculation::greatCircleDistance(
|
||||
forward_distance_offset +=
|
||||
to_alias<EdgeDistance>(util::coordinate_calculation::greatCircleDistance(
|
||||
datafacade.GetCoordinateOfNode(*current),
|
||||
datafacade.GetCoordinateOfNode(*std::next(current)));
|
||||
datafacade.GetCoordinateOfNode(*std::next(current))));
|
||||
}
|
||||
|
||||
BOOST_ASSERT(data.fwd_segment_position <
|
||||
std::distance(forward_durations.begin(), forward_durations.end()));
|
||||
|
||||
EdgeWeight forward_weight = forward_weights[data.fwd_segment_position];
|
||||
EdgeDuration forward_duration = forward_durations[data.fwd_segment_position];
|
||||
EdgeDistance forward_distance = util::coordinate_calculation::greatCircleDistance(
|
||||
EdgeWeight forward_weight =
|
||||
alias_cast<EdgeWeight>(forward_weights[data.fwd_segment_position]);
|
||||
EdgeDuration forward_duration =
|
||||
alias_cast<EdgeDuration>(forward_durations[data.fwd_segment_position]);
|
||||
EdgeDistance forward_distance =
|
||||
to_alias<EdgeDistance>(util::coordinate_calculation::greatCircleDistance(
|
||||
datafacade.GetCoordinateOfNode(forward_geometry(data.fwd_segment_position)),
|
||||
point_on_segment);
|
||||
point_on_segment));
|
||||
|
||||
const auto reverse_weight_offset =
|
||||
const auto reverse_weight_offset = alias_cast<EdgeWeight>(
|
||||
std::accumulate(reverse_weights.begin(),
|
||||
reverse_weights.end() - data.fwd_segment_position - 1,
|
||||
EdgeWeight{0});
|
||||
SegmentWeight{0}));
|
||||
|
||||
const auto reverse_duration_offset =
|
||||
const auto reverse_duration_offset = alias_cast<EdgeDuration>(
|
||||
std::accumulate(reverse_durations.begin(),
|
||||
reverse_durations.end() - data.fwd_segment_position - 1,
|
||||
EdgeDuration{0});
|
||||
SegmentDuration{0}));
|
||||
|
||||
EdgeDistance reverse_distance_offset = 0;
|
||||
EdgeDistance reverse_distance_offset = {0};
|
||||
// Sum up the distance from just after the fwd_segment_position to the end
|
||||
for (auto current = forward_geometry.begin() + data.fwd_segment_position + 1;
|
||||
current != std::prev(forward_geometry.end());
|
||||
++current)
|
||||
{
|
||||
reverse_distance_offset += util::coordinate_calculation::greatCircleDistance(
|
||||
reverse_distance_offset +=
|
||||
to_alias<EdgeDistance>(util::coordinate_calculation::greatCircleDistance(
|
||||
datafacade.GetCoordinateOfNode(*current),
|
||||
datafacade.GetCoordinateOfNode(*std::next(current)));
|
||||
datafacade.GetCoordinateOfNode(*std::next(current))));
|
||||
}
|
||||
|
||||
EdgeWeight reverse_weight =
|
||||
reverse_weights[reverse_weights.size() - data.fwd_segment_position - 1];
|
||||
EdgeDuration reverse_duration =
|
||||
reverse_durations[reverse_durations.size() - data.fwd_segment_position - 1];
|
||||
EdgeDistance reverse_distance = util::coordinate_calculation::greatCircleDistance(
|
||||
EdgeWeight reverse_weight = alias_cast<EdgeWeight>(
|
||||
reverse_weights[reverse_weights.size() - data.fwd_segment_position - 1]);
|
||||
EdgeDuration reverse_duration = alias_cast<EdgeDuration>(
|
||||
reverse_durations[reverse_durations.size() - data.fwd_segment_position - 1]);
|
||||
EdgeDistance reverse_distance =
|
||||
to_alias<EdgeDistance>(util::coordinate_calculation::greatCircleDistance(
|
||||
point_on_segment,
|
||||
datafacade.GetCoordinateOfNode(forward_geometry(data.fwd_segment_position + 1)));
|
||||
datafacade.GetCoordinateOfNode(forward_geometry(data.fwd_segment_position + 1))));
|
||||
|
||||
ratio = std::min(1.0, std::max(0.0, ratio));
|
||||
if (data.forward_segment_id.id != SPECIAL_SEGMENTID)
|
||||
{
|
||||
forward_weight = static_cast<EdgeWeight>(forward_weight * ratio);
|
||||
forward_duration = static_cast<EdgeDuration>(forward_duration * ratio);
|
||||
forward_weight = to_alias<EdgeWeight>(from_alias<double>(forward_weight) * ratio);
|
||||
forward_duration = to_alias<EdgeDuration>(from_alias<double>(forward_duration) * ratio);
|
||||
}
|
||||
if (data.reverse_segment_id.id != SPECIAL_SEGMENTID)
|
||||
{
|
||||
reverse_weight -= static_cast<EdgeWeight>(reverse_weight * ratio);
|
||||
reverse_duration -= static_cast<EdgeDuration>(reverse_duration * ratio);
|
||||
reverse_weight -= to_alias<EdgeWeight>(from_alias<double>(reverse_weight) * ratio);
|
||||
reverse_duration -=
|
||||
to_alias<EdgeDuration>(from_alias<double>(reverse_duration) * ratio);
|
||||
}
|
||||
|
||||
// check phantom node segments validity
|
||||
|
@ -95,8 +95,9 @@ inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
|
||||
// the duration_of_turn/weight_of_turn value, which is 0 for
|
||||
// non-preceeding-turn segments, but contains the turn value
|
||||
// for segments before a turn.
|
||||
(path_point.duration_until_turn - path_point.duration_of_turn) / 10.,
|
||||
(path_point.weight_until_turn - path_point.weight_of_turn) /
|
||||
from_alias<double>(path_point.duration_until_turn - path_point.duration_of_turn) /
|
||||
10.,
|
||||
from_alias<double>(path_point.weight_until_turn - path_point.weight_of_turn) /
|
||||
facade.GetWeightMultiplier(),
|
||||
path_point.datasource_id});
|
||||
geometry.locations.push_back(coordinate);
|
||||
@ -121,14 +122,15 @@ inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
|
||||
if (geometry.annotations.empty())
|
||||
{
|
||||
auto duration =
|
||||
std::abs(
|
||||
std::abs(from_alias<EdgeDuration::value_type>(
|
||||
(reversed_target ? target_node.reverse_duration : target_node.forward_duration) -
|
||||
(reversed_source ? source_node.reverse_duration : source_node.forward_duration)) /
|
||||
(reversed_source ? source_node.reverse_duration : source_node.forward_duration))) /
|
||||
10.;
|
||||
BOOST_ASSERT(duration >= 0);
|
||||
auto weight =
|
||||
std::abs((reversed_target ? target_node.reverse_weight : target_node.forward_weight) -
|
||||
(reversed_source ? source_node.reverse_weight : source_node.forward_weight)) /
|
||||
std::abs(from_alias<EdgeWeight::value_type>(
|
||||
(reversed_target ? target_node.reverse_weight : target_node.forward_weight) -
|
||||
(reversed_source ? source_node.reverse_weight : source_node.forward_weight))) /
|
||||
facade.GetWeightMultiplier();
|
||||
BOOST_ASSERT(weight >= 0);
|
||||
|
||||
@ -142,8 +144,11 @@ inline LegGeometry assembleGeometry(const datafacade::BaseDataFacade &facade,
|
||||
{
|
||||
geometry.annotations.emplace_back(LegGeometry::Annotation{
|
||||
current_distance,
|
||||
(reversed_target ? target_node.reverse_duration : target_node.forward_duration) / 10.,
|
||||
(reversed_target ? target_node.reverse_weight : target_node.forward_weight) /
|
||||
from_alias<double>(reversed_target ? target_node.reverse_duration
|
||||
: target_node.forward_duration) /
|
||||
10.,
|
||||
from_alias<double>(reversed_target ? target_node.reverse_weight
|
||||
: target_node.forward_weight) /
|
||||
facade.GetWeightMultiplier(),
|
||||
forward_datasources(target_node.fwd_segment_position)});
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ namespace detail
|
||||
const constexpr std::size_t MAX_USED_SEGMENTS = 2;
|
||||
struct NamedSegment
|
||||
{
|
||||
EdgeWeight duration;
|
||||
EdgeDuration duration;
|
||||
std::uint32_t position;
|
||||
std::uint32_t name_id;
|
||||
};
|
||||
@ -88,7 +88,7 @@ std::array<std::uint32_t, SegmentNumber> summarizeRoute(const datafacade::BaseDa
|
||||
target_traversed_in_reverse ? target_node.reverse_duration : target_node.forward_duration;
|
||||
const auto target_node_id = target_traversed_in_reverse ? target_node.reverse_segment_id.id
|
||||
: target_node.forward_segment_id.id;
|
||||
if (target_duration > 1)
|
||||
if (target_duration > EdgeDuration{1})
|
||||
segments.push_back({target_duration, index++, facade.GetNameIndex(target_node_id)});
|
||||
// this makes sure that the segment with the lowest position comes first
|
||||
std::sort(
|
||||
@ -184,11 +184,11 @@ inline RouteLeg assembleLeg(const datafacade::BaseDataFacade &facade,
|
||||
|
||||
auto duration = std::accumulate(
|
||||
route_data.begin(), route_data.end(), 0, [](const double sum, const PathData &data) {
|
||||
return sum + data.duration_until_turn;
|
||||
return sum + from_alias<double>(data.duration_until_turn);
|
||||
});
|
||||
auto weight = std::accumulate(
|
||||
route_data.begin(), route_data.end(), 0, [](const double sum, const PathData &data) {
|
||||
return sum + data.weight_until_turn;
|
||||
return sum + from_alias<double>(data.weight_until_turn);
|
||||
});
|
||||
|
||||
// s
|
||||
@ -212,13 +212,13 @@ inline RouteLeg assembleLeg(const datafacade::BaseDataFacade &facade,
|
||||
// caputed by the phantom node. So we need to add the target duration here.
|
||||
// On local segments, the target duration is already part of the duration, however.
|
||||
|
||||
duration = duration + target_duration;
|
||||
weight = weight + target_weight;
|
||||
duration = duration + from_alias<double>(target_duration);
|
||||
weight = weight + from_alias<double>(target_weight);
|
||||
if (route_data.empty())
|
||||
{
|
||||
weight -=
|
||||
(target_traversed_in_reverse ? source_node.reverse_weight : source_node.forward_weight);
|
||||
duration -= (target_traversed_in_reverse ? source_node.reverse_duration
|
||||
weight -= from_alias<double>(target_traversed_in_reverse ? source_node.reverse_weight
|
||||
: source_node.forward_weight);
|
||||
duration -= from_alias<double>(target_traversed_in_reverse ? source_node.reverse_duration
|
||||
: source_node.forward_duration);
|
||||
// use rectified linear unit function to avoid negative duration values
|
||||
// due to flooring errors in phantom snapping
|
||||
|
@ -52,7 +52,7 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
const constexpr char *NO_ROTARY_NAME = "";
|
||||
const EdgeWeight source_weight =
|
||||
source_traversed_in_reverse ? source_node.reverse_weight : source_node.forward_weight;
|
||||
const EdgeWeight source_duration =
|
||||
const EdgeDuration source_duration =
|
||||
source_traversed_in_reverse ? source_node.reverse_duration : source_node.forward_duration;
|
||||
const auto source_node_id = source_traversed_in_reverse ? source_node.reverse_segment_id.id
|
||||
: source_node.forward_segment_id.id;
|
||||
@ -61,7 +61,7 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
const auto source_mode = facade.GetTravelMode(source_node_id);
|
||||
auto source_classes = facade.GetClasses(facade.GetClassData(source_node_id));
|
||||
|
||||
const EdgeWeight target_duration =
|
||||
const EdgeDuration target_duration =
|
||||
target_traversed_in_reverse ? target_node.reverse_duration : target_node.forward_duration;
|
||||
const EdgeWeight target_weight =
|
||||
target_traversed_in_reverse ? target_node.reverse_weight : target_node.forward_weight;
|
||||
@ -103,8 +103,8 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
// but a RouteStep is with regard to the segment after the turn.
|
||||
// We need to skip the first segment because it is already covered by the
|
||||
// initial start of a route
|
||||
EdgeWeight segment_duration = 0;
|
||||
EdgeWeight segment_weight = 0;
|
||||
EdgeDuration segment_duration = {0};
|
||||
EdgeWeight segment_weight = {0};
|
||||
|
||||
// some name changes are not announced in our processing. For these, we have to keep the
|
||||
// first name on the segment
|
||||
@ -121,7 +121,7 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
: osrm::guidance::TurnInstruction::NO_TURN();
|
||||
if (turn_instruction.type != osrm::guidance::TurnType::NoTurn)
|
||||
{
|
||||
BOOST_ASSERT(segment_weight >= 0);
|
||||
BOOST_ASSERT(segment_weight >= EdgeWeight{0});
|
||||
const auto name = facade.GetNameForID(step_name_id);
|
||||
const auto ref = facade.GetRefForID(step_name_id);
|
||||
const auto pronunciation = facade.GetPronunciationForID(step_name_id);
|
||||
@ -147,9 +147,9 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
exits.to_string(),
|
||||
NO_ROTARY_NAME,
|
||||
NO_ROTARY_NAME,
|
||||
segment_duration / 10.,
|
||||
from_alias<double>(segment_duration) / 10.,
|
||||
distance,
|
||||
segment_weight / weight_multiplier,
|
||||
from_alias<double>(segment_weight) / weight_multiplier,
|
||||
travel_mode,
|
||||
maneuver,
|
||||
leg_geometry.FrontIndex(segment_index),
|
||||
@ -228,16 +228,16 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
WaypointType::None,
|
||||
0};
|
||||
segment_index++;
|
||||
segment_duration = 0;
|
||||
segment_weight = 0;
|
||||
segment_duration = {0};
|
||||
segment_weight = {0};
|
||||
}
|
||||
}
|
||||
const auto distance = leg_geometry.segment_distances[segment_index];
|
||||
const EdgeWeight duration = segment_duration + target_duration;
|
||||
const EdgeDuration duration = segment_duration + target_duration;
|
||||
const EdgeWeight weight = segment_weight + target_weight;
|
||||
// intersections contain the classes of exiting road
|
||||
intersection.classes = facade.GetClasses(facade.GetClassData(target_node_id));
|
||||
BOOST_ASSERT(duration >= 0);
|
||||
BOOST_ASSERT(duration >= EdgeDuration{0});
|
||||
steps.push_back(RouteStep{leg_data[leg_data.size() - 1].from_edge_based_node,
|
||||
step_name_id,
|
||||
is_segregated,
|
||||
@ -248,9 +248,9 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
facade.GetExitsForID(step_name_id).to_string(),
|
||||
NO_ROTARY_NAME,
|
||||
NO_ROTARY_NAME,
|
||||
duration / 10.,
|
||||
from_alias<double>(duration) / 10.,
|
||||
distance,
|
||||
weight / weight_multiplier,
|
||||
from_alias<double>(weight) / weight_multiplier,
|
||||
target_mode,
|
||||
maneuver,
|
||||
leg_geometry.FrontIndex(segment_index),
|
||||
@ -280,8 +280,9 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
|
||||
// use rectified linear unit function to avoid negative duration values
|
||||
// due to flooring errors in phantom snapping
|
||||
BOOST_ASSERT(target_duration >= source_duration || weight == 0);
|
||||
const EdgeWeight duration = std::max(0, target_duration - source_duration);
|
||||
BOOST_ASSERT(target_duration >= source_duration || weight == EdgeWeight{0});
|
||||
const EdgeDuration duration =
|
||||
std::max<EdgeDuration>({0}, target_duration - source_duration);
|
||||
|
||||
steps.push_back(RouteStep{source_node_id,
|
||||
source_name_id,
|
||||
@ -293,9 +294,9 @@ inline std::vector<RouteStep> assembleSteps(const datafacade::BaseDataFacade &fa
|
||||
facade.GetExitsForID(source_name_id).to_string(),
|
||||
NO_ROTARY_NAME,
|
||||
NO_ROTARY_NAME,
|
||||
duration / 10.,
|
||||
from_alias<double>(duration) / 10.,
|
||||
leg_geometry.segment_distances[segment_index],
|
||||
weight / weight_multiplier,
|
||||
from_alias<double>(weight) / weight_multiplier,
|
||||
source_mode,
|
||||
maneuver,
|
||||
leg_geometry.FrontIndex(segment_index),
|
||||
|
@ -37,10 +37,10 @@ struct PathData
|
||||
EdgeWeight weight_of_turn;
|
||||
// duration that is traveled on the segment until the turn is reached,
|
||||
// including a turn if the segment precedes one.
|
||||
EdgeWeight duration_until_turn;
|
||||
EdgeDuration duration_until_turn;
|
||||
// If this segment immediately precedes a turn, then duration_of_turn
|
||||
// will contain the duration of the turn. Otherwise it will be 0.
|
||||
EdgeWeight duration_of_turn;
|
||||
EdgeDuration duration_of_turn;
|
||||
// Source of the speed value on this road segment
|
||||
DatasourceID datasource_id;
|
||||
// If segment precedes a turn, ID of the turn itself
|
||||
@ -63,9 +63,9 @@ struct InternalRouteResult
|
||||
}
|
||||
|
||||
// Note: includes duration for turns, except for at start and end node.
|
||||
EdgeWeight duration() const
|
||||
EdgeDuration duration() const
|
||||
{
|
||||
EdgeWeight ret{0};
|
||||
EdgeDuration ret{0};
|
||||
|
||||
for (const auto &leg : unpacked_path_segments)
|
||||
for (const auto &segment : leg)
|
||||
|
@ -48,12 +48,12 @@ struct PhantomNode
|
||||
PhantomNode()
|
||||
: forward_segment_id{SPECIAL_SEGMENTID, false}, reverse_segment_id{SPECIAL_SEGMENTID,
|
||||
false},
|
||||
forward_weight(INVALID_EDGE_WEIGHT), reverse_weight(INVALID_EDGE_WEIGHT),
|
||||
forward_weight_offset(0), reverse_weight_offset(0),
|
||||
forward_weight(INVALID_EDGE_WEIGHT),
|
||||
reverse_weight(INVALID_EDGE_WEIGHT), forward_weight_offset{0}, reverse_weight_offset{0},
|
||||
forward_distance(INVALID_EDGE_DISTANCE), reverse_distance(INVALID_EDGE_DISTANCE),
|
||||
forward_distance_offset(0), reverse_distance_offset(0),
|
||||
forward_distance_offset{0}, reverse_distance_offset{0},
|
||||
forward_duration(MAXIMAL_EDGE_DURATION), reverse_duration(MAXIMAL_EDGE_DURATION),
|
||||
forward_duration_offset(0), reverse_duration_offset(0),
|
||||
forward_duration_offset{0}, reverse_duration_offset{0},
|
||||
component({INVALID_COMPONENTID, 0}),
|
||||
fwd_segment_position(0), is_valid_forward_source{false}, is_valid_forward_target{false},
|
||||
is_valid_reverse_source{false}, is_valid_reverse_target{false}, bearing(0)
|
||||
@ -73,13 +73,13 @@ struct PhantomNode
|
||||
return reverse_weight_offset + reverse_weight;
|
||||
}
|
||||
|
||||
EdgeWeight GetForwardDuration() const
|
||||
EdgeDuration GetForwardDuration() const
|
||||
{
|
||||
BOOST_ASSERT(forward_segment_id.enabled);
|
||||
return forward_duration + forward_duration_offset;
|
||||
}
|
||||
|
||||
EdgeWeight GetReverseDuration() const
|
||||
EdgeDuration GetReverseDuration() const
|
||||
{
|
||||
BOOST_ASSERT(reverse_segment_id.enabled);
|
||||
return reverse_duration + reverse_duration_offset;
|
||||
@ -168,10 +168,10 @@ struct PhantomNode
|
||||
EdgeDistance reverse_distance,
|
||||
EdgeDistance forward_distance_offset,
|
||||
EdgeDistance reverse_distance_offset,
|
||||
EdgeWeight forward_duration,
|
||||
EdgeWeight reverse_duration,
|
||||
EdgeWeight forward_duration_offset,
|
||||
EdgeWeight reverse_duration_offset,
|
||||
EdgeDuration forward_duration,
|
||||
EdgeDuration reverse_duration,
|
||||
EdgeDuration forward_duration_offset,
|
||||
EdgeDuration reverse_duration_offset,
|
||||
bool is_valid_forward_source,
|
||||
bool is_valid_forward_target,
|
||||
bool is_valid_reverse_source,
|
||||
@ -206,10 +206,10 @@ struct PhantomNode
|
||||
EdgeDistance reverse_distance;
|
||||
EdgeDistance forward_distance_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
EdgeDistance reverse_distance_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
EdgeWeight forward_duration;
|
||||
EdgeWeight reverse_duration;
|
||||
EdgeWeight forward_duration_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
EdgeWeight reverse_duration_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
EdgeDuration forward_duration;
|
||||
EdgeDuration reverse_duration;
|
||||
EdgeDuration forward_duration_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
EdgeDuration reverse_duration_offset; // TODO: try to remove -> requires path unpacking changes
|
||||
ComponentID component;
|
||||
|
||||
util::Coordinate location; // this is the coordinate of x
|
||||
|
@ -43,14 +43,14 @@ void insertSourceInForwardHeap(Heap &forward_heap, const PhantomNode &source)
|
||||
if (source.IsValidForwardSource())
|
||||
{
|
||||
forward_heap.Insert(source.forward_segment_id.id,
|
||||
-source.GetForwardWeightPlusOffset(),
|
||||
EdgeWeight{0} - source.GetForwardWeightPlusOffset(),
|
||||
source.forward_segment_id.id);
|
||||
}
|
||||
|
||||
if (source.IsValidReverseSource())
|
||||
{
|
||||
forward_heap.Insert(source.reverse_segment_id.id,
|
||||
-source.GetReverseWeightPlusOffset(),
|
||||
EdgeWeight{0} - source.GetReverseWeightPlusOffset(),
|
||||
source.reverse_segment_id.id);
|
||||
}
|
||||
}
|
||||
@ -127,18 +127,18 @@ void insertSourceInHeap(ManyToManyQueryHeap &heap, const PhantomNodeCandidates &
|
||||
if (phantom_node.IsValidForwardSource())
|
||||
{
|
||||
heap.Insert(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
EdgeWeight{0} - phantom_node.GetForwardWeightPlusOffset(),
|
||||
{phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance()});
|
||||
EdgeDuration{0} - phantom_node.GetForwardDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetForwardDistance()});
|
||||
}
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
heap.Insert(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
EdgeWeight{0} - phantom_node.GetReverseWeightPlusOffset(),
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance()});
|
||||
EdgeDuration{0} - phantom_node.GetReverseDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetReverseDistance()});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -251,13 +251,12 @@ void annotatePath(const FacadeT &facade,
|
||||
BOOST_ASSERT(start_index < end_index);
|
||||
for (std::size_t segment_idx = start_index; segment_idx < end_index; ++segment_idx)
|
||||
{
|
||||
unpacked_path.push_back(
|
||||
PathData{node_id,
|
||||
unpacked_path.push_back(PathData{node_id,
|
||||
id_vector[segment_idx + 1],
|
||||
static_cast<EdgeWeight>(weight_vector[segment_idx]),
|
||||
0,
|
||||
static_cast<EdgeDuration>(duration_vector[segment_idx]),
|
||||
0,
|
||||
alias_cast<EdgeWeight>(weight_vector[segment_idx]),
|
||||
{0},
|
||||
alias_cast<EdgeDuration>(duration_vector[segment_idx]),
|
||||
{0},
|
||||
datasource_vector[segment_idx],
|
||||
boost::none});
|
||||
}
|
||||
@ -266,10 +265,10 @@ void annotatePath(const FacadeT &facade,
|
||||
const auto turn_duration = facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
const auto turn_weight = facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
|
||||
unpacked_path.back().duration_until_turn += turn_duration;
|
||||
unpacked_path.back().duration_of_turn = turn_duration;
|
||||
unpacked_path.back().weight_until_turn += turn_weight;
|
||||
unpacked_path.back().weight_of_turn = turn_weight;
|
||||
unpacked_path.back().duration_until_turn += alias_cast<EdgeDuration>(turn_duration);
|
||||
unpacked_path.back().duration_of_turn = alias_cast<EdgeDuration>(turn_duration);
|
||||
unpacked_path.back().weight_until_turn += alias_cast<EdgeWeight>(turn_weight);
|
||||
unpacked_path.back().weight_of_turn = alias_cast<EdgeWeight>(turn_weight);
|
||||
unpacked_path.back().turn_edge = turn_id;
|
||||
}
|
||||
|
||||
@ -311,10 +310,10 @@ void annotatePath(const FacadeT &facade,
|
||||
unpacked_path.push_back(
|
||||
PathData{target_node_id,
|
||||
id_vector[start_index < end_index ? segment_idx + 1 : segment_idx - 1],
|
||||
static_cast<EdgeWeight>(weight_vector[segment_idx]),
|
||||
0,
|
||||
static_cast<EdgeDuration>(duration_vector[segment_idx]),
|
||||
0,
|
||||
alias_cast<EdgeWeight>(weight_vector[segment_idx]),
|
||||
{0},
|
||||
alias_cast<EdgeDuration>(duration_vector[segment_idx]),
|
||||
{0},
|
||||
datasource_vector[segment_idx],
|
||||
boost::none});
|
||||
}
|
||||
@ -341,9 +340,9 @@ void annotatePath(const FacadeT &facade,
|
||||
// node to the first turn would be the same as from end to end of a segment,
|
||||
// which is obviously incorrect and not ideal...
|
||||
unpacked_path.front().weight_until_turn =
|
||||
std::max(unpacked_path.front().weight_until_turn - source_weight, 0);
|
||||
std::max(unpacked_path.front().weight_until_turn - source_weight, {0});
|
||||
unpacked_path.front().duration_until_turn =
|
||||
std::max(unpacked_path.front().duration_until_turn - source_duration, 0);
|
||||
std::max(unpacked_path.front().duration_until_turn - source_duration, {0});
|
||||
}
|
||||
}
|
||||
|
||||
@ -410,7 +409,7 @@ template <typename FacadeT> EdgeDistance computeEdgeDistance(const FacadeT &faca
|
||||
{
|
||||
const auto geometry_index = facade.GetGeometryIndex(node_id);
|
||||
|
||||
EdgeDistance total_distance = 0.0;
|
||||
EdgeDistance total_distance = {0};
|
||||
|
||||
auto geometry_range = facade.GetUncompressedForwardGeometry(geometry_index.id);
|
||||
for (auto current = geometry_range.begin(); current < geometry_range.end() - 1; ++current)
|
||||
|
@ -34,7 +34,7 @@ bool stallAtNode(const DataFacade<Algorithm> &facade,
|
||||
{
|
||||
const NodeID to = facade.GetTarget(edge);
|
||||
const EdgeWeight edge_weight = data.weight;
|
||||
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
|
||||
BOOST_ASSERT_MSG(edge_weight > EdgeWeight{0}, "edge_weight invalid");
|
||||
const auto toHeapNode = query_heap.GetHeapNodeIfWasInserted(to);
|
||||
if (toHeapNode)
|
||||
{
|
||||
@ -61,7 +61,7 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
const NodeID to = facade.GetTarget(edge);
|
||||
const EdgeWeight edge_weight = data.weight;
|
||||
|
||||
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
|
||||
BOOST_ASSERT_MSG(edge_weight > EdgeWeight{0}, "edge_weight invalid");
|
||||
const EdgeWeight to_weight = heapNode.weight + edge_weight;
|
||||
|
||||
const auto toHeapNode = heap.GetHeapNodeIfWasInserted(to);
|
||||
@ -135,7 +135,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
force_loop(force_loop_reverse_nodes, heapNode) ||
|
||||
// in this case we are looking at a bi-directional way where the source
|
||||
// and target phantom are on the same edge based node
|
||||
new_weight < 0)
|
||||
new_weight < EdgeWeight{0})
|
||||
{
|
||||
// check whether there is a loop present at the node
|
||||
for (const auto edge : facade.GetAdjacentEdgeRange(heapNode.node))
|
||||
@ -148,7 +148,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
{
|
||||
const EdgeWeight edge_weight = data.weight;
|
||||
const EdgeWeight loop_weight = new_weight + edge_weight;
|
||||
if (loop_weight >= 0 && loop_weight < upper_bound)
|
||||
if (loop_weight >= EdgeWeight{0} && loop_weight < upper_bound)
|
||||
{
|
||||
middle_node_id = heapNode.node;
|
||||
upper_bound = loop_weight;
|
||||
@ -159,7 +159,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
else
|
||||
{
|
||||
BOOST_ASSERT(new_weight >= 0);
|
||||
BOOST_ASSERT(new_weight >= EdgeWeight{0});
|
||||
|
||||
middle_node_id = heapNode.node;
|
||||
upper_bound = new_weight;
|
||||
@ -169,7 +169,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
|
||||
// make sure we don't terminate too early if we initialize the weight
|
||||
// for the nodes in the forward heap with the forward/reverse offset
|
||||
BOOST_ASSERT(min_edge_offset <= 0);
|
||||
BOOST_ASSERT(min_edge_offset <= EdgeWeight{0});
|
||||
if (heapNode.weight + min_edge_offset > upper_bound)
|
||||
{
|
||||
forward_heap.DeleteAll();
|
||||
@ -185,31 +185,6 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
relaxOutgoingEdges<DIRECTION>(facade, heapNode, forward_heap);
|
||||
}
|
||||
|
||||
template <bool UseDuration>
|
||||
std::tuple<EdgeWeight, EdgeDistance> getLoopWeight(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
{
|
||||
EdgeWeight loop_weight = UseDuration ? MAXIMAL_EDGE_DURATION : INVALID_EDGE_WEIGHT;
|
||||
EdgeDistance loop_distance = MAXIMAL_EDGE_DISTANCE;
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
{
|
||||
const auto &data = facade.GetEdgeData(edge);
|
||||
if (data.forward)
|
||||
{
|
||||
const NodeID to = facade.GetTarget(edge);
|
||||
if (to == node)
|
||||
{
|
||||
const auto value = UseDuration ? data.duration : data.weight;
|
||||
if (value < loop_weight)
|
||||
{
|
||||
loop_weight = value;
|
||||
loop_distance = data.distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return std::make_tuple(loop_weight, loop_distance);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a sequence of connected `NodeID`s in the CH graph, performs a depth-first unpacking of
|
||||
* the shortcut
|
||||
@ -301,7 +276,7 @@ EdgeDistance calculateEBGNodeAnnotations(const DataFacade<Algorithm> &facade,
|
||||
// Make sure we have at least something to unpack
|
||||
if (packed_path_begin == packed_path_end ||
|
||||
std::distance(packed_path_begin, packed_path_end) <= 1)
|
||||
return 0;
|
||||
return {0};
|
||||
|
||||
std::stack<std::tuple<NodeID, NodeID, bool>> recursion_stack;
|
||||
std::stack<EdgeDistance> distance_stack;
|
||||
@ -383,7 +358,7 @@ EdgeDistance calculateEBGNodeAnnotations(const DataFacade<Algorithm> &facade,
|
||||
}
|
||||
}
|
||||
|
||||
EdgeDistance total_distance = 0;
|
||||
EdgeDistance total_distance = {0};
|
||||
while (!distance_stack.empty())
|
||||
{
|
||||
total_distance += distance_stack.top();
|
||||
@ -505,8 +480,48 @@ double getNetworkDistance(SearchEngineData<Algorithm> &engine_working_data,
|
||||
SearchEngineData<Algorithm>::QueryHeap &reverse_heap,
|
||||
const PhantomNode &source_phantom,
|
||||
const PhantomNode &target_phantom,
|
||||
int duration_upper_bound = INVALID_EDGE_WEIGHT);
|
||||
EdgeWeight duration_upper_bound = INVALID_EDGE_WEIGHT);
|
||||
|
||||
template <typename EdgeMetric>
|
||||
std::tuple<EdgeMetric, EdgeDistance> getLoopMetric(const DataFacade<Algorithm> &facade, NodeID node)
|
||||
{
|
||||
EdgeMetric loop_metric;
|
||||
if constexpr (std::is_same<EdgeMetric, EdgeDuration>::value)
|
||||
{
|
||||
loop_metric = INVALID_EDGE_DURATION;
|
||||
}
|
||||
else
|
||||
{
|
||||
loop_metric = INVALID_EDGE_WEIGHT;
|
||||
}
|
||||
EdgeDistance loop_distance = MAXIMAL_EDGE_DISTANCE;
|
||||
for (auto edge : facade.GetAdjacentEdgeRange(node))
|
||||
{
|
||||
const auto &data = facade.GetEdgeData(edge);
|
||||
if (data.forward)
|
||||
{
|
||||
const NodeID to = facade.GetTarget(edge);
|
||||
if (to == node)
|
||||
{
|
||||
EdgeMetric value;
|
||||
if constexpr (std::is_same<EdgeMetric, EdgeDuration>::value)
|
||||
{
|
||||
value = to_alias<EdgeDuration>(data.duration);
|
||||
}
|
||||
else
|
||||
{
|
||||
value = data.weight;
|
||||
}
|
||||
if (value < loop_metric)
|
||||
{
|
||||
loop_metric = value;
|
||||
loop_distance = data.distance;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return std::make_tuple(loop_metric, loop_distance);
|
||||
}
|
||||
} // namespace ch
|
||||
} // namespace routing_algorithms
|
||||
} // namespace engine
|
||||
|
@ -363,7 +363,8 @@ void relaxOutgoingEdges(const DataFacade<Algorithm> &facade,
|
||||
|
||||
// TODO: BOOST_ASSERT(edge_data.weight == node_weight + turn_penalty);
|
||||
|
||||
const EdgeWeight to_weight = heapNode.weight + node_weight + turn_penalty;
|
||||
const EdgeWeight to_weight =
|
||||
heapNode.weight + node_weight + alias_cast<EdgeWeight>(turn_penalty);
|
||||
|
||||
const auto toHeapNode = forward_heap.GetHeapNodeIfWasInserted(to);
|
||||
if (!toHeapNode)
|
||||
@ -410,7 +411,7 @@ void routingStep(const DataFacade<Algorithm> &facade,
|
||||
// MLD uses loops forcing only to prune single node paths in forward and/or
|
||||
// backward direction (there is no need to force loops in MLD but in CH)
|
||||
if (!force_loop(force_loop_forward_nodes, heapNode) &&
|
||||
!force_loop(force_loop_reverse_nodes, heapNode) && (path_weight >= 0) &&
|
||||
!force_loop(force_loop_reverse_nodes, heapNode) && (path_weight >= EdgeWeight{0}) &&
|
||||
(path_weight < path_upper_bound))
|
||||
{
|
||||
middle_node = heapNode.node;
|
||||
@ -529,8 +530,8 @@ UnpackedPath search(SearchEngineData<Algorithm> &engine_working_data,
|
||||
// Here heaps can be reused, let's go deeper!
|
||||
forward_heap.Clear();
|
||||
reverse_heap.Clear();
|
||||
forward_heap.Insert(source, 0, {source});
|
||||
reverse_heap.Insert(target, 0, {target});
|
||||
forward_heap.Insert(source, {0}, {source});
|
||||
reverse_heap.Insert(target, {0}, {target});
|
||||
|
||||
// TODO: when structured bindings will be allowed change to
|
||||
// auto [subpath_weight, subpath_source, subpath_target, subpath] = ...
|
||||
|
@ -292,7 +292,7 @@ shortestPathWithWaypointUTurns(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const std::vector<PhantomNodeCandidates> &waypoint_candidates)
|
||||
{
|
||||
|
||||
EdgeWeight total_weight = 0;
|
||||
EdgeWeight total_weight = {0};
|
||||
std::vector<NodeID> total_packed_path;
|
||||
std::vector<std::size_t> packed_leg_begin;
|
||||
|
||||
@ -467,8 +467,8 @@ struct route_state
|
||||
route_state(const PhantomNodeCandidates &init_candidates)
|
||||
: current_leg(0), previous_leg_path_offset(0)
|
||||
{
|
||||
last.total_weight_to_forward.resize(init_candidates.size(), 0);
|
||||
last.total_weight_to_reverse.resize(init_candidates.size(), 0);
|
||||
last.total_weight_to_forward.resize(init_candidates.size(), {0});
|
||||
last.total_weight_to_reverse.resize(init_candidates.size(), {0});
|
||||
// Initialize routability from source validity.
|
||||
std::transform(
|
||||
init_candidates.begin(),
|
||||
|
@ -23,7 +23,7 @@ struct TurnData final
|
||||
const int in_angle;
|
||||
const int turn_angle;
|
||||
const EdgeWeight weight;
|
||||
const EdgeWeight duration;
|
||||
const EdgeDuration duration;
|
||||
const guidance::TurnInstruction turn_instruction;
|
||||
};
|
||||
|
||||
|
@ -29,9 +29,9 @@ struct HeapData
|
||||
|
||||
struct ManyToManyHeapData : HeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
ManyToManyHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
ManyToManyHeapData(NodeID p, EdgeDuration duration, EdgeDistance distance)
|
||||
: HeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
@ -78,15 +78,15 @@ struct MultiLayerDijkstraHeapData
|
||||
|
||||
struct ManyToManyMultiLayerDijkstraHeapData : MultiLayerDijkstraHeapData
|
||||
{
|
||||
EdgeWeight duration;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeWeight duration, EdgeDistance distance)
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p, EdgeDuration duration, EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p), duration(duration), distance(distance)
|
||||
{
|
||||
}
|
||||
ManyToManyMultiLayerDijkstraHeapData(NodeID p,
|
||||
bool from,
|
||||
EdgeWeight duration,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance)
|
||||
: MultiLayerDijkstraHeapData(p, from), duration(duration), distance(distance)
|
||||
{
|
||||
|
@ -23,12 +23,12 @@ namespace trip
|
||||
{
|
||||
|
||||
// computes the distance of a given permutation
|
||||
inline EdgeWeight ReturnDistance(const util::DistTableWrapper<EdgeWeight> &dist_table,
|
||||
inline EdgeDuration ReturnDistance(const util::DistTableWrapper<EdgeDuration> &dist_table,
|
||||
const std::vector<NodeID> &location_order,
|
||||
const EdgeWeight min_route_dist,
|
||||
const EdgeDuration min_route_dist,
|
||||
const std::size_t number_of_locations)
|
||||
{
|
||||
EdgeWeight route_dist = 0;
|
||||
EdgeDuration route_dist = {0};
|
||||
std::size_t current_index = 0;
|
||||
while (current_index < location_order.size() && (route_dist < min_route_dist))
|
||||
{
|
||||
@ -36,12 +36,13 @@ inline EdgeWeight ReturnDistance(const util::DistTableWrapper<EdgeWeight> &dist_
|
||||
std::size_t next_index = (current_index + 1) % number_of_locations;
|
||||
auto edge_weight = dist_table(location_order[current_index], location_order[next_index]);
|
||||
|
||||
// If the edge_weight is very large (INVALID_EDGE_WEIGHT) then the algorithm will not choose
|
||||
// this edge in final minimal path. So instead of computing all the permutations after this
|
||||
// large edge, discard this edge right here and don't consider the path after this edge.
|
||||
if (edge_weight == INVALID_EDGE_WEIGHT)
|
||||
// If the edge_weight is very large (INVALID_EDGE_DURATION) then the algorithm will not
|
||||
// choose this edge in final minimal path. So instead of computing all the permutations
|
||||
// after this large edge, discard this edge right here and don't consider the path after
|
||||
// this edge.
|
||||
if (edge_weight == INVALID_EDGE_DURATION)
|
||||
{
|
||||
return INVALID_EDGE_WEIGHT;
|
||||
return INVALID_EDGE_DURATION;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -50,7 +51,7 @@ inline EdgeWeight ReturnDistance(const util::DistTableWrapper<EdgeWeight> &dist_
|
||||
|
||||
// This boost assert should not be reached if TFSE table
|
||||
BOOST_ASSERT_MSG(dist_table(location_order[current_index], location_order[next_index]) !=
|
||||
INVALID_EDGE_WEIGHT,
|
||||
INVALID_EDGE_DURATION,
|
||||
"invalid route found");
|
||||
++current_index;
|
||||
}
|
||||
@ -60,14 +61,14 @@ inline EdgeWeight ReturnDistance(const util::DistTableWrapper<EdgeWeight> &dist_
|
||||
|
||||
// computes the route by computing all permutations and selecting the shortest
|
||||
inline std::vector<NodeID> BruteForceTrip(const std::size_t number_of_locations,
|
||||
const util::DistTableWrapper<EdgeWeight> &dist_table)
|
||||
const util::DistTableWrapper<EdgeDuration> &dist_table)
|
||||
{
|
||||
// set initial order in which nodes are visited to 0, 1, 2, 3, ...
|
||||
std::vector<NodeID> node_order(number_of_locations);
|
||||
std::iota(std::begin(node_order), std::end(node_order), 0);
|
||||
std::vector<NodeID> route = node_order;
|
||||
|
||||
EdgeWeight min_route_dist = INVALID_EDGE_WEIGHT;
|
||||
EdgeDuration min_route_dist = INVALID_EDGE_DURATION;
|
||||
|
||||
// check length of all possible permutation of the component ids
|
||||
BOOST_ASSERT_MSG(node_order.size() > 0, "no order permutation given");
|
||||
|
@ -23,15 +23,15 @@ namespace trip
|
||||
// given a route and a new location, find the best place of insertion and
|
||||
// check the distance of roundtrip when the new location is additionally visited
|
||||
using NodeIDIter = std::vector<NodeID>::iterator;
|
||||
inline std::pair<EdgeWeight, NodeIDIter>
|
||||
inline std::pair<EdgeDuration, NodeIDIter>
|
||||
GetShortestRoundTrip(const NodeID new_loc,
|
||||
const util::DistTableWrapper<EdgeWeight> &dist_table,
|
||||
const util::DistTableWrapper<EdgeDuration> &dist_table,
|
||||
const std::size_t number_of_locations,
|
||||
std::vector<NodeID> &route)
|
||||
{
|
||||
(void)number_of_locations; // unused
|
||||
|
||||
auto min_trip_distance = INVALID_EDGE_WEIGHT;
|
||||
auto min_trip_distance = INVALID_EDGE_DURATION;
|
||||
NodeIDIter next_insert_point_candidate;
|
||||
|
||||
// for all nodes in the current trip find the best insertion resulting in the shortest path
|
||||
@ -48,10 +48,11 @@ GetShortestRoundTrip(const NodeID new_loc,
|
||||
|
||||
const auto dist_from = dist_table(*from_node, new_loc);
|
||||
const auto dist_to = dist_table(new_loc, *to_node);
|
||||
// If the edge_weight is very large (INVALID_EDGE_WEIGHT) then the algorithm will not choose
|
||||
// this edge in final minimal path. So instead of computing all the permutations after this
|
||||
// large edge, discard this edge right here and don't consider the path after this edge.
|
||||
if (dist_from == INVALID_EDGE_WEIGHT || dist_to == INVALID_EDGE_WEIGHT)
|
||||
// If the edge_weight is very large (INVALID_EDGE_DURATION) then the algorithm will not
|
||||
// choose this edge in final minimal path. So instead of computing all the permutations
|
||||
// after this large edge, discard this edge right here and don't consider the path after
|
||||
// this edge.
|
||||
if (dist_from == INVALID_EDGE_DURATION || dist_to == INVALID_EDGE_DURATION)
|
||||
continue;
|
||||
|
||||
const auto trip_dist = dist_from + dist_to - dist_table(*from_node, *to_node);
|
||||
@ -71,14 +72,14 @@ GetShortestRoundTrip(const NodeID new_loc,
|
||||
next_insert_point_candidate = to_node;
|
||||
}
|
||||
}
|
||||
BOOST_ASSERT_MSG(min_trip_distance != INVALID_EDGE_WEIGHT, "trip has invalid edge weight");
|
||||
BOOST_ASSERT_MSG(min_trip_distance != INVALID_EDGE_DURATION, "trip has invalid edge weight");
|
||||
|
||||
return std::make_pair(min_trip_distance, next_insert_point_candidate);
|
||||
}
|
||||
|
||||
// given two initial start nodes, find a roundtrip route using the farthest insertion algorithm
|
||||
inline std::vector<NodeID> FindRoute(const std::size_t &number_of_locations,
|
||||
const util::DistTableWrapper<EdgeWeight> &dist_table,
|
||||
const util::DistTableWrapper<EdgeDuration> &dist_table,
|
||||
const NodeID &start1,
|
||||
const NodeID &start2)
|
||||
{
|
||||
@ -99,7 +100,7 @@ inline std::vector<NodeID> FindRoute(const std::size_t &number_of_locations,
|
||||
// two nodes are already in the initial start trip, so we need to add all other nodes
|
||||
for (std::size_t added_nodes = 2; added_nodes < number_of_locations; ++added_nodes)
|
||||
{
|
||||
auto farthest_distance = std::numeric_limits<int>::min();
|
||||
auto farthest_distance = EdgeDuration{std::numeric_limits<EdgeDuration::value_type>::min()};
|
||||
auto next_node = -1;
|
||||
NodeIDIter next_insert_point;
|
||||
|
||||
@ -112,7 +113,7 @@ inline std::vector<NodeID> FindRoute(const std::size_t &number_of_locations,
|
||||
const auto insert_candidate =
|
||||
GetShortestRoundTrip(id, dist_table, number_of_locations, route);
|
||||
|
||||
BOOST_ASSERT_MSG(insert_candidate.first != INVALID_EDGE_WEIGHT,
|
||||
BOOST_ASSERT_MSG(insert_candidate.first != INVALID_EDGE_DURATION,
|
||||
"shortest round trip is invalid");
|
||||
|
||||
// add the location to the current trip such that it results in the shortest total
|
||||
@ -137,7 +138,7 @@ inline std::vector<NodeID> FindRoute(const std::size_t &number_of_locations,
|
||||
|
||||
inline std::vector<NodeID>
|
||||
FarthestInsertionTrip(const std::size_t number_of_locations,
|
||||
const util::DistTableWrapper<EdgeWeight> &dist_table)
|
||||
const util::DistTableWrapper<EdgeDuration> &dist_table)
|
||||
{
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// START FARTHEST INSERTION HERE
|
||||
|
@ -44,8 +44,8 @@ class CompressedEdgeContainer
|
||||
|
||||
void AddUncompressedEdge(const EdgeID edge_id,
|
||||
const NodeID target_node,
|
||||
const SegmentWeight weight,
|
||||
const SegmentWeight duration);
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration);
|
||||
|
||||
void InitializeBothwayVector();
|
||||
unsigned ZipEdges(const unsigned f_edge_pos, const unsigned r_edge_pos);
|
||||
@ -67,8 +67,8 @@ class CompressedEdgeContainer
|
||||
std::unique_ptr<SegmentDataContainer> ToSegmentData();
|
||||
|
||||
private:
|
||||
SegmentWeight ClipWeight(const SegmentWeight weight);
|
||||
SegmentDuration ClipDuration(const SegmentDuration duration);
|
||||
SegmentWeight ClipWeight(const EdgeWeight weight);
|
||||
SegmentDuration ClipDuration(const EdgeDuration duration);
|
||||
|
||||
int free_list_maximum = 0;
|
||||
std::atomic_size_t clipped_weights{0};
|
||||
|
@ -16,14 +16,14 @@ struct EdgeBasedEdge
|
||||
struct EdgeData
|
||||
{
|
||||
EdgeData()
|
||||
: turn_id(0), weight(0), distance(0), duration(0), forward(false), backward(false)
|
||||
: turn_id(0), weight{0}, distance{0}, duration(0), forward(false), backward(false)
|
||||
{
|
||||
}
|
||||
|
||||
EdgeData(const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeDistance distance,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDuration duration,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
: turn_id(turn_id), weight(weight), distance(distance), duration(duration),
|
||||
@ -34,7 +34,7 @@ struct EdgeBasedEdge
|
||||
NodeID turn_id; // ID of the edge based node (node based edge)
|
||||
EdgeWeight weight;
|
||||
EdgeDistance distance;
|
||||
EdgeWeight duration : 30;
|
||||
EdgeDuration::value_type duration : 30;
|
||||
std::uint32_t forward : 1;
|
||||
std::uint32_t backward : 1;
|
||||
|
||||
@ -47,7 +47,7 @@ struct EdgeBasedEdge
|
||||
const NodeID target,
|
||||
const NodeID edge_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward);
|
||||
@ -72,7 +72,7 @@ inline EdgeBasedEdge::EdgeBasedEdge(const NodeID source,
|
||||
const NodeID target,
|
||||
const NodeID turn_id,
|
||||
const EdgeWeight weight,
|
||||
const EdgeWeight duration,
|
||||
const EdgeDuration duration,
|
||||
const EdgeDistance distance,
|
||||
const bool forward,
|
||||
const bool backward)
|
||||
|
@ -93,7 +93,7 @@ class EdgeBasedGraphFactory
|
||||
void GetEdgeBasedEdges(util::DeallocatingVector<EdgeBasedEdge> &edges);
|
||||
void GetEdgeBasedNodeSegments(std::vector<EdgeBasedNodeSegment> &nodes);
|
||||
void GetEdgeBasedNodeWeights(std::vector<EdgeWeight> &output_node_weights);
|
||||
void GetEdgeBasedNodeDurations(std::vector<EdgeWeight> &output_node_durations);
|
||||
void GetEdgeBasedNodeDurations(std::vector<EdgeDuration> &output_node_durations);
|
||||
void GetEdgeBasedNodeDistances(std::vector<EdgeDistance> &output_node_distances);
|
||||
std::uint32_t GetConnectivityChecksum() const;
|
||||
|
||||
|
@ -63,7 +63,7 @@ struct InternalExtractorEdge
|
||||
WeightData weight_data,
|
||||
DurationData duration_data,
|
||||
util::Coordinate source_coordinate)
|
||||
: result(source, target, 0, 0, 0, {}, -1, {}), weight_data(weight_data),
|
||||
: result(source, target, {0}, {0}, {0}, {}, -1, {}), weight_data(weight_data),
|
||||
duration_data(duration_data), source_coordinate(source_coordinate)
|
||||
{
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ inline NodeBasedEdgeClassification::NodeBasedEdgeClassification()
|
||||
}
|
||||
|
||||
inline NodeBasedEdge::NodeBasedEdge()
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight(0), duration(0), distance(0),
|
||||
: source(SPECIAL_NODEID), target(SPECIAL_NODEID), weight{0}, duration{0}, distance{0},
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
@ -6,8 +6,8 @@
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
#include <boost/range/adaptor/filtered.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
#include <unordered_map>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
|
@ -114,7 +114,7 @@ struct ProfileProperties
|
||||
|
||||
double GetMaxTurnWeight() const
|
||||
{
|
||||
return std::numeric_limits<TurnPenalty>::max() / GetWeightMultiplier();
|
||||
return from_alias<double>(MAXIMAL_TURN_PENALTY) / GetWeightMultiplier();
|
||||
}
|
||||
|
||||
//! penalty to cross a traffic light in deci-seconds
|
||||
|
@ -2,11 +2,13 @@
|
||||
#define OSRM_EXTRACTOR_RESTRICTION_GRAPH_HPP_
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
#include "util/node_based_graph.hpp"
|
||||
#include "util/std_hash.hpp"
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
#include <unordered_map>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace extractor
|
||||
@ -112,10 +114,10 @@ struct RestrictionGraph
|
||||
RestrictionRange GetRestrictions(RestrictionID id) const;
|
||||
|
||||
// A compressed node-based edge can only have one start node in the restriction graph.
|
||||
boost::unordered_map<EdgeKey, RestrictionID> start_edge_to_node{};
|
||||
std::unordered_map<EdgeKey, RestrictionID> start_edge_to_node{};
|
||||
// A compressed node-based edge can have multiple via nodes in the restriction graph
|
||||
// (as the compressed edge can appear in paths with different prefixes).
|
||||
boost::unordered_multimap<EdgeKey, RestrictionID> via_edge_to_node{};
|
||||
std::unordered_multimap<EdgeKey, RestrictionID> via_edge_to_node{};
|
||||
std::vector<RestrictionNode> nodes;
|
||||
// TODO: Investigate reusing DynamicGraph. Currently it requires specific attributes
|
||||
// (e.g. reversed, weight) that would not make sense for restrictions.
|
||||
|
@ -2,9 +2,9 @@
|
||||
#define OSRM_EXTRACTOR_TRAFFIC_SIGNALS_HPP
|
||||
|
||||
#include "util/typedefs.hpp"
|
||||
#include <unordered_set>
|
||||
|
||||
#include <boost/unordered_set.hpp>
|
||||
#include <boost/functional/hash.hpp>
|
||||
#include <unordered_set>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
#include <boost/unordered_map.hpp>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
namespace osrm
|
||||
@ -43,9 +43,9 @@ class TurnPathCompressor
|
||||
// via nodes are the same.
|
||||
// Similarly, we do not compress the instruction via node in a maneuver override, as we need
|
||||
// this to identify the location of the maneuver during routing path-processing.
|
||||
boost::unordered_multimap<NodeID, TurnPath *> starts;
|
||||
boost::unordered_multimap<NodeID, TurnPath *> vias;
|
||||
boost::unordered_multimap<NodeID, TurnPath *> ends;
|
||||
std::unordered_multimap<NodeID, TurnPath *> starts;
|
||||
std::unordered_multimap<NodeID, TurnPath *> vias;
|
||||
std::unordered_multimap<NodeID, TurnPath *> ends;
|
||||
};
|
||||
|
||||
} // namespace extractor
|
||||
|
@ -1,17 +1,16 @@
|
||||
#ifndef OSRM_EXTRACTOR_WAY_RESTRICTION_MAP_HPP_
|
||||
#define OSRM_EXTRACTOR_WAY_RESTRICTION_MAP_HPP_
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
// to access the turn restrictions
|
||||
#include <boost/unordered_map.hpp>
|
||||
|
||||
#include "extractor/restriction.hpp"
|
||||
#include "extractor/restriction_graph.hpp"
|
||||
#include "util/integer_range.hpp"
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
// to access the turn restrictions
|
||||
#include <unordered_map>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace extractor
|
||||
|
@ -41,8 +41,8 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
directed.emplace_back(edge.source,
|
||||
edge.target,
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
std::max(edge.data.weight, {1}),
|
||||
to_alias<EdgeDuration>(edge.data.duration),
|
||||
edge.data.distance,
|
||||
edge.data.forward,
|
||||
edge.data.backward);
|
||||
@ -50,8 +50,8 @@ splitBidirectionalEdges(const std::vector<extractor::EdgeBasedEdge> &edges)
|
||||
directed.emplace_back(edge.target,
|
||||
edge.source,
|
||||
edge.data.turn_id,
|
||||
std::max(edge.data.weight, 1),
|
||||
edge.data.duration,
|
||||
std::max(edge.data.weight, {1}),
|
||||
to_alias<EdgeDuration>(edge.data.duration),
|
||||
edge.data.distance,
|
||||
edge.data.backward,
|
||||
edge.data.forward);
|
||||
|
@ -28,6 +28,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#ifndef OSRM_UTIL_ALIAS_HPP
|
||||
#define OSRM_UTIL_ALIAS_HPP
|
||||
|
||||
#include <boost/numeric/conversion/cast.hpp>
|
||||
#include <functional>
|
||||
#include <iostream>
|
||||
#include <type_traits>
|
||||
@ -125,6 +126,40 @@ template <typename From, typename Tag> struct Alias final
|
||||
}
|
||||
};
|
||||
|
||||
template <typename ToAlias, typename FromAlias> inline ToAlias alias_cast(const FromAlias &from)
|
||||
{
|
||||
static_assert(std::is_arithmetic<typename FromAlias::value_type>::value,
|
||||
"Alias From needs to be based on an arithmetic type");
|
||||
static_assert(std::is_arithmetic<typename ToAlias::value_type>::value,
|
||||
"Alias Other needs to be based on an arithmetic type");
|
||||
return {static_cast<typename ToAlias::value_type>(
|
||||
static_cast<const typename FromAlias::value_type>(from))};
|
||||
}
|
||||
|
||||
template <typename ToNumeric, typename FromAlias> inline ToNumeric from_alias(const FromAlias &from)
|
||||
{
|
||||
static_assert(std::is_arithmetic<typename FromAlias::value_type>::value,
|
||||
"Alias From needs to be based on an arithmetic type");
|
||||
static_assert(std::is_arithmetic<ToNumeric>::value, "Numeric needs to be an arithmetic type");
|
||||
return {static_cast<ToNumeric>(static_cast<const typename FromAlias::value_type>(from))};
|
||||
}
|
||||
|
||||
template <typename ToAlias,
|
||||
typename FromNumeric,
|
||||
typename = std::enable_if_t<!std::is_same<ToAlias, FromNumeric>::value>>
|
||||
inline ToAlias to_alias(const FromNumeric &from)
|
||||
{
|
||||
static_assert(std::is_arithmetic<FromNumeric>::value, "Numeric needs to be an arithmetic type");
|
||||
static_assert(std::is_arithmetic<typename ToAlias::value_type>::value,
|
||||
"Alias needs to be based on an arithmetic type");
|
||||
return {static_cast<typename ToAlias::value_type>(from)};
|
||||
}
|
||||
|
||||
// Sometimes metrics are stored either as bitfields or the alias itself.
|
||||
// So we'll try to convert to alias without knowing which is the case.
|
||||
// Therefore, we need this no-op overload, otherwise it will fail on the arithmetic requirement.
|
||||
template <typename ToAlias> inline ToAlias to_alias(const ToAlias &from) { return from; }
|
||||
|
||||
template <typename From, typename Tag>
|
||||
inline std::ostream &operator<<(std::ostream &stream, const Alias<From, Tag> &inst)
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ template <typename T> class DistTableWrapper
|
||||
|
||||
std::size_t size() const { return table_.size(); }
|
||||
|
||||
EdgeWeight operator()(NodeID from, NodeID to) const
|
||||
T operator()(NodeID from, NodeID to) const
|
||||
{
|
||||
BOOST_ASSERT_MSG(from < number_of_nodes_, "from ID is out of bound");
|
||||
BOOST_ASSERT_MSG(to < number_of_nodes_, "to ID is out of bound");
|
||||
@ -46,7 +46,7 @@ template <typename T> class DistTableWrapper
|
||||
return table_[index];
|
||||
}
|
||||
|
||||
void SetValue(NodeID from, NodeID to, EdgeWeight value)
|
||||
void SetValue(NodeID from, NodeID to, T value)
|
||||
{
|
||||
BOOST_ASSERT_MSG(from < number_of_nodes_, "from ID is out of bound");
|
||||
BOOST_ASSERT_MSG(to < number_of_nodes_, "to ID is out of bound");
|
||||
|
@ -21,14 +21,14 @@ namespace util
|
||||
struct NodeBasedEdgeData
|
||||
{
|
||||
NodeBasedEdgeData()
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_WEIGHT),
|
||||
: weight(INVALID_EDGE_WEIGHT), duration(INVALID_EDGE_DURATION),
|
||||
distance(INVALID_EDGE_DISTANCE), geometry_id({0, false}), reversed(false),
|
||||
annotation_data(-1)
|
||||
{
|
||||
}
|
||||
|
||||
NodeBasedEdgeData(EdgeWeight weight,
|
||||
EdgeWeight duration,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
GeometryID geometry_id,
|
||||
bool reversed,
|
||||
@ -40,7 +40,7 @@ struct NodeBasedEdgeData
|
||||
}
|
||||
|
||||
EdgeWeight weight;
|
||||
EdgeWeight duration;
|
||||
EdgeDuration duration;
|
||||
EdgeDistance distance;
|
||||
GeometryID geometry_id;
|
||||
bool reversed : 1;
|
||||
@ -88,9 +88,9 @@ NodeBasedDynamicGraphFromEdges(NodeID number_of_nodes,
|
||||
output_edge.data.flags = input_edge.flags;
|
||||
output_edge.data.annotation_data = input_edge.annotation_data;
|
||||
|
||||
BOOST_ASSERT(output_edge.data.weight > 0);
|
||||
BOOST_ASSERT(output_edge.data.duration > 0);
|
||||
BOOST_ASSERT(output_edge.data.distance >= 0);
|
||||
BOOST_ASSERT(output_edge.data.weight > EdgeWeight{0});
|
||||
BOOST_ASSERT(output_edge.data.duration > EdgeDuration{0});
|
||||
BOOST_ASSERT(output_edge.data.distance >= EdgeDistance{0});
|
||||
});
|
||||
|
||||
tbb::parallel_sort(edges_list.begin(), edges_list.end());
|
||||
|
@ -83,19 +83,45 @@ inline T get_upper_half_value(WordT word,
|
||||
}
|
||||
|
||||
template <typename WordT, typename T>
|
||||
inline WordT set_lower_value(WordT word, WordT mask, std::uint8_t offset, T value)
|
||||
inline WordT set_lower_value(WordT word,
|
||||
WordT mask,
|
||||
std::uint8_t offset,
|
||||
T value,
|
||||
typename std::enable_if_t<std::is_integral<T>::value> * = nullptr)
|
||||
{
|
||||
static_assert(std::is_unsigned<WordT>::value, "Only unsigned word types supported for now.");
|
||||
return (word & ~mask) | ((static_cast<WordT>(value) << offset) & mask);
|
||||
}
|
||||
|
||||
template <typename WordT, typename T>
|
||||
inline WordT set_upper_value(WordT word, WordT mask, std::uint8_t offset, T value)
|
||||
inline WordT set_upper_value(WordT word,
|
||||
WordT mask,
|
||||
std::uint8_t offset,
|
||||
T value,
|
||||
typename std::enable_if_t<std::is_integral<T>::value> * = nullptr)
|
||||
{
|
||||
static_assert(std::is_unsigned<WordT>::value, "Only unsigned word types supported for now.");
|
||||
return (word & ~mask) | ((static_cast<WordT>(value) >> offset) & mask);
|
||||
}
|
||||
|
||||
template <typename WordT, typename T>
|
||||
inline WordT set_lower_value(
|
||||
WordT word, WordT mask, std::uint8_t offset, T value, typename T::value_type * = nullptr)
|
||||
{
|
||||
static_assert(std::is_unsigned<WordT>::value, "Only unsigned word types supported for now.");
|
||||
return (word & ~mask) |
|
||||
((static_cast<WordT>(static_cast<typename T::value_type>(value)) << offset) & mask);
|
||||
}
|
||||
|
||||
template <typename WordT, typename T>
|
||||
inline WordT set_upper_value(
|
||||
WordT word, WordT mask, std::uint8_t offset, T value, typename T::value_type * = nullptr)
|
||||
{
|
||||
static_assert(std::is_unsigned<WordT>::value, "Only unsigned word types supported for now.");
|
||||
return (word & ~mask) |
|
||||
((static_cast<WordT>(static_cast<typename T::value_type>(value)) >> offset) & mask);
|
||||
}
|
||||
|
||||
inline bool compare_and_swap(uint64_t *ptr, uint64_t old_value, uint64_t new_value)
|
||||
{
|
||||
#if defined(_MSC_VER)
|
||||
@ -287,6 +313,12 @@ template <typename T, std::size_t Bits, storage::Ownership Ownership> class Pack
|
||||
return &container == &other.container && internal_index == other.internal_index;
|
||||
}
|
||||
|
||||
// FIXME: This is needed for tests on Boost ranges to correctly compare Alias values.
|
||||
template <typename F, typename U> bool operator!=(const osrm::Alias<F, U> value) const
|
||||
{
|
||||
return container.get_value(internal_index) != value;
|
||||
}
|
||||
|
||||
friend std::ostream &operator<<(std::ostream &os, const internal_reference &rhs)
|
||||
{
|
||||
return os << static_cast<T>(rhs);
|
||||
|
@ -48,7 +48,26 @@ struct osm_way_id
|
||||
struct duplicated_node
|
||||
{
|
||||
};
|
||||
struct edge_weight
|
||||
{
|
||||
};
|
||||
struct edge_duration
|
||||
{
|
||||
};
|
||||
struct edge_distance
|
||||
{
|
||||
};
|
||||
struct segment_weight
|
||||
{
|
||||
};
|
||||
struct segment_duration
|
||||
{
|
||||
};
|
||||
struct turn_penalty
|
||||
{
|
||||
};
|
||||
} // namespace tag
|
||||
|
||||
using OSMNodeID = osrm::Alias<std::uint64_t, tag::osm_node_id>;
|
||||
// clang-tidy fires `bugprone-throw-keyword-missing` here for unknown reason
|
||||
// NOLINTNEXTLINE(bugprone-throw-keyword-missing)
|
||||
@ -77,12 +96,13 @@ using EdgeID = std::uint32_t;
|
||||
using NameID = std::uint32_t;
|
||||
using AnnotationID = std::uint32_t;
|
||||
using PackedGeometryID = std::uint32_t;
|
||||
using EdgeWeight = std::int32_t;
|
||||
using EdgeDuration = std::int32_t;
|
||||
using EdgeDistance = float;
|
||||
using SegmentWeight = std::uint32_t;
|
||||
using SegmentDuration = std::uint32_t;
|
||||
using TurnPenalty = std::int16_t; // turn penalty in 100ms units
|
||||
|
||||
using EdgeWeight = osrm::Alias<std::int32_t, tag::edge_weight>;
|
||||
using EdgeDuration = osrm::Alias<std::int32_t, tag::edge_duration>;
|
||||
using EdgeDistance = osrm::Alias<float, tag::edge_distance>;
|
||||
using SegmentWeight = osrm::Alias<std::uint32_t, tag::segment_weight>;
|
||||
using SegmentDuration = osrm::Alias<std::uint32_t, tag::segment_duration>;
|
||||
using TurnPenalty = osrm::Alias<std::int16_t, tag::turn_penalty>; // turn penalty in 100ms units
|
||||
|
||||
static const std::size_t INVALID_INDEX = std::numeric_limits<std::size_t>::max();
|
||||
|
||||
@ -109,16 +129,30 @@ static const NameID EMPTY_NAMEID = 0;
|
||||
static const unsigned INVALID_COMPONENTID = 0;
|
||||
static const std::size_t SEGMENT_WEIGHT_BITS = 22;
|
||||
static const std::size_t SEGMENT_DURATION_BITS = 22;
|
||||
static const SegmentWeight INVALID_SEGMENT_WEIGHT = (1u << SEGMENT_WEIGHT_BITS) - 1;
|
||||
static const SegmentDuration INVALID_SEGMENT_DURATION = (1u << SEGMENT_DURATION_BITS) - 1;
|
||||
static const SegmentWeight MAX_SEGMENT_WEIGHT = INVALID_SEGMENT_WEIGHT - 1;
|
||||
static const SegmentDuration MAX_SEGMENT_DURATION = INVALID_SEGMENT_DURATION - 1;
|
||||
static const EdgeWeight INVALID_EDGE_WEIGHT = std::numeric_limits<EdgeWeight>::max();
|
||||
static const EdgeDuration MAXIMAL_EDGE_DURATION = std::numeric_limits<EdgeDuration>::max();
|
||||
static const EdgeDistance MAXIMAL_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
static const TurnPenalty INVALID_TURN_PENALTY = std::numeric_limits<TurnPenalty>::max();
|
||||
static const EdgeDistance INVALID_EDGE_DISTANCE = std::numeric_limits<EdgeDistance>::max();
|
||||
static const EdgeDistance INVALID_FALLBACK_SPEED = std::numeric_limits<EdgeDistance>::max();
|
||||
static const SegmentWeight INVALID_SEGMENT_WEIGHT = SegmentWeight{(1u << SEGMENT_WEIGHT_BITS) - 1};
|
||||
static const SegmentDuration INVALID_SEGMENT_DURATION =
|
||||
SegmentDuration{(1u << SEGMENT_DURATION_BITS) - 1};
|
||||
static const SegmentWeight MAX_SEGMENT_WEIGHT = INVALID_SEGMENT_WEIGHT - SegmentWeight{1};
|
||||
static const SegmentDuration MAX_SEGMENT_DURATION = INVALID_SEGMENT_DURATION - SegmentDuration{1};
|
||||
static const EdgeWeight INVALID_EDGE_WEIGHT =
|
||||
EdgeWeight{std::numeric_limits<EdgeWeight::value_type>::max()};
|
||||
static const EdgeDuration INVALID_EDGE_DURATION =
|
||||
EdgeDuration{std::numeric_limits<EdgeDuration::value_type>::max()};
|
||||
static const EdgeDistance INVALID_EDGE_DISTANCE =
|
||||
EdgeDistance{std::numeric_limits<EdgeDistance::value_type>::max()};
|
||||
static const TurnPenalty INVALID_TURN_PENALTY =
|
||||
TurnPenalty{std::numeric_limits<TurnPenalty::value_type>::max()};
|
||||
static const EdgeDistance INVALID_FALLBACK_SPEED =
|
||||
EdgeDistance{std::numeric_limits<EdgeDistance::value_type>::max()};
|
||||
// TODO: These are the same as the invalid values. Do we need both?
|
||||
static const EdgeWeight MAXIMAL_EDGE_WEIGHT =
|
||||
EdgeWeight{std::numeric_limits<EdgeWeight::value_type>::max()};
|
||||
static const EdgeDuration MAXIMAL_EDGE_DURATION =
|
||||
EdgeDuration{std::numeric_limits<EdgeDuration::value_type>::max()};
|
||||
static const EdgeDistance MAXIMAL_EDGE_DISTANCE =
|
||||
EdgeDistance{std::numeric_limits<EdgeDistance::value_type>::max()};
|
||||
static const TurnPenalty MAXIMAL_TURN_PENALTY =
|
||||
TurnPenalty{std::numeric_limits<TurnPenalty::value_type>::max()};
|
||||
|
||||
using DatasourceID = std::uint8_t;
|
||||
|
||||
|
@ -78,7 +78,8 @@ int Contractor::Run()
|
||||
// Convert node weights for oneway streets to INVALID_EDGE_WEIGHT
|
||||
for (auto &weight : node_weights)
|
||||
{
|
||||
weight = (weight & 0x80000000) ? INVALID_EDGE_WEIGHT : weight;
|
||||
weight = (from_alias<EdgeWeight::value_type>(weight) & 0x80000000) ? INVALID_EDGE_WEIGHT
|
||||
: weight;
|
||||
}
|
||||
|
||||
// Contracting the edge-expanded graph
|
||||
|
@ -170,8 +170,8 @@ void ContractNode(ContractorThreadData *data,
|
||||
}
|
||||
|
||||
heap.Clear();
|
||||
heap.Insert(source, 0, ContractorHeapData{});
|
||||
EdgeWeight max_weight = 0;
|
||||
heap.Insert(source, {0}, ContractorHeapData{});
|
||||
EdgeWeight max_weight = {0};
|
||||
unsigned number_of_targets = 0;
|
||||
|
||||
for (auto out_edge : graph.GetAdjacentEdgeRange(node))
|
||||
@ -199,7 +199,7 @@ void ContractNode(ContractorThreadData *data,
|
||||
// CAREFUL: This only works due to the independent node-setting. This
|
||||
// guarantees that source is not connected to another node that is
|
||||
// contracted
|
||||
node_weights[source] = path_weight + 1;
|
||||
node_weights[source] = path_weight + EdgeWeight{1};
|
||||
BOOST_ASSERT(stats != nullptr);
|
||||
stats->edges_added_count += 2;
|
||||
stats->original_edges_added_count +=
|
||||
|
@ -133,7 +133,8 @@ int Customizer::Run(const CustomizationConfig &config)
|
||||
auto graph = LoadAndUpdateEdgeExpandedGraph(
|
||||
config, mlp, node_weights, node_durations, node_distances, connectivity_checksum);
|
||||
BOOST_ASSERT(graph.GetNumberOfNodes() == node_weights.size());
|
||||
std::for_each(node_weights.begin(), node_weights.end(), [](auto &w) { w &= 0x7fffffff; });
|
||||
std::for_each(
|
||||
node_weights.begin(), node_weights.end(), [](auto &w) { w &= EdgeWeight{0x7fffffff}; });
|
||||
util::Log() << "Loaded edge based graph: " << graph.GetNumberOfEdges() << " edges, "
|
||||
<< graph.GetNumberOfNodes() << " nodes";
|
||||
|
||||
|
@ -3,11 +3,11 @@
|
||||
#include "engine/datafacade/datafacade_base.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
#include <boost/unordered_set.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <tuple>
|
||||
#include <unordered_set>
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -106,8 +106,8 @@ bool Hint::IsValid(const util::Coordinate new_input_coordinates,
|
||||
|
||||
// Check hints do not contain duplicate segment pairs
|
||||
// We can't allow duplicates as search heaps do not support it.
|
||||
boost::unordered_set<NodeID> forward_segments;
|
||||
boost::unordered_set<NodeID> reverse_segments;
|
||||
std::unordered_set<NodeID> forward_segments;
|
||||
std::unordered_set<NodeID> reverse_segments;
|
||||
for (const auto &seg_hint : segment_hints)
|
||||
{
|
||||
const auto forward_res = forward_segments.insert(seg_hint.phantom.forward_segment_id.id);
|
||||
|
@ -90,7 +90,8 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
std::vector<api::TableAPI::TableCellRef> estimated_pairs;
|
||||
|
||||
// Scan table for null results - if any exist, replace with distance estimates
|
||||
if (params.fallback_speed != INVALID_FALLBACK_SPEED || params.scale_factor != 1)
|
||||
if (params.fallback_speed != from_alias<double>(INVALID_FALLBACK_SPEED) ||
|
||||
params.scale_factor != 1)
|
||||
{
|
||||
for (std::size_t row = 0; row < num_sources; row++)
|
||||
{
|
||||
@ -98,7 +99,8 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
{
|
||||
const auto &table_index = row * num_destinations + column;
|
||||
BOOST_ASSERT(table_index < result_tables_pair.first.size());
|
||||
if (params.fallback_speed != INVALID_FALLBACK_SPEED && params.fallback_speed > 0 &&
|
||||
if (params.fallback_speed != from_alias<double>(INVALID_FALLBACK_SPEED) &&
|
||||
params.fallback_speed > 0 &&
|
||||
result_tables_pair.first[table_index] == MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
const auto &source =
|
||||
@ -118,29 +120,32 @@ Status TablePlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
candidatesSnappedLocation(destination));
|
||||
|
||||
result_tables_pair.first[table_index] =
|
||||
distance_estimate / (double)params.fallback_speed;
|
||||
to_alias<EdgeDuration>(distance_estimate / params.fallback_speed);
|
||||
if (!result_tables_pair.second.empty())
|
||||
{
|
||||
result_tables_pair.second[table_index] = distance_estimate;
|
||||
result_tables_pair.second[table_index] =
|
||||
to_alias<EdgeDistance>(distance_estimate);
|
||||
}
|
||||
|
||||
estimated_pairs.emplace_back(row, column);
|
||||
}
|
||||
if (params.scale_factor > 0 && params.scale_factor != 1 &&
|
||||
result_tables_pair.first[table_index] != MAXIMAL_EDGE_DURATION &&
|
||||
result_tables_pair.first[table_index] != 0)
|
||||
result_tables_pair.first[table_index] != EdgeDuration{0})
|
||||
{
|
||||
EdgeDuration diff =
|
||||
MAXIMAL_EDGE_DURATION / result_tables_pair.first[table_index];
|
||||
|
||||
if (params.scale_factor >= diff)
|
||||
if (params.scale_factor >= from_alias<double>(diff))
|
||||
{
|
||||
result_tables_pair.first[table_index] = MAXIMAL_EDGE_DURATION - 1;
|
||||
result_tables_pair.first[table_index] =
|
||||
MAXIMAL_EDGE_DURATION - EdgeDuration{1};
|
||||
}
|
||||
else
|
||||
{
|
||||
result_tables_pair.first[table_index] = std::lround(
|
||||
result_tables_pair.first[table_index] * params.scale_factor);
|
||||
result_tables_pair.first[table_index] = to_alias<EdgeDuration>(
|
||||
std::lround(from_alias<double>(result_tables_pair.first[table_index]) *
|
||||
params.scale_factor));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -497,17 +497,17 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
auto name = facade.GetNameForID(name_id);
|
||||
|
||||
// If this is a valid forward edge, go ahead and add it to the tile
|
||||
if (forward_duration != 0 && edge.forward_segment_id.enabled)
|
||||
if (forward_duration != SegmentDuration{0} && edge.forward_segment_id.enabled)
|
||||
{
|
||||
// Calculate the speed for this line
|
||||
std::uint32_t speed_kmh_idx =
|
||||
static_cast<std::uint32_t>(round(length / forward_duration * 10 * 3.6));
|
||||
std::uint32_t speed_kmh_idx = static_cast<std::uint32_t>(
|
||||
round(length / from_alias<double>(forward_duration) * 10 * 3.6));
|
||||
|
||||
// Rate values are in meters per weight-unit - and similar to speeds, we
|
||||
// present 1 decimal place of precision (these values are added as
|
||||
// double/10) lower down
|
||||
std::uint32_t forward_rate =
|
||||
static_cast<std::uint32_t>(round(length / forward_weight * 10.));
|
||||
std::uint32_t forward_rate = static_cast<std::uint32_t>(
|
||||
round(length / from_alias<double>(forward_weight) * 10.));
|
||||
|
||||
auto tile_line = coordinatesToTileLine(a, b, tile_bbox);
|
||||
if (!tile_line.empty())
|
||||
@ -519,8 +519,8 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
fbuilder.set_is_small(component_id.is_tiny);
|
||||
fbuilder.set_datasource(
|
||||
facade.GetDatasourceName(forward_datasource_idx).to_string());
|
||||
fbuilder.set_weight(forward_weight / 10.0);
|
||||
fbuilder.set_duration(forward_duration / 10.0);
|
||||
fbuilder.set_weight(from_alias<double>(forward_weight) / 10.0);
|
||||
fbuilder.set_duration(from_alias<double>(forward_duration) / 10.0);
|
||||
fbuilder.set_name(name);
|
||||
fbuilder.set_rate(forward_rate / 10.0);
|
||||
fbuilder.set_is_startpoint(is_startpoint);
|
||||
@ -531,17 +531,17 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
|
||||
// Repeat the above for the coordinates reversed and using the `reverse`
|
||||
// properties
|
||||
if (reverse_duration != 0 && edge.reverse_segment_id.enabled)
|
||||
if (reverse_duration != SegmentDuration{0} && edge.reverse_segment_id.enabled)
|
||||
{
|
||||
// Calculate the speed for this line
|
||||
std::uint32_t speed_kmh_idx =
|
||||
static_cast<std::uint32_t>(round(length / reverse_duration * 10 * 3.6));
|
||||
std::uint32_t speed_kmh_idx = static_cast<std::uint32_t>(
|
||||
round(length / from_alias<double>(reverse_duration) * 10 * 3.6));
|
||||
|
||||
// Rate values are in meters per weight-unit - and similar to speeds, we
|
||||
// present 1 decimal place of precision (these values are added as
|
||||
// double/10) lower down
|
||||
std::uint32_t reverse_rate =
|
||||
static_cast<std::uint32_t>(round(length / reverse_weight * 10.));
|
||||
std::uint32_t reverse_rate = static_cast<std::uint32_t>(
|
||||
round(length / from_alias<double>(reverse_weight) * 10.));
|
||||
|
||||
auto tile_line = coordinatesToTileLine(b, a, tile_bbox);
|
||||
if (!tile_line.empty())
|
||||
@ -553,8 +553,8 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
fbuilder.set_is_small(component_id.is_tiny);
|
||||
fbuilder.set_datasource(
|
||||
facade.GetDatasourceName(reverse_datasource_idx).to_string());
|
||||
fbuilder.set_weight(reverse_weight / 10.0);
|
||||
fbuilder.set_duration(reverse_duration / 10.0);
|
||||
fbuilder.set_weight(from_alias<double>(reverse_weight) / 10.0);
|
||||
fbuilder.set_duration(from_alias<double>(reverse_duration) / 10.0);
|
||||
fbuilder.set_name(name);
|
||||
fbuilder.set_rate(reverse_rate / 10.0);
|
||||
fbuilder.set_is_startpoint(is_startpoint);
|
||||
@ -582,8 +582,8 @@ void encodeVectorTile(const DataFacadeBase &facade,
|
||||
|
||||
fbuilder.set_bearing_in(turn_data.in_angle);
|
||||
fbuilder.set_turn_angle(turn_data.turn_angle);
|
||||
fbuilder.set_cost(turn_data.duration / 10.0);
|
||||
fbuilder.set_weight(turn_data.weight / 10.0);
|
||||
fbuilder.set_cost(from_alias<double>(turn_data.duration) / 10.0);
|
||||
fbuilder.set_weight(from_alias<double>(turn_data.weight) / 10.0);
|
||||
fbuilder.set_turn(turn_data.turn_instruction);
|
||||
|
||||
fbuilder.commit();
|
||||
|
@ -20,9 +20,9 @@ namespace engine
|
||||
namespace plugins
|
||||
{
|
||||
|
||||
bool IsStronglyConnectedComponent(const util::DistTableWrapper<EdgeWeight> &result_table)
|
||||
bool IsStronglyConnectedComponent(const util::DistTableWrapper<EdgeDuration> &result_table)
|
||||
{
|
||||
return std::find(std::begin(result_table), std::end(result_table), INVALID_EDGE_WEIGHT) ==
|
||||
return std::find(std::begin(result_table), std::end(result_table), INVALID_EDGE_DURATION) ==
|
||||
std::end(result_table);
|
||||
}
|
||||
|
||||
@ -68,7 +68,7 @@ TripPlugin::ComputeRoute(const RoutingAlgorithmsInterface &algorithms,
|
||||
|
||||
void ManipulateTableForFSE(const std::size_t source_id,
|
||||
const std::size_t destination_id,
|
||||
util::DistTableWrapper<EdgeWeight> &result_table)
|
||||
util::DistTableWrapper<EdgeDuration> &result_table)
|
||||
{
|
||||
// ****************** Change Table *************************
|
||||
// The following code manipulates the table and produces the new table for
|
||||
@ -94,7 +94,7 @@ void ManipulateTableForFSE(const std::size_t source_id,
|
||||
{
|
||||
if (i == source_id)
|
||||
continue;
|
||||
result_table.SetValue(i, source_id, INVALID_EDGE_WEIGHT);
|
||||
result_table.SetValue(i, source_id, INVALID_EDGE_DURATION);
|
||||
}
|
||||
|
||||
// change parameters.destination row
|
||||
@ -104,22 +104,22 @@ void ManipulateTableForFSE(const std::size_t source_id,
|
||||
{
|
||||
if (i == destination_id)
|
||||
continue;
|
||||
result_table.SetValue(destination_id, i, INVALID_EDGE_WEIGHT);
|
||||
result_table.SetValue(destination_id, i, INVALID_EDGE_DURATION);
|
||||
}
|
||||
|
||||
// set destination->source to zero so roundtrip treats source and
|
||||
// destination as one location
|
||||
result_table.SetValue(destination_id, source_id, 0);
|
||||
result_table.SetValue(destination_id, source_id, {0});
|
||||
|
||||
// set source->destination as very high number so algorithm is forced
|
||||
// to find another path to get to destination
|
||||
result_table.SetValue(source_id, destination_id, INVALID_EDGE_WEIGHT);
|
||||
result_table.SetValue(source_id, destination_id, INVALID_EDGE_DURATION);
|
||||
|
||||
//********* End of changes to table *************************************
|
||||
}
|
||||
|
||||
void ManipulateTableForNonRoundtripFS(const std::size_t source_id,
|
||||
util::DistTableWrapper<EdgeWeight> &result_table)
|
||||
util::DistTableWrapper<EdgeDuration> &result_table)
|
||||
{
|
||||
// We can use the round-trip calculation to simulate non-round-trip fixed start
|
||||
// by making all paths to the source location zero. Effectively finding an 'optimal'
|
||||
@ -127,12 +127,12 @@ void ManipulateTableForNonRoundtripFS(const std::size_t source_id,
|
||||
// source.
|
||||
for (const auto i : util::irange<size_t>(0, result_table.GetNumberOfNodes()))
|
||||
{
|
||||
result_table.SetValue(i, source_id, 0);
|
||||
result_table.SetValue(i, source_id, {0});
|
||||
}
|
||||
}
|
||||
|
||||
void ManipulateTableForNonRoundtripFE(const std::size_t destination_id,
|
||||
util::DistTableWrapper<EdgeWeight> &result_table)
|
||||
util::DistTableWrapper<EdgeDuration> &result_table)
|
||||
{
|
||||
// We can use the round-trip calculation to simulate non-round-trip fixed end
|
||||
// by making all paths from the destination to other locations zero.
|
||||
@ -140,7 +140,7 @@ void ManipulateTableForNonRoundtripFE(const std::size_t destination_id,
|
||||
// from the destination to any source.
|
||||
for (const auto i : util::irange<size_t>(0, result_table.GetNumberOfNodes()))
|
||||
{
|
||||
result_table.SetValue(destination_id, i, 0);
|
||||
result_table.SetValue(destination_id, i, {0});
|
||||
}
|
||||
}
|
||||
|
||||
@ -218,7 +218,7 @@ Status TripPlugin::HandleRequest(const RoutingAlgorithmsInterface &algorithms,
|
||||
BOOST_ASSERT(snapped_phantoms.size() == number_of_locations);
|
||||
|
||||
// compute the duration table of all phantom nodes
|
||||
auto result_duration_table = util::DistTableWrapper<EdgeWeight>(
|
||||
auto result_duration_table = util::DistTableWrapper<EdgeDuration>(
|
||||
algorithms.ManyToManySearch(snapped_phantoms, {}, {}, /*requestDistance*/ false).first,
|
||||
number_of_locations);
|
||||
|
||||
|
@ -44,7 +44,7 @@ struct RankedCandidateNode
|
||||
|
||||
bool operator<(const RankedCandidateNode &other) const
|
||||
{
|
||||
return (2 * weight + sharing) < (2 * other.weight + other.sharing);
|
||||
return (EdgeWeight{2} * weight + sharing) < (EdgeWeight{2} * other.weight + other.sharing);
|
||||
}
|
||||
};
|
||||
|
||||
@ -66,8 +66,8 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
// toHeapNode is the same
|
||||
const auto heapNode = forward_heap.DeleteMinGetHeapNode();
|
||||
|
||||
const auto scaled_weight =
|
||||
static_cast<EdgeWeight>((heapNode.weight + min_edge_offset) / (1. + VIAPATH_EPSILON));
|
||||
const auto scaled_weight = to_alias<EdgeWeight>(
|
||||
from_alias<double>(heapNode.weight + min_edge_offset) / (1. + VIAPATH_EPSILON));
|
||||
if ((INVALID_EDGE_WEIGHT != *upper_bound_to_shortest_path_weight) &&
|
||||
(scaled_weight > *upper_bound_to_shortest_path_weight))
|
||||
{
|
||||
@ -84,7 +84,7 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const EdgeWeight new_weight = reverseHeapNode->weight + heapNode.weight;
|
||||
if (new_weight < *upper_bound_to_shortest_path_weight)
|
||||
{
|
||||
if (new_weight >= 0)
|
||||
if (new_weight >= EdgeWeight{0})
|
||||
{
|
||||
*middle_node = heapNode.node;
|
||||
*upper_bound_to_shortest_path_weight = new_weight;
|
||||
@ -92,7 +92,8 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
else
|
||||
{
|
||||
// check whether there is a loop present at the node
|
||||
const auto loop_weight = std::get<0>(getLoopWeight<false>(facade, heapNode.node));
|
||||
const auto loop_weight =
|
||||
std::get<0>(getLoopMetric<EdgeWeight>(facade, heapNode.node));
|
||||
const EdgeWeight new_weight_with_loop = new_weight + loop_weight;
|
||||
if (loop_weight != INVALID_EDGE_WEIGHT &&
|
||||
new_weight_with_loop <= *upper_bound_to_shortest_path_weight)
|
||||
@ -112,7 +113,7 @@ void alternativeRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
const NodeID to = facade.GetTarget(edge);
|
||||
const EdgeWeight edge_weight = data.weight;
|
||||
|
||||
BOOST_ASSERT(edge_weight > 0);
|
||||
BOOST_ASSERT(edge_weight > EdgeWeight{0});
|
||||
const EdgeWeight to_weight = heapNode.weight + edge_weight;
|
||||
|
||||
const auto toHeapNode = forward_heap.GetHeapNodeIfWasInserted(to);
|
||||
@ -180,7 +181,7 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
|
||||
|
||||
NodeID s_v_middle = SPECIAL_NODEID;
|
||||
EdgeWeight upper_bound_s_v_path_weight = INVALID_EDGE_WEIGHT;
|
||||
new_reverse_heap.Insert(via_node, 0, via_node);
|
||||
new_reverse_heap.Insert(via_node, {0}, via_node);
|
||||
// compute path <s,..,v> by reusing forward search from s
|
||||
while (!new_reverse_heap.Empty())
|
||||
{
|
||||
@ -196,7 +197,7 @@ void computeWeightAndSharingOfViaPath(SearchEngineData<Algorithm> &engine_workin
|
||||
// compute path <v,..,t> by reusing backward search from node t
|
||||
NodeID v_t_middle = SPECIAL_NODEID;
|
||||
EdgeWeight upper_bound_of_v_t_path_weight = INVALID_EDGE_WEIGHT;
|
||||
new_forward_heap.Insert(via_node, 0, via_node);
|
||||
new_forward_heap.Insert(via_node, {0}, via_node);
|
||||
while (!new_forward_heap.Empty())
|
||||
{
|
||||
routingStep<FORWARD_DIRECTION>(facade,
|
||||
@ -342,7 +343,7 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
*s_v_middle = SPECIAL_NODEID;
|
||||
EdgeWeight upper_bound_s_v_path_weight = INVALID_EDGE_WEIGHT;
|
||||
// compute path <s,..,v> by reusing forward search from s
|
||||
new_reverse_heap.Insert(candidate.node, 0, candidate.node);
|
||||
new_reverse_heap.Insert(candidate.node, {0}, candidate.node);
|
||||
while (new_reverse_heap.Size() > 0)
|
||||
{
|
||||
routingStep<REVERSE_DIRECTION>(facade,
|
||||
@ -363,7 +364,7 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
// compute path <v,..,t> by reusing backward search from t
|
||||
*v_t_middle = SPECIAL_NODEID;
|
||||
EdgeWeight upper_bound_of_v_t_path_weight = INVALID_EDGE_WEIGHT;
|
||||
new_forward_heap.Insert(candidate.node, 0, candidate.node);
|
||||
new_forward_heap.Insert(candidate.node, {0}, candidate.node);
|
||||
while (new_forward_heap.Size() > 0)
|
||||
{
|
||||
routingStep<FORWARD_DIRECTION>(facade,
|
||||
@ -400,8 +401,9 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
{
|
||||
return false;
|
||||
}
|
||||
const EdgeWeight T_threshold = static_cast<EdgeWeight>(VIAPATH_ALPHA * weight_of_shortest_path);
|
||||
EdgeWeight unpacked_until_weight = 0;
|
||||
const EdgeWeight T_threshold =
|
||||
to_alias<EdgeWeight>(VIAPATH_ALPHA * from_alias<double>(weight_of_shortest_path));
|
||||
EdgeWeight unpacked_until_weight = {0};
|
||||
|
||||
std::stack<SearchSpaceEdge> unpack_stack;
|
||||
// Traverse path s-->v
|
||||
@ -463,7 +465,7 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
}
|
||||
|
||||
EdgeWeight t_test_path_weight = unpacked_until_weight;
|
||||
unpacked_until_weight = 0;
|
||||
unpacked_until_weight = {0};
|
||||
// Traverse path s-->v
|
||||
BOOST_ASSERT(!packed_v_t_path.empty());
|
||||
for (unsigned i = 0, packed_path_length = static_cast<unsigned>(packed_v_t_path.size() - 1);
|
||||
@ -532,8 +534,8 @@ bool viaNodeCandidatePassesTTest(SearchEngineData<Algorithm> &engine_working_dat
|
||||
EdgeWeight upper_bound = INVALID_EDGE_WEIGHT;
|
||||
NodeID middle = SPECIAL_NODEID;
|
||||
|
||||
forward_heap3.Insert(s_P, 0, s_P);
|
||||
reverse_heap3.Insert(t_P, 0, t_P);
|
||||
forward_heap3.Insert(s_P, {0}, s_P);
|
||||
reverse_heap3.Insert(t_P, {0}, t_P);
|
||||
// exploration from s and t until deletemin/(1+epsilon) > _lengt_oO_sShortest_path
|
||||
while ((forward_heap3.Size() + reverse_heap3.Size()) > 0)
|
||||
{
|
||||
@ -580,10 +582,11 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
|
||||
insertNodesInHeaps(forward_heap1, reverse_heap1, endpoint_candidates);
|
||||
// get offset to account for offsets on phantom nodes on compressed edges
|
||||
EdgeWeight min_edge_offset = forward_heap1.Empty() ? 0 : std::min(0, forward_heap1.MinKey());
|
||||
BOOST_ASSERT(min_edge_offset <= 0);
|
||||
EdgeWeight min_edge_offset =
|
||||
forward_heap1.Empty() ? EdgeWeight{0} : std::min<EdgeWeight>({0}, forward_heap1.MinKey());
|
||||
BOOST_ASSERT(min_edge_offset <= EdgeWeight{0});
|
||||
// we only every insert negative offsets for nodes in the forward heap
|
||||
BOOST_ASSERT(reverse_heap1.Empty() || reverse_heap1.MinKey() >= 0);
|
||||
BOOST_ASSERT(reverse_heap1.Empty() || reverse_heap1.MinKey() >= EdgeWeight{0});
|
||||
|
||||
// search from s and t till new_min/(1+epsilon) > weight_of_shortest_path
|
||||
while (0 < (forward_heap1.Size() + reverse_heap1.Size()))
|
||||
@ -701,22 +704,27 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
if (node == middle_node)
|
||||
continue;
|
||||
const auto fwd_iterator = approximated_forward_sharing.find(node);
|
||||
const EdgeWeight fwd_sharing =
|
||||
(fwd_iterator != approximated_forward_sharing.end()) ? fwd_iterator->second : 0;
|
||||
const EdgeWeight fwd_sharing = (fwd_iterator != approximated_forward_sharing.end())
|
||||
? fwd_iterator->second
|
||||
: EdgeWeight{0};
|
||||
const auto rev_iterator = approximated_reverse_sharing.find(node);
|
||||
const EdgeWeight rev_sharing =
|
||||
(rev_iterator != approximated_reverse_sharing.end()) ? rev_iterator->second : 0;
|
||||
const EdgeWeight rev_sharing = (rev_iterator != approximated_reverse_sharing.end())
|
||||
? rev_iterator->second
|
||||
: EdgeWeight{0};
|
||||
|
||||
const EdgeWeight approximated_sharing = fwd_sharing + rev_sharing;
|
||||
const EdgeWeight approximated_weight =
|
||||
forward_heap1.GetKey(node) + reverse_heap1.GetKey(node);
|
||||
const bool weight_passes =
|
||||
(approximated_weight < upper_bound_to_shortest_path_weight * (1 + VIAPATH_EPSILON));
|
||||
(from_alias<double>(approximated_weight) <
|
||||
from_alias<double>(upper_bound_to_shortest_path_weight) * (1 + VIAPATH_EPSILON));
|
||||
const bool sharing_passes =
|
||||
(approximated_sharing <= upper_bound_to_shortest_path_weight * VIAPATH_GAMMA);
|
||||
(from_alias<double>(approximated_sharing) <=
|
||||
from_alias<double>(upper_bound_to_shortest_path_weight) * VIAPATH_GAMMA);
|
||||
const bool stretch_passes =
|
||||
(approximated_weight - approximated_sharing) <
|
||||
((1. + VIAPATH_EPSILON) * (upper_bound_to_shortest_path_weight - approximated_sharing));
|
||||
from_alias<double>(approximated_weight - approximated_sharing) <
|
||||
((1. + VIAPATH_EPSILON) *
|
||||
from_alias<double>(upper_bound_to_shortest_path_weight - approximated_sharing));
|
||||
|
||||
if (weight_passes && sharing_passes && stretch_passes)
|
||||
{
|
||||
@ -737,7 +745,7 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
// prioritizing via nodes for deep inspection
|
||||
for (const NodeID node : preselected_node_list)
|
||||
{
|
||||
EdgeWeight weight_of_via_path = 0, sharing_of_via_path = 0;
|
||||
EdgeWeight weight_of_via_path = {0}, sharing_of_via_path = {0};
|
||||
computeWeightAndSharingOfViaPath(engine_working_data,
|
||||
facade,
|
||||
node,
|
||||
@ -745,10 +753,11 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &engi
|
||||
&sharing_of_via_path,
|
||||
packed_shortest_path,
|
||||
min_edge_offset);
|
||||
const EdgeWeight maximum_allowed_sharing =
|
||||
static_cast<EdgeWeight>(upper_bound_to_shortest_path_weight * VIAPATH_GAMMA);
|
||||
const EdgeWeight maximum_allowed_sharing = to_alias<EdgeWeight>(
|
||||
from_alias<double>(upper_bound_to_shortest_path_weight) * VIAPATH_GAMMA);
|
||||
if (sharing_of_via_path <= maximum_allowed_sharing &&
|
||||
weight_of_via_path <= upper_bound_to_shortest_path_weight * (1 + VIAPATH_EPSILON))
|
||||
from_alias<double>(weight_of_via_path) <=
|
||||
from_alias<double>(upper_bound_to_shortest_path_weight) * (1 + VIAPATH_EPSILON))
|
||||
{
|
||||
ranked_candidates_list.emplace_back(node, weight_of_via_path, sharing_of_via_path);
|
||||
}
|
||||
|
@ -85,9 +85,9 @@ struct WeightedViaNodeUnpackedPath
|
||||
// Scale the maximum allowed weight increase based on its magnitude:
|
||||
// - Shortest path 10 minutes, alternative 13 minutes => Factor of 0.30 ok
|
||||
// - Shortest path 10 hours, alternative 13 hours => Factor of 0.30 unreasonable
|
||||
double getLongerByFactorBasedOnDuration(const EdgeWeight duration)
|
||||
double getLongerByFactorBasedOnDuration(const EdgeDuration duration)
|
||||
{
|
||||
BOOST_ASSERT(duration != INVALID_EDGE_WEIGHT);
|
||||
BOOST_ASSERT(duration != INVALID_EDGE_DURATION);
|
||||
|
||||
// We only have generic weights here and no durations without unpacking.
|
||||
// We also have restricted way penalties which are huge and will screw scaling here.
|
||||
@ -118,19 +118,20 @@ double getLongerByFactorBasedOnDuration(const EdgeWeight duration)
|
||||
const constexpr auto c = 2.45437877e+09;
|
||||
const constexpr auto d = -2.07944571e+03;
|
||||
|
||||
if (duration < EdgeWeight(5 * 60))
|
||||
if (duration < EdgeDuration{5 * 60})
|
||||
{
|
||||
return 1.0;
|
||||
}
|
||||
else if (duration > EdgeWeight(10 * 60 * 60))
|
||||
else if (duration > EdgeDuration{10 * 60 * 60})
|
||||
{
|
||||
return 0.20;
|
||||
}
|
||||
|
||||
// Bigger than 10 minutes but smaller than 10 hours
|
||||
BOOST_ASSERT(duration >= 5 * 60 && duration <= 10 * 60 * 60);
|
||||
BOOST_ASSERT(duration >= EdgeDuration{5 * 60} && duration <= EdgeDuration{10 * 60 * 60});
|
||||
|
||||
return a + b / (duration - d) + c / std::pow(duration - d, 3);
|
||||
return a + b / (from_alias<double>(duration) - d) +
|
||||
c / std::pow(from_alias<double>(duration) - d, 3);
|
||||
}
|
||||
|
||||
Parameters parametersFromRequest(const PhantomEndpointCandidates &endpoint_candidates)
|
||||
@ -223,10 +224,11 @@ RandIt filterViaCandidatesByStretch(RandIt first,
|
||||
// Assumes weight roughly corresponds to duration-ish. If this is not the case e.g.
|
||||
// because users are setting weight to be distance in the profiles, then we might
|
||||
// either generate more candidates than we have to or not enough. But is okay.
|
||||
const auto stretch_weight_limit = (1. + parameters.kAtMostLongerBy) * weight;
|
||||
const auto stretch_weight_limit =
|
||||
(1. + parameters.kAtMostLongerBy) * from_alias<double>(weight);
|
||||
|
||||
const auto over_weight_limit = [=](const auto via) {
|
||||
return via.weight > stretch_weight_limit;
|
||||
return from_alias<double>(via.weight) > stretch_weight_limit;
|
||||
};
|
||||
|
||||
return std::remove_if(first, last, over_weight_limit);
|
||||
@ -444,7 +446,8 @@ RandIt filterPackedPathsByLocalOptimality(const WeightedViaNodePackedPath &path,
|
||||
const auto detour_length = forward_heap.GetKey(via) - forward_heap.GetKey(a) +
|
||||
reverse_heap.GetKey(via) - reverse_heap.GetKey(b);
|
||||
|
||||
return plateaux_length < parameters.kAtLeastOptimalAroundViaBy * detour_length;
|
||||
return from_alias<double>(plateaux_length) <
|
||||
parameters.kAtLeastOptimalAroundViaBy * from_alias<double>(detour_length);
|
||||
};
|
||||
|
||||
return std::remove_if(first, last, is_not_locally_optimal);
|
||||
@ -482,8 +485,8 @@ RandIt filterUnpackedPathsBySharing(RandIt first,
|
||||
return false;
|
||||
}
|
||||
|
||||
EdgeWeight total_duration = 0;
|
||||
const auto add_if_seen = [&](const EdgeWeight duration, const NodeID node) {
|
||||
EdgeDuration total_duration = {0};
|
||||
const auto add_if_seen = [&](const EdgeDuration duration, const NodeID node) {
|
||||
auto node_duration = facade.GetNodeDuration(node);
|
||||
total_duration += node_duration;
|
||||
if (nodes.count(node) > 0)
|
||||
@ -496,7 +499,7 @@ RandIt filterUnpackedPathsBySharing(RandIt first,
|
||||
const auto shared_duration = std::accumulate(
|
||||
begin(unpacked.nodes), end(unpacked.nodes), EdgeDuration{0}, add_if_seen);
|
||||
|
||||
unpacked.sharing = shared_duration / static_cast<double>(total_duration);
|
||||
unpacked.sharing = from_alias<double>(shared_duration) / from_alias<double>(total_duration);
|
||||
BOOST_ASSERT(unpacked.sharing >= 0.);
|
||||
BOOST_ASSERT(unpacked.sharing <= 1.);
|
||||
|
||||
@ -531,10 +534,11 @@ RandIt filterAnnotatedRoutesByStretch(RandIt first,
|
||||
BOOST_ASSERT(shortest_route.is_valid());
|
||||
|
||||
const auto shortest_route_duration = shortest_route.duration();
|
||||
const auto stretch_duration_limit = (1. + parameters.kAtMostLongerBy) * shortest_route_duration;
|
||||
const auto stretch_duration_limit =
|
||||
(1. + parameters.kAtMostLongerBy) * from_alias<double>(shortest_route_duration);
|
||||
|
||||
const auto over_duration_limit = [=](const auto &route) {
|
||||
return route.duration() > stretch_duration_limit;
|
||||
return from_alias<double>(route.duration()) > stretch_duration_limit;
|
||||
};
|
||||
|
||||
return std::remove_if(first, last, over_duration_limit);
|
||||
@ -610,8 +614,8 @@ void unpackPackedPaths(InputIt first,
|
||||
// Here heaps can be reused, let's go deeper!
|
||||
forward_heap.Clear();
|
||||
reverse_heap.Clear();
|
||||
forward_heap.Insert(source, 0, {source});
|
||||
reverse_heap.Insert(target, 0, {target});
|
||||
forward_heap.Insert(source, {0}, {source});
|
||||
reverse_heap.Insert(target, {0}, {target});
|
||||
|
||||
BOOST_ASSERT(!facade.ExcludeNode(source));
|
||||
BOOST_ASSERT(!facade.ExcludeNode(target));
|
||||
@ -694,7 +698,8 @@ makeCandidateVias(SearchEngineData<Algorithm> &search_engine_data,
|
||||
while (forward_heap.Size() + reverse_heap.Size() > 0)
|
||||
{
|
||||
if (shortest_path_weight != INVALID_EDGE_WEIGHT)
|
||||
overlap_weight = shortest_path_weight * parameters.kSearchSpaceOverlapFactor;
|
||||
overlap_weight = to_alias<EdgeWeight>(from_alias<double>(shortest_path_weight) *
|
||||
parameters.kSearchSpaceOverlapFactor);
|
||||
|
||||
// Termination criteria - when we have a shortest path this will guarantee for our overlap.
|
||||
const bool keep_going = forward_heap_min + reverse_heap_min < overlap_weight;
|
||||
@ -820,8 +825,10 @@ InternalManyRoutesResult alternativePathSearch(SearchEngineData<Algorithm> &sear
|
||||
NodeID shortest_path_via = shortest_path_via_it->node;
|
||||
EdgeWeight shortest_path_weight = shortest_path_via_it->weight;
|
||||
|
||||
const double duration_estimation = shortest_path_weight / facade.GetWeightMultiplier();
|
||||
parameters.kAtMostLongerBy = getLongerByFactorBasedOnDuration(duration_estimation);
|
||||
const double duration_estimation =
|
||||
from_alias<double>(shortest_path_weight) / facade.GetWeightMultiplier();
|
||||
parameters.kAtMostLongerBy =
|
||||
getLongerByFactorBasedOnDuration(to_alias<EdgeDuration>(duration_estimation));
|
||||
|
||||
// Filters via candidate nodes with heuristics
|
||||
|
||||
|
@ -24,16 +24,16 @@ inline bool addLoopWeight(const DataFacade<ch::Algorithm> &facade,
|
||||
EdgeDuration &duration,
|
||||
EdgeDistance &distance)
|
||||
{ // Special case for CH when contractor creates a loop edge node->node
|
||||
BOOST_ASSERT(weight < 0);
|
||||
BOOST_ASSERT(weight < EdgeWeight{0});
|
||||
|
||||
const auto loop_weight = ch::getLoopWeight<false>(facade, node);
|
||||
const auto loop_weight = ch::getLoopMetric<EdgeWeight>(facade, node);
|
||||
if (std::get<0>(loop_weight) != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
const auto new_weight_with_loop = weight + std::get<0>(loop_weight);
|
||||
if (new_weight_with_loop >= 0)
|
||||
if (new_weight_with_loop >= EdgeWeight{0})
|
||||
{
|
||||
weight = new_weight_with_loop;
|
||||
auto result = ch::getLoopWeight<true>(facade, node);
|
||||
auto result = ch::getLoopMetric<EdgeDuration>(facade, node);
|
||||
duration += std::get<0>(result);
|
||||
distance += std::get<1>(result);
|
||||
return true;
|
||||
@ -67,9 +67,9 @@ void relaxOutgoingEdges(
|
||||
const auto edge_duration = data.duration;
|
||||
const auto edge_distance = data.distance;
|
||||
|
||||
BOOST_ASSERT_MSG(edge_weight > 0, "edge_weight invalid");
|
||||
BOOST_ASSERT_MSG(edge_weight > EdgeWeight{0}, "edge_weight invalid");
|
||||
const auto to_weight = heapNode.weight + edge_weight;
|
||||
const auto to_duration = heapNode.data.duration + edge_duration;
|
||||
const auto to_duration = heapNode.data.duration + to_alias<EdgeDuration>(edge_duration);
|
||||
const auto to_distance = heapNode.data.distance + edge_distance;
|
||||
|
||||
const auto toHeapNode = query_heap.GetHeapNodeIfWasInserted(to);
|
||||
@ -120,7 +120,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
|
||||
auto ¤t_weight = weights_table[row_index * number_of_targets + column_index];
|
||||
|
||||
EdgeDistance nulldistance = 0;
|
||||
EdgeDistance nulldistance = {0};
|
||||
|
||||
auto ¤t_duration = durations_table[row_index * number_of_targets + column_index];
|
||||
auto ¤t_distance =
|
||||
@ -132,7 +132,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
auto new_duration = heapNode.data.duration + target_duration;
|
||||
auto new_distance = heapNode.data.distance + target_distance;
|
||||
|
||||
if (new_weight < 0)
|
||||
if (new_weight < EdgeWeight{0})
|
||||
{
|
||||
if (addLoopWeight(facade, heapNode.node, new_weight, new_duration, new_distance))
|
||||
{
|
||||
|
@ -62,10 +62,13 @@ void relaxBorderEdges(const DataFacade<mld::Algorithm> &facade,
|
||||
const auto node_weight = facade.GetNodeWeight(node_id);
|
||||
const auto node_duration = facade.GetNodeDuration(node_id);
|
||||
const auto node_distance = facade.GetNodeDistance(node_id);
|
||||
const auto turn_weight = node_weight + facade.GetWeightPenaltyForEdgeID(turn_id);
|
||||
const auto turn_duration = node_duration + facade.GetDurationPenaltyForEdgeID(turn_id);
|
||||
const auto turn_weight =
|
||||
node_weight + alias_cast<EdgeWeight>(facade.GetWeightPenaltyForEdgeID(turn_id));
|
||||
const auto turn_duration =
|
||||
node_duration +
|
||||
alias_cast<EdgeDuration>(facade.GetDurationPenaltyForEdgeID(turn_id));
|
||||
|
||||
BOOST_ASSERT_MSG(node_weight + turn_weight > 0, "edge weight is invalid");
|
||||
BOOST_ASSERT_MSG(node_weight + turn_weight > EdgeWeight{0}, "edge weight is invalid");
|
||||
const auto to_weight = weight + turn_weight;
|
||||
const auto to_duration = duration + turn_duration;
|
||||
const auto to_distance = distance + node_distance;
|
||||
@ -259,17 +262,17 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.forward_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance())});
|
||||
EdgeWeight{0} - phantom_node.GetForwardWeightPlusOffset(),
|
||||
EdgeDuration{0} - phantom_node.GetForwardDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetForwardDistance())});
|
||||
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
target_nodes_index.insert(
|
||||
{phantom_node.reverse_segment_id.id,
|
||||
std::make_tuple(index,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance())});
|
||||
EdgeWeight{0} - phantom_node.GetReverseWeightPlusOffset(),
|
||||
EdgeDuration{0} - phantom_node.GetReverseDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetReverseDistance())});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -292,12 +295,12 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
std::tie(index, target_weight, target_duration, target_distance) = it->second;
|
||||
|
||||
const auto path_weight = weight + target_weight;
|
||||
if (path_weight >= 0)
|
||||
if (path_weight >= EdgeWeight{0})
|
||||
{
|
||||
const auto path_duration = duration + target_duration;
|
||||
const auto path_distance = distance + target_distance;
|
||||
|
||||
EdgeDistance nulldistance = 0;
|
||||
EdgeDistance nulldistance = {0};
|
||||
auto ¤t_distance =
|
||||
distances_table.empty() ? nulldistance : distances_table[index];
|
||||
|
||||
@ -350,17 +353,17 @@ oneToManySearch(SearchEngineData<Algorithm> &engine_working_data,
|
||||
if (phantom_node.IsValidForwardSource())
|
||||
{
|
||||
insert_node(phantom_node.forward_segment_id.id,
|
||||
-phantom_node.GetForwardWeightPlusOffset(),
|
||||
-phantom_node.GetForwardDuration(),
|
||||
-phantom_node.GetForwardDistance());
|
||||
EdgeWeight{0} - phantom_node.GetForwardWeightPlusOffset(),
|
||||
EdgeDuration{0} - phantom_node.GetForwardDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetForwardDistance());
|
||||
}
|
||||
|
||||
if (phantom_node.IsValidReverseSource())
|
||||
{
|
||||
insert_node(phantom_node.reverse_segment_id.id,
|
||||
-phantom_node.GetReverseWeightPlusOffset(),
|
||||
-phantom_node.GetReverseDuration(),
|
||||
-phantom_node.GetReverseDistance());
|
||||
EdgeWeight{0} - phantom_node.GetReverseWeightPlusOffset(),
|
||||
EdgeDuration{0} - phantom_node.GetReverseDuration(),
|
||||
EdgeDistance{0} - phantom_node.GetReverseDistance());
|
||||
}
|
||||
}
|
||||
else if (DIRECTION == REVERSE_DIRECTION)
|
||||
@ -444,7 +447,7 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
auto ¤t_weight = weights_table[location];
|
||||
auto ¤t_duration = durations_table[location];
|
||||
|
||||
EdgeDistance nulldistance = 0;
|
||||
EdgeDistance nulldistance = {0};
|
||||
auto ¤t_distance = distances_table.empty() ? nulldistance : distances_table[location];
|
||||
|
||||
// Check if new weight is better
|
||||
@ -452,7 +455,8 @@ void forwardRoutingStep(const DataFacade<Algorithm> &facade,
|
||||
auto new_duration = heapNode.data.duration + target_duration;
|
||||
auto new_distance = heapNode.data.distance + target_distance;
|
||||
|
||||
if (new_weight >= 0 && std::tie(new_weight, new_duration, new_distance) <
|
||||
if (new_weight >= EdgeWeight{0} &&
|
||||
std::tie(new_weight, new_duration, new_distance) <
|
||||
std::tie(current_weight, current_duration, current_distance))
|
||||
{
|
||||
current_weight = new_weight;
|
||||
|
@ -217,8 +217,8 @@ SubMatchingList mapMatching(SearchEngineData<Algorithm> &engine_working_data,
|
||||
const auto haversine_distance = util::coordinate_calculation::greatCircleDistance(
|
||||
prev_coordinate, current_coordinate);
|
||||
// assumes minumum of 4 m/s
|
||||
const EdgeWeight weight_upper_bound =
|
||||
((haversine_distance + max_distance_delta) / 4.) * facade.GetWeightMultiplier();
|
||||
const EdgeWeight weight_upper_bound = to_alias<EdgeWeight>(
|
||||
((haversine_distance + max_distance_delta) / 4.) * facade.GetWeightMultiplier());
|
||||
|
||||
// compute d_t for this timestamp and the next one
|
||||
for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
|
||||
|
@ -109,10 +109,10 @@ void search(SearchEngineData<Algorithm> & /*engine_working_data*/,
|
||||
weight = weight_upper_bound;
|
||||
|
||||
// get offset to account for offsets on phantom nodes on compressed edges
|
||||
const auto min_edge_offset = std::min(0, forward_heap.MinKey());
|
||||
BOOST_ASSERT(min_edge_offset <= 0);
|
||||
const auto min_edge_offset = std::min<EdgeWeight>({0}, forward_heap.MinKey());
|
||||
BOOST_ASSERT(min_edge_offset <= EdgeWeight{0});
|
||||
// we only every insert negative offsets for nodes in the forward heap
|
||||
BOOST_ASSERT(reverse_heap.MinKey() >= 0);
|
||||
BOOST_ASSERT(reverse_heap.MinKey() >= EdgeWeight{0});
|
||||
|
||||
// run two-Target Dijkstra routing step.
|
||||
while (0 < (forward_heap.Size() + reverse_heap.Size()))
|
||||
|
@ -182,8 +182,8 @@ std::vector<TurnData> generateTurns(const datafacade &facade,
|
||||
all_turn_data.push_back(TurnData{coord_via,
|
||||
angle_in,
|
||||
turn_angle,
|
||||
turn_weight,
|
||||
turn_duration,
|
||||
alias_cast<EdgeWeight>(turn_weight),
|
||||
alias_cast<EdgeDuration>(turn_duration),
|
||||
turn_instruction});
|
||||
}
|
||||
}
|
||||
|
@ -74,24 +74,26 @@ unsigned CompressedEdgeContainer::GetZippedPositionForReverseID(const EdgeID edg
|
||||
return map_iterator->second;
|
||||
}
|
||||
|
||||
SegmentWeight CompressedEdgeContainer::ClipWeight(const SegmentWeight weight)
|
||||
SegmentWeight CompressedEdgeContainer::ClipWeight(const EdgeWeight weight)
|
||||
{
|
||||
if (weight >= INVALID_SEGMENT_WEIGHT)
|
||||
SegmentWeight seg_weight = alias_cast<SegmentWeight>(weight);
|
||||
if (seg_weight >= INVALID_SEGMENT_WEIGHT)
|
||||
{
|
||||
clipped_weights++;
|
||||
return MAX_SEGMENT_WEIGHT;
|
||||
}
|
||||
return weight;
|
||||
return seg_weight;
|
||||
}
|
||||
|
||||
SegmentDuration CompressedEdgeContainer::ClipDuration(const SegmentDuration duration)
|
||||
SegmentDuration CompressedEdgeContainer::ClipDuration(const EdgeDuration duration)
|
||||
{
|
||||
if (duration >= INVALID_SEGMENT_DURATION)
|
||||
SegmentDuration seg_duration = alias_cast<SegmentDuration>(duration);
|
||||
if (seg_duration >= INVALID_SEGMENT_DURATION)
|
||||
{
|
||||
clipped_weights++;
|
||||
return MAX_SEGMENT_DURATION;
|
||||
}
|
||||
return duration;
|
||||
return seg_duration;
|
||||
}
|
||||
|
||||
// Adds info for a compressed edge to the container. edge_id_2
|
||||
@ -119,8 +121,8 @@ void CompressedEdgeContainer::CompressEdge(const EdgeID edge_id_1,
|
||||
BOOST_ASSERT(SPECIAL_EDGEID != edge_id_2);
|
||||
BOOST_ASSERT(SPECIAL_NODEID != via_node_id);
|
||||
BOOST_ASSERT(SPECIAL_NODEID != target_node_id);
|
||||
BOOST_ASSERT(INVALID_SEGMENT_WEIGHT != weight1);
|
||||
BOOST_ASSERT(INVALID_SEGMENT_WEIGHT != weight2);
|
||||
BOOST_ASSERT(INVALID_EDGE_WEIGHT != weight1);
|
||||
BOOST_ASSERT(INVALID_EDGE_WEIGHT != weight2);
|
||||
|
||||
// append list of removed edge_id plus via node to surviving edge id:
|
||||
// <surv_1, .. , surv_n, via_node_id, rem_1, .. rem_n
|
||||
@ -207,13 +209,14 @@ void CompressedEdgeContainer::CompressEdge(const EdgeID edge_id_1,
|
||||
|
||||
void CompressedEdgeContainer::AddUncompressedEdge(const EdgeID edge_id,
|
||||
const NodeID target_node_id,
|
||||
const SegmentWeight weight,
|
||||
const SegmentDuration duration)
|
||||
const EdgeWeight weight,
|
||||
const EdgeDuration duration)
|
||||
{
|
||||
// remove super-trivial geometries
|
||||
BOOST_ASSERT(SPECIAL_EDGEID != edge_id);
|
||||
BOOST_ASSERT(SPECIAL_NODEID != target_node_id);
|
||||
BOOST_ASSERT(INVALID_EDGE_WEIGHT != weight);
|
||||
BOOST_ASSERT(INVALID_EDGE_DURATION != duration);
|
||||
|
||||
// Add via node id. List is created if it does not exist
|
||||
if (!HasEntryForID(edge_id))
|
||||
@ -336,12 +339,12 @@ void CompressedEdgeContainer::PrintStatistics() const
|
||||
if (clipped_weights > 0)
|
||||
{
|
||||
util::Log(logWARNING) << "Clipped " << clipped_weights << " segment weights to "
|
||||
<< (INVALID_SEGMENT_WEIGHT - 1);
|
||||
<< MAX_SEGMENT_WEIGHT;
|
||||
}
|
||||
if (clipped_durations > 0)
|
||||
{
|
||||
util::Log(logWARNING) << "Clipped " << clipped_durations << " segment durations to "
|
||||
<< (INVALID_SEGMENT_DURATION - 1);
|
||||
<< MAX_SEGMENT_DURATION;
|
||||
}
|
||||
|
||||
util::Log() << "Geometry successfully removed:"
|
||||
|
@ -34,20 +34,6 @@
|
||||
#include <tbb/parallel_for.h>
|
||||
#include <tbb/parallel_pipeline.h>
|
||||
|
||||
namespace std
|
||||
{
|
||||
template <> struct hash<std::pair<NodeID, NodeID>>
|
||||
{
|
||||
std::size_t operator()(const std::pair<NodeID, NodeID> &mk) const noexcept
|
||||
{
|
||||
std::size_t seed = 0;
|
||||
boost::hash_combine(seed, mk.first);
|
||||
boost::hash_combine(seed, mk.second);
|
||||
return seed;
|
||||
}
|
||||
};
|
||||
} // namespace std
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace extractor
|
||||
@ -96,7 +82,7 @@ void EdgeBasedGraphFactory::GetEdgeBasedNodeWeights(std::vector<EdgeWeight> &out
|
||||
}
|
||||
|
||||
void EdgeBasedGraphFactory::GetEdgeBasedNodeDurations(
|
||||
std::vector<EdgeWeight> &output_node_durations)
|
||||
std::vector<EdgeDuration> &output_node_durations)
|
||||
{
|
||||
using std::swap; // Koenig swap
|
||||
swap(m_edge_based_node_durations, output_node_durations);
|
||||
@ -149,7 +135,8 @@ NBGToEBG EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u, const N
|
||||
// * in other cases node weights must be masked with 0x7fffffff to clear MSB
|
||||
if (nbe_to_ebn_mapping[edge_id_1] != SPECIAL_NODEID &&
|
||||
nbe_to_ebn_mapping[edge_id_2] == SPECIAL_NODEID)
|
||||
m_edge_based_node_weights[nbe_to_ebn_mapping[edge_id_1]] |= 0x80000000;
|
||||
m_edge_based_node_weights[nbe_to_ebn_mapping[edge_id_1]] |=
|
||||
EdgeWeight{static_cast<EdgeWeight::value_type>(0x80000000)};
|
||||
|
||||
BOOST_ASSERT(m_compressed_edge_container.HasEntryForID(edge_id_1) ==
|
||||
m_compressed_edge_container.HasEntryForID(edge_id_2));
|
||||
@ -402,7 +389,7 @@ EdgeBasedGraphFactory::GenerateEdgeExpandedNodes(const WayRestrictionMap &way_re
|
||||
segregated_edges.count(eid) > 0;
|
||||
|
||||
const auto ebn_weight = m_edge_based_node_weights[nbe_to_ebn_mapping[eid]];
|
||||
BOOST_ASSERT((ebn_weight & 0x7fffffff) == edge_data.weight);
|
||||
BOOST_ASSERT((ebn_weight & EdgeWeight{0x7fffffff}) == edge_data.weight);
|
||||
m_edge_based_node_weights.push_back(ebn_weight);
|
||||
m_edge_based_node_durations.push_back(
|
||||
m_edge_based_node_durations[nbe_to_ebn_mapping[eid]]);
|
||||
@ -670,7 +657,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
edge_data1.flags.highway_turn_classification,
|
||||
edge_data1.flags.access_turn_classification,
|
||||
((double)intersection::findEdgeLength(edge_geometries, node_based_edge_from) /
|
||||
edge_data1.duration) *
|
||||
from_alias<double>(edge_data1.duration)) *
|
||||
36,
|
||||
edge_data1.flags.road_classification.GetPriority(),
|
||||
// target info
|
||||
@ -682,7 +669,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
edge_data2.flags.highway_turn_classification,
|
||||
edge_data2.flags.access_turn_classification,
|
||||
((double)intersection::findEdgeLength(edge_geometries, node_based_edge_to) /
|
||||
edge_data2.duration) *
|
||||
from_alias<double>(edge_data2.duration)) *
|
||||
36,
|
||||
edge_data2.flags.road_classification.GetPriority(),
|
||||
// connected roads
|
||||
@ -693,17 +680,18 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
|
||||
// turn penalties are limited to [-2^15, 2^15) which roughly translates to 54 minutes
|
||||
// and fits signed 16bit deci-seconds
|
||||
auto weight_penalty =
|
||||
boost::numeric_cast<TurnPenalty>(extracted_turn.weight * weight_multiplier);
|
||||
auto duration_penalty = boost::numeric_cast<TurnPenalty>(extracted_turn.duration * 10.);
|
||||
auto weight_penalty = TurnPenalty{boost::numeric_cast<TurnPenalty::value_type>(
|
||||
extracted_turn.weight * weight_multiplier)};
|
||||
auto duration_penalty = TurnPenalty{
|
||||
boost::numeric_cast<TurnPenalty::value_type>(extracted_turn.duration * 10.)};
|
||||
|
||||
BOOST_ASSERT(SPECIAL_NODEID != nbe_to_ebn_mapping[node_based_edge_from]);
|
||||
BOOST_ASSERT(SPECIAL_NODEID != nbe_to_ebn_mapping[node_based_edge_to]);
|
||||
|
||||
// auto turn_id = m_edge_based_edge_list.size();
|
||||
auto weight = boost::numeric_cast<EdgeWeight>(edge_data1.weight + weight_penalty);
|
||||
auto duration = boost::numeric_cast<EdgeWeight>(edge_data1.duration + duration_penalty);
|
||||
auto distance = boost::numeric_cast<EdgeDistance>(edge_data1.distance);
|
||||
auto weight = edge_data1.weight + alias_cast<EdgeWeight>(weight_penalty);
|
||||
auto duration = edge_data1.duration + alias_cast<EdgeDuration>(duration_penalty);
|
||||
auto distance = edge_data1.distance;
|
||||
|
||||
EdgeBasedEdge edge_based_edge = {edge_based_node_from,
|
||||
edge_based_node_to,
|
||||
@ -867,7 +855,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
edge_data.flags.access_turn_classification,
|
||||
((double)intersection::findEdgeLength(edge_geometries,
|
||||
connected_edge.eid) /
|
||||
edge_data.duration) *
|
||||
from_alias<double>(edge_data.duration)) *
|
||||
36,
|
||||
edge_data.flags.road_classification.GetPriority(),
|
||||
is_incoming,
|
||||
@ -1288,7 +1276,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
std::vector<ConditionalTurnPenalty>
|
||||
EdgeBasedGraphFactory::IndexConditionals(std::vector<Conditional> &&conditionals) const
|
||||
{
|
||||
boost::unordered_multimap<std::pair<NodeID, NodeID>, ConditionalTurnPenalty *> index;
|
||||
std::unordered_multimap<std::pair<NodeID, NodeID>, ConditionalTurnPenalty *> index;
|
||||
|
||||
// build and index of all conditional restrictions
|
||||
for (auto &conditional : conditionals)
|
||||
|
@ -718,9 +718,11 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
scripting_environment.ProcessSegment(segment);
|
||||
|
||||
auto &edge = edge_iterator->result;
|
||||
edge.weight = std::max<EdgeWeight>(1, std::round(segment.weight * weight_multiplier));
|
||||
edge.duration = std::max<EdgeWeight>(1, std::round(segment.duration * 10.));
|
||||
edge.distance = static_cast<float>(accurate_distance);
|
||||
edge.weight = std::max<EdgeWeight>(
|
||||
{1}, to_alias<EdgeWeight>(std::round(segment.weight * weight_multiplier)));
|
||||
edge.duration = std::max<EdgeDuration>(
|
||||
{1}, to_alias<EdgeDuration>(std::round(segment.duration * 10.)));
|
||||
edge.distance = to_alias<EdgeDistance>(accurate_distance);
|
||||
|
||||
// assign new node id
|
||||
const auto node_id = mapExternalToInternalNodeID(
|
||||
@ -785,10 +787,8 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
NodeID source = all_edges_list[i].result.source;
|
||||
NodeID target = all_edges_list[i].result.target;
|
||||
|
||||
auto min_forward = std::make_pair(std::numeric_limits<EdgeWeight>::max(),
|
||||
std::numeric_limits<EdgeWeight>::max());
|
||||
auto min_backward = std::make_pair(std::numeric_limits<EdgeWeight>::max(),
|
||||
std::numeric_limits<EdgeWeight>::max());
|
||||
auto min_forward = std::make_pair(MAXIMAL_EDGE_WEIGHT, MAXIMAL_EDGE_DURATION);
|
||||
auto min_backward = std::make_pair(MAXIMAL_EDGE_WEIGHT, MAXIMAL_EDGE_DURATION);
|
||||
std::size_t min_forward_idx = std::numeric_limits<std::size_t>::max();
|
||||
std::size_t min_backward_idx = std::numeric_limits<std::size_t>::max();
|
||||
|
||||
|
@ -674,7 +674,7 @@ void Extractor::FindComponents(unsigned number_of_edge_based_nodes,
|
||||
|
||||
for (const auto &edge : input_edge_list)
|
||||
{
|
||||
BOOST_ASSERT_MSG(static_cast<unsigned int>(std::max(edge.data.weight, 1)) > 0,
|
||||
BOOST_ASSERT_MSG((std::max(edge.data.weight, EdgeWeight{1})) > EdgeWeight{0},
|
||||
"edge distance < 1");
|
||||
BOOST_ASSERT(edge.source < number_of_edge_based_nodes);
|
||||
BOOST_ASSERT(edge.target < number_of_edge_based_nodes);
|
||||
|
@ -433,9 +433,9 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
NodeBasedEdgeWithOSM edge = {
|
||||
OSMNodeID{static_cast<std::uint64_t>(first_node.ref())},
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{0}, // weight
|
||||
{0}, // duration
|
||||
{0}, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{true,
|
||||
@ -467,9 +467,9 @@ void ExtractorCallbacks::ProcessWay(const osmium::Way &input_way, const Extracti
|
||||
NodeBasedEdgeWithOSM edge = {
|
||||
OSMNodeID{static_cast<std::uint64_t>(first_node.ref())},
|
||||
OSMNodeID{static_cast<std::uint64_t>(last_node.ref())},
|
||||
0, // weight
|
||||
0, // duration
|
||||
0, // distance
|
||||
{0}, // weight
|
||||
{0}, // duration
|
||||
{0}, // distance
|
||||
{}, // geometry id
|
||||
static_cast<AnnotationID>(annotation_data_id),
|
||||
{false,
|
||||
|
@ -271,8 +271,8 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
EdgeWeight &weight_penalty) {
|
||||
if (has_traffic_control_node)
|
||||
{
|
||||
duration_penalty = extraction_turn.duration * SECOND_TO_DECISECOND;
|
||||
weight_penalty = extraction_turn.weight * weight_multiplier;
|
||||
duration_penalty = to_alias<EdgeDuration>(extraction_turn.duration * SECOND_TO_DECISECOND);
|
||||
weight_penalty = to_alias<EdgeWeight>(extraction_turn.weight * weight_multiplier);
|
||||
}
|
||||
};
|
||||
|
||||
@ -290,8 +290,8 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
const auto forward_duration1 = fwd_edge_data1.duration;
|
||||
const auto forward_duration2 = fwd_edge_data2.duration;
|
||||
|
||||
BOOST_ASSERT(0 != forward_weight1);
|
||||
BOOST_ASSERT(0 != forward_weight2);
|
||||
BOOST_ASSERT(EdgeWeight{0} != forward_weight1);
|
||||
BOOST_ASSERT(EdgeWeight{0} != forward_weight2);
|
||||
|
||||
const auto reverse_weight1 = rev_edge_data1.weight;
|
||||
const auto reverse_weight2 = rev_edge_data2.weight;
|
||||
@ -310,8 +310,8 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
BOOST_ASSERT(forward_distance2 == reverse_distance1);
|
||||
#endif
|
||||
|
||||
BOOST_ASSERT(0 != reverse_weight1);
|
||||
BOOST_ASSERT(0 != reverse_weight2);
|
||||
BOOST_ASSERT(EdgeWeight{0} != reverse_weight1);
|
||||
BOOST_ASSERT(EdgeWeight{0} != reverse_weight2);
|
||||
|
||||
auto apply_e2_to_e1 = [&graph](EdgeID edge1,
|
||||
EdgeID edge2,
|
||||
@ -359,8 +359,8 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
if (weight_penalty == INVALID_EDGE_WEIGHT &&
|
||||
other_weight_penalty != INVALID_EDGE_WEIGHT)
|
||||
{
|
||||
weight_penalty = 0;
|
||||
duration_penalty = 0;
|
||||
weight_penalty = {0};
|
||||
duration_penalty = {0};
|
||||
}
|
||||
};
|
||||
set_dummy_penalty(forward_node_weight_penalty,
|
||||
|
@ -570,7 +570,7 @@ void Sol2ScriptingEnvironment::InitContext(LuaScriptingContext &context)
|
||||
"precision",
|
||||
COORDINATE_PRECISION,
|
||||
"max_turn_weight",
|
||||
std::numeric_limits<TurnPenalty>::max());
|
||||
std::numeric_limits<TurnPenalty::value_type>::max());
|
||||
|
||||
// call initialize function
|
||||
sol::function setup_function = function_table.value()["setup"];
|
||||
|
@ -84,7 +84,7 @@ inline SegmentDuration convertToDuration(double speed_in_kmh, double distance_in
|
||||
const auto speed_in_ms = speed_in_kmh / 3.6;
|
||||
const auto duration = distance_in_meters / speed_in_ms;
|
||||
auto segment_duration = std::max<SegmentDuration>(
|
||||
1, boost::numeric_cast<SegmentDuration>(std::round(duration * 10.)));
|
||||
{1}, {boost::numeric_cast<SegmentDuration::value_type>(std::round(duration * 10.))});
|
||||
if (segment_duration >= INVALID_SEGMENT_DURATION)
|
||||
{
|
||||
util::Log(logWARNING) << "Clamping segment duration " << segment_duration << " to "
|
||||
@ -114,7 +114,8 @@ void checkWeightsConsistency(
|
||||
{
|
||||
auto range = segment_data.GetForwardWeights(geometry_id.id);
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
EdgeWeight weight = std::accumulate(range.begin(), range.end(), EdgeWeight{0});
|
||||
EdgeWeight weight = alias_cast<EdgeWeight>(
|
||||
std::accumulate(range.begin(), range.end(), SegmentWeight{0}));
|
||||
if (weight > edge.data.weight)
|
||||
{
|
||||
util::Log(logWARNING) << geometry_id.id << " vs " << edge.data.turn_id << ":"
|
||||
@ -125,7 +126,8 @@ void checkWeightsConsistency(
|
||||
{
|
||||
auto range = segment_data.GetReverseWeights(geometry_id.id);
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
EdgeWeight weight = std::accumulate(range.begin(), range.end(), EdgeWeight{0});
|
||||
EdgeWeight weight = alias_cast<EdgeWeight>(
|
||||
std::accumulate(range.begin(), range.end(), SegmentWeight{0}));
|
||||
if (weight > edge.data.weight)
|
||||
{
|
||||
util::Log(logWARNING) << geometry_id.id << " vs " << edge.data.turn_id << ":"
|
||||
@ -185,8 +187,10 @@ updateSegmentData(const UpdaterConfig &config,
|
||||
|
||||
const auto weight_multiplier = profile_properties.GetWeightMultiplier();
|
||||
const auto weight = distance_in_meters / rate;
|
||||
auto segment_weight = std::max<SegmentWeight>(
|
||||
1, boost::numeric_cast<SegmentWeight>(std::round(weight * weight_multiplier)));
|
||||
auto segment_weight =
|
||||
std::max<SegmentWeight>({1},
|
||||
{boost::numeric_cast<SegmentWeight::value_type>(
|
||||
std::round(weight * weight_multiplier))});
|
||||
if (segment_weight >= INVALID_SEGMENT_WEIGHT)
|
||||
{
|
||||
util::Log(logWARNING) << "Clamping segment weight " << segment_weight << " to "
|
||||
@ -356,16 +360,19 @@ updateSegmentData(const UpdaterConfig &config,
|
||||
if (new_fwd_datasources_range[segment_offset] == LUA_SOURCE)
|
||||
continue;
|
||||
|
||||
if (old_fwd_durations_range[segment_offset] >=
|
||||
(new_fwd_durations_range[segment_offset] * config.log_edge_updates_factor))
|
||||
SegmentDuration old_fwd_duration = old_fwd_durations_range[segment_offset];
|
||||
SegmentDuration new_fwd_duration = new_fwd_durations_range[segment_offset];
|
||||
|
||||
if (old_fwd_duration >=
|
||||
to_alias<SegmentDuration>(from_alias<double>(new_fwd_duration) *
|
||||
config.log_edge_updates_factor))
|
||||
{
|
||||
auto from = osm_node_ids[nodes_range[segment_offset]];
|
||||
auto to = osm_node_ids[nodes_range[segment_offset + 1]];
|
||||
util::Log(logWARNING)
|
||||
<< "[weight updates] Edge weight update from "
|
||||
<< old_fwd_durations_range[segment_offset] / 10. << "s to "
|
||||
<< new_fwd_durations_range[segment_offset] / 10. << "s Segment: " << from
|
||||
<< "," << to << " based on "
|
||||
util::Log(logWARNING) << "[weight updates] Edge weight update from "
|
||||
<< from_alias<double>(old_fwd_duration) / 10. << "s to "
|
||||
<< from_alias<double>(new_fwd_duration) / 10.
|
||||
<< "s Segment: " << from << "," << to << " based on "
|
||||
<< config.segment_speed_lookup_paths
|
||||
[new_fwd_datasources_range[segment_offset] - 1];
|
||||
}
|
||||
@ -377,16 +384,19 @@ updateSegmentData(const UpdaterConfig &config,
|
||||
if (new_rev_datasources_range[segment_offset] == LUA_SOURCE)
|
||||
continue;
|
||||
|
||||
if (old_rev_durations_range[segment_offset] >=
|
||||
(new_rev_durations_range[segment_offset] * config.log_edge_updates_factor))
|
||||
SegmentDuration old_rev_duration = old_rev_durations_range[segment_offset];
|
||||
SegmentDuration new_rev_duration = new_rev_durations_range[segment_offset];
|
||||
|
||||
if (old_rev_duration >=
|
||||
to_alias<SegmentDuration>(from_alias<double>(new_rev_duration) *
|
||||
config.log_edge_updates_factor))
|
||||
{
|
||||
auto from = osm_node_ids[nodes_range[segment_offset + 1]];
|
||||
auto to = osm_node_ids[nodes_range[segment_offset]];
|
||||
util::Log(logWARNING)
|
||||
<< "[weight updates] Edge weight update from "
|
||||
<< old_rev_durations_range[segment_offset] / 10. << "s to "
|
||||
<< new_rev_durations_range[segment_offset] / 10. << "s Segment: " << from
|
||||
<< "," << to << " based on "
|
||||
util::Log(logWARNING) << "[weight updates] Edge weight update from "
|
||||
<< from_alias<double>(old_rev_duration) / 10. << "s to "
|
||||
<< from_alias<double>(new_rev_duration) / 10.
|
||||
<< "s Segment: " << from << "," << to << " based on "
|
||||
<< config.segment_speed_lookup_paths
|
||||
[new_rev_datasources_range[segment_offset] - 1];
|
||||
}
|
||||
@ -455,18 +465,20 @@ updateTurnPenalties(const UpdaterConfig &config,
|
||||
|
||||
if (auto value = turn_penalty_lookup(osm_turn))
|
||||
{
|
||||
turn_duration_penalty =
|
||||
boost::numeric_cast<TurnPenalty>(std::round(value->duration * 10.));
|
||||
turn_weight_penalty = boost::numeric_cast<TurnPenalty>(std::round(
|
||||
std::isfinite(value->weight) ? value->weight * weight_multiplier
|
||||
: turn_duration_penalty * weight_multiplier / 10.));
|
||||
turn_duration_penalty = {
|
||||
boost::numeric_cast<TurnPenalty::value_type>(std::round(value->duration * 10.))};
|
||||
turn_weight_penalty = {boost::numeric_cast<TurnPenalty::value_type>(
|
||||
std::round(std::isfinite(value->weight)
|
||||
? value->weight * weight_multiplier
|
||||
: from_alias<TurnPenalty::value_type>(turn_duration_penalty) *
|
||||
weight_multiplier / 10.))};
|
||||
|
||||
turn_duration_penalties[edge_index] = turn_duration_penalty;
|
||||
turn_weight_penalties[edge_index] = turn_weight_penalty;
|
||||
updated_turns.push_back(edge_index);
|
||||
}
|
||||
|
||||
if (turn_weight_penalty < 0)
|
||||
if (turn_weight_penalty < TurnPenalty{0})
|
||||
{
|
||||
util::Log(logWARNING) << "Negative turn penalty at " << osm_turn.from << ", "
|
||||
<< osm_turn.via << ", " << osm_turn.to << ": turn penalty "
|
||||
@ -674,42 +686,44 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
return std::tie(lhs.id, lhs.forward) < std::tie(rhs.id, rhs.forward);
|
||||
});
|
||||
|
||||
using WeightAndDuration = std::tuple<EdgeWeight, EdgeWeight>;
|
||||
using WeightAndDuration = std::tuple<EdgeWeight, EdgeDuration>;
|
||||
const auto compute_new_weight_and_duration =
|
||||
[&](const GeometryID geometry_id) -> WeightAndDuration {
|
||||
EdgeWeight new_weight = 0;
|
||||
EdgeWeight new_duration = 0;
|
||||
EdgeWeight new_weight = {0};
|
||||
EdgeDuration new_duration = {0};
|
||||
if (geometry_id.forward)
|
||||
{
|
||||
const auto weights = segment_data.GetForwardWeights(geometry_id.id);
|
||||
for (const auto weight : weights)
|
||||
for (const SegmentWeight weight : weights)
|
||||
{
|
||||
if (weight == INVALID_SEGMENT_WEIGHT)
|
||||
{
|
||||
new_weight = INVALID_EDGE_WEIGHT;
|
||||
break;
|
||||
}
|
||||
new_weight += weight;
|
||||
new_weight += alias_cast<EdgeWeight>(weight);
|
||||
}
|
||||
const auto durations = segment_data.GetForwardDurations(geometry_id.id);
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
new_duration = std::accumulate(durations.begin(), durations.end(), EdgeWeight{0});
|
||||
new_duration = alias_cast<EdgeDuration>(
|
||||
std::accumulate(durations.begin(), durations.end(), SegmentDuration{0}));
|
||||
}
|
||||
else
|
||||
{
|
||||
const auto weights = segment_data.GetReverseWeights(geometry_id.id);
|
||||
for (const auto weight : weights)
|
||||
for (const SegmentWeight weight : weights)
|
||||
{
|
||||
if (weight == INVALID_SEGMENT_WEIGHT)
|
||||
{
|
||||
new_weight = INVALID_EDGE_WEIGHT;
|
||||
break;
|
||||
}
|
||||
new_weight += weight;
|
||||
new_weight += alias_cast<EdgeWeight>(SegmentWeight(weight));
|
||||
}
|
||||
const auto durations = segment_data.GetReverseDurations(geometry_id.id);
|
||||
// NOLINTNEXTLINE(bugprone-fold-init-type)
|
||||
new_duration = std::accumulate(durations.begin(), durations.end(), EdgeWeight{0});
|
||||
new_duration = alias_cast<EdgeDuration>(
|
||||
std::accumulate(durations.begin(), durations.end(), SegmentDuration{0}));
|
||||
}
|
||||
return std::make_tuple(new_weight, new_duration);
|
||||
};
|
||||
@ -740,7 +754,7 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
// Find a segment with zero speed and simultaneously compute the new edge
|
||||
// weight
|
||||
EdgeWeight new_weight;
|
||||
EdgeWeight new_duration;
|
||||
EdgeDuration new_duration;
|
||||
std::tie(new_weight, new_duration) =
|
||||
accumulated_segment_data[updated_iter - updated_segments.begin()];
|
||||
|
||||
@ -749,7 +763,9 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
// but we should always assign the same value here.
|
||||
BOOST_ASSERT(edge.source < node_weights.size());
|
||||
node_weights[edge.source] =
|
||||
node_weights[edge.source] & 0x80000000 ? new_weight | 0x80000000 : new_weight;
|
||||
from_alias<EdgeWeight::value_type>(node_weights[edge.source]) & 0x80000000
|
||||
? new_weight | EdgeWeight{static_cast<EdgeWeight::value_type>(0x80000000)}
|
||||
: new_weight;
|
||||
node_durations[edge.source] = new_duration;
|
||||
|
||||
// We found a zero-speed edge, so we'll skip this whole edge-based-edge
|
||||
@ -765,15 +781,15 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
auto turn_weight_penalty = turn_weight_penalties[edge.data.turn_id];
|
||||
auto turn_duration_penalty = turn_duration_penalties[edge.data.turn_id];
|
||||
const auto num_nodes = segment_data.GetForwardGeometry(geometry_id.id).size();
|
||||
const auto weight_min_value = static_cast<EdgeWeight>(num_nodes);
|
||||
if (turn_weight_penalty + new_weight < weight_min_value)
|
||||
const auto weight_min_value = to_alias<EdgeWeight>(num_nodes);
|
||||
if (alias_cast<EdgeWeight>(turn_weight_penalty) + new_weight < weight_min_value)
|
||||
{
|
||||
if (turn_weight_penalty < 0)
|
||||
if (turn_weight_penalty < TurnPenalty{0})
|
||||
{
|
||||
util::Log(logWARNING)
|
||||
<< "turn penalty " << turn_weight_penalty
|
||||
<< " is too negative: clamping turn weight to " << weight_min_value;
|
||||
turn_weight_penalty = weight_min_value - new_weight;
|
||||
turn_weight_penalty = alias_cast<TurnPenalty>(weight_min_value - new_weight);
|
||||
turn_weight_penalties[edge.data.turn_id] = turn_weight_penalty;
|
||||
}
|
||||
else
|
||||
@ -783,8 +799,9 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
}
|
||||
|
||||
// Update edge weight
|
||||
edge.data.weight = new_weight + turn_weight_penalty;
|
||||
edge.data.duration = new_duration + turn_duration_penalty;
|
||||
edge.data.weight = new_weight + alias_cast<EdgeWeight>(turn_weight_penalty);
|
||||
edge.data.duration = from_alias<EdgeDuration::value_type>(
|
||||
new_duration + alias_cast<EdgeDuration>(turn_duration_penalty));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -37,31 +37,31 @@ BOOST_AUTO_TEST_CASE(merge_edge_of_multiple_graph)
|
||||
ContractedEdgeContainer container;
|
||||
|
||||
std::vector<QueryEdge> edges;
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, 3, 3, 6, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, 3, 3, 6, false, true}});
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, {3}, {3}, {6}, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, {3}, {3}, {6}, false, true}});
|
||||
container.Insert(edges);
|
||||
|
||||
edges.clear();
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, 12, 12, 24, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, 12, 12, 24, false, true}});
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, {12}, {12}, {24}, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, {12}, {12}, {24}, false, true}});
|
||||
container.Merge(edges);
|
||||
|
||||
edges.clear();
|
||||
edges.push_back(QueryEdge{1, 4, {5, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{1, 4, {5, false, {3}, {3}, {6}, true, false}});
|
||||
container.Merge(edges);
|
||||
|
||||
std::vector<QueryEdge> reference_edges;
|
||||
reference_edges.push_back(QueryEdge{0, 1, {1, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 2, {2, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 4, {5, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, 3, 3, 6, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, 12, 12, 24, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, 3, 3, 6, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, 12, 12, 24, false, true}});
|
||||
reference_edges.push_back(QueryEdge{0, 1, {1, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 2, {2, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 4, {5, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, {3}, {3}, {6}, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, {12}, {12}, {24}, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, {3}, {3}, {6}, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, {12}, {12}, {24}, false, true}});
|
||||
CHECK_EQUAL_COLLECTIONS(container.edges, reference_edges);
|
||||
|
||||
auto filters = container.MakeEdgeFilters();
|
||||
@ -79,22 +79,22 @@ BOOST_AUTO_TEST_CASE(merge_edge_of_multiple_disjoint_graph)
|
||||
ContractedEdgeContainer container;
|
||||
|
||||
std::vector<QueryEdge> edges;
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, 12, 12, 24, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, 12, 12, 24, false, true}});
|
||||
edges.push_back(QueryEdge{0, 1, {1, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{1, 2, {2, false, {3}, {3}, {6}, true, false}});
|
||||
edges.push_back(QueryEdge{2, 0, {3, false, {12}, {12}, {24}, false, true}});
|
||||
edges.push_back(QueryEdge{2, 1, {4, false, {12}, {12}, {24}, false, true}});
|
||||
container.Merge(edges);
|
||||
|
||||
edges.clear();
|
||||
edges.push_back(QueryEdge{1, 4, {5, false, 3, 3, 6, true, false}});
|
||||
edges.push_back(QueryEdge{1, 4, {5, false, {3}, {3}, {6}, true, false}});
|
||||
container.Merge(edges);
|
||||
|
||||
std::vector<QueryEdge> reference_edges;
|
||||
reference_edges.push_back(QueryEdge{0, 1, {1, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 2, {2, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 4, {5, false, 3, 3, 6, true, false}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, 12, 12, 24, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, 12, 12, 24, false, true}});
|
||||
reference_edges.push_back(QueryEdge{0, 1, {1, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 2, {2, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{1, 4, {5, false, {3}, {3}, {6}, true, false}});
|
||||
reference_edges.push_back(QueryEdge{2, 0, {3, false, {12}, {12}, {24}, false, true}});
|
||||
reference_edges.push_back(QueryEdge{2, 1, {4, false, {12}, {12}, {24}, false, true}});
|
||||
CHECK_EQUAL_COLLECTIONS(container.edges, reference_edges);
|
||||
|
||||
auto filters = container.MakeEdgeFilters();
|
||||
|
@ -34,7 +34,7 @@ BOOST_AUTO_TEST_CASE(contract_graph)
|
||||
auto reference_graph = makeGraph(edges);
|
||||
|
||||
auto contracted_graph = reference_graph;
|
||||
std::vector<bool> core = contractGraph(contracted_graph, {1, 1, 1, 1, 1, 1});
|
||||
std::vector<bool> core = contractGraph(contracted_graph, {{1}, {1}, {1}, {1}, {1}, {1}});
|
||||
|
||||
// This contraction order is dependent on the priority caculation in the contractor
|
||||
// but deterministic for the same graph.
|
||||
@ -87,7 +87,7 @@ BOOST_AUTO_TEST_CASE(contract_graph)
|
||||
reference_graph.DeleteEdgesTo(1, 3);
|
||||
reference_graph.DeleteEdgesTo(4, 3);
|
||||
// Insert shortcut
|
||||
reference_graph.InsertEdge(4, 1, {2, 4, 1.0, 3, 0, true, true, false});
|
||||
reference_graph.InsertEdge(4, 1, {{2}, {4}, {1.0}, 3, 0, true, true, false});
|
||||
|
||||
/* After contracting 4:
|
||||
*
|
||||
|
@ -27,12 +27,12 @@ inline contractor::ContractorGraph makeGraph(const std::vector<TestEdge> &edges)
|
||||
start,
|
||||
target,
|
||||
contractor::ContractorEdgeData{
|
||||
weight, duration, distance, id++, 0, false, true, false}});
|
||||
{weight}, {duration}, {distance}, id++, 0, false, true, false}});
|
||||
input_edges.push_back(contractor::ContractorEdge{
|
||||
target,
|
||||
start,
|
||||
contractor::ContractorEdgeData{
|
||||
weight, duration, distance, id++, 0, false, false, true}});
|
||||
{weight}, {duration}, {distance}, id++, 0, false, false, true}});
|
||||
}
|
||||
std::sort(input_edges.begin(), input_edges.end());
|
||||
|
||||
|
@ -40,15 +40,15 @@ auto makeGraph(const MultiLevelPartition &mlp, const std::vector<MockEdge> &mock
|
||||
edges.push_back(Edge{m.start,
|
||||
m.target,
|
||||
m.weight,
|
||||
2 * m.weight,
|
||||
static_cast<EdgeDistance>(1.0),
|
||||
EdgeDuration{2} * alias_cast<EdgeDuration>(m.weight),
|
||||
EdgeDistance{1.0},
|
||||
true,
|
||||
false});
|
||||
edges.push_back(Edge{m.target,
|
||||
m.start,
|
||||
m.weight,
|
||||
2 * m.weight,
|
||||
static_cast<EdgeDistance>(1.0),
|
||||
EdgeDuration{2} * alias_cast<EdgeDuration>(m.weight),
|
||||
EdgeDistance{1.0},
|
||||
false,
|
||||
true});
|
||||
}
|
||||
@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE(two_level_test)
|
||||
|
||||
BOOST_REQUIRE_EQUAL(mlp.GetNumberOfLevels(), 2);
|
||||
|
||||
std::vector<MockEdge> edges = {{0, 1, 1}, {0, 2, 1}, {2, 3, 1}, {3, 1, 1}, {3, 2, 1}};
|
||||
std::vector<MockEdge> edges = {{0, 1, {1}}, {0, 2, {1}}, {2, 3, {1}}, {3, 1, {1}}, {3, 2, {1}}};
|
||||
|
||||
auto graph = makeGraph(mlp, edges);
|
||||
std::vector<bool> node_filter(graph.GetNumberOfNodes(), true);
|
||||
@ -106,17 +106,17 @@ BOOST_AUTO_TEST_CASE(two_level_test)
|
||||
|
||||
// cell 0
|
||||
// check row source -> destination
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(0), 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(0), EdgeWeight{1});
|
||||
// check column destination -> source
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(1), 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(1), EdgeWeight{1});
|
||||
|
||||
// cell 1
|
||||
// check row source -> destination
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(2), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(3), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(2), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(3), EdgeWeight{1}, EdgeWeight{0});
|
||||
// check column destination -> source
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(2), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(3), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(2), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(3), EdgeWeight{1}, EdgeWeight{0});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(four_levels_test)
|
||||
@ -130,27 +130,27 @@ BOOST_AUTO_TEST_CASE(four_levels_test)
|
||||
BOOST_REQUIRE_EQUAL(mlp.GetNumberOfLevels(), 4);
|
||||
|
||||
std::vector<MockEdge> edges = {
|
||||
{0, 1, 1}, // cell (0, 0, 0)
|
||||
{0, 2, 1}, {3, 1, 1}, {3, 2, 1},
|
||||
{0, 1, {1}}, // cell (0, 0, 0)
|
||||
{0, 2, {1}}, {3, 1, {1}}, {3, 2, {1}},
|
||||
|
||||
{4, 5, 1}, // cell (1, 0, 0)
|
||||
{4, 6, 1}, {4, 7, 1}, {5, 4, 1}, {5, 6, 1}, {5, 7, 1}, {6, 4, 1},
|
||||
{6, 5, 1}, {6, 7, 1}, {7, 4, 1}, {7, 5, 1}, {7, 6, 1},
|
||||
{4, 5, {1}}, // cell (1, 0, 0)
|
||||
{4, 6, {1}}, {4, 7, {1}}, {5, 4, {1}}, {5, 6, {1}}, {5, 7, {1}}, {6, 4, {1}},
|
||||
{6, 5, {1}}, {6, 7, {1}}, {7, 4, {1}}, {7, 5, {1}}, {7, 6, {1}},
|
||||
|
||||
{9, 11, 1}, // cell (2, 1, 0)
|
||||
{10, 8, 1}, {11, 10, 1},
|
||||
{9, 11, {1}}, // cell (2, 1, 0)
|
||||
{10, 8, {1}}, {11, 10, {1}},
|
||||
|
||||
{13, 12, 10}, // cell (3, 1, 0)
|
||||
{15, 14, 1},
|
||||
{13, 12, {10}}, // cell (3, 1, 0)
|
||||
{15, 14, {1}},
|
||||
|
||||
{2, 4, 1}, // edge between cells (0, 0, 0) -> (1, 0, 0)
|
||||
{5, 12, 1}, // edge between cells (1, 0, 0) -> (3, 1, 0)
|
||||
{8, 3, 1}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{9, 3, 1}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{12, 5, 1}, // edge between cells (3, 1, 0) -> (1, 0, 0)
|
||||
{13, 7, 1}, // edge between cells (3, 1, 0) -> (1, 0, 0)
|
||||
{14, 9, 1}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{14, 11, 1} // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{2, 4, {1}}, // edge between cells (0, 0, 0) -> (1, 0, 0)
|
||||
{5, 12, {1}}, // edge between cells (1, 0, 0) -> (3, 1, 0)
|
||||
{8, 3, {1}}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{9, 3, {1}}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{12, 5, {1}}, // edge between cells (3, 1, 0) -> (1, 0, 0)
|
||||
{13, 7, {1}}, // edge between cells (3, 1, 0) -> (1, 0, 0)
|
||||
{14, 9, {1}}, // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
{14, 11, {1}} // edge between cells (2, 1, 0) -> (0, 0, 0)
|
||||
};
|
||||
|
||||
auto graph = makeGraph(mlp, edges);
|
||||
@ -238,48 +238,53 @@ BOOST_AUTO_TEST_CASE(four_levels_test)
|
||||
|
||||
// level 1
|
||||
// cell 0
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(3), 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(2), 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(3), EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(2), EdgeWeight{1});
|
||||
|
||||
// cell 1
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(4), 0, 1, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(5), 1, 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(7), 1, 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(4), 0, 1, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(5), 1, 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(7), 1, 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(4), EdgeWeight{0}, EdgeWeight{1}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(5), EdgeWeight{1}, EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(7), EdgeWeight{1}, EdgeWeight{1}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(4), EdgeWeight{0}, EdgeWeight{1}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(5), EdgeWeight{1}, EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(7), EdgeWeight{1}, EdgeWeight{1}, EdgeWeight{0});
|
||||
|
||||
// cell 2
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(9), 3, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(11), 2, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(8), 3, 2);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(11), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(9), EdgeWeight{3}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(11), EdgeWeight{2}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(8), EdgeWeight{3}, EdgeWeight{2});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(11), EdgeWeight{1}, EdgeWeight{0});
|
||||
|
||||
// cell 3
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetOutWeight(13), 10, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(12), 10);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetOutWeight(13), EdgeWeight{10}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(12), EdgeWeight{10});
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(14), INVALID_EDGE_WEIGHT);
|
||||
|
||||
// level 2
|
||||
// cell 0
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(3), 3, 3);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(5), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(7), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(5), 3, 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(7), 3, 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(3), EdgeWeight{3}, EdgeWeight{3});
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(5), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(7), EdgeWeight{1}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(5), EdgeWeight{3}, EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(7), EdgeWeight{3}, EdgeWeight{1}, EdgeWeight{0});
|
||||
|
||||
// cell 1
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(9), 3, 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(13), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT, 10);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(8), 3, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(9), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(12), INVALID_EDGE_WEIGHT, 10);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(9), EdgeWeight{3}, EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(
|
||||
cell_2_1.GetOutWeight(13), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT, EdgeWeight{10});
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(8), EdgeWeight{3}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(9), EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(12), INVALID_EDGE_WEIGHT, EdgeWeight{10});
|
||||
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutDuration(9), 6, 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutDuration(13), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT, 20);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(8), 6, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(9), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(12), INVALID_EDGE_WEIGHT, 20);
|
||||
CHECK_EQUAL_RANGE(
|
||||
cell_2_1.GetOutDuration(9), EdgeDuration{6}, EdgeDuration{0}, INVALID_EDGE_DURATION);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutDuration(13),
|
||||
INVALID_EDGE_DURATION,
|
||||
INVALID_EDGE_DURATION,
|
||||
EdgeDuration{20});
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(8), EdgeDuration{6}, INVALID_EDGE_DURATION);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(9), EdgeDuration{0}, INVALID_EDGE_DURATION);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInDuration(12), INVALID_EDGE_DURATION, EdgeDuration{20});
|
||||
|
||||
CellStorage storage_rec(mlp, graph);
|
||||
auto metric_rec = storage_rec.MakeMetric();
|
||||
@ -304,9 +309,10 @@ BOOST_AUTO_TEST_CASE(exclude_test)
|
||||
// 2 ----3 --- 4 --- 7
|
||||
// \__________/
|
||||
std::vector<MockEdge> edges = {
|
||||
{0, 1, 1}, {0, 2, 1}, {1, 0, 1}, {1, 2, 10}, {1, 3, 1}, {1, 5, 1}, {2, 0, 1}, {2, 1, 10},
|
||||
{2, 3, 1}, {2, 4, 1}, {3, 1, 1}, {3, 2, 1}, {3, 4, 1}, {4, 2, 1}, {4, 3, 1}, {4, 5, 1},
|
||||
{4, 7, 1}, {5, 1, 1}, {5, 4, 1}, {5, 6, 1}, {6, 5, 1}, {6, 7, 1}, {7, 4, 1}, {7, 6, 1},
|
||||
{0, 1, {1}}, {0, 2, {1}}, {1, 0, {1}}, {1, 2, {10}}, {1, 3, {1}}, {1, 5, {1}},
|
||||
{2, 0, {1}}, {2, 1, {10}}, {2, 3, {1}}, {2, 4, {1}}, {3, 1, {1}}, {3, 2, {1}},
|
||||
{3, 4, {1}}, {4, 2, {1}}, {4, 3, {1}}, {4, 5, {1}}, {4, 7, {1}}, {5, 1, {1}},
|
||||
{5, 4, {1}}, {5, 6, {1}}, {6, 5, {1}}, {6, 7, {1}}, {7, 4, {1}}, {7, 6, {1}},
|
||||
};
|
||||
|
||||
// node: 0 1 2 3 4 5 6 7
|
||||
@ -367,20 +373,20 @@ BOOST_AUTO_TEST_CASE(exclude_test)
|
||||
REQUIRE_SIZE_RANGE(cell_1_3.GetInWeight(7), 2);
|
||||
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(0), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(1), INVALID_EDGE_WEIGHT, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetOutWeight(1), INVALID_EDGE_WEIGHT, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(0), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(1), INVALID_EDGE_WEIGHT, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(2), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_0.GetInWeight(1), INVALID_EDGE_WEIGHT, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(2), EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetOutWeight(3), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(2), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(2), EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_1.GetInWeight(3), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(5), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(6), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(5), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(6), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetOutWeight(4), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(5), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetOutWeight(6), EdgeWeight{1}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(5), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_1_2.GetInWeight(6), EdgeWeight{1}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetOutWeight(4), EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetOutWeight(7), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(4), 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(4), EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_1_3.GetInWeight(7), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
|
||||
auto cell_2_0 = storage.GetCell(metric, 2, 0);
|
||||
@ -407,19 +413,19 @@ BOOST_AUTO_TEST_CASE(exclude_test)
|
||||
REQUIRE_SIZE_RANGE(cell_2_1.GetInWeight(4), 2);
|
||||
REQUIRE_SIZE_RANGE(cell_2_1.GetInWeight(5), 2);
|
||||
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(1), 0, 10, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(2), 10, 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(1), EdgeWeight{0}, EdgeWeight{10}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetOutWeight(2), EdgeWeight{10}, EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(
|
||||
cell_2_0.GetOutWeight(3), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(1), 0, 10, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(2), 10, 0, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(1), EdgeWeight{0}, EdgeWeight{10}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(cell_2_0.GetInWeight(2), EdgeWeight{10}, EdgeWeight{0}, INVALID_EDGE_WEIGHT);
|
||||
CHECK_EQUAL_RANGE(
|
||||
cell_2_0.GetInWeight(3), INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT, INVALID_EDGE_WEIGHT);
|
||||
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(4), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(5), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(4), 0, 1);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(5), 1, 0);
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(4), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetOutWeight(5), EdgeWeight{1}, EdgeWeight{0});
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(4), EdgeWeight{0}, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(cell_2_1.GetInWeight(5), EdgeWeight{1}, EdgeWeight{0});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
@ -19,14 +19,14 @@ BOOST_AUTO_TEST_CASE(unchanged_collapse_route_result)
|
||||
PhantomNode target;
|
||||
source.forward_segment_id = {1, true};
|
||||
target.forward_segment_id = {6, true};
|
||||
PathData pathy{0, 2, 2, 3, 4, 5, 2, boost::none};
|
||||
PathData kathy{0, 1, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData pathy{0, 2, {2}, {3}, {4}, {5}, 2, boost::none};
|
||||
PathData kathy{0, 1, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
InternalRouteResult one_leg_result;
|
||||
one_leg_result.unpacked_path_segments = {{pathy, kathy}};
|
||||
one_leg_result.leg_endpoints = {PhantomEndpoints{source, target}};
|
||||
one_leg_result.source_traversed_in_reverse = {true};
|
||||
one_leg_result.target_traversed_in_reverse = {true};
|
||||
one_leg_result.shortest_path_weight = 50;
|
||||
one_leg_result.shortest_path_weight = {50};
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(one_leg_result, {true, true});
|
||||
BOOST_CHECK_EQUAL(one_leg_result.unpacked_path_segments[0].front().turn_via_node,
|
||||
@ -39,22 +39,26 @@ BOOST_AUTO_TEST_CASE(two_legs_to_one_leg)
|
||||
{
|
||||
// from_edge_based_node, turn_via_node, weight_until_turn, weight_of_turn,
|
||||
// duration_until_turn, duration_of_turn, datasource_id, turn_edge
|
||||
PathData pathy{0, 2, 2, 3, 4, 5, 2, boost::none};
|
||||
PathData kathy{0, 1, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData cathy{0, 3, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData pathy{0, 2, {2}, {3}, {4}, {5}, 2, boost::none};
|
||||
PathData kathy{0, 1, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PathData cathy{0, 3, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PhantomNode node_1;
|
||||
PhantomNode node_2;
|
||||
PhantomNode node_3;
|
||||
node_1.forward_segment_id = {1, true};
|
||||
node_2.forward_segment_id = {6, true};
|
||||
node_3.forward_segment_id = {12, true};
|
||||
node_1.forward_weight = node_2.forward_weight = node_3.forward_weight = {1};
|
||||
node_1.forward_duration = node_2.forward_duration = node_3.forward_duration = {1};
|
||||
node_1.forward_distance = node_2.forward_distance = node_3.forward_distance = {1};
|
||||
|
||||
InternalRouteResult two_leg_result;
|
||||
two_leg_result.unpacked_path_segments = {{pathy, kathy}, {kathy, cathy}};
|
||||
two_leg_result.leg_endpoints = {PhantomEndpoints{node_1, node_2},
|
||||
PhantomEndpoints{node_2, node_3}};
|
||||
two_leg_result.source_traversed_in_reverse = {true, false};
|
||||
two_leg_result.target_traversed_in_reverse = {true, false};
|
||||
two_leg_result.shortest_path_weight = 80;
|
||||
two_leg_result.shortest_path_weight = {80};
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(two_leg_result, {true, false, true, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 1);
|
||||
@ -70,11 +74,11 @@ BOOST_AUTO_TEST_CASE(two_legs_to_one_leg)
|
||||
|
||||
BOOST_AUTO_TEST_CASE(three_legs_to_two_legs)
|
||||
{
|
||||
PathData pathy{0, 2, 2, 3, 4, 5, 2, boost::none};
|
||||
PathData kathy{0, 1, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData qathy{0, 5, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData cathy{0, 3, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData mathy{0, 4, 8, 9, 13, 4, 2, boost::none};
|
||||
PathData pathy{0, 2, {2}, {3}, {4}, {5}, 2, boost::none};
|
||||
PathData kathy{0, 1, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PathData qathy{0, 5, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PathData cathy{0, 3, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PathData mathy{0, 4, {8}, {9}, {13}, {4}, 2, boost::none};
|
||||
PhantomNode node_1;
|
||||
PhantomNode node_2;
|
||||
PhantomNode node_3;
|
||||
@ -83,6 +87,19 @@ BOOST_AUTO_TEST_CASE(three_legs_to_two_legs)
|
||||
node_2.forward_segment_id = {6, true};
|
||||
node_3.forward_segment_id = {12, true};
|
||||
node_4.forward_segment_id = {18, true};
|
||||
node_1.forward_weight = node_2.forward_weight = node_3.forward_weight =
|
||||
node_4.forward_weight = {1};
|
||||
node_1.forward_duration = node_2.forward_duration = node_3.forward_duration =
|
||||
node_4.forward_duration = {1};
|
||||
node_1.forward_distance = node_2.forward_distance = node_3.forward_distance =
|
||||
node_4.forward_distance = {1};
|
||||
node_1.reverse_weight = node_2.reverse_weight = node_3.reverse_weight =
|
||||
node_4.reverse_weight = {1};
|
||||
node_1.reverse_duration = node_2.reverse_duration = node_3.reverse_duration =
|
||||
node_4.reverse_duration = {1};
|
||||
node_1.reverse_distance = node_2.reverse_distance = node_3.reverse_distance =
|
||||
node_4.reverse_distance = {1};
|
||||
|
||||
InternalRouteResult three_leg_result;
|
||||
three_leg_result.unpacked_path_segments = {std::vector<PathData>{pathy, kathy},
|
||||
std::vector<PathData>{kathy, qathy, cathy},
|
||||
@ -92,7 +109,7 @@ BOOST_AUTO_TEST_CASE(three_legs_to_two_legs)
|
||||
PhantomEndpoints{node_3, node_4}};
|
||||
three_leg_result.source_traversed_in_reverse = {true, false, true},
|
||||
three_leg_result.target_traversed_in_reverse = {true, false, true},
|
||||
three_leg_result.shortest_path_weight = 140;
|
||||
three_leg_result.shortest_path_weight = {140};
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(three_leg_result, {true, true, false, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 2);
|
||||
@ -114,9 +131,9 @@ BOOST_AUTO_TEST_CASE(three_legs_to_two_legs)
|
||||
|
||||
BOOST_AUTO_TEST_CASE(two_legs_to_two_legs)
|
||||
{
|
||||
PathData pathy{0, 2, 2, 3, 4, 5, 2, boost::none};
|
||||
PathData kathy{0, 1, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData cathy{0, 3, 1, 2, 3, 4, 1, boost::none};
|
||||
PathData pathy{0, 2, {2}, {3}, {4}, {5}, 2, boost::none};
|
||||
PathData kathy{0, 1, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PathData cathy{0, 3, {1}, {2}, {3}, {4}, 1, boost::none};
|
||||
PhantomNode node_1;
|
||||
PhantomNode node_2;
|
||||
PhantomNode node_3;
|
||||
@ -129,7 +146,7 @@ BOOST_AUTO_TEST_CASE(two_legs_to_two_legs)
|
||||
PhantomEndpoints{node_2, node_3}};
|
||||
two_leg_result.source_traversed_in_reverse = {true, false};
|
||||
two_leg_result.target_traversed_in_reverse = {true, false};
|
||||
two_leg_result.shortest_path_weight = 80;
|
||||
two_leg_result.shortest_path_weight = {80};
|
||||
|
||||
auto collapsed = CollapseInternalRouteResult(two_leg_result, {true, true, true});
|
||||
BOOST_CHECK_EQUAL(collapsed.unpacked_path_segments.size(), 2);
|
||||
|
@ -87,14 +87,11 @@ struct ExternalCellStorage
|
||||
return boost::make_iterator_range((EdgeWeight *)0, (EdgeWeight *)0);
|
||||
}
|
||||
|
||||
auto GetSourceNodes() const
|
||||
{
|
||||
return boost::make_iterator_range((EdgeWeight *)0, (EdgeWeight *)0);
|
||||
}
|
||||
auto GetSourceNodes() const { return boost::make_iterator_range((NodeID *)0, (NodeID *)0); }
|
||||
|
||||
auto GetDestinationNodes() const
|
||||
{
|
||||
return boost::make_iterator_range((EdgeWeight *)0, (EdgeWeight *)0);
|
||||
return boost::make_iterator_range((NodeID *)0, (NodeID *)0);
|
||||
}
|
||||
};
|
||||
|
||||
@ -265,7 +262,7 @@ class ContiguousInternalMemoryDataFacade<routing_algorithms::offline::Algorithm>
|
||||
return {};
|
||||
}
|
||||
|
||||
EdgeWeight GetNodeWeight(const NodeID /*node*/) const { return 0; }
|
||||
EdgeWeight GetNodeWeight(const NodeID /*node*/) const { return {0}; }
|
||||
|
||||
bool IsForwardEdge(const NodeID /*edge*/) const { return true; }
|
||||
|
||||
|
@ -15,7 +15,7 @@ BOOST_AUTO_TEST_CASE(long_road_test)
|
||||
CompressedEdgeContainer container;
|
||||
|
||||
// compress 0---1---2 to 0---2
|
||||
container.CompressEdge(0, 1, 1, 2, 1, 1, 11, 11);
|
||||
container.CompressEdge(0, 1, 1, 2, {1}, {1}, {11}, {11});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(!container.HasEntryForID(2));
|
||||
@ -24,7 +24,7 @@ BOOST_AUTO_TEST_CASE(long_road_test)
|
||||
BOOST_CHECK_EQUAL(container.GetLastEdgeSourceID(0), 1);
|
||||
|
||||
// compress 2---3---4 to 2---4
|
||||
container.CompressEdge(2, 3, 3, 4, 1, 1, 11, 11);
|
||||
container.CompressEdge(2, 3, 3, 4, {1}, {1}, {11}, {11});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(container.HasEntryForID(2));
|
||||
@ -33,7 +33,7 @@ BOOST_AUTO_TEST_CASE(long_road_test)
|
||||
BOOST_CHECK_EQUAL(container.GetLastEdgeSourceID(2), 3);
|
||||
|
||||
// compress 0---2---4 to 0---4
|
||||
container.CompressEdge(0, 2, 2, 4, 2, 2, 22, 22);
|
||||
container.CompressEdge(0, 2, 2, 4, {2}, {2}, {22}, {22});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(!container.HasEntryForID(2));
|
||||
@ -53,7 +53,7 @@ BOOST_AUTO_TEST_CASE(t_crossing)
|
||||
CompressedEdgeContainer container;
|
||||
|
||||
// compress 0---1---2 to 0---2
|
||||
container.CompressEdge(0, 1, 1, 2, 1, 1, 11, 11);
|
||||
container.CompressEdge(0, 1, 1, 2, {1}, {1}, {11}, {11});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(!container.HasEntryForID(2));
|
||||
@ -64,7 +64,7 @@ BOOST_AUTO_TEST_CASE(t_crossing)
|
||||
BOOST_CHECK_EQUAL(container.GetLastEdgeSourceID(0), 1);
|
||||
|
||||
// compress 2---5---6 to 2---6
|
||||
container.CompressEdge(4, 5, 5, 6, 1, 1, 11, 11);
|
||||
container.CompressEdge(4, 5, 5, 6, {1}, {1}, {11}, {11});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(!container.HasEntryForID(2));
|
||||
@ -75,7 +75,7 @@ BOOST_AUTO_TEST_CASE(t_crossing)
|
||||
BOOST_CHECK_EQUAL(container.GetLastEdgeSourceID(4), 5);
|
||||
|
||||
// compress 2---3---4 to 2---4
|
||||
container.CompressEdge(2, 3, 3, 4, 1, 1, 11, 11);
|
||||
container.CompressEdge(2, 3, 3, 4, {1}, {1}, {11}, {11});
|
||||
BOOST_CHECK(container.HasEntryForID(0));
|
||||
BOOST_CHECK(!container.HasEntryForID(1));
|
||||
BOOST_CHECK(container.HasEntryForID(2));
|
||||
|
@ -27,9 +27,9 @@ inline InputEdge MakeUnitEdge(const NodeID from, const NodeID to)
|
||||
{
|
||||
return {from, // source
|
||||
to, // target
|
||||
1, // weight
|
||||
1, // duration
|
||||
1, // distance
|
||||
EdgeWeight{1}, // weight
|
||||
EdgeDuration{1}, // duration
|
||||
EdgeDistance{1}, // distance
|
||||
GeometryID{0, false}, // geometry_id
|
||||
false, // reversed
|
||||
NodeBasedEdgeClassification(), // default flags
|
||||
|
@ -43,9 +43,9 @@ BOOST_AUTO_TEST_CASE(simple_intersection_connectivity)
|
||||
[](const NodeID from, const NodeID to, bool allowed, AnnotationID annotation) {
|
||||
return InputEdge{from,
|
||||
to,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
EdgeWeight{1},
|
||||
EdgeDuration{1},
|
||||
EdgeDistance{1},
|
||||
GeometryID{0, false},
|
||||
!allowed,
|
||||
NodeBasedEdgeClassification(),
|
||||
@ -170,9 +170,9 @@ BOOST_AUTO_TEST_CASE(roundabout_intersection_connectivity)
|
||||
const auto unit_edge = [](const NodeID from, const NodeID to, bool allowed, bool roundabout) {
|
||||
return InputEdge{from,
|
||||
to,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
EdgeWeight{1},
|
||||
EdgeDuration{1},
|
||||
EdgeDistance{1},
|
||||
GeometryID{0, false},
|
||||
!allowed,
|
||||
NodeBasedEdgeClassification{
|
||||
@ -275,8 +275,15 @@ BOOST_AUTO_TEST_CASE(skip_degree_two_nodes)
|
||||
// 6 8 ↔ 9
|
||||
//
|
||||
const auto unit_edge = [](const NodeID from, const NodeID to, bool allowed) {
|
||||
return InputEdge{
|
||||
from, to, 1, 1, 1, GeometryID{0, false}, !allowed, NodeBasedEdgeClassification{}, 0};
|
||||
return InputEdge{from,
|
||||
to,
|
||||
EdgeWeight{1},
|
||||
EdgeDuration{1},
|
||||
EdgeDistance{1},
|
||||
GeometryID{0, false},
|
||||
!allowed,
|
||||
NodeBasedEdgeClassification{},
|
||||
0};
|
||||
};
|
||||
std::vector<InputEdge> edges = {unit_edge(0, 1, true), // 0
|
||||
unit_edge(1, 0, true),
|
||||
|
@ -47,11 +47,11 @@ class MockBaseDataFacade : public engine::datafacade::BaseDataFacade
|
||||
}
|
||||
TurnPenalty GetWeightPenaltyForEdgeID(const unsigned /* id */) const override final
|
||||
{
|
||||
return 0;
|
||||
return {0};
|
||||
}
|
||||
TurnPenalty GetDurationPenaltyForEdgeID(const unsigned /* id */) const override final
|
||||
{
|
||||
return 0;
|
||||
return {0};
|
||||
}
|
||||
std::string GetTimestamp() const override { return ""; }
|
||||
NodeForwardRange GetUncompressedForwardGeometry(const EdgeID /* id */) const override
|
||||
|
@ -99,14 +99,14 @@ BOOST_AUTO_TEST_CASE(mutable_cell_storage)
|
||||
auto in_range_1_5_11 = cell_1_5.GetInWeight(11);
|
||||
|
||||
fill_range(out_range_1_0_0, {});
|
||||
fill_range(out_range_1_2_4, {1});
|
||||
fill_range(out_range_1_3_6, {2});
|
||||
fill_range(out_range_1_5_11, {3});
|
||||
fill_range(out_range_1_2_4, {EdgeWeight{1}});
|
||||
fill_range(out_range_1_3_6, {EdgeWeight{2}});
|
||||
fill_range(out_range_1_5_11, {EdgeWeight{3}});
|
||||
|
||||
CHECK_EQUAL_COLLECTIONS(in_range_1_1_3, std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_RANGE(in_range_1_2_5, 1);
|
||||
CHECK_EQUAL_RANGE(in_range_1_3_7, 2);
|
||||
CHECK_EQUAL_RANGE(in_range_1_5_11, 3);
|
||||
CHECK_EQUAL_RANGE(in_range_1_2_5, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(in_range_1_3_7, EdgeWeight{2});
|
||||
CHECK_EQUAL_RANGE(in_range_1_5_11, EdgeWeight{3});
|
||||
|
||||
// Level 2
|
||||
auto cell_2_0 = storage.GetCell(metric, 2, 0);
|
||||
@ -125,14 +125,14 @@ BOOST_AUTO_TEST_CASE(mutable_cell_storage)
|
||||
auto in_range_2_1_7 = cell_2_1.GetInWeight(7);
|
||||
auto in_range_2_3_11 = cell_2_3.GetInWeight(11);
|
||||
|
||||
fill_range(out_range_2_0_0, {1});
|
||||
fill_range(out_range_2_1_4, {2, 3});
|
||||
fill_range(out_range_2_3_11, {4});
|
||||
fill_range(out_range_2_0_0, {EdgeWeight{1}});
|
||||
fill_range(out_range_2_1_4, {EdgeWeight{2}, EdgeWeight{3}});
|
||||
fill_range(out_range_2_3_11, {EdgeWeight{4}});
|
||||
|
||||
CHECK_EQUAL_RANGE(in_range_2_0_3, 1);
|
||||
CHECK_EQUAL_RANGE(in_range_2_1_4, 2);
|
||||
CHECK_EQUAL_RANGE(in_range_2_1_7, 3);
|
||||
CHECK_EQUAL_RANGE(in_range_2_3_11, 4);
|
||||
CHECK_EQUAL_RANGE(in_range_2_0_3, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(in_range_2_1_4, EdgeWeight{2});
|
||||
CHECK_EQUAL_RANGE(in_range_2_1_7, EdgeWeight{3});
|
||||
CHECK_EQUAL_RANGE(in_range_2_3_11, EdgeWeight{4});
|
||||
|
||||
// Level 3
|
||||
auto cell_3_0 = storage.GetCell(metric, 3, 0);
|
||||
@ -146,13 +146,13 @@ BOOST_AUTO_TEST_CASE(mutable_cell_storage)
|
||||
auto in_range_3_1_4 = cell_3_1.GetInWeight(4);
|
||||
auto in_range_3_1_7 = cell_3_1.GetInWeight(7);
|
||||
|
||||
fill_range(out_range_3_0_0, {1});
|
||||
fill_range(out_range_3_1_4, {2, 3});
|
||||
fill_range(out_range_3_1_7, {4, 5});
|
||||
fill_range(out_range_3_0_0, {EdgeWeight{1}});
|
||||
fill_range(out_range_3_1_4, {EdgeWeight{2}, EdgeWeight{3}});
|
||||
fill_range(out_range_3_1_7, {EdgeWeight{4}, EdgeWeight{5}});
|
||||
|
||||
CHECK_EQUAL_RANGE(in_range_3_0_3, 1);
|
||||
CHECK_EQUAL_RANGE(in_range_3_1_4, 2, 4);
|
||||
CHECK_EQUAL_RANGE(in_range_3_1_7, 3, 5);
|
||||
CHECK_EQUAL_RANGE(in_range_3_0_3, EdgeWeight{1});
|
||||
CHECK_EQUAL_RANGE(in_range_3_1_4, EdgeWeight{2}, EdgeWeight{4});
|
||||
CHECK_EQUAL_RANGE(in_range_3_1_7, EdgeWeight{3}, EdgeWeight{5});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(immutable_cell_storage)
|
||||
@ -230,17 +230,17 @@ BOOST_AUTO_TEST_CASE(immutable_cell_storage)
|
||||
auto const_cell_1_5 = const_storage.GetCell(metric, 1, 5);
|
||||
|
||||
CHECK_EQUAL_RANGE(const_cell_1_0.GetSourceNodes(), 0);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_1.GetSourceNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_1.GetSourceNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_1_2.GetSourceNodes(), 4);
|
||||
CHECK_EQUAL_RANGE(const_cell_1_3.GetSourceNodes(), 6);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_4.GetSourceNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_4.GetSourceNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_1_5.GetSourceNodes(), 11);
|
||||
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_0.GetDestinationNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_0.GetDestinationNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_1_1.GetDestinationNodes(), 3);
|
||||
CHECK_EQUAL_RANGE(const_cell_1_2.GetDestinationNodes(), 5);
|
||||
CHECK_EQUAL_RANGE(const_cell_1_3.GetDestinationNodes(), 7);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_4.GetDestinationNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_1_4.GetDestinationNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_1_5.GetDestinationNodes(), 11);
|
||||
|
||||
auto out_const_range_1_0_0 = const_cell_1_0.GetOutWeight(0);
|
||||
@ -271,12 +271,12 @@ BOOST_AUTO_TEST_CASE(immutable_cell_storage)
|
||||
|
||||
CHECK_EQUAL_RANGE(const_cell_2_0.GetSourceNodes(), 0);
|
||||
CHECK_EQUAL_RANGE(const_cell_2_1.GetSourceNodes(), 4);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_2_2.GetSourceNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_2_2.GetSourceNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_2_3.GetSourceNodes(), 11);
|
||||
|
||||
CHECK_EQUAL_RANGE(const_cell_2_0.GetDestinationNodes(), 3);
|
||||
CHECK_EQUAL_RANGE(const_cell_2_1.GetDestinationNodes(), 4, 7);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_2_2.GetDestinationNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_2_2.GetDestinationNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_RANGE(const_cell_2_3.GetDestinationNodes(), 11);
|
||||
|
||||
auto out_const_range_2_0_0 = const_cell_2_0.GetOutWeight(0);
|
||||
@ -325,8 +325,8 @@ BOOST_AUTO_TEST_CASE(immutable_cell_storage)
|
||||
|
||||
// Level 4
|
||||
auto const_cell_4_0 = const_storage.GetCell(metric, 4, 0);
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_4_0.GetSourceNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_4_0.GetDestinationNodes(), std::vector<EdgeWeight>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_4_0.GetSourceNodes(), std::vector<NodeID>{});
|
||||
CHECK_EQUAL_COLLECTIONS(const_cell_4_0.GetDestinationNodes(), std::vector<NodeID>{});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
@ -32,9 +32,9 @@ auto makeGraph(const std::vector<MockEdge> &mock_edges)
|
||||
max_id = std::max<std::size_t>(max_id, std::max(m.start, m.target));
|
||||
|
||||
edges.push_back(InputEdge{
|
||||
m.start, m.target, EdgeBasedGraphEdgeData{SPECIAL_NODEID, 1, 1, 1, true, false}});
|
||||
m.start, m.target, EdgeBasedGraphEdgeData{SPECIAL_NODEID, {1}, {1}, {1}, true, false}});
|
||||
edges.push_back(InputEdge{
|
||||
m.target, m.start, EdgeBasedGraphEdgeData{SPECIAL_NODEID, 1, 1, 1, false, true}});
|
||||
m.target, m.start, EdgeBasedGraphEdgeData{SPECIAL_NODEID, {1}, {1}, {1}, false, true}});
|
||||
}
|
||||
std::sort(edges.begin(), edges.end());
|
||||
return DynamicEdgeBasedGraph(max_id + 1, edges);
|
||||
|
@ -76,13 +76,13 @@ BOOST_AUTO_TEST_CASE(packed_vector_iterator_test)
|
||||
|
||||
BOOST_CHECK(std::is_sorted(packed_vec.begin(), packed_vec.end()));
|
||||
|
||||
auto idx = 0;
|
||||
auto vec_idx = 0;
|
||||
for (auto value : packed_vec)
|
||||
{
|
||||
BOOST_CHECK_EQUAL(packed_vec[idx], value);
|
||||
idx++;
|
||||
BOOST_CHECK_EQUAL(packed_vec[vec_idx], value);
|
||||
vec_idx++;
|
||||
}
|
||||
BOOST_CHECK_EQUAL(idx, packed_vec.size());
|
||||
BOOST_CHECK_EQUAL(vec_idx, packed_vec.size());
|
||||
|
||||
auto range = boost::make_iterator_range(packed_vec.cbegin(), packed_vec.cend());
|
||||
BOOST_CHECK_EQUAL(range.size(), packed_vec.size());
|
||||
@ -215,18 +215,38 @@ BOOST_AUTO_TEST_CASE(packed_weights_container_with_type_erasure)
|
||||
|
||||
PackedVector<SegmentWeight, SEGMENT_WEIGHT_BITS> vector(7);
|
||||
|
||||
std::iota(vector.begin(), vector.end(), 0);
|
||||
std::iota(vector.begin(), vector.end(), SegmentWeight{0});
|
||||
|
||||
auto forward = boost::make_iterator_range(vector.begin() + 1, vector.begin() + 6);
|
||||
auto forward_any = WeightsAnyRange(forward.begin(), forward.end());
|
||||
|
||||
CHECK_EQUAL_RANGE(forward, 1, 2, 3, 4, 5);
|
||||
CHECK_EQUAL_RANGE(forward_any, 1, 2, 3, 4, 5);
|
||||
CHECK_EQUAL_RANGE(forward,
|
||||
SegmentWeight{1},
|
||||
SegmentWeight{2},
|
||||
SegmentWeight{3},
|
||||
SegmentWeight{4},
|
||||
SegmentWeight{5});
|
||||
CHECK_EQUAL_RANGE(forward_any,
|
||||
SegmentWeight{1},
|
||||
SegmentWeight{2},
|
||||
SegmentWeight{3},
|
||||
SegmentWeight{4},
|
||||
SegmentWeight{5});
|
||||
|
||||
auto reverse = boost::adaptors::reverse(forward);
|
||||
auto reverse_any = WeightsAnyRange(reverse);
|
||||
CHECK_EQUAL_RANGE(reverse, 5, 4, 3, 2, 1);
|
||||
CHECK_EQUAL_RANGE(reverse_any, 5, 4, 3, 2, 1);
|
||||
CHECK_EQUAL_RANGE(reverse,
|
||||
SegmentWeight{5},
|
||||
SegmentWeight{4},
|
||||
SegmentWeight{3},
|
||||
SegmentWeight{2},
|
||||
SegmentWeight{1});
|
||||
CHECK_EQUAL_RANGE(reverse_any,
|
||||
SegmentWeight{5},
|
||||
SegmentWeight{4},
|
||||
SegmentWeight{3},
|
||||
SegmentWeight{2},
|
||||
SegmentWeight{1});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(packed_weights_view_with_type_erasure)
|
||||
@ -244,14 +264,14 @@ BOOST_AUTO_TEST_CASE(packed_weights_view_with_type_erasure)
|
||||
auto forward = boost::make_iterator_range(view.begin() + 1, view.begin() + 4);
|
||||
auto forward_any = WeightsAnyRange(forward.begin(), forward.end());
|
||||
|
||||
CHECK_EQUAL_RANGE(forward, 1, 2, 3);
|
||||
CHECK_EQUAL_RANGE(forward_any, 1, 2, 3);
|
||||
CHECK_EQUAL_RANGE(forward, SegmentWeight{1}, SegmentWeight{2}, SegmentWeight{3});
|
||||
CHECK_EQUAL_RANGE(forward_any, SegmentWeight{1}, SegmentWeight{2}, SegmentWeight{3});
|
||||
|
||||
auto reverse = boost::adaptors::reverse(forward);
|
||||
auto reverse_any = WeightsAnyRange(reverse);
|
||||
|
||||
CHECK_EQUAL_RANGE(reverse, 3, 2, 1);
|
||||
CHECK_EQUAL_RANGE(reverse_any, 3, 2, 1);
|
||||
CHECK_EQUAL_RANGE(reverse, SegmentWeight{3}, SegmentWeight{2}, SegmentWeight{1});
|
||||
CHECK_EQUAL_RANGE(reverse_any, SegmentWeight{3}, SegmentWeight{2}, SegmentWeight{1});
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
Loading…
Reference in New Issue
Block a user