Fix adding traffic signal penalties during compression (#6419)
Weight and duration penalties are flipped in the lambda function that applies penalties from traffic signals. Duration is in deciseconds, whilst weight is multipled by 10^weight_precision, with weight_precision being 1 by default. Therefore, for default routability profile, the penalties end up being the same, hence why no tests picked this up. If distance weight is used however, it will incorrectly apply an additional penalty to the weight, and not add the traffic signal delay to the duration in the routing graph. To confuse things further, in some API responses the values are correct because they use geometry data instead, but it's still possible that a sub-optimal route was selected. However, given the distance weight is in meters, and the additional penalty per traffic light would be 20, it's unlikely this would have changed the routing results. In any case, we correct the function to apply the arguments correctly.
This commit is contained in:
parent
c1d2c15995
commit
9ad432c5b2
@ -4,6 +4,8 @@
|
||||
- FIXED: Handle snapping parameter for all plugins in NodeJs bindings, but not for Route only. [#6417](https://github.com/Project-OSRM/osrm-backend/pull/6417)
|
||||
- FIXED: Fix annotations=true handling in NodeJS bindings & libosrm. [#6415](https://github.com/Project-OSRM/osrm-backend/pull/6415/)
|
||||
- FIXED: Fix bindings compilation issue on the latest Node. Update NAN to 2.17.0. [#6416](https://github.com/Project-OSRM/osrm-backend/pull/6416)
|
||||
- Routing:
|
||||
- FIXED: Fix adding traffic signal penalties during compression [#6419](https://github.com/Project-OSRM/osrm-backend/pull/6419)
|
||||
# 5.27.1
|
||||
- Changes from 5.27.0
|
||||
- Misc:
|
||||
|
@ -66,15 +66,50 @@ Feature: Car - Handle traffic lights
|
||||
| k | traffic_signals | backward |
|
||||
|
||||
When I route I should get
|
||||
| from | to | time | # |
|
||||
| 1 | 2 | 11.1s | no turn with no traffic light |
|
||||
| 2 | 1 | 11.1s | no turn with no traffic light |
|
||||
| 3 | 4 | 13.1s | no turn with traffic light |
|
||||
| 4 | 3 | 13.1s | no turn with traffic light |
|
||||
| 5 | 6 | 13.1s | no turn with traffic light |
|
||||
| 6 | 5 | 11.1s | no turn with no traffic light |
|
||||
| 7 | 8 | 11.1s | no turn with no traffic light |
|
||||
| 8 | 7 | 13.1s | no turn with traffic light |
|
||||
| from | to | time | weight | # |
|
||||
| 1 | 2 | 11.1s | 11.1 | no turn with no traffic light |
|
||||
| 2 | 1 | 11.1s | 11.1 | no turn with no traffic light |
|
||||
| 3 | 4 | 13.1s | 13.1 | no turn with traffic light |
|
||||
| 4 | 3 | 13.1s | 13.1 | no turn with traffic light |
|
||||
| 5 | 6 | 13.1s | 13.1 | no turn with traffic light |
|
||||
| 6 | 5 | 11.1s | 11.1 | no turn with no traffic light |
|
||||
| 7 | 8 | 11.1s | 11.1 | no turn with no traffic light |
|
||||
| 8 | 7 | 13.1s | 13.1 | no turn with traffic light |
|
||||
|
||||
|
||||
Scenario: Car - Traffic signal direction with distance weight
|
||||
Given the profile file "car" initialized with
|
||||
"""
|
||||
profile.properties.weight_name = 'distance'
|
||||
profile.properties.traffic_light_penalty = 100000
|
||||
"""
|
||||
|
||||
Given the node map
|
||||
"""
|
||||
a---b---c
|
||||
1 2
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
d-------f
|
||||
|
||||
"""
|
||||
|
||||
And the ways
|
||||
| nodes | highway |
|
||||
| abc | primary |
|
||||
| adfc | primary |
|
||||
|
||||
And the nodes
|
||||
| node | highway |
|
||||
| b | traffic_signals |
|
||||
|
||||
When I route I should get
|
||||
| from | to | time | distances | weight | # |
|
||||
| 1 | 2 | 100033.2s | 599.9m,0m | 599.8 | goes via the expensive traffic signal |
|
||||
|
||||
|
||||
|
||||
Scenario: Car - Encounters a traffic light
|
||||
|
@ -276,7 +276,6 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
const auto forward_weight2 = fwd_edge_data2.weight;
|
||||
const auto forward_duration1 = fwd_edge_data1.duration;
|
||||
const auto forward_duration2 = fwd_edge_data2.duration;
|
||||
const auto forward_distance2 = fwd_edge_data2.distance;
|
||||
|
||||
BOOST_ASSERT(0 != forward_weight1);
|
||||
BOOST_ASSERT(0 != forward_weight2);
|
||||
@ -285,7 +284,6 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
const auto reverse_weight2 = rev_edge_data2.weight;
|
||||
const auto reverse_duration1 = rev_edge_data1.duration;
|
||||
const auto reverse_duration2 = rev_edge_data2.duration;
|
||||
const auto reverse_distance2 = rev_edge_data2.distance;
|
||||
|
||||
#ifndef NDEBUG
|
||||
// Because distances are symmetrical, we only need one
|
||||
@ -293,6 +291,8 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
// their mirrors.
|
||||
const auto reverse_distance1 = rev_edge_data1.distance;
|
||||
const auto forward_distance1 = fwd_edge_data1.distance;
|
||||
const auto forward_distance2 = fwd_edge_data2.distance;
|
||||
const auto reverse_distance2 = rev_edge_data2.distance;
|
||||
BOOST_ASSERT(forward_distance1 == reverse_distance2);
|
||||
BOOST_ASSERT(forward_distance2 == reverse_distance1);
|
||||
#endif
|
||||
@ -300,35 +300,30 @@ void GraphCompressor::Compress(const std::unordered_set<NodeID> &barrier_nodes,
|
||||
BOOST_ASSERT(0 != reverse_weight1);
|
||||
BOOST_ASSERT(0 != reverse_weight2);
|
||||
|
||||
auto apply_e2_to_e1 = [&graph](EdgeID edge,
|
||||
EdgeWeight weight,
|
||||
EdgeDuration duration,
|
||||
EdgeDistance distance,
|
||||
EdgeDuration &duration_penalty,
|
||||
EdgeWeight &weight_penalty) {
|
||||
auto &edge_data = graph.GetEdgeData(edge);
|
||||
edge_data.weight += weight;
|
||||
edge_data.duration += duration;
|
||||
edge_data.distance += distance;
|
||||
auto apply_e2_to_e1 = [&graph](EdgeID edge1,
|
||||
EdgeID edge2,
|
||||
EdgeWeight &weight_penalty,
|
||||
EdgeDuration &duration_penalty) {
|
||||
auto &edge1_data = graph.GetEdgeData(edge1);
|
||||
const auto &edge2_data = graph.GetEdgeData(edge2);
|
||||
edge1_data.weight += edge2_data.weight;
|
||||
edge1_data.duration += edge2_data.duration;
|
||||
edge1_data.distance += edge2_data.distance;
|
||||
if (weight_penalty != INVALID_EDGE_WEIGHT &&
|
||||
duration_penalty != MAXIMAL_EDGE_DURATION)
|
||||
{
|
||||
edge_data.weight += weight_penalty;
|
||||
edge_data.duration += duration_penalty;
|
||||
edge1_data.weight += weight_penalty;
|
||||
edge1_data.duration += duration_penalty;
|
||||
// Note: no penalties for distances
|
||||
}
|
||||
};
|
||||
|
||||
apply_e2_to_e1(forward_e1,
|
||||
forward_weight2,
|
||||
forward_duration2,
|
||||
forward_distance2,
|
||||
forward_e2,
|
||||
forward_node_weight_penalty,
|
||||
forward_node_duration_penalty);
|
||||
apply_e2_to_e1(reverse_e1,
|
||||
reverse_weight2,
|
||||
reverse_duration2,
|
||||
reverse_distance2,
|
||||
reverse_e2,
|
||||
reverse_node_weight_penalty,
|
||||
reverse_node_duration_penalty);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user