added max_speed to the profiles (#3089)

This commit is contained in:
Kajari Ghosh 2016-11-01 17:13:10 -04:00 committed by GitHub
parent 5da63998d6
commit c30f43b148
15 changed files with 266 additions and 180 deletions

View File

@ -1,5 +1,6 @@
'use strict';
var util = require('util'); var util = require('util');
var d3 = require('d3-queue');
var polyline = require('polyline'); var polyline = require('polyline');
module.exports = function () { module.exports = function () {
@ -44,14 +45,25 @@ module.exports = function () {
if (res.statusCode === 200) { if (res.statusCode === 200) {
if (headers.has('matchings')) { if (headers.has('matchings')) {
subMatchings = []; subMatchings = [];
var sub = [json.tracepoints[0].location];
for(var i = 1; i < json.tracepoints.length; i++){ // find the first matched
if(json.tracepoints[i-1].matchings_index === json.tracepoints[i].matchings_index) { let start_index = 0;
sub.push(json.tracepoints[i].location); while (start_index < json.tracepoints.length && json.tracepoints[start_index] === null) start_index++;
} else {
subMatchings.push(sub); var sub = [];
sub = [json.tracepoints[i].location]; let prev_index = null;
for(var i = start_index; i < json.tracepoints.length; i++){
if (json.tracepoints[i] === null) continue;
let current_index = json.tracepoints[i].matchings_index;
if(prev_index !== current_index) {
if (sub.length > 0) subMatchings.push(sub);
sub = [];
prev_index = current_index;
} }
sub.push(json.tracepoints[i].location);
} }
subMatchings.push(sub); subMatchings.push(sub);
} }
@ -82,7 +94,7 @@ module.exports = function () {
} }
if (headers.has('OSM IDs')) { if (headers.has('OSM IDs')) {
if (json.matchings.length != 1) throw new Error('*** CHecking annotation only supported for matchings with one subtrace'); if (json.matchings.length != 1) throw new Error('*** Checking annotation only supported for matchings with one subtrace');
OSMIDs = this.OSMIDList(json.matchings[0]); OSMIDs = this.OSMIDList(json.matchings[0]);
} }
} }
@ -118,59 +130,53 @@ module.exports = function () {
var encodedResult = '', var encodedResult = '',
extendedTarget = ''; extendedTarget = '';
var q = d3.queue(); var testSubMatching = (sub, si) => {
var testSubNode = (ni) => {
var node = this.findNodeByName(sub[ni]),
outNode = subMatchings[si][ni];
var testSubMatching = (sub, si, scb) => { if (this.FuzzyMatch.matchLocation(outNode, node)) {
if (si >= subMatchings.length) { encodedResult += sub[ni];
ok = false; extendedTarget += sub[ni];
q.abort(); } else {
scb(); if (outNode != null) {
} else {
var sq = d3.queue();
var testSubNode = (ni, ncb) => {
var node = this.findNodeByName(sub[ni]),
outNode = subMatchings[si][ni];
if (this.FuzzyMatch.matchLocation(outNode, node)) {
encodedResult += sub[ni];
extendedTarget += sub[ni];
} else {
encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]); encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]);
extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon); } else {
ok = false; encodedResult += '?';
} }
ncb(); extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon);
}; ok = false;
for (var i=0; i<sub.length; i++) {
sq.defer(testSubNode, i);
} }
};
sq.awaitAll(scb); for (var i=0; i<sub.length; i++) {
testSubNode(i);
} }
}; };
if (subMatchings.length != row.matchings.split(',').length) {
ok = false;
cb(new Error('*** table matchings and api response are not the same'));
}
row.matchings.split(',').forEach((sub, si) => { row.matchings.split(',').forEach((sub, si) => {
q.defer(testSubMatching, sub, si); testSubMatching(sub, si);
}); });
q.awaitAll(() => { if (ok) {
if (ok) { if (headers.has('matchings')) {
if (headers.has('matchings')) { got.matchings = row.matchings;
got.matchings = row.matchings;
}
if (headers.has('timestamps')) {
got.timestamps = row.timestamps;
}
} else {
got.matchings = encodedResult;
row.matchings = extendedTarget;
} }
cb(null, got); if (headers.has('timestamps')) {
}); got.timestamps = row.timestamps;
}
} else {
got.matchings = encodedResult;
row.matchings = extendedTarget;
}
cb(null, got);
}; };
if (row.request) { if (row.request) {

View File

@ -107,6 +107,7 @@ module.exports = {
} }
matchLocation (got, want) { matchLocation (got, want) {
if (got == null || want == null) return false;
return this.match(got[0], util.format('%d ~0.0025%', want.lon)) && return this.match(got[0], util.format('%d ~0.0025%', want.lon)) &&
this.match(got[1], util.format('%d ~0.0025%', want.lat)); this.match(got[1], util.format('%d ~0.0025%', want.lat));
} }

View File

@ -9,10 +9,11 @@ Feature: Basic Map Matching
| geometries | geojson | | geometries | geojson |
Scenario: Testbot - Map matching with outlier that has no candidate Scenario: Testbot - Map matching with outlier that has no candidate
Given a grid size of 10 meters Given a grid size of 100 meters
Given the node map Given the node map
""" """
a b c d a b c d
1 1
""" """
@ -22,7 +23,7 @@ Feature: Basic Map Matching
When I match I should get When I match I should get
| trace | timestamps | matchings | | trace | timestamps | matchings |
| ab1d | 0 1 2 3 | abcd | | ab1d | 0 1 2 3 | ad |
Scenario: Testbot - Map matching with trace splitting Scenario: Testbot - Map matching with trace splitting
Given the node map Given the node map
@ -169,3 +170,42 @@ Feature: Basic Map Matching
When I match I should get When I match I should get
| trace | matchings | geometry | | trace | matchings | geometry |
| abd | abd | 1,1,1.000089,1,1.000089,1,1.000089,0.99991 | | abd | abd | 1,1,1.000089,1,1.000089,1,1.000089,0.99991 |
Scenario: Testbot - Speed greater than speed threshhold, should split -- returns trace as abcd but should be split into ab,cd
Given a grid size of 10 meters
Given the query options
| geometries | geojson |
Given the node map
"""
a b ---- x
|
|
y --- c d
"""
And the ways
| nodes | oneway |
| abxycd | no |
When I match I should get
| trace | timestamps | matchings |
| abcd | 0 1 2 3 | ab,cd |
Scenario: Testbot - Speed less than speed threshhold, should not split
Given a grid size of 10 meters
Given the query options
| geometries | geojson |
Given the node map
"""
a b c d
"""
And the ways
| nodes | oneway |
| abcd | no |
When I match I should get
| trace | timestamps | matchings |
| abcd | 0 1 2 3 | abcd |

View File

@ -173,6 +173,8 @@ class BaseDataFacade
virtual bool GetContinueStraightDefault() const = 0; virtual bool GetContinueStraightDefault() const = 0;
virtual double GetMapMatchingMaxSpeed() const = 0;
virtual BearingClassID GetBearingClassID(const NodeID id) const = 0; virtual BearingClassID GetBearingClassID(const NodeID id) const = 0;
virtual util::guidance::TurnBearing PreTurnBearing(const EdgeID eid) const = 0; virtual util::guidance::TurnBearing PreTurnBearing(const EdgeID eid) const = 0;

View File

@ -899,6 +899,11 @@ class InternalDataFacade final : public BaseDataFacade
return m_profile_properties.continue_straight_at_waypoint; return m_profile_properties.continue_straight_at_waypoint;
} }
double GetMapMatchingMaxSpeed() const override final
{
return m_profile_properties.max_speed_for_map_matching;
}
BearingClassID GetBearingClassID(const NodeID nid) const override final BearingClassID GetBearingClassID(const NodeID nid) const override final
{ {
return m_bearing_class_id_table.at(nid); return m_bearing_class_id_table.at(nid);

View File

@ -931,6 +931,11 @@ class SharedDataFacade final : public BaseDataFacade
return m_profile_properties->continue_straight_at_waypoint; return m_profile_properties->continue_straight_at_waypoint;
} }
double GetMapMatchingMaxSpeed() const override final
{
return m_profile_properties->max_speed_for_map_matching;
}
BearingClassID GetBearingClassID(const NodeID id) const override final BearingClassID GetBearingClassID(const NodeID id) const override final
{ {
return m_bearing_class_id_table.at(id); return m_bearing_class_id_table.at(id);

View File

@ -7,6 +7,7 @@
#include "engine/map_matching/matching_confidence.hpp" #include "engine/map_matching/matching_confidence.hpp"
#include "engine/map_matching/sub_matching.hpp" #include "engine/map_matching/sub_matching.hpp"
#include "extractor/profile_properties.hpp"
#include "util/coordinate_calculation.hpp" #include "util/coordinate_calculation.hpp"
#include "util/for_each_pair.hpp" #include "util/for_each_pair.hpp"
@ -32,7 +33,6 @@ using HMM = map_matching::HiddenMarkovModel<CandidateLists>;
using SubMatchingList = std::vector<map_matching::SubMatching>; using SubMatchingList = std::vector<map_matching::SubMatching>;
constexpr static const unsigned MAX_BROKEN_STATES = 10; constexpr static const unsigned MAX_BROKEN_STATES = 10;
constexpr static const double MAX_SPEED = 180 / 3.6; // 180km -> m/s
static const constexpr double MATCHING_BETA = 10; static const constexpr double MATCHING_BETA = 10;
constexpr static const double MAX_DISTANCE_DELTA = 2000.; constexpr static const double MAX_DISTANCE_DELTA = 2000.;
@ -46,6 +46,7 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
map_matching::EmissionLogProbability default_emission_log_probability; map_matching::EmissionLogProbability default_emission_log_probability;
map_matching::TransitionLogProbability transition_log_probability; map_matching::TransitionLogProbability transition_log_probability;
map_matching::MatchingConfidence confidence; map_matching::MatchingConfidence confidence;
extractor::ProfileProperties m_profile_properties;
unsigned GetMedianSampleTime(const std::vector<unsigned> &timestamps) const unsigned GetMedianSampleTime(const std::vector<unsigned> &timestamps) const
{ {
@ -98,7 +99,7 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
const auto max_distance_delta = [&] { const auto max_distance_delta = [&] {
if (use_timestamps) if (use_timestamps)
{ {
return median_sample_time * MAX_SPEED; return median_sample_time * facade.GetMapMatchingMaxSpeed();
} }
else else
{ {
@ -172,24 +173,135 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
prev_unbroken_timestamps.push_back(initial_timestamp); prev_unbroken_timestamps.push_back(initial_timestamp);
for (auto t = initial_timestamp + 1; t < candidates_list.size(); ++t) for (auto t = initial_timestamp + 1; t < candidates_list.size(); ++t)
{ {
const bool gap_in_trace = [&, use_timestamps]() {
// use temporal information if available to determine a split
if (use_timestamps)
{
return trace_timestamps[t] - trace_timestamps[prev_unbroken_timestamps.back()] >
max_broken_time;
}
else
{
return t - prev_unbroken_timestamps.back() > MAX_BROKEN_STATES;
}
}();
if (!gap_in_trace)
{
BOOST_ASSERT(!prev_unbroken_timestamps.empty());
const std::size_t prev_unbroken_timestamp = prev_unbroken_timestamps.back();
const auto &prev_viterbi = model.viterbi[prev_unbroken_timestamp];
const auto &prev_pruned = model.pruned[prev_unbroken_timestamp];
const auto &prev_unbroken_timestamps_list =
candidates_list[prev_unbroken_timestamp];
const auto &prev_coordinate = trace_coordinates[prev_unbroken_timestamp];
auto &current_viterbi = model.viterbi[t];
auto &current_pruned = model.pruned[t];
auto &current_parents = model.parents[t];
auto &current_lengths = model.path_distances[t];
const auto &current_timestamps_list = candidates_list[t];
const auto &current_coordinate = trace_coordinates[t];
const auto haversine_distance = util::coordinate_calculation::haversineDistance(
prev_coordinate, current_coordinate);
// assumes minumum of 0.1 m/s
const int duration_upper_bound =
((haversine_distance + max_distance_delta) * 0.25) * 10;
// compute d_t for this timestamp and the next one
for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
{
if (prev_pruned[s])
{
continue;
}
for (const auto s_prime :
util::irange<std::size_t>(0UL, current_viterbi.size()))
{
const double emission_pr = emission_log_probabilities[t][s_prime];
double new_value = prev_viterbi[s] + emission_pr;
if (current_viterbi[s_prime] > new_value)
{
continue;
}
forward_heap.Clear();
reverse_heap.Clear();
double network_distance;
if (facade.GetCoreSize() > 0)
{
forward_core_heap.Clear();
reverse_core_heap.Clear();
network_distance = super::GetNetworkDistanceWithCore(
facade,
forward_heap,
reverse_heap,
forward_core_heap,
reverse_core_heap,
prev_unbroken_timestamps_list[s].phantom_node,
current_timestamps_list[s_prime].phantom_node,
duration_upper_bound);
}
else
{
network_distance = super::GetNetworkDistance(
facade,
forward_heap,
reverse_heap,
prev_unbroken_timestamps_list[s].phantom_node,
current_timestamps_list[s_prime].phantom_node);
}
// get distance diff between loc1/2 and locs/s_prime
const auto d_t = std::abs(network_distance - haversine_distance);
// very low probability transition -> prune
if (d_t >= max_distance_delta)
{
continue;
}
const double transition_pr = transition_log_probability(d_t);
new_value += transition_pr;
if (new_value > current_viterbi[s_prime])
{
current_viterbi[s_prime] = new_value;
current_parents[s_prime] = std::make_pair(prev_unbroken_timestamp, s);
current_lengths[s_prime] = network_distance;
current_pruned[s_prime] = false;
model.breakage[t] = false;
}
}
}
if (model.breakage[t])
{
// save start of breakage -> we need this as split point
if (t < breakage_begin)
{
breakage_begin = t;
}
BOOST_ASSERT(prev_unbroken_timestamps.size() > 0);
// remove both ends of the breakage
prev_unbroken_timestamps.pop_back();
}
else
{
prev_unbroken_timestamps.push_back(t);
}
}
// breakage recover has removed all previous good points // breakage recover has removed all previous good points
bool trace_split = prev_unbroken_timestamps.empty(); const bool trace_split = prev_unbroken_timestamps.empty();
// use temporal information if available to determine a split if (trace_split || gap_in_trace)
if (use_timestamps)
{
trace_split =
trace_split ||
(trace_timestamps[t] - trace_timestamps[prev_unbroken_timestamps.back()] >
max_broken_time);
}
else
{
trace_split =
trace_split || (t - prev_unbroken_timestamps.back() > MAX_BROKEN_STATES);
}
if (trace_split)
{ {
std::size_t split_index = t; std::size_t split_index = t;
if (breakage_begin != map_matching::INVALID_STATE) if (breakage_begin != map_matching::INVALID_STATE)
@ -213,114 +325,9 @@ class MapMatching final : public BasicRoutingInterface<DataFacadeT, MapMatching<
// Important: We potentially go back here! // Important: We potentially go back here!
// However since t > new_start >= breakge_begin // However since t > new_start >= breakge_begin
// we can only reset trace_coordindates.size() times. // we can only reset trace_coordindates.size() times.
t = new_start + 1; t = new_start;
} // note: the head of the loop will call ++t, hence the next
// iteration will actually be on new_start+1
BOOST_ASSERT(!prev_unbroken_timestamps.empty());
const std::size_t prev_unbroken_timestamp = prev_unbroken_timestamps.back();
const auto &prev_viterbi = model.viterbi[prev_unbroken_timestamp];
const auto &prev_pruned = model.pruned[prev_unbroken_timestamp];
const auto &prev_unbroken_timestamps_list = candidates_list[prev_unbroken_timestamp];
const auto &prev_coordinate = trace_coordinates[prev_unbroken_timestamp];
auto &current_viterbi = model.viterbi[t];
auto &current_pruned = model.pruned[t];
auto &current_parents = model.parents[t];
auto &current_lengths = model.path_distances[t];
const auto &current_timestamps_list = candidates_list[t];
const auto &current_coordinate = trace_coordinates[t];
const auto haversine_distance = util::coordinate_calculation::haversineDistance(
prev_coordinate, current_coordinate);
// assumes minumum of 0.1 m/s
const int duration_uppder_bound =
((haversine_distance + max_distance_delta) * 0.25) * 10;
// compute d_t for this timestamp and the next one
for (const auto s : util::irange<std::size_t>(0UL, prev_viterbi.size()))
{
if (prev_pruned[s])
{
continue;
}
for (const auto s_prime : util::irange<std::size_t>(0UL, current_viterbi.size()))
{
const double emission_pr = emission_log_probabilities[t][s_prime];
double new_value = prev_viterbi[s] + emission_pr;
if (current_viterbi[s_prime] > new_value)
{
continue;
}
forward_heap.Clear();
reverse_heap.Clear();
double network_distance;
if (facade.GetCoreSize() > 0)
{
forward_core_heap.Clear();
reverse_core_heap.Clear();
network_distance = super::GetNetworkDistanceWithCore(
facade,
forward_heap,
reverse_heap,
forward_core_heap,
reverse_core_heap,
prev_unbroken_timestamps_list[s].phantom_node,
current_timestamps_list[s_prime].phantom_node,
duration_uppder_bound);
}
else
{
network_distance = super::GetNetworkDistance(
facade,
forward_heap,
reverse_heap,
prev_unbroken_timestamps_list[s].phantom_node,
current_timestamps_list[s_prime].phantom_node);
}
// get distance diff between loc1/2 and locs/s_prime
const auto d_t = std::abs(network_distance - haversine_distance);
// very low probability transition -> prune
if (d_t >= max_distance_delta)
{
continue;
}
const double transition_pr = transition_log_probability(d_t);
new_value += transition_pr;
if (new_value > current_viterbi[s_prime])
{
current_viterbi[s_prime] = new_value;
current_parents[s_prime] = std::make_pair(prev_unbroken_timestamp, s);
current_lengths[s_prime] = network_distance;
current_pruned[s_prime] = false;
model.breakage[t] = false;
}
}
}
if (model.breakage[t])
{
// save start of breakage -> we need this as split point
if (t < breakage_begin)
{
breakage_begin = t;
}
BOOST_ASSERT(prev_unbroken_timestamps.size() > 0);
// remove both ends of the breakage
prev_unbroken_timestamps.pop_back();
}
else
{
prev_unbroken_timestamps.push_back(t);
} }
} }

View File

@ -8,11 +8,14 @@ namespace osrm
namespace extractor namespace extractor
{ {
const constexpr auto DEFAULT_MAX_SPEED = 180 / 3.6; // 180kmph -> m/s
struct ProfileProperties struct ProfileProperties
{ {
ProfileProperties() ProfileProperties()
: traffic_signal_penalty(0), u_turn_penalty(0), continue_straight_at_waypoint(true), : traffic_signal_penalty(0), u_turn_penalty(0),
use_turn_restrictions(false), left_hand_driving(false) max_speed_for_map_matching(DEFAULT_MAX_SPEED), continue_straight_at_waypoint(true),
use_turn_restrictions(false), left_hand_driving(false)
{ {
} }
@ -30,10 +33,18 @@ struct ProfileProperties
traffic_signal_penalty = boost::numeric_cast<int>(traffic_signal_penalty_ * 10.); traffic_signal_penalty = boost::numeric_cast<int>(traffic_signal_penalty_ * 10.);
} }
double GetMaxSpeedForMapMatching() const { return max_speed_for_map_matching; }
void SetMaxSpeedForMapMatching(const double max_speed_for_map_matching_)
{
max_speed_for_map_matching = max_speed_for_map_matching_;
}
//! penalty to cross a traffic light in deci-seconds //! penalty to cross a traffic light in deci-seconds
int traffic_signal_penalty; int traffic_signal_penalty;
//! penalty to do a uturn in deci-seconds //! penalty to do a uturn in deci-seconds
int u_turn_penalty; int u_turn_penalty;
double max_speed_for_map_matching;
bool continue_straight_at_waypoint; bool continue_straight_at_waypoint;
bool use_turn_restrictions; bool use_turn_restrictions;
bool left_hand_driving; bool left_hand_driving;

View File

@ -93,8 +93,9 @@ surface_speeds = {
-- these need to be global because they are accesed externaly -- these need to be global because they are accesed externaly
properties.traffic_signal_penalty = 2 properties.traffic_signal_penalty = 2
properties.use_turn_restrictions = false
properties.u_turn_penalty = 20 properties.u_turn_penalty = 20
properties.max_speed_for_map_matching = 110/3.6 -- kmph -> m/s
properties.use_turn_restrictions = false
properties.continue_straight_at_waypoint = false properties.continue_straight_at_waypoint = false
local obey_oneway = true local obey_oneway = true

View File

@ -147,6 +147,7 @@ maxspeed_table = {
-- set profile properties -- set profile properties
properties.u_turn_penalty = 20 properties.u_turn_penalty = 20
properties.traffic_signal_penalty = 2 properties.traffic_signal_penalty = 2
properties.max_speed_for_map_matching = 180/3.6 -- 180kmph -> m/s
properties.use_turn_restrictions = true properties.use_turn_restrictions = true
properties.continue_straight_at_waypoint = true properties.continue_straight_at_waypoint = true
properties.left_hand_driving = false properties.left_hand_driving = false

View File

@ -66,6 +66,7 @@ leisure_speeds = {
properties.traffic_signal_penalty = 2 properties.traffic_signal_penalty = 2
properties.u_turn_penalty = 2 properties.u_turn_penalty = 2
properties.max_speed_for_map_matching = 40/3.6 -- kmph -> m/s
properties.use_turn_restrictions = false properties.use_turn_restrictions = false
properties.continue_straight_at_waypoint = false properties.continue_straight_at_waypoint = false

View File

@ -20,6 +20,7 @@ properties.continue_straight_at_waypoint = true
properties.use_turn_restrictions = true properties.use_turn_restrictions = true
properties.traffic_signal_penalty = 7 -- seconds properties.traffic_signal_penalty = 7 -- seconds
properties.u_turn_penalty = 20 properties.u_turn_penalty = 20
properties.max_speed_for_map_matching = 30/3.6 --km -> m/s
function limit_speed(speed, limits) function limit_speed(speed, limits)
-- don't use ipairs(), since it stops at the first nil value -- don't use ipairs(), since it stops at the first nil value

View File

@ -4,6 +4,7 @@
#include "engine/api/match_api.hpp" #include "engine/api/match_api.hpp"
#include "engine/api/match_parameters.hpp" #include "engine/api/match_parameters.hpp"
#include "engine/map_matching/bayes_classifier.hpp" #include "engine/map_matching/bayes_classifier.hpp"
#include "engine/map_matching/sub_matching.hpp"
#include "util/coordinate_calculation.hpp" #include "util/coordinate_calculation.hpp"
#include "util/integer_range.hpp" #include "util/integer_range.hpp"
#include "util/json_util.hpp" #include "util/json_util.hpp"

View File

@ -131,6 +131,9 @@ void LuaScriptingEnvironment::InitContext(LuaScriptingContext &context)
.property("u_turn_penalty", .property("u_turn_penalty",
&ProfileProperties::GetUturnPenalty, &ProfileProperties::GetUturnPenalty,
&ProfileProperties::SetUturnPenalty) &ProfileProperties::SetUturnPenalty)
.property("max_speed_for_map_matching",
&ProfileProperties::GetMaxSpeedForMapMatching,
&ProfileProperties::SetMaxSpeedForMapMatching)
.def_readwrite("use_turn_restrictions", &ProfileProperties::use_turn_restrictions) .def_readwrite("use_turn_restrictions", &ProfileProperties::use_turn_restrictions)
.def_readwrite("continue_straight_at_waypoint", .def_readwrite("continue_straight_at_waypoint",
&ProfileProperties::continue_straight_at_waypoint) &ProfileProperties::continue_straight_at_waypoint)

View File

@ -206,6 +206,7 @@ class MockDataFacade final : public engine::datafacade::BaseDataFacade
std::size_t GetCoreSize() const override { return 0; } std::size_t GetCoreSize() const override { return 0; }
std::string GetTimestamp() const override { return ""; } std::string GetTimestamp() const override { return ""; }
bool GetContinueStraightDefault() const override { return true; } bool GetContinueStraightDefault() const override { return true; }
double GetMapMatchingMaxSpeed() const override { return 180 / 3.6; }
BearingClassID GetBearingClassID(const NodeID /*id*/) const override { return 0; } BearingClassID GetBearingClassID(const NodeID /*id*/) const override { return 0; }
EntryClassID GetEntryClassID(const EdgeID /*id*/) const override { return 0; } EntryClassID GetEntryClassID(const EdgeID /*id*/) const override { return 0; }