Implement arbitrary turn penalty file IO and integration (#2306)

Closes #1830
This commit is contained in:
Lauren Budorick 2016-04-29 00:48:13 -07:00
parent cf17bd38eb
commit b8f7569e93
17 changed files with 346 additions and 60 deletions

1
.gitignore vendored
View File

@ -80,6 +80,7 @@ stxxl.errlog
/test/profile.lua /test/profile.lua
/test/cache /test/cache
/test/speeds.csv /test/speeds.csv
/test/penalties.csv
/test/data/monaco.* /test/data/monaco.*
node_modules node_modules

View File

@ -31,7 +31,7 @@ Feature: Traffic - speeds
4,1,27 4,1,27
""" """
Scenario: Weighting not based on raster sources Scenario: Weighting based on speed file
Given the profile "testbot" Given the profile "testbot"
Given the extract extra arguments "--generate-edge-lookup" Given the extract extra arguments "--generate-edge-lookup"
Given the contract extra arguments "--segment-speed-file speeds.csv" Given the contract extra arguments "--segment-speed-file speeds.csv"

View File

@ -0,0 +1,97 @@
@routing @speed @traffic
Feature: Traffic - turn penalties
Background: Evenly spaced grid with multiple intersections
Given the node map
| | a | | b | |
| c | d | e | f | g |
| | h | | i | |
| j | k | l | m | n |
| | o | | p | |
And the ways
| nodes | highway |
| ad | primary |
| cd | primary |
| de | primary |
| dhk | primary |
| bf | primary |
| ef | primary |
| fg | primary |
| fim | primary |
| jk | primary |
| kl | primary |
| ko | primary |
| lm | primary |
| mn | primary |
| mp | primary |
And the profile "car"
And the extract extra arguments "--generate-edge-lookup"
Scenario: Weighting not based on turn penalty file
When I route I should get
| from | to | route | speed | time |
| a | h | ad,dhk,dhk | 63 km/h | 11.5s +-1 |
# straight
| i | g | fim,fg,fg | 59 km/h | 12s +-1 |
# right
| a | e | ad,de,de | 57 km/h | 12.5s +-1 |
# left
| c | g | cd,de,ef,fg,fg | 63 km/h | 23s +-1 |
# double straight
| p | g | mp,fim,fg,fg | 61 km/h | 23.5s +-1 |
# straight-right
| a | l | ad,dhk,kl,kl | 60 km/h | 24s +-1 |
# straight-left
| l | e | kl,dhk,de,de | 59 km/h | 24.5s +-1 |
# double right
| g | n | fg,fim,mn,mn | 57 km/h | 25s +-1 |
# double left
Scenario: Weighting based on turn penalty file
Given the turn penalty file
"""
9,6,7,1.8
9,13,14,24.5
8,4,3,26
12,11,8,9
8,11,12,13
1,4,5,-0.2
"""
And the contract extra arguments "--turn-penalty-file penalties.csv"
When I route I should get
| from | to | route | speed | time |
| a | h | ad,dhk,dhk | 63 km/h | 11.5s +-1 |
# straight
| i | g | fim,fg,fg | 55 km/h | 13s +-1 |
# right - ifg penalty
| a | e | ad,de,de | 64 km/h | 11s +-1 |
# left - faster because of negative ade penalty
| c | g | cd,de,ef,fg,fg | 63 km/h | 23s +-1 |
# double straight
| p | g | mp,fim,fg,fg | 59 km/h | 24.5s +-1 |
# straight-right - ifg penalty
| a | l | ad,de,ef,fim,lm,lm | 61 km/h | 35.5s +-1 |
# was straight-left - forced around by hkl penalty
| l | e | lm,fim,ef,ef | 57 km/h | 25s +-1 |
# double right - forced left by lkh penalty
| g | n | fg,fim,mn,mn | 30 km/h | 47.5s +-1 |
# double left - imn penalty
| j | c | jk,kl,lm,fim,ef,de,cd,cd | 60 km/h | 48s +-1 |
# double left - hdc penalty ever so slightly higher than imn; forces all the way around
Scenario: Too-negative penalty clamps, but does not fail
Given the contract extra arguments "--turn-penalty-file penalties.csv"
And the profile "testbot"
And the turn penalty file
"""
1,4,5,-10
"""
When I route I should get
| from | to | route | time |
| a | d | ad,ad | 10s +-1 |
| a | e | ad,de,de | 10s +-1 |
| b | f | bf,bf | 10s +-1 |
| b | g | bf,fg,fg | 20s +-1 |

View File

@ -10,13 +10,11 @@ module.exports = function () {
}); });
this.Given(/^the extract extra arguments "(.*?)"$/, (args, callback) => { this.Given(/^the extract extra arguments "(.*?)"$/, (args, callback) => {
this.setExtractArgs(args); this.setExtractArgs(args, callback);
callback();
}); });
this.Given(/^the contract extra arguments "(.*?)"$/, (args, callback) => { this.Given(/^the contract extra arguments "(.*?)"$/, (args, callback) => {
this.setContractArgs(args); this.setContractArgs(args, callback);
callback();
}); });
this.Given(/^a grid size of (\d+) meters$/, (meters, callback) => { this.Given(/^a grid size of (\d+) meters$/, (meters, callback) => {
@ -228,6 +226,10 @@ module.exports = function () {
fs.writeFile(path.resolve(this.TEST_FOLDER, 'speeds.csv'), data, callback); fs.writeFile(path.resolve(this.TEST_FOLDER, 'speeds.csv'), data, callback);
}); });
this.Given(/^the turn penalty file$/, (data, callback) => {
fs.writeFile(path.resolve(this.TEST_FOLDER, 'penalties.csv'), data, callback);
});
this.Given(/^the data has been saved to disk$/, (callback) => { this.Given(/^the data has been saved to disk$/, (callback) => {
try { try {
this.reprocess(callback); this.reprocess(callback);

View File

@ -105,11 +105,16 @@ module.exports = function () {
} else cb(); } else cb();
}; };
this.setExtractArgs = (args) => { this.setExtractArgs = (args, callback) => {
this.extractArgs = args; this.extractArgs = args;
this.forceExtract = true;
this.forceContract = true;
callback();
}; };
this.setContractArgs = (args) => { this.setContractArgs = (args, callback) => {
this.contractArgs = args; this.contractArgs = args;
this.forceContract = true;
callback();
}; };
}; };

View File

@ -295,9 +295,10 @@ module.exports = function () {
this.writeAndExtract((e) => { this.writeAndExtract((e) => {
if (e) return callback(e); if (e) return callback(e);
this.isContracted((isContracted) => { this.isContracted((isContracted) => {
var contractFn = isContracted ? noop : this.contractData; var contractFn = (isContracted && !this.forceContract) ? noop : this.contractData;
if (isContracted) this.log('Already contracted ' + this.osmData.contractedFile, 'preprocess'); if (isContracted) this.log('Already contracted ' + this.osmData.contractedFile, 'preprocess');
contractFn((e) => { contractFn((e) => {
this.forceContract = false;
if (e) return callback(e); if (e) return callback(e);
this.logPreprocessDone(); this.logPreprocessDone();
callback(); callback();
@ -311,9 +312,10 @@ module.exports = function () {
this.writeInputData((e) => { this.writeInputData((e) => {
if (e) return callback(e); if (e) return callback(e);
this.isExtracted((isExtracted) => { this.isExtracted((isExtracted) => {
var extractFn = isExtracted ? noop : this.extractData; var extractFn = (isExtracted && !this.forceExtract) ? noop : this.extractData;
if (isExtracted) this.log('Already extracted ' + this.osmData.extractedFile, 'preprocess'); if (isExtracted) this.log('Already extracted ' + this.osmData.extractedFile, 'preprocess');
extractFn((e) => { extractFn((e) => {
this.forceExtract = false;
callback(e); callback(e);
}); });
}); });

View File

@ -24,10 +24,12 @@ module.exports = function () {
}); });
this.After((scenario, callback) => { this.After((scenario, callback) => {
this.setExtractArgs(''); this.setExtractArgs('', () => {
this.setContractArgs(''); this.setContractArgs('', () => {
if (this.loadMethod === 'directly' && !!this.OSRMLoader.loader) this.OSRMLoader.shutdown(callback); if (this.loadMethod === 'directly' && !!this.OSRMLoader.loader) this.OSRMLoader.shutdown(callback);
else callback(); else callback();
});
});
}); });
this.Around('@stress', (scenario, callback) => { this.Around('@stress', (scenario, callback) => {

View File

@ -0,0 +1,37 @@
@routing @speed @traffic
Feature: Traffic - turn penalties applied to turn onto which a phantom node snaps
Background: Simple map with phantom nodes
Given the node map
| | 1 | | 2 | | 3 | |
| a | | b | | c | | d |
| | | | | | | |
| | | e | | f | | g |
And the ways
| nodes | highway |
| ab | primary |
| bc | primary |
| cd | primary |
| be | primary |
| cf | primary |
| dg | primary |
And the profile "testbot"
# Since testbot doesn't have turn penalties, a penalty from file of 0 should produce a neutral effect
And the extract extra arguments "--generate-edge-lookup"
Scenario: Weighting based on turn penalty file, with an extreme negative value -- clamps and does not fail
Given the turn penalty file
"""
1,2,5,0
3,4,7,-20
"""
And the contract extra arguments "--turn-penalty-file penalties.csv"
When I route I should get
| from | to | route | speed | time |
| a | e | ab,be,be | 36 km/h | 40s +-1 |
| 1 | e | ab,be,be | 36 km/h | 30s +-1 |
| b | f | bc,cf,cf | 36 km/h | 40s +-1 |
| 2 | f | bc,cf,cf | 36 km/h | 30s +-1 |
| c | g | cd,dg,dg | 71 km/h | 20s +-1 |
| 3 | g | cd,dg,dg | 54 km/h | 20s +-1 |

View File

@ -84,6 +84,7 @@ class Contractor
const std::string &edge_segment_lookup_path, const std::string &edge_segment_lookup_path,
const std::string &edge_penalty_path, const std::string &edge_penalty_path,
const std::vector<std::string> &segment_speed_path, const std::vector<std::string> &segment_speed_path,
const std::vector<std::string> &turn_penalty_path,
const std::string &nodes_filename, const std::string &nodes_filename,
const std::string &geometry_filename, const std::string &geometry_filename,
const std::string &datasource_names_filename, const std::string &datasource_names_filename,

View File

@ -81,6 +81,7 @@ struct ContractorConfig
double core_factor; double core_factor;
std::vector<std::string> segment_speed_lookup_paths; std::vector<std::string> segment_speed_lookup_paths;
std::vector<std::string> turn_penalty_lookup_paths;
std::string datasource_indexes_path; std::string datasource_indexes_path;
std::string datasource_names_path; std::string datasource_names_path;
}; };

View File

@ -301,8 +301,6 @@ template <class DataFacadeT, class Derived> class BasicRoutingInterface
auto total_weight = std::accumulate(weight_vector.begin(), weight_vector.end(), 0); auto total_weight = std::accumulate(weight_vector.begin(), weight_vector.end(), 0);
BOOST_ASSERT(weight_vector.size() == id_vector.size()); BOOST_ASSERT(weight_vector.size() == id_vector.size());
// ed.distance should be total_weight + penalties (turn, stop, etc)
BOOST_ASSERT(ed.distance >= total_weight);
const bool is_first_segment = unpacked_path.empty(); const bool is_first_segment = unpacked_path.empty();
const std::size_t start_index = const std::size_t start_index =
@ -350,7 +348,8 @@ template <class DataFacadeT, class Derived> class BasicRoutingInterface
start_index = start_index =
id_vector.size() - phantom_node_pair.source_phantom.fwd_segment_position - 1; id_vector.size() - phantom_node_pair.source_phantom.fwd_segment_position - 1;
} }
end_index = id_vector.size() - phantom_node_pair.target_phantom.fwd_segment_position - 1; end_index =
id_vector.size() - phantom_node_pair.target_phantom.fwd_segment_position - 1;
} }
else else
{ {
@ -396,8 +395,17 @@ template <class DataFacadeT, class Derived> class BasicRoutingInterface
// However the first segment duration needs to be adjusted to the fact that the source // However the first segment duration needs to be adjusted to the fact that the source
// phantom is in the middle of the segment. We do this by subtracting v--s from the // phantom is in the middle of the segment. We do this by subtracting v--s from the
// duration. // duration.
BOOST_ASSERT(unpacked_path.front().duration_until_turn >= source_weight);
unpacked_path.front().duration_until_turn -= source_weight; // Since it's possible duration_until_turn can be less than source_weight here if
// a negative enough turn penalty is used to modify this edge weight during
// osrm-contract, we clamp to 1 here so as not to return a negative duration
// for this segment.
// TODO this creates a scenario where it's possible the duration from a phantom
// node to the first turn would be the same as from end to end of a segment,
// which is obviously incorrect and not ideal...
unpacked_path.front().duration_until_turn =
std::max(unpacked_path.front().duration_until_turn - source_weight, 0);
} }
// there is no equivalent to a node-based node in an edge-expanded graph. // there is no equivalent to a node-based node in an edge-expanded graph.

View File

@ -40,7 +40,9 @@ class CompressedEdgeContainer
void SerializeInternalVector(const std::string &path) const; void SerializeInternalVector(const std::string &path) const;
unsigned GetPositionForID(const EdgeID edge_id) const; unsigned GetPositionForID(const EdgeID edge_id) const;
const EdgeBucket &GetBucketReference(const EdgeID edge_id) const; const EdgeBucket &GetBucketReference(const EdgeID edge_id) const;
bool IsTrivial(const EdgeID edge_id) const;
NodeID GetFirstEdgeTargetID(const EdgeID edge_id) const; NodeID GetFirstEdgeTargetID(const EdgeID edge_id) const;
NodeID GetLastEdgeTargetID(const EdgeID edge_id) const;
NodeID GetLastEdgeSourceID(const EdgeID edge_id) const; NodeID GetLastEdgeSourceID(const EdgeID edge_id) const;
private: private:

View File

@ -103,7 +103,7 @@ getCoordinateFromCompressedRange(util::Coordinate current_coordinate,
} }
} // namespace detail } // namespace detail
// Finds a (potentially inteprolated) coordinate that is DESIRED_SEGMENT_LENGTH away // Finds a (potentially interpolated) coordinate that is DESIRED_SEGMENT_LENGTH away
// from the start of an edge // from the start of an edge
inline util::Coordinate inline util::Coordinate
getRepresentativeCoordinate(const NodeID from_node, getRepresentativeCoordinate(const NodeID from_node,

View File

@ -20,6 +20,7 @@
#include <boost/assert.hpp> #include <boost/assert.hpp>
#include <boost/filesystem/fstream.hpp> #include <boost/filesystem/fstream.hpp>
#include <boost/functional/hash.hpp>
#include <tbb/parallel_sort.h> #include <tbb/parallel_sort.h>
@ -29,6 +30,7 @@
#include <memory> #include <memory>
#include <thread> #include <thread>
#include <iterator> #include <iterator>
#include <tuple>
namespace std namespace std
{ {
@ -40,6 +42,18 @@ template <> struct hash<std::pair<OSMNodeID, OSMNodeID>>
return static_cast<uint64_t>(k.first) ^ (static_cast<uint64_t>(k.second) << 12); return static_cast<uint64_t>(k.first) ^ (static_cast<uint64_t>(k.second) << 12);
} }
}; };
template <> struct hash<std::tuple<OSMNodeID, OSMNodeID, OSMNodeID>>
{
std::size_t operator()(const std::tuple<OSMNodeID, OSMNodeID, OSMNodeID> &k) const
{
std::size_t seed = 0;
boost::hash_combine(seed, static_cast<uint64_t>(std::get<0>(k)));
boost::hash_combine(seed, static_cast<uint64_t>(std::get<1>(k)));
boost::hash_combine(seed, static_cast<uint64_t>(std::get<2>(k)));
return seed;
}
};
} }
namespace osrm namespace osrm
@ -71,9 +85,9 @@ int Contractor::Run()
std::size_t max_edge_id = LoadEdgeExpandedGraph( std::size_t max_edge_id = LoadEdgeExpandedGraph(
config.edge_based_graph_path, edge_based_edge_list, config.edge_segment_lookup_path, config.edge_based_graph_path, edge_based_edge_list, config.edge_segment_lookup_path,
config.edge_penalty_path, config.segment_speed_lookup_paths, config.node_based_graph_path, config.edge_penalty_path, config.segment_speed_lookup_paths,
config.geometry_path, config.datasource_names_path, config.datasource_indexes_path, config.turn_penalty_lookup_paths, config.node_based_graph_path, config.geometry_path,
config.rtree_leaf_path); config.datasource_names_path, config.datasource_indexes_path, config.rtree_leaf_path);
// Contracting the edge-expanded graph // Contracting the edge-expanded graph
@ -129,6 +143,7 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
const std::string &edge_segment_lookup_filename, const std::string &edge_segment_lookup_filename,
const std::string &edge_penalty_filename, const std::string &edge_penalty_filename,
const std::vector<std::string> &segment_speed_filenames, const std::vector<std::string> &segment_speed_filenames,
const std::vector<std::string> &turn_penalty_filenames,
const std::string &nodes_filename, const std::string &nodes_filename,
const std::string &geometry_filename, const std::string &geometry_filename,
const std::string &datasource_names_filename, const std::string &datasource_names_filename,
@ -139,11 +154,12 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
boost::filesystem::ifstream input_stream(edge_based_graph_filename, std::ios::binary); boost::filesystem::ifstream input_stream(edge_based_graph_filename, std::ios::binary);
const bool update_edge_weights = !segment_speed_filenames.empty(); const bool update_edge_weights = !segment_speed_filenames.empty();
const bool update_turn_penalties = !turn_penalty_filenames.empty();
boost::filesystem::ifstream edge_segment_input_stream; boost::filesystem::ifstream edge_segment_input_stream;
boost::filesystem::ifstream edge_fixed_penalties_input_stream; boost::filesystem::ifstream edge_fixed_penalties_input_stream;
if (update_edge_weights) if (update_edge_weights || update_turn_penalties)
{ {
edge_segment_input_stream.open(edge_segment_lookup_filename, std::ios::binary); edge_segment_input_stream.open(edge_segment_lookup_filename, std::ios::binary);
edge_fixed_penalties_input_stream.open(edge_penalty_filename, std::ios::binary); edge_fixed_penalties_input_stream.open(edge_penalty_filename, std::ios::binary);
@ -172,37 +188,74 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
std::unordered_map<std::pair<OSMNodeID, OSMNodeID>, std::pair<unsigned, uint8_t>> std::unordered_map<std::pair<OSMNodeID, OSMNodeID>, std::pair<unsigned, uint8_t>>
segment_speed_lookup; segment_speed_lookup;
std::unordered_map<std::tuple<OSMNodeID, OSMNodeID, OSMNodeID>, std::pair<double, uint8_t>>
turn_penalty_lookup;
// If we update the edge weights, this file will hold the datasource information // If we update the edge weights, this file will hold the datasource information
// for each segment // for each segment
std::vector<uint8_t> m_geometry_datasource; std::vector<uint8_t> m_geometry_datasource;
if (update_edge_weights) if (update_edge_weights || update_turn_penalties)
{ {
uint8_t file_id = 1; std::uint8_t segment_file_id = 1;
for (auto segment_speed_filename : segment_speed_filenames) std::uint8_t turn_file_id = 1;
{
util::SimpleLogger().Write()
<< "Segment speed data supplied, will update edge weights from "
<< segment_speed_filename;
io::CSVReader<3> csv_in(segment_speed_filename);
csv_in.set_header("from_node", "to_node", "speed");
uint64_t from_node_id{};
uint64_t to_node_id{};
unsigned speed{};
while (csv_in.read_row(from_node_id, to_node_id, speed))
{
segment_speed_lookup[std::make_pair(OSMNodeID(from_node_id),
OSMNodeID(to_node_id))] =
std::make_pair(speed, file_id);
}
++file_id;
// Check for overflow if (update_edge_weights)
if (file_id == 0) {
for (auto segment_speed_filename : segment_speed_filenames)
{ {
throw util::exception( util::SimpleLogger().Write()
"Sorry, there's a limit of 254 segment speed files, you supplied too many"); << "Segment speed data supplied, will update edge weights from "
<< segment_speed_filename;
io::CSVReader<3> csv_in(segment_speed_filename);
csv_in.set_header("from_node", "to_node", "speed");
std::uint64_t from_node_id{};
std::uint64_t to_node_id{};
unsigned speed{};
while (csv_in.read_row(from_node_id, to_node_id, speed))
{
segment_speed_lookup[std::make_pair(OSMNodeID(from_node_id),
OSMNodeID(to_node_id))] =
std::make_pair(speed, segment_file_id);
}
++segment_file_id;
// Check for overflow
if (segment_file_id == 0)
{
throw util::exception(
"Sorry, there's a limit of 255 segment speed files; you supplied too many");
}
}
}
if (update_turn_penalties)
{
for (auto turn_penalty_filename : turn_penalty_filenames)
{
util::SimpleLogger().Write()
<< "Turn penalty data supplied, will update turn penalties from "
<< turn_penalty_filename;
io::CSVReader<4> csv_in(turn_penalty_filename);
csv_in.set_header("from_node", "via_node", "to_node", "penalty");
uint64_t from_node_id{};
uint64_t via_node_id{};
uint64_t to_node_id{};
double penalty{};
while (csv_in.read_row(from_node_id, via_node_id, to_node_id, penalty))
{
turn_penalty_lookup[std::make_tuple(
OSMNodeID(from_node_id), OSMNodeID(via_node_id), OSMNodeID(to_node_id))] =
std::make_pair(penalty, turn_file_id);
}
++turn_file_id;
// Check for overflow
if (turn_file_id == 0)
{
throw util::exception(
"Sorry, there's a limit of 255 turn penalty files; you supplied too many");
}
} }
} }
@ -358,10 +411,9 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
u = &(internal_to_external_node_map u = &(internal_to_external_node_map
[m_geometry_list[reverse_begin + rev_segment_position - 1] [m_geometry_list[reverse_begin + rev_segment_position - 1]
.node_id]); .node_id]);
v = &( v = &(internal_to_external_node_map
internal_to_external_node_map[m_geometry_list[reverse_begin + [m_geometry_list[reverse_begin + rev_segment_position]
rev_segment_position] .node_id]);
.node_id]);
} }
const double segment_length = const double segment_length =
util::coordinate_calculation::greatCircleDistance( util::coordinate_calculation::greatCircleDistance(
@ -412,8 +464,7 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
std::ofstream datasource_stream(datasource_indexes_filename, std::ios::binary); std::ofstream datasource_stream(datasource_indexes_filename, std::ios::binary);
if (!datasource_stream) if (!datasource_stream)
{ {
throw util::exception("Failed to open " + datasource_indexes_filename + throw util::exception("Failed to open " + datasource_indexes_filename + " for writing");
" for writing");
} }
auto number_of_datasource_entries = m_geometry_datasource.size(); auto number_of_datasource_entries = m_geometry_datasource.size();
datasource_stream.write(reinterpret_cast<const char *>(&number_of_datasource_entries), datasource_stream.write(reinterpret_cast<const char *>(&number_of_datasource_entries),
@ -429,8 +480,7 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
std::ofstream datasource_stream(datasource_names_filename, std::ios::binary); std::ofstream datasource_stream(datasource_names_filename, std::ios::binary);
if (!datasource_stream) if (!datasource_stream)
{ {
throw util::exception("Failed to open " + datasource_names_filename + throw util::exception("Failed to open " + datasource_names_filename + " for writing");
" for writing");
} }
datasource_stream << "lua profile" << std::endl; datasource_stream << "lua profile" << std::endl;
for (auto const &name : segment_speed_filenames) for (auto const &name : segment_speed_filenames)
@ -445,7 +495,7 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
{ {
extractor::EdgeBasedEdge inbuffer; extractor::EdgeBasedEdge inbuffer;
input_stream.read((char *)&inbuffer, sizeof(extractor::EdgeBasedEdge)); input_stream.read((char *)&inbuffer, sizeof(extractor::EdgeBasedEdge));
if (update_edge_weights) if (update_edge_weights || update_turn_penalties)
{ {
// Processing-time edge updates // Processing-time edge updates
unsigned fixed_penalty; unsigned fixed_penalty;
@ -463,6 +513,7 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
OSMNodeID this_osm_node_id; OSMNodeID this_osm_node_id;
double segment_length; double segment_length;
int segment_weight; int segment_weight;
int compressed_edge_nodes = static_cast<int>(num_osm_nodes);
--num_osm_nodes; --num_osm_nodes;
for (; num_osm_nodes != 0; --num_osm_nodes) for (; num_osm_nodes != 0; --num_osm_nodes)
{ {
@ -494,7 +545,34 @@ std::size_t Contractor::LoadEdgeExpandedGraph(
previous_osm_node_id = this_osm_node_id; previous_osm_node_id = this_osm_node_id;
} }
inbuffer.weight = fixed_penalty + new_weight; OSMNodeID from_id;
OSMNodeID via_id;
OSMNodeID to_id;
edge_fixed_penalties_input_stream.read(reinterpret_cast<char *>(&from_id),
sizeof(from_id));
edge_fixed_penalties_input_stream.read(reinterpret_cast<char *>(&via_id),
sizeof(via_id));
edge_fixed_penalties_input_stream.read(reinterpret_cast<char *>(&to_id), sizeof(to_id));
auto turn_iter = turn_penalty_lookup.find(std::make_tuple(from_id, via_id, to_id));
if (turn_iter != turn_penalty_lookup.end())
{
int new_turn_weight = static_cast<int>(turn_iter->second.first * 10);
if (new_turn_weight + new_weight < compressed_edge_nodes)
{
util::SimpleLogger().Write(logWARNING)
<< "turn penalty " << turn_iter->second.first << " for turn " << from_id
<< ", " << via_id << ", " << to_id
<< " is too negative: clamping turn weight to " << compressed_edge_nodes;
}
inbuffer.weight = std::max(new_turn_weight + new_weight, compressed_edge_nodes);
}
else
{
inbuffer.weight = fixed_penalty + new_weight;
}
} }
edge_based_edge_list.emplace_back(std::move(inbuffer)); edge_based_edge_list.emplace_back(std::move(inbuffer));

View File

@ -250,16 +250,32 @@ CompressedEdgeContainer::GetBucketReference(const EdgeID edge_id) const
return m_compressed_geometries.at(index); return m_compressed_geometries.at(index);
} }
// Since all edges are technically in the compressed geometry container,
// regardless of whether a compressed edge actually contains multiple
// original segments, we use 'Trivial' here to describe compressed edges
// that only contain one original segment
bool CompressedEdgeContainer::IsTrivial(const EdgeID edge_id) const
{
const auto &bucket = GetBucketReference(edge_id);
return bucket.size() == 1;
}
NodeID CompressedEdgeContainer::GetFirstEdgeTargetID(const EdgeID edge_id) const NodeID CompressedEdgeContainer::GetFirstEdgeTargetID(const EdgeID edge_id) const
{ {
const auto &bucket = GetBucketReference(edge_id); const auto &bucket = GetBucketReference(edge_id);
BOOST_ASSERT(bucket.size() >= 2); BOOST_ASSERT(bucket.size() >= 1);
return bucket.front().node_id; return bucket.front().node_id;
} }
NodeID CompressedEdgeContainer::GetLastEdgeTargetID(const EdgeID edge_id) const
{
const auto &bucket = GetBucketReference(edge_id);
BOOST_ASSERT(bucket.size() >= 1);
return bucket.back().node_id;
}
NodeID CompressedEdgeContainer::GetLastEdgeSourceID(const EdgeID edge_id) const NodeID CompressedEdgeContainer::GetLastEdgeSourceID(const EdgeID edge_id) const
{ {
const auto &bucket = GetBucketReference(edge_id); const auto &bucket = GetBucketReference(edge_id);
BOOST_ASSERT(bucket.size() >= 2); BOOST_ASSERT(bucket.size() >= 1);
return bucket[bucket.size() - 2].node_id; return bucket[bucket.size() - 2].node_id;
} }
} }

View File

@ -123,7 +123,8 @@ void EdgeBasedGraphFactory::InsertEdgeBasedNode(const NodeID node_u, const NodeI
NodeID current_edge_source_coordinate_id = node_u; NodeID current_edge_source_coordinate_id = node_u;
const auto edge_id_to_segment_id = [](const NodeID edge_based_node_id) { const auto edge_id_to_segment_id = [](const NodeID edge_based_node_id)
{
if (edge_based_node_id == SPECIAL_NODEID) if (edge_based_node_id == SPECIAL_NODEID)
{ {
return SegmentID{SPECIAL_SEGMENTID, false}; return SegmentID{SPECIAL_SEGMENTID, false};
@ -339,7 +340,6 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
auto possible_turns = turn_analysis.getTurns(node_u, edge_from_u); auto possible_turns = turn_analysis.getTurns(node_u, edge_from_u);
const NodeID node_v = m_node_based_graph->GetTarget(edge_from_u); const NodeID node_v = m_node_based_graph->GetTarget(edge_from_u);
for (const auto turn : possible_turns) for (const auto turn : possible_turns)
{ {
const double turn_angle = turn.angle; const double turn_angle = turn.angle;
@ -434,6 +434,36 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
sizeof(target_node.weight)); sizeof(target_node.weight));
previous = target_node.node_id; previous = target_node.node_id;
} }
// We also now write out the mapping between the edge-expanded edges and the
// original nodes. Since each edge represents a possible maneuver, external
// programs can use this to quickly perform updates to edge weights in order
// to penalize certain turns.
// If this edge is 'trivial' -- where the compressed edge corresponds
// exactly to an original OSM segment -- we can pull the turn's preceding
// node ID directly with `node_u`; otherwise, we need to look up the node
// immediately preceding the turn from the compressed edge container.
const bool isTrivial = m_compressed_edge_container.IsTrivial(edge_from_u);
const auto &from_node =
isTrivial
? m_node_info_list[node_u]
: m_node_info_list[m_compressed_edge_container.GetLastEdgeSourceID(
edge_from_u)];
const auto &via_node =
m_node_info_list[m_compressed_edge_container.GetLastEdgeTargetID(
edge_from_u)];
const auto &to_node =
m_node_info_list[m_compressed_edge_container.GetFirstEdgeTargetID(
turn.eid)];
edge_penalty_file.write(reinterpret_cast<const char *>(&from_node.node_id),
sizeof(from_node.node_id));
edge_penalty_file.write(reinterpret_cast<const char *>(&via_node.node_id),
sizeof(via_node.node_id));
edge_penalty_file.write(reinterpret_cast<const char *>(&to_node.node_id),
sizeof(to_node.node_id));
} }
} }
} }

View File

@ -43,6 +43,10 @@ return_code parseArguments(int argc, char *argv[], contractor::ContractorConfig
&contractor_config.segment_speed_lookup_paths) &contractor_config.segment_speed_lookup_paths)
->composing(), ->composing(),
"Lookup files containing nodeA, nodeB, speed data to adjust edge weights")( "Lookup files containing nodeA, nodeB, speed data to adjust edge weights")(
"turn-penalty-file", boost::program_options::value<std::vector<std::string>>(
&contractor_config.turn_penalty_lookup_paths)
->composing(),
"Lookup files containing from_, to_, via_nodes, and turn penalties to adjust turn weights")(
"level-cache,o", boost::program_options::value<bool>(&contractor_config.use_cached_priority) "level-cache,o", boost::program_options::value<bool>(&contractor_config.use_cached_priority)
->default_value(false), ->default_value(false),
"Use .level file to retain the contaction level for each node from the last run."); "Use .level file to retain the contaction level for each node from the last run.");