Merge pull request #3669 from Project-OSRM/seemoritz-minor_fixes

Port Seemoritz minor fixes to master
This commit is contained in:
Patrick Niklaus 2017-02-08 09:34:36 +00:00 committed by GitHub
commit 768dc8d817
9 changed files with 41 additions and 30 deletions

View File

@ -24,7 +24,7 @@ struct ContractorEdgeData
bool forward,
bool backward)
: weight(weight), duration(duration), id(id),
originalEdges(std::min((unsigned)1 << 28, original_edges)), shortcut(shortcut),
originalEdges(std::min((1u << 28) - 1u, original_edges)), shortcut(shortcut),
forward(forward), backward(backward), is_original_via_node_ID(false)
{
}

View File

@ -175,11 +175,11 @@ class GraphContractor
auto &dijkstra = data->dijkstra;
std::size_t inserted_edges_size = data->inserted_edges.size();
std::vector<ContractorEdge> &inserted_edges = data->inserted_edges;
const constexpr bool SHORTCUT_ARC = true;
const constexpr bool FORWARD_DIRECTION_ENABLED = true;
const constexpr bool FORWARD_DIRECTION_DISABLED = false;
const constexpr bool REVERSE_DIRECTION_ENABLED = true;
const constexpr bool REVERSE_DIRECTION_DISABLED = false;
constexpr bool SHORTCUT_ARC = true;
constexpr bool FORWARD_DIRECTION_ENABLED = true;
constexpr bool FORWARD_DIRECTION_DISABLED = false;
constexpr bool REVERSE_DIRECTION_ENABLED = true;
constexpr bool REVERSE_DIRECTION_DISABLED = false;
for (auto in_edge : contractor_graph->GetAdjacentEdgeRange(node))
{
@ -213,7 +213,9 @@ class GraphContractor
}
const NodeID target = contractor_graph->GetTarget(out_edge);
if (node == target)
{
continue;
}
const EdgeWeight path_weight = in_data.weight + out_data.weight;
if (target == source)

View File

@ -859,18 +859,15 @@ class ContiguousInternalMemoryDataFacade : public BaseDataFacade
const unsigned end = m_geometry_indices.at(id + 1);
std::vector<DatasourceID> result_datasources;
result_datasources.resize(end - begin);
// If there was no datasource info, return an array of 0's.
if (m_datasource_list.empty())
{
for (unsigned i = 0; i < end - begin; ++i)
{
result_datasources.push_back(0);
}
result_datasources.resize(end - begin, 0);
}
else
{
result_datasources.resize(end - begin);
std::copy(m_datasource_list.begin() + begin,
m_datasource_list.begin() + end,
result_datasources.begin());
@ -902,13 +899,11 @@ class ContiguousInternalMemoryDataFacade : public BaseDataFacade
// If there was no datasource info, return an array of 0's.
if (m_datasource_list.empty())
{
for (unsigned i = 0; i < end - begin; ++i)
{
result_datasources.push_back(0);
}
result_datasources.resize(end - begin, 0);
}
else
{
result_datasources.resize(end - begin);
std::copy(m_datasource_list.rbegin() + (m_datasource_list.size() - end),
m_datasource_list.rbegin() + (m_datasource_list.size() - begin),
result_datasources.begin());

View File

@ -91,7 +91,7 @@ class FileReader
const auto &result = input_stream.read(reinterpret_cast<char *>(dest), count * sizeof(T));
const std::size_t bytes_read = input_stream.gcount();
if (bytes_read != count && !result)
if (bytes_read != count * sizeof(T) && !result)
{
if (result.eof())
{

View File

@ -356,8 +356,8 @@ class DeallocatingVector
ElementT &back() const
{
const std::size_t _bucket = current_size / ELEMENTS_PER_BLOCK;
const std::size_t _index = current_size % ELEMENTS_PER_BLOCK;
const std::size_t _bucket = (current_size - 1) / ELEMENTS_PER_BLOCK;
const std::size_t _index = (current_size - 1) % ELEMENTS_PER_BLOCK;
return (bucket_list[_bucket][_index]);
}

View File

@ -161,9 +161,13 @@ template <typename EdgeDataT> class DynamicGraph
EdgeIterator InsertEdge(const NodeIterator from, const NodeIterator to, const EdgeDataT &data)
{
Node &node = node_array[from];
EdgeIterator newFirstEdge = node.edges + node.first_edge;
if (newFirstEdge >= edge_list.size() || !isDummy(newFirstEdge))
EdgeIterator one_beyond_last_of_node = node.edges + node.first_edge;
// if we can't write at the end of this nodes edges
// that is: the end is the end of the edge_list,
// or the beginning of the next nodes edges
if (one_beyond_last_of_node == edge_list.size() || !isDummy(one_beyond_last_of_node))
{
// can we write before this nodes edges?
if (node.first_edge != 0 && isDummy(node.first_edge - 1))
{
node.first_edge--;
@ -171,20 +175,24 @@ template <typename EdgeDataT> class DynamicGraph
}
else
{
// we have to move this nodes edges to the end of the edge_list
EdgeIterator newFirstEdge = (EdgeIterator)edge_list.size();
unsigned newSize = node.edges * 1.1 + 2;
EdgeIterator requiredCapacity = newSize + edge_list.size();
EdgeIterator oldCapacity = edge_list.capacity();
// make sure there is enough space at the end
if (requiredCapacity >= oldCapacity)
{
edge_list.reserve(requiredCapacity * 1.1);
}
edge_list.resize(edge_list.size() + newSize);
// move the edges over and invalidate the old ones
for (const auto i : irange(0u, node.edges))
{
edge_list[newFirstEdge + i] = edge_list[node.first_edge + i];
makeDummy(node.first_edge + i);
}
// invalidate until the end of edge_list
for (const auto i : irange(node.edges + 1, newSize))
{
makeDummy(newFirstEdge + i);
@ -192,6 +200,8 @@ template <typename EdgeDataT> class DynamicGraph
node.first_edge = newFirstEdge;
}
}
// get the position for the edge that is to be inserted
// and write it
Edge &edge = edge_list[node.first_edge + node.edges];
edge.target = to;
edge.data = data;

View File

@ -237,7 +237,7 @@ void GraphContractor::Run(double core_factor)
unsigned current_level = 0;
bool flushed_contractor = false;
while (number_of_nodes > 2 &&
while (remaining_nodes.size() > 1 &&
number_of_contracted_nodes < static_cast<NodeID>(number_of_nodes * core_factor))
{
if (!flushed_contractor && (number_of_contracted_nodes >
@ -341,16 +341,19 @@ void GraphContractor::Run(double core_factor)
for (const ContractorEdge &edge : data->inserted_edges)
{
const EdgeID current_edge_ID = contractor_graph->FindEdge(edge.source, edge.target);
if (current_edge_ID < contractor_graph->EndEdges(edge.source))
if (current_edge_ID != SPECIAL_EDGEID)
{
ContractorGraph::EdgeData &current_data =
contractor_graph->GetEdgeData(current_edge_ID);
if (current_data.shortcut && edge.data.forward == current_data.forward &&
edge.data.backward == current_data.backward &&
edge.data.weight < current_data.weight)
edge.data.backward == current_data.backward)
{
// found a duplicate edge with smaller weight, update it.
current_data = edge.data;
if (edge.data.weight < current_data.weight)
{
current_data = edge.data;
}
// don't insert duplicates
continue;
}
}
@ -387,7 +390,7 @@ void GraphContractor::Run(double core_factor)
if (remaining_nodes.size() > 2)
{
if (orig_node_id_from_new_node_id_map.size() > 0)
if (flushed_contractor)
{
tbb::parallel_for(tbb::blocked_range<NodeID>(0, remaining_nodes.size(), InitGrainSize),
[this, &remaining_nodes](const tbb::blocked_range<NodeID> &range) {

View File

@ -174,10 +174,11 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
// setup restriction parser
const RestrictionParser restriction_parser(scripting_environment);
while (const osmium::memory::Buffer buffer = reader.read())
// create a vector of iterators into the buffer
for (std::vector<osmium::memory::Buffer::const_iterator> osm_elements;
const osmium::memory::Buffer buffer = reader.read();
osm_elements.clear())
{
// create a vector of iterators into the buffer
std::vector<osmium::memory::Buffer::const_iterator> osm_elements;
for (auto iter = std::begin(buffer), end = std::end(buffer); iter != end; ++iter)
{
osm_elements.push_back(iter);

View File

@ -36,7 +36,7 @@ bool StorageConfig::IsValid() const
timestamp_path,
turn_weight_penalties_path,
turn_duration_penalties_path,
datasource_indexes_path,
datasource_names_path,
datasource_indexes_path,
names_data_path,
properties_path,