Fix formating
This commit is contained in:
parent
993f5badf1
commit
c0dd5d7c76
@ -5,8 +5,8 @@
|
||||
|
||||
#include "util/serialization.hpp"
|
||||
|
||||
#include "storage/tar.hpp"
|
||||
#include "storage/serialization.hpp"
|
||||
#include "storage/tar.hpp"
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
@ -39,10 +39,11 @@ inline void readGraph(const boost::filesystem::path &path,
|
||||
edge_filter.resize(count);
|
||||
for (const auto index : util::irange<std::size_t>(0, count))
|
||||
{
|
||||
storage::serialization::read(reader, "/ch/edge_filter/" + std::to_string(index), edge_filter[index]);
|
||||
storage::serialization::read(
|
||||
reader, "/ch/edge_filter/" + std::to_string(index), edge_filter[index]);
|
||||
}
|
||||
|
||||
reader.ReadInto("/ch/connectivity_checksum", connectivity_checksum);
|
||||
reader.ReadInto("/ch/connectivity_checksum", connectivity_checksum);
|
||||
}
|
||||
|
||||
// writes .osrm.hsgr file
|
||||
@ -69,7 +70,8 @@ inline void writeGraph(const boost::filesystem::path &path,
|
||||
writer.WriteElementCount64("/ch/edge_filter", edge_filter.size());
|
||||
for (const auto index : util::irange<std::size_t>(0, edge_filter.size()))
|
||||
{
|
||||
storage::serialization::write(writer, "/ch/edge_filter/" + std::to_string(index), edge_filter[index]);
|
||||
storage::serialization::write(
|
||||
writer, "/ch/edge_filter/" + std::to_string(index), edge_filter[index]);
|
||||
}
|
||||
|
||||
writer.WriteElementCount64("/ch/connectivity_checksum", 1);
|
||||
|
@ -938,7 +938,7 @@ class ContiguousInternalMemoryDataFacadeBase : public BaseDataFacade
|
||||
auto found_range = std::equal_range(
|
||||
m_maneuver_overrides.begin(), m_maneuver_overrides.end(), edge_based_node_id, Comp{});
|
||||
|
||||
std::for_each(found_range.first, found_range.second, [&](const auto &override) {
|
||||
std::for_each(found_range.first, found_range.second, [&](const auto & override) {
|
||||
std::vector<NodeID> sequence(
|
||||
m_maneuver_override_node_sequences.begin() + override.node_sequence_offset_begin,
|
||||
m_maneuver_override_node_sequences.begin() + override.node_sequence_offset_end);
|
||||
|
@ -14,7 +14,6 @@ struct CompressedNodeBasedGraphEdge
|
||||
NodeID source;
|
||||
NodeID target;
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,8 +128,7 @@ inline void readNodes(const boost::filesystem::path &path,
|
||||
|
||||
// reads only coordinates from .osrm.nbg_nodes
|
||||
template <typename CoordinatesT>
|
||||
inline void readNodeCoordinates(const boost::filesystem::path &path,
|
||||
CoordinatesT &coordinates)
|
||||
inline void readNodeCoordinates(const boost::filesystem::path &path, CoordinatesT &coordinates)
|
||||
{
|
||||
static_assert(std::is_same<typename CoordinatesT::value_type, util::Coordinate>::value, "");
|
||||
|
||||
@ -482,8 +481,7 @@ void writeRamIndex(const boost::filesystem::path &path, const RTreeT &rtree)
|
||||
util::serialization::write(writer, "/common/rtree", rtree);
|
||||
}
|
||||
|
||||
template <typename RTreeT>
|
||||
void readRamIndex(const boost::filesystem::path &path, RTreeT &rtree)
|
||||
template <typename RTreeT> void readRamIndex(const boost::filesystem::path &path, RTreeT &rtree)
|
||||
{
|
||||
const auto fingerprint = storage::tar::FileReader::VerifyFingerprint;
|
||||
storage::tar::FileReader reader{path, fingerprint};
|
||||
|
@ -4,9 +4,9 @@
|
||||
#include "extractor/compressed_edge_container.hpp"
|
||||
#include "extractor/intersection/coordinate_extractor.hpp"
|
||||
#include "extractor/intersection/have_identical_names.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/restriction_index.hpp"
|
||||
#include "extractor/turn_lane_types.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
|
||||
#include "guidance/intersection.hpp"
|
||||
|
||||
|
@ -23,12 +23,12 @@ namespace serialization
|
||||
{
|
||||
template <storage::Ownership Ownership>
|
||||
void read(storage::tar::FileReader &reader,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
detail::IntersectionBearingsContainer<Ownership> &turn_data);
|
||||
|
||||
template <storage::Ownership Ownership>
|
||||
void write(storage::tar::FileWriter &writer,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
const detail::IntersectionBearingsContainer<Ownership> &turn_data);
|
||||
}
|
||||
|
||||
|
@ -107,6 +107,7 @@ template <storage::Ownership Ownership> class NameTableImpl
|
||||
friend void serialization::write<Ownership>(storage::tar::FileWriter &writer,
|
||||
const std::string &name,
|
||||
const NameTableImpl &index_data);
|
||||
|
||||
private:
|
||||
IndexedData indexed_data;
|
||||
};
|
||||
|
@ -217,7 +217,7 @@ inline void write(storage::tar::FileWriter &writer,
|
||||
template <storage::Ownership Ownership>
|
||||
inline void read(storage::tar::FileReader &reader,
|
||||
const std::string &name,
|
||||
detail::NameTableImpl<Ownership> &name_table)
|
||||
detail::NameTableImpl<Ownership> &name_table)
|
||||
{
|
||||
std::string buffer;
|
||||
util::serialization::read(reader, name, name_table.indexed_data);
|
||||
|
@ -4,11 +4,11 @@
|
||||
#include "guidance/turn_data_container.hpp"
|
||||
|
||||
#include "extractor/compressed_edge_container.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/node_data_container.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
#include "extractor/turn_lane_types.hpp"
|
||||
#include "extractor/way_restriction_map.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
|
||||
#include "util/coordinate.hpp"
|
||||
#include "util/guidance/bearing_class.hpp"
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
#include "extractor/intersection/intersection_analysis.hpp"
|
||||
#include "extractor/intersection/node_based_graph_walker.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
#include "guidance/constants.hpp"
|
||||
#include "guidance/intersection.hpp"
|
||||
|
||||
|
@ -7,8 +7,8 @@
|
||||
#include "extractor/node_data_container.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
|
||||
#include "util/node_based_graph.hpp"
|
||||
#include "util/bearing.hpp"
|
||||
#include "util/node_based_graph.hpp"
|
||||
|
||||
#include "util/guidance/name_announcements.hpp"
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
#include "extractor/compressed_edge_container.hpp"
|
||||
#include "extractor/intersection/coordinate_extractor.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
|
||||
#include "guidance/intersection.hpp"
|
||||
#include "guidance/intersection_handler.hpp"
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
#include "guidance/turn_data_container.hpp"
|
||||
|
||||
#include "storage/tar.hpp"
|
||||
#include "storage/serialization.hpp"
|
||||
#include "storage/tar.hpp"
|
||||
|
||||
#include <boost/assert.hpp>
|
||||
|
||||
|
@ -3,9 +3,9 @@
|
||||
|
||||
#include "extractor/compressed_edge_container.hpp"
|
||||
#include "extractor/intersection/intersection_view.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/restriction_index.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
|
||||
#include "guidance/driveway_handler.hpp"
|
||||
#include "guidance/intersection.hpp"
|
||||
|
@ -1,8 +1,8 @@
|
||||
#ifndef OSRM_GUIDANCE_TURN_HANDLER_HPP_
|
||||
#define OSRM_GUIDANCE_TURN_HANDLER_HPP_
|
||||
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
|
||||
#include "guidance/intersection.hpp"
|
||||
#include "guidance/intersection_handler.hpp"
|
||||
|
@ -95,9 +95,10 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
WeightValueT,
|
||||
boost::random_access_traversal_tag>
|
||||
{
|
||||
typedef boost::
|
||||
iterator_facade<ColumnIterator, WeightValueT, boost::random_access_traversal_tag>
|
||||
base_t;
|
||||
typedef boost::iterator_facade<ColumnIterator,
|
||||
WeightValueT,
|
||||
boost::random_access_traversal_tag>
|
||||
base_t;
|
||||
|
||||
public:
|
||||
typedef typename base_t::value_type value_type;
|
||||
@ -182,8 +183,8 @@ template <storage::Ownership Ownership> class CellStorageImpl
|
||||
const NodeID *const all_destinations)
|
||||
: num_source_nodes{data.num_source_nodes},
|
||||
num_destination_nodes{data.num_destination_nodes},
|
||||
weights{all_weights + data.value_offset}, durations{all_durations +
|
||||
data.value_offset},
|
||||
weights{all_weights + data.value_offset},
|
||||
durations{all_durations + data.value_offset},
|
||||
source_boundary{all_sources + data.source_boundary_offset},
|
||||
destination_boundary{all_destinations + data.destination_boundary_offset}
|
||||
{
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
#include "partitioner/multi_level_partition.hpp"
|
||||
|
||||
#include "storage/tar_fwd.hpp"
|
||||
#include "storage/shared_memory_ownership.hpp"
|
||||
#include "storage/tar_fwd.hpp"
|
||||
|
||||
#include "util/static_graph.hpp"
|
||||
#include "util/vector_view.hpp"
|
||||
@ -25,13 +25,13 @@ namespace serialization
|
||||
{
|
||||
template <typename EdgeDataT, storage::Ownership Ownership>
|
||||
void read(storage::tar::FileReader &reader,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
MultiLevelGraph<EdgeDataT, Ownership> &graph,
|
||||
std::uint32_t &connectivity_checksum);
|
||||
|
||||
template <typename EdgeDataT, storage::Ownership Ownership>
|
||||
void write(storage::tar::FileWriter &writer,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
const MultiLevelGraph<EdgeDataT, Ownership> &graph,
|
||||
const std::uint32_t connectivity_checksum);
|
||||
}
|
||||
@ -205,12 +205,12 @@ class MultiLevelGraph : public util::StaticGraph<EdgeDataT, Ownership>
|
||||
|
||||
friend void
|
||||
serialization::read<EdgeDataT, Ownership>(storage::tar::FileReader &reader,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
MultiLevelGraph<EdgeDataT, Ownership> &graph,
|
||||
std::uint32_t &connectivity_checksum);
|
||||
friend void
|
||||
serialization::write<EdgeDataT, Ownership>(storage::tar::FileWriter &writer,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
const MultiLevelGraph<EdgeDataT, Ownership> &graph,
|
||||
const std::uint32_t connectivity_checksum);
|
||||
|
||||
|
@ -7,8 +7,8 @@
|
||||
#include "util/typedefs.hpp"
|
||||
#include "util/vector_view.hpp"
|
||||
|
||||
#include "storage/tar_fwd.hpp"
|
||||
#include "storage/shared_memory_ownership.hpp"
|
||||
#include "storage/tar_fwd.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
@ -34,9 +34,13 @@ using MultiLevelPartitionView = detail::MultiLevelPartitionImpl<storage::Ownersh
|
||||
namespace serialization
|
||||
{
|
||||
template <storage::Ownership Ownership>
|
||||
void read(storage::tar::FileReader &reader, const std::string& name, detail::MultiLevelPartitionImpl<Ownership> &mlp);
|
||||
void read(storage::tar::FileReader &reader,
|
||||
const std::string &name,
|
||||
detail::MultiLevelPartitionImpl<Ownership> &mlp);
|
||||
template <storage::Ownership Ownership>
|
||||
void write(storage::tar::FileWriter &writer, const std::string& name, const detail::MultiLevelPartitionImpl<Ownership> &mlp);
|
||||
void write(storage::tar::FileWriter &writer,
|
||||
const std::string &name,
|
||||
const detail::MultiLevelPartitionImpl<Ownership> &mlp);
|
||||
}
|
||||
|
||||
namespace detail
|
||||
@ -137,10 +141,10 @@ template <storage::Ownership Ownership> class MultiLevelPartitionImpl final
|
||||
}
|
||||
|
||||
friend void serialization::read<Ownership>(storage::tar::FileReader &reader,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
MultiLevelPartitionImpl &mlp);
|
||||
friend void serialization::write<Ownership>(storage::tar::FileWriter &writer,
|
||||
const std::string& name,
|
||||
const std::string &name,
|
||||
const MultiLevelPartitionImpl &mlp);
|
||||
|
||||
private:
|
||||
|
@ -33,21 +33,25 @@ namespace serialization
|
||||
*/
|
||||
|
||||
template <typename T>
|
||||
inline void read(storage::tar::FileReader &reader, const std::string& name, util::DeallocatingVector<T> &vec)
|
||||
inline void
|
||||
read(storage::tar::FileReader &reader, const std::string &name, util::DeallocatingVector<T> &vec)
|
||||
{
|
||||
vec.resize(reader.ReadElementCount64(name));
|
||||
reader.ReadStreaming<T>(name, vec.begin(), vec.size());
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void write(storage::tar::FileWriter &writer, const std::string& name, const util::DeallocatingVector<T> &vec)
|
||||
inline void write(storage::tar::FileWriter &writer,
|
||||
const std::string &name,
|
||||
const util::DeallocatingVector<T> &vec)
|
||||
{
|
||||
writer.WriteElementCount64(name, vec.size());
|
||||
writer.WriteStreaming<T>(name, vec.begin(), vec.size());
|
||||
}
|
||||
|
||||
#if USE_STXXL_LIBRARY
|
||||
template <typename T> inline void read(storage::tar::FileReader &reader, const std::string& name, stxxl::vector<T> &vec)
|
||||
template <typename T>
|
||||
inline void read(storage::tar::FileReader &reader, const std::string &name, stxxl::vector<T> &vec)
|
||||
{
|
||||
auto size = reader.ReadElementCount64(name);
|
||||
vec.reserve(size);
|
||||
@ -55,23 +59,22 @@ template <typename T> inline void read(storage::tar::FileReader &reader, const s
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void write(storage::tar::FileWriter &writer, const std::string& name, const stxxl::vector<T> &vec)
|
||||
inline void
|
||||
write(storage::tar::FileWriter &writer, const std::string &name, const stxxl::vector<T> &vec)
|
||||
{
|
||||
writer.WriteElementCount64(name, vec.size());
|
||||
writer.WriteStreaming<T>(name, vec.begin(), vec.size());
|
||||
}
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
void read(io::BufferReader &reader, std::vector<T> &data)
|
||||
template <typename T> void read(io::BufferReader &reader, std::vector<T> &data)
|
||||
{
|
||||
const auto count = reader.ReadElementCount64();
|
||||
data.resize(count);
|
||||
reader.ReadInto(data.data(), count);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void write(io::BufferWriter &writer, const std::vector<T> &data)
|
||||
template <typename T> void write(io::BufferWriter &writer, const std::vector<T> &data)
|
||||
{
|
||||
const auto count = data.size();
|
||||
writer.WriteElementCount64(count);
|
||||
@ -89,11 +92,11 @@ inline void read(tar::FileReader &reader, const std::string &name, std::string &
|
||||
{
|
||||
const auto count = reader.ReadElementCount64(name);
|
||||
data.resize(count);
|
||||
reader.ReadInto(name, const_cast<char*>(data.data()), count);
|
||||
reader.ReadInto(name, const_cast<char *>(data.data()), count);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void read(tar::FileReader &reader, const std::string& name, std::vector<T> &data)
|
||||
inline void read(tar::FileReader &reader, const std::string &name, std::vector<T> &data)
|
||||
{
|
||||
const auto count = reader.ReadElementCount64(name);
|
||||
data.resize(count);
|
||||
@ -179,7 +182,9 @@ void writeBoolVector(tar::FileWriter &writer, const std::string &name, const Vec
|
||||
|
||||
std::uint64_t number_of_blocks = std::ceil((double)count / CHAR_BIT);
|
||||
writer.WriteStreaming<unsigned char>(
|
||||
name, boost::make_function_input_iterator(encode_function, boost::infinite()), number_of_blocks);
|
||||
name,
|
||||
boost::make_function_input_iterator(encode_function, boost::infinite()),
|
||||
number_of_blocks);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -214,7 +214,8 @@ template <int N, typename T = std::string> struct FixedGroupBlock
|
||||
template <typename OffsetIterator, typename OutByteIter>
|
||||
OutByteIter WriteBlockPrefix(OffsetIterator first, OffsetIterator last, OutByteIter out) const
|
||||
{
|
||||
constexpr std::size_t MAX_LENGTH = std::numeric_limits<std::make_unsigned_t<ValueType>>::max();
|
||||
constexpr std::size_t MAX_LENGTH =
|
||||
std::numeric_limits<std::make_unsigned_t<ValueType>>::max();
|
||||
|
||||
auto index = 0;
|
||||
std::array<ValueType, BLOCK_SIZE> prefix;
|
||||
@ -223,12 +224,13 @@ template <int N, typename T = std::string> struct FixedGroupBlock
|
||||
{
|
||||
const std::uint32_t data_length = *next - *curr;
|
||||
if (data_length > MAX_LENGTH)
|
||||
throw util::exception(boost::format("too large data length %1% > %2%") % data_length % MAX_LENGTH);
|
||||
throw util::exception(boost::format("too large data length %1% > %2%") %
|
||||
data_length % MAX_LENGTH);
|
||||
|
||||
prefix[index++] = data_length;
|
||||
}
|
||||
|
||||
out = std::copy_n((const char *)prefix.data(), sizeof(ValueType)*BLOCK_SIZE, out);
|
||||
out = std::copy_n((const char *)prefix.data(), sizeof(ValueType) * BLOCK_SIZE, out);
|
||||
return out;
|
||||
}
|
||||
|
||||
|
@ -2,10 +2,10 @@
|
||||
#define OSMR_UTIL_SERIALIZATION_HPP
|
||||
|
||||
#include "util/dynamic_graph.hpp"
|
||||
#include "util/indexed_data.hpp"
|
||||
#include "util/packed_vector.hpp"
|
||||
#include "util/range_table.hpp"
|
||||
#include "util/static_graph.hpp"
|
||||
#include "util/indexed_data.hpp"
|
||||
#include "util/static_rtree.hpp"
|
||||
|
||||
#include "storage/io.hpp"
|
||||
@ -101,8 +101,9 @@ inline void write(storage::tar::FileWriter &writer,
|
||||
}
|
||||
|
||||
template <typename BlockPolicy, storage::Ownership Ownership>
|
||||
inline void
|
||||
read(storage::tar::FileReader &reader, const std::string &name, detail::IndexedDataImpl<BlockPolicy, Ownership> &index_data)
|
||||
inline void read(storage::tar::FileReader &reader,
|
||||
const std::string &name,
|
||||
detail::IndexedDataImpl<BlockPolicy, Ownership> &index_data)
|
||||
{
|
||||
storage::serialization::read(reader, name + "/blocks", index_data.blocks);
|
||||
storage::serialization::read(reader, name + "/values", index_data.values);
|
||||
@ -110,7 +111,8 @@ read(storage::tar::FileReader &reader, const std::string &name, detail::IndexedD
|
||||
|
||||
template <typename BlockPolicy, storage::Ownership Ownership>
|
||||
inline void write(storage::tar::FileWriter &writer,
|
||||
const std::string &name, const detail::IndexedDataImpl<BlockPolicy, Ownership> &index_data)
|
||||
const std::string &name,
|
||||
const detail::IndexedDataImpl<BlockPolicy, Ownership> &index_data)
|
||||
{
|
||||
storage::serialization::write(writer, name + "/blocks", index_data.blocks);
|
||||
storage::serialization::write(writer, name + "/values", index_data.values);
|
||||
@ -120,21 +122,27 @@ template <class EdgeDataT,
|
||||
storage::Ownership Ownership,
|
||||
std::uint32_t BRANCHING_FACTOR,
|
||||
std::uint32_t LEAF_PAGE_SIZE>
|
||||
void read(storage::tar::FileReader &reader, const std::string& name, util::StaticRTree<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE> &rtree)
|
||||
void read(storage::tar::FileReader &reader,
|
||||
const std::string &name,
|
||||
util::StaticRTree<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE> &rtree)
|
||||
{
|
||||
storage::serialization::read(reader, name + "/search_tree", rtree.m_search_tree);
|
||||
storage::serialization::read(reader, name + "/search_tree_level_starts", rtree.m_tree_level_starts);
|
||||
storage::serialization::read(
|
||||
reader, name + "/search_tree_level_starts", rtree.m_tree_level_starts);
|
||||
}
|
||||
|
||||
template <class EdgeDataT,
|
||||
storage::Ownership Ownership,
|
||||
std::uint32_t BRANCHING_FACTOR,
|
||||
std::uint32_t LEAF_PAGE_SIZE>
|
||||
void write(storage::tar::FileWriter &writer, const std::string& name, const util::StaticRTree<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE> &rtree){
|
||||
void write(storage::tar::FileWriter &writer,
|
||||
const std::string &name,
|
||||
const util::StaticRTree<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE> &rtree)
|
||||
{
|
||||
storage::serialization::write(writer, name + "/search_tree", rtree.m_search_tree);
|
||||
storage::serialization::write(writer, name + "/search_tree_level_starts", rtree.m_tree_level_starts);
|
||||
storage::serialization::write(
|
||||
writer, name + "/search_tree_level_starts", rtree.m_tree_level_starts);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -273,10 +273,9 @@ class StaticRTree
|
||||
explicit StaticRTree(const std::vector<EdgeDataT> &input_data_vector,
|
||||
const Vector<Coordinate> &coordinate_list,
|
||||
const boost::filesystem::path &on_disk_file_name)
|
||||
: m_coordinate_list(coordinate_list), m_objects{mmapFile<EdgeDataT>(
|
||||
on_disk_file_name,
|
||||
m_objects_region,
|
||||
input_data_vector.size() * sizeof(EdgeDataT))}
|
||||
: m_coordinate_list(coordinate_list),
|
||||
m_objects{mmapFile<EdgeDataT>(
|
||||
on_disk_file_name, m_objects_region, input_data_vector.size() * sizeof(EdgeDataT))}
|
||||
{
|
||||
const auto element_count = input_data_vector.size();
|
||||
std::vector<WrappedInputElement> input_wrapper_vector(element_count);
|
||||
@ -758,9 +757,7 @@ class StaticRTree
|
||||
}
|
||||
|
||||
friend void serialization::read<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE>(
|
||||
storage::tar::FileReader &reader,
|
||||
const std::string &name,
|
||||
StaticRTree &rtree);
|
||||
storage::tar::FileReader &reader, const std::string &name, StaticRTree &rtree);
|
||||
|
||||
friend void serialization::write<EdgeDataT, Ownership, BRANCHING_FACTOR, LEAF_PAGE_SIZE>(
|
||||
storage::tar::FileWriter &writer, const std::string &name, const StaticRTree &rtree);
|
||||
|
@ -108,7 +108,8 @@ template <typename DataT> class vector_view
|
||||
|
||||
std::size_t size() const { return m_size; }
|
||||
|
||||
void resize(const size_t size) {
|
||||
void resize(const size_t size)
|
||||
{
|
||||
if (size > m_size)
|
||||
{
|
||||
throw util::exception("Trying to resize a view to a larger size.");
|
||||
@ -116,7 +117,6 @@ template <typename DataT> class vector_view
|
||||
m_size = size;
|
||||
}
|
||||
|
||||
|
||||
bool empty() const { return 0 == size(); }
|
||||
|
||||
DataT &operator[](const unsigned index)
|
||||
@ -194,12 +194,14 @@ template <> class vector_view<bool>
|
||||
return m_ptr[bucket] & (1u << offset);
|
||||
}
|
||||
|
||||
void reset(unsigned * ptr, std::size_t size) {
|
||||
void reset(unsigned *ptr, std::size_t size)
|
||||
{
|
||||
m_ptr = ptr;
|
||||
m_size = size;
|
||||
}
|
||||
|
||||
void resize(const size_t size) {
|
||||
void resize(const size_t size)
|
||||
{
|
||||
if (size > m_size)
|
||||
{
|
||||
throw util::exception("Trying to resize a view to a larger size.");
|
||||
|
@ -1,8 +1,8 @@
|
||||
#include "util/static_rtree.hpp"
|
||||
#include "extractor/edge_based_node_segment.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "extractor/files.hpp"
|
||||
#include "extractor/packed_osm_ids.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
|
||||
#include "mocks/mock_datafacade.hpp"
|
||||
#include "storage/io.hpp"
|
||||
|
@ -536,21 +536,21 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
&scripting_environment,
|
||||
weight_multiplier,
|
||||
&conditional_restriction_map](
|
||||
// what nodes will be used? In most cases this will be the id
|
||||
// stored in the edge_data. In case of duplicated nodes (e.g.
|
||||
// due to via-way restrictions), one/both of these might
|
||||
// refer to a newly added edge based node
|
||||
const auto edge_based_node_from,
|
||||
const auto edge_based_node_to,
|
||||
// the situation of the turn
|
||||
const auto node_along_road_entering,
|
||||
const auto node_based_edge_from,
|
||||
const auto intersection_node,
|
||||
const auto node_based_edge_to,
|
||||
const auto &turn_angle,
|
||||
const auto &road_legs_on_the_right,
|
||||
const auto &road_legs_on_the_left,
|
||||
const auto &edge_geometries) {
|
||||
// what nodes will be used? In most cases this will be the id
|
||||
// stored in the edge_data. In case of duplicated nodes (e.g.
|
||||
// due to via-way restrictions), one/both of these might
|
||||
// refer to a newly added edge based node
|
||||
const auto edge_based_node_from,
|
||||
const auto edge_based_node_to,
|
||||
// the situation of the turn
|
||||
const auto node_along_road_entering,
|
||||
const auto node_based_edge_from,
|
||||
const auto intersection_node,
|
||||
const auto node_based_edge_to,
|
||||
const auto &turn_angle,
|
||||
const auto &road_legs_on_the_right,
|
||||
const auto &road_legs_on_the_left,
|
||||
const auto &edge_geometries) {
|
||||
|
||||
const auto node_restricted =
|
||||
isRestricted(node_along_road_entering,
|
||||
@ -872,7 +872,7 @@ void EdgeBasedGraphFactory::GenerateEdgeExpandedEdges(
|
||||
// TODO: this loop is not optimized - once we have a few
|
||||
// overrides available, we should index this for faster
|
||||
// lookups
|
||||
for (auto &override : unresolved_maneuver_overrides)
|
||||
for (auto & override : unresolved_maneuver_overrides)
|
||||
{
|
||||
for (auto &turn : override.turn_sequence)
|
||||
{
|
||||
|
@ -1,10 +1,10 @@
|
||||
#include "extractor/extraction_containers.hpp"
|
||||
#include "extractor/extraction_segment.hpp"
|
||||
#include "extractor/extraction_way.hpp"
|
||||
#include "extractor/files.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/restriction.hpp"
|
||||
#include "extractor/serialization.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/files.hpp"
|
||||
|
||||
#include "util/coordinate_calculation.hpp"
|
||||
|
||||
@ -165,7 +165,9 @@ void ExtractionContainers::WriteCharData(const std::string &file_name)
|
||||
log << "writing street name index ... ";
|
||||
TIMER_START(write_index);
|
||||
|
||||
files::writeNames(file_name, NameTable {NameTable::IndexedData (name_offsets.begin(), name_offsets.end(), name_char_data.begin())});
|
||||
files::writeNames(file_name,
|
||||
NameTable{NameTable::IndexedData(
|
||||
name_offsets.begin(), name_offsets.end(), name_char_data.begin())});
|
||||
|
||||
TIMER_STOP(write_index);
|
||||
log << "ok, after " << TIMER_SEC(write_index) << "s";
|
||||
@ -275,8 +277,8 @@ void ExtractionContainers::PrepareEdges(ScriptingEnvironment &scripting_environm
|
||||
{
|
||||
if (edge_iterator->result.osm_source_id < node_iterator->node_id)
|
||||
{
|
||||
util::Log(logDEBUG)
|
||||
<< "Found invalid node reference " << edge_iterator->result.source;
|
||||
util::Log(logDEBUG) << "Found invalid node reference "
|
||||
<< edge_iterator->result.source;
|
||||
edge_iterator->result.source = SPECIAL_NODEID;
|
||||
++edge_iterator;
|
||||
continue;
|
||||
@ -1031,9 +1033,8 @@ void ExtractionContainers::PrepareRestrictions()
|
||||
// translate the turn from one segment onto another into a node restriction (the ways can
|
||||
// only
|
||||
// be connected at a single location)
|
||||
auto const get_node_restriction_from_OSM_ids = [&](auto const from_id,
|
||||
auto const to_id,
|
||||
const OSMNodeID via_node) {
|
||||
auto const get_node_restriction_from_OSM_ids = [&](
|
||||
auto const from_id, auto const to_id, const OSMNodeID via_node) {
|
||||
auto const from_segment_itr = referenced_ways.find(from_id);
|
||||
if (from_segment_itr->second.way_id != from_id)
|
||||
{
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include "extractor/extractor.hpp"
|
||||
|
||||
#include "extractor/compressed_node_based_graph_edge.hpp"
|
||||
#include "extractor/edge_based_edge.hpp"
|
||||
#include "extractor/extraction_containers.hpp"
|
||||
#include "extractor/extraction_node.hpp"
|
||||
@ -8,13 +9,12 @@
|
||||
#include "extractor/extractor_callbacks.hpp"
|
||||
#include "extractor/files.hpp"
|
||||
#include "extractor/maneuver_override_relation_parser.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/node_based_graph_factory.hpp"
|
||||
#include "extractor/raster_source.hpp"
|
||||
#include "extractor/restriction_filter.hpp"
|
||||
#include "extractor/restriction_parser.hpp"
|
||||
#include "extractor/scripting_environment.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/compressed_node_based_graph_edge.hpp"
|
||||
|
||||
#include "guidance/files.hpp"
|
||||
#include "guidance/guidance_processing.hpp"
|
||||
@ -170,7 +170,7 @@ void SetExcludableClasses(const ExtractorCallbacks::ClassesMap &classes_map,
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<CompressedNodeBasedGraphEdge> toEdgeList(const util::NodeBasedDynamicGraph& graph)
|
||||
std::vector<CompressedNodeBasedGraphEdge> toEdgeList(const util::NodeBasedDynamicGraph &graph)
|
||||
{
|
||||
std::vector<CompressedNodeBasedGraphEdge> edges;
|
||||
edges.reserve(graph.GetNumberOfEdges());
|
||||
@ -188,7 +188,6 @@ std::vector<CompressedNodeBasedGraphEdge> toEdgeList(const util::NodeBasedDynami
|
||||
|
||||
return edges;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -288,7 +287,8 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
compressed_node_based_graph_writing.wait();
|
||||
};
|
||||
|
||||
files::writeCompressedNodeBasedGraph(config.GetPath(".osrm.cnbg").string(), toEdgeList(node_based_graph));
|
||||
files::writeCompressedNodeBasedGraph(config.GetPath(".osrm.cnbg").string(),
|
||||
toEdgeList(node_based_graph));
|
||||
|
||||
node_based_graph_factory.GetCompressedEdges().PrintStatistics();
|
||||
|
||||
@ -343,7 +343,8 @@ int Extractor::run(ScriptingEnvironment &scripting_environment)
|
||||
|
||||
util::Log() << "Saving edge-based node weights to file.";
|
||||
TIMER_START(timer_write_node_weights);
|
||||
extractor::files::writeEdgeBasedNodeWeights(config.GetPath(".osrm.enw"), edge_based_node_weights);
|
||||
extractor::files::writeEdgeBasedNodeWeights(config.GetPath(".osrm.enw"),
|
||||
edge_based_node_weights);
|
||||
TIMER_STOP(timer_write_node_weights);
|
||||
util::Log() << "Done writing. (" << TIMER_SEC(timer_write_node_weights) << ")";
|
||||
|
||||
@ -558,9 +559,9 @@ Extractor::ParseOSMData(ScriptingEnvironment &scripting_environment,
|
||||
const auto &rel = static_cast<const osmium::Relation &>(*entity);
|
||||
|
||||
const char *rel_type = rel.get_value_by_key("type");
|
||||
if (!rel_type || !std::binary_search(relation_types.begin(),
|
||||
relation_types.end(),
|
||||
std::string(rel_type)))
|
||||
if (!rel_type ||
|
||||
!std::binary_search(
|
||||
relation_types.begin(), relation_types.end(), std::string(rel_type)))
|
||||
continue;
|
||||
|
||||
ExtractionRelation extracted_rel({rel.id(), osmium::item_type::relation});
|
||||
@ -823,8 +824,8 @@ void Extractor::BuildRTree(std::vector<EdgeBasedNodeSegment> edge_based_node_seg
|
||||
edge_based_node_segments.resize(new_size);
|
||||
|
||||
TIMER_START(construction);
|
||||
util::StaticRTree<EdgeBasedNodeSegment> rtree(edge_based_node_segments, coordinates,
|
||||
config.GetPath(".osrm.fileIndex"));
|
||||
util::StaticRTree<EdgeBasedNodeSegment> rtree(
|
||||
edge_based_node_segments, coordinates, config.GetPath(".osrm.fileIndex"));
|
||||
|
||||
files::writeRamIndex(config.GetPath(".osrm.ramIndex"), rtree);
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
#include "extractor/intersection/mergable_road_detector.hpp"
|
||||
#include "extractor/intersection/intersection_analysis.hpp"
|
||||
#include "extractor/intersection/node_based_graph_walker.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "extractor/query_node.hpp"
|
||||
#include "extractor/suffix_table.hpp"
|
||||
#include "extractor/name_table.hpp"
|
||||
#include "guidance/constants.hpp"
|
||||
|
||||
#include "util/bearing.hpp"
|
||||
|
@ -1,6 +1,6 @@
|
||||
#include "extractor/node_based_graph_factory.hpp"
|
||||
#include "extractor/graph_compressor.hpp"
|
||||
#include "extractor/files.hpp"
|
||||
#include "extractor/graph_compressor.hpp"
|
||||
#include "storage/io.hpp"
|
||||
|
||||
#include "util/log.hpp"
|
||||
@ -38,7 +38,13 @@ void NodeBasedGraphFactory::LoadDataFromFile(const boost::filesystem::path &inpu
|
||||
auto traffic_signals_iter = inserter(traffic_signals, end(traffic_signals));
|
||||
std::vector<NodeBasedEdge> edge_list;
|
||||
|
||||
files::readRawNBGraph(input_file, barriers_iter, traffic_signals_iter, coordinates, osm_node_ids, edge_list, annotation_data);
|
||||
files::readRawNBGraph(input_file,
|
||||
barriers_iter,
|
||||
traffic_signals_iter,
|
||||
coordinates,
|
||||
osm_node_ids,
|
||||
edge_list,
|
||||
annotation_data);
|
||||
|
||||
const auto number_of_node_based_nodes = coordinates.size();
|
||||
if (edge_list.empty())
|
||||
|
@ -162,10 +162,8 @@ updateSegmentData(const UpdaterConfig &config,
|
||||
|
||||
// closure to convert SpeedSource value to weight and count fallbacks to durations
|
||||
std::atomic<std::uint32_t> fallbacks_to_duration{0};
|
||||
auto convertToWeight = [&profile_properties,
|
||||
&fallbacks_to_duration](const SegmentWeight &existing_weight,
|
||||
const SpeedSource &value,
|
||||
double distance_in_meters) {
|
||||
auto convertToWeight = [&profile_properties, &fallbacks_to_duration](
|
||||
const SegmentWeight &existing_weight, const SpeedSource &value, double distance_in_meters) {
|
||||
|
||||
double rate = std::numeric_limits<double>::quiet_NaN();
|
||||
|
||||
@ -440,8 +438,8 @@ updateTurnPenalties(const UpdaterConfig &config,
|
||||
{
|
||||
auto map =
|
||||
util::mmapTarFile(config.GetPath(".osrm.turn_penalties_index"), turn_index_region);
|
||||
turn_index_blocks =
|
||||
reinterpret_cast<const extractor::lookup::TurnIndexBlock *>(map["/extractor/turn_index"].first);
|
||||
turn_index_blocks = reinterpret_cast<const extractor::lookup::TurnIndexBlock *>(
|
||||
map["/extractor/turn_index"].first);
|
||||
}
|
||||
|
||||
// Get the turn penalty and update to the new value if required
|
||||
@ -770,9 +768,9 @@ Updater::LoadAndUpdateEdgeExpandedGraph(std::vector<extractor::EdgeBasedEdge> &e
|
||||
{
|
||||
if (turn_weight_penalty < 0)
|
||||
{
|
||||
util::Log(logWARNING)
|
||||
<< "turn penalty " << turn_weight_penalty
|
||||
<< " is too negative: clamping turn weight to " << weight_min_value;
|
||||
util::Log(logWARNING) << "turn penalty " << turn_weight_penalty
|
||||
<< " is too negative: clamping turn weight to "
|
||||
<< weight_min_value;
|
||||
turn_weight_penalty = weight_min_value - new_weight;
|
||||
turn_weight_penalties[edge.data.turn_id] = turn_weight_penalty;
|
||||
}
|
||||
|
@ -6,7 +6,7 @@
|
||||
struct TemporaryFile
|
||||
{
|
||||
TemporaryFile() : path(boost::filesystem::unique_path()) {}
|
||||
TemporaryFile(const std::string& path) : path(path){}
|
||||
TemporaryFile(const std::string &path) : path(path) {}
|
||||
|
||||
~TemporaryFile() { boost::filesystem::remove(path); }
|
||||
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_SUITE(tar)
|
||||
|
||||
using namespace osrm;
|
||||
@ -25,7 +24,7 @@ BOOST_AUTO_TEST_CASE(read_write_hsgr)
|
||||
TestEdge{3, 1, 1},
|
||||
TestEdge{4, 3, 1},
|
||||
TestEdge{5, 1, 1}};
|
||||
auto reference_graph = QueryGraph {6, toEdges<QueryEdge>(makeGraph(edges))};
|
||||
auto reference_graph = QueryGraph{6, toEdges<QueryEdge>(makeGraph(edges))};
|
||||
std::vector<std::vector<bool>> reference_filters = {
|
||||
{false, false, true, true, false, false, true},
|
||||
{true, false, true, false, true, false, true},
|
||||
@ -33,8 +32,12 @@ BOOST_AUTO_TEST_CASE(read_write_hsgr)
|
||||
{true, true, true, true, true, true, true},
|
||||
};
|
||||
|
||||
TemporaryFile tmp {TEST_DATA_DIR "/read_write_hsgr_test.osrm.hsgr"};
|
||||
contractor::files::writeGraph(tmp.path, reference_checksum, reference_graph, reference_filters, reference_connectivity_checksum);
|
||||
TemporaryFile tmp{TEST_DATA_DIR "/read_write_hsgr_test.osrm.hsgr"};
|
||||
contractor::files::writeGraph(tmp.path,
|
||||
reference_checksum,
|
||||
reference_graph,
|
||||
reference_filters,
|
||||
reference_connectivity_checksum);
|
||||
|
||||
unsigned checksum;
|
||||
unsigned connectivity_checksum;
|
||||
|
@ -22,17 +22,19 @@ inline contractor::ContractorGraph makeGraph(const std::vector<TestEdge> &edges)
|
||||
std::tie(start, target, weight) = edge;
|
||||
max_id = std::max(std::max(start, target), max_id);
|
||||
input_edges.push_back(contractor::ContractorEdge{
|
||||
start, target, contractor::ContractorEdgeData{weight, weight * 2, id++, 0, false, true, false}});
|
||||
start,
|
||||
target,
|
||||
contractor::ContractorEdgeData{weight, weight * 2, id++, 0, false, true, false}});
|
||||
input_edges.push_back(contractor::ContractorEdge{
|
||||
target, start, contractor::ContractorEdgeData{weight, weight * 2, id++, 0, false, false, true}});
|
||||
target,
|
||||
start,
|
||||
contractor::ContractorEdgeData{weight, weight * 2, id++, 0, false, false, true}});
|
||||
}
|
||||
std::sort(input_edges.begin(), input_edges.end());
|
||||
|
||||
return contractor::ContractorGraph{max_id + 1, std::move(input_edges)};
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
@ -70,7 +70,7 @@ BOOST_AUTO_TEST_CASE(check_name_table_fill)
|
||||
"X", "Y", "Z", "", "", "", "", "", "", "", "0", ""};
|
||||
|
||||
auto data = PrapareNameTableData(expected_names, true);
|
||||
NameTable name_table {data};
|
||||
NameTable name_table{data};
|
||||
|
||||
for (std::size_t index = 0; index < expected_names.size(); ++index)
|
||||
{
|
||||
@ -91,7 +91,7 @@ BOOST_AUTO_TEST_CASE(check_name_table_nofill)
|
||||
"X", "Y", "Z", "", "", "", "", "", "", "", "0", ""};
|
||||
|
||||
auto data = PrapareNameTableData(expected_names, false);
|
||||
NameTable name_table {data};
|
||||
NameTable name_table{data};
|
||||
|
||||
// CALLGRIND_START_INSTRUMENTATION;
|
||||
for (std::size_t index = 0; index < expected_names.size(); ++index)
|
||||
|
@ -56,8 +56,11 @@ BOOST_AUTO_TEST_CASE(tar_serialize_int_vector)
|
||||
{
|
||||
TemporaryFile tmp;
|
||||
{
|
||||
std::vector<std::vector<int>> data = {
|
||||
{}, {0}, {1, -2, 3}, {4, -5, 6, -7, 8, -9, 10, -11}, {-12, 13, -14, 15, -16, 17, -18, 19, -20}};
|
||||
std::vector<std::vector<int>> data = {{},
|
||||
{0},
|
||||
{1, -2, 3},
|
||||
{4, -5, 6, -7, 8, -9, 10, -11},
|
||||
{-12, 13, -14, 15, -16, 17, -18, 19, -20}};
|
||||
for (const auto &v : data)
|
||||
{
|
||||
{
|
||||
|
@ -97,13 +97,76 @@ BOOST_AUTO_TEST_CASE(continue_write_tar_file)
|
||||
TemporaryFile tmp{TEST_DATA_DIR "/tar_continue_write_test.tar"};
|
||||
|
||||
// more than 64 values to ensure we fill up more than one tar block of 512 bytes
|
||||
std::vector<std::uint64_t> vector_64bit = {
|
||||
0, 1, 2, 3, 4, 1ULL << 62, 0, 1 << 22, 0xFFFFFFFFFFFFFFFF, 0xFF00FF0000FF00FF, 11, 12, 13, 14, 15, 16,
|
||||
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
|
||||
33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
|
||||
49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
|
||||
65, 66, 67, 68, 69, 70
|
||||
};
|
||||
std::vector<std::uint64_t> vector_64bit = {0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
1ULL << 62,
|
||||
0,
|
||||
1 << 22,
|
||||
0xFFFFFFFFFFFFFFFF,
|
||||
0xFF00FF0000FF00FF,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65,
|
||||
66,
|
||||
67,
|
||||
68,
|
||||
69,
|
||||
70};
|
||||
|
||||
{
|
||||
storage::tar::FileWriter writer(tmp.path, storage::tar::FileWriter::GenerateFingerprint);
|
||||
@ -111,7 +174,8 @@ BOOST_AUTO_TEST_CASE(continue_write_tar_file)
|
||||
writer.WriteFrom("baz/bla/64bit_vector", vector_64bit.data(), 12);
|
||||
writer.ContinueFrom("baz/bla/64bit_vector", vector_64bit.data() + 12, 30);
|
||||
writer.ContinueFrom("baz/bla/64bit_vector", vector_64bit.data() + 42, 10);
|
||||
writer.ContinueFrom("baz/bla/64bit_vector", vector_64bit.data() + 52, vector_64bit.size() - 52);
|
||||
writer.ContinueFrom(
|
||||
"baz/bla/64bit_vector", vector_64bit.data() + 52, vector_64bit.size() - 52);
|
||||
}
|
||||
|
||||
storage::tar::FileReader reader(tmp.path, storage::tar::FileReader::VerifyFingerprint);
|
||||
|
@ -32,8 +32,8 @@ BOOST_AUTO_TEST_CASE(io_data)
|
||||
{
|
||||
osrm::storage::io::FileWriter outfile(IO_TMP_FILE,
|
||||
osrm::storage::io::FileWriter::GenerateFingerprint);
|
||||
outfile.WriteElementCount64(data_in.size());
|
||||
outfile.WriteFrom(data_in.data(), data_in.size());
|
||||
outfile.WriteElementCount64(data_in.size());
|
||||
outfile.WriteFrom(data_in.data(), data_in.size());
|
||||
}
|
||||
|
||||
osrm::storage::io::FileReader infile(IO_TMP_FILE,
|
||||
|
@ -86,17 +86,15 @@ BOOST_AUTO_TEST_CASE(tar_serialize_variable_indexed_data)
|
||||
std::vector<std::vector<unsigned>> offset_data = {
|
||||
{5, 8, 8},
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
|
||||
};
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}};
|
||||
std::vector<std::string> char_data = {
|
||||
"HalloFoo",
|
||||
"ABCDEFGHIJKLMNOPQR",
|
||||
"ABCDEFGHIJKLMNOP",
|
||||
"HalloFoo", "ABCDEFGHIJKLMNOPQR", "ABCDEFGHIJKLMNOP",
|
||||
};
|
||||
|
||||
for (const auto i : util::irange<std::size_t>(0, offset_data.size()))
|
||||
{
|
||||
TestIndexedData indexed {offset_data[i].begin(), offset_data[i].end(), char_data[i].begin()};
|
||||
TestIndexedData indexed{
|
||||
offset_data[i].begin(), offset_data[i].end(), char_data[i].begin()};
|
||||
{
|
||||
storage::tar::FileWriter writer(tmp.path,
|
||||
storage::tar::FileWriter::GenerateFingerprint);
|
||||
@ -107,7 +105,7 @@ BOOST_AUTO_TEST_CASE(tar_serialize_variable_indexed_data)
|
||||
storage::tar::FileReader reader(tmp.path, storage::tar::FileReader::VerifyFingerprint);
|
||||
util::serialization::read(reader, "my_indexed_data", result);
|
||||
|
||||
for(auto j : util::irange<std::size_t>(0, offset_data[i].size() - 1))
|
||||
for (auto j : util::irange<std::size_t>(0, offset_data[i].size() - 1))
|
||||
{
|
||||
BOOST_CHECK_EQUAL(indexed.at(j), result.at(j));
|
||||
}
|
||||
|
@ -7,8 +7,8 @@
|
||||
#include "util/rectangle.hpp"
|
||||
#include "util/typedefs.hpp"
|
||||
|
||||
#include "mocks/mock_datafacade.hpp"
|
||||
#include "../common/temporary_file.hpp"
|
||||
#include "mocks/mock_datafacade.hpp"
|
||||
|
||||
#include <boost/functional/hash.hpp>
|
||||
#include <boost/test/auto_unit_test.hpp>
|
||||
|
Loading…
Reference in New Issue
Block a user