Add table API
This commit is contained in:
parent
ca6c1d7bb1
commit
fda8f6ecef
117
include/engine/api/table_api.hpp
Normal file
117
include/engine/api/table_api.hpp
Normal file
@ -0,0 +1,117 @@
|
||||
#ifndef ENGINE_API_TABLE_HPP
|
||||
#define ENGINE_API_TABLE_HPP
|
||||
|
||||
#include "engine/api/base_api.hpp"
|
||||
#include "engine/api/table_parameters.hpp"
|
||||
#include "engine/api/json_factory.hpp"
|
||||
|
||||
#include "engine/datafacade/datafacade_base.hpp"
|
||||
|
||||
#include "engine/guidance/assemble_leg.hpp"
|
||||
#include "engine/guidance/assemble_route.hpp"
|
||||
#include "engine/guidance/assemble_geometry.hpp"
|
||||
#include "engine/guidance/assemble_overview.hpp"
|
||||
#include "engine/guidance/assemble_steps.hpp"
|
||||
|
||||
#include "engine/internal_route_result.hpp"
|
||||
|
||||
#include "util/integer_range.hpp"
|
||||
|
||||
namespace osrm
|
||||
{
|
||||
namespace engine
|
||||
{
|
||||
namespace api
|
||||
{
|
||||
|
||||
namespace detail
|
||||
{
|
||||
template <typename ChildT> class TableAPI_ : public BaseAPI_<TableAPI_<ChildT>>
|
||||
{
|
||||
using BaseT = BaseAPI_<TableAPI_<ChildT>>;
|
||||
|
||||
public:
|
||||
TableAPI_(const datafacade::BaseDataFacade &facade_, const TableParameters ¶meters_)
|
||||
: BaseT(facade_, parameters_), parameters(parameters_)
|
||||
{
|
||||
}
|
||||
|
||||
virtual void MakeResponse(const std::vector<EdgeWeight> &durations,
|
||||
const std::vector<PhantomNode> &phantoms,
|
||||
util::json::Object &response) const
|
||||
{
|
||||
// symmetric case
|
||||
if (parameters.sources.empty())
|
||||
{
|
||||
BOOST_ASSERT(parameters.destinations.empty());
|
||||
response.values["sources"] = MakeWaypoints(phantoms);
|
||||
response.values["destinations"] = MakeWaypoints(phantoms);
|
||||
response.values["durations"] = MakeTable(durations, phantoms.size(), phantoms.size());
|
||||
}
|
||||
else
|
||||
{
|
||||
response.values["sources"] = MakeWaypoints(phantoms, parameters.sources);
|
||||
response.values["destinations"] = MakeWaypoints(phantoms, parameters.destinations);
|
||||
response.values["durations"] =
|
||||
MakeTable(durations, parameters.sources.size(), parameters.destinations.size());
|
||||
}
|
||||
response.values["code"] = "ok";
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual util::json::Array MakeWaypoints(const std::vector<PhantomNode> &phantoms) const
|
||||
{
|
||||
util::json::Array json_waypoints;
|
||||
json_waypoints.values.reserve(phantoms.size());
|
||||
BOOST_ASSERT(phantoms.size() == parameters.coordinates.size());
|
||||
auto phantom_iter = phantoms.begin();
|
||||
auto coordinate_iter = parameters.coordinates.begin();
|
||||
for (; phantom_iter != phantoms.end() && coordinate_iter != parameters.coordinates.end();
|
||||
++phantom_iter, ++coordinate_iter)
|
||||
{
|
||||
json_waypoints.values.push_back(BaseT::MakeWaypoint(*coordinate_iter, *phantom_iter));
|
||||
}
|
||||
return json_waypoints;
|
||||
}
|
||||
|
||||
virtual util::json::Array MakeWaypoints(const std::vector<PhantomNode> &phantoms,
|
||||
const std::vector<std::size_t> &indices) const
|
||||
{
|
||||
util::json::Array json_waypoints;
|
||||
json_waypoints.values.reserve(indices.size());
|
||||
for (auto idx : indices)
|
||||
{
|
||||
BOOST_ASSERT(idx < phantoms.size() && idx < parameters.coordinates.size());
|
||||
json_waypoints.values.push_back(
|
||||
BaseT::MakeWaypoint(parameters.coordinates[idx], phantoms[idx]));
|
||||
}
|
||||
return json_waypoints;
|
||||
}
|
||||
|
||||
virtual util::json::Array MakeTable(const std::vector<EdgeWeight> &values,
|
||||
std::size_t number_of_rows,
|
||||
std::size_t number_of_columns) const
|
||||
{
|
||||
util::json::Array json_table;
|
||||
for (const auto row : util::irange<std::size_t>(0, number_of_rows))
|
||||
{
|
||||
util::json::Array json_row;
|
||||
auto row_begin_iterator = values.begin() + (row * number_of_columns);
|
||||
auto row_end_iterator = values.begin() + ((row + 1) * number_of_columns);
|
||||
json_row.values.insert(json_row.values.end(), row_begin_iterator, row_end_iterator);
|
||||
json_table.values.push_back(std::move(json_row));
|
||||
}
|
||||
return json_table;
|
||||
}
|
||||
|
||||
const TableParameters ¶meters;
|
||||
};
|
||||
}
|
||||
|
||||
// Expose non-templated version
|
||||
using TableAPI = detail::TableAPI_<std::true_type>;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
@ -36,6 +36,8 @@ class ManyToManyRouting final
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME This should be replaced by an std::unordered_multimap, though this needs benchmarking
|
||||
using SearchSpaceWithBuckets = std::unordered_map<NodeID, std::vector<NodeBucket>>;
|
||||
|
||||
public:
|
||||
|
@ -1,6 +1,7 @@
|
||||
#include "engine/plugins/table.hpp"
|
||||
|
||||
#include "engine/api/table_parameters.hpp"
|
||||
#include "engine/api/table_api.hpp"
|
||||
#include "engine/object_encoder.hpp"
|
||||
#include "engine/routing_algorithms/many_to_many.hpp"
|
||||
#include "engine/search_engine_data.hpp"
|
||||
@ -75,66 +76,9 @@ Status TablePlugin::HandleRequest(const api::TableParameters ¶ms, util::json
|
||||
return Error("no-table", "No table found", result);
|
||||
}
|
||||
|
||||
util::json::Array matrix_json_array;
|
||||
for (const auto row : util::irange<std::size_t>(0, params.sources.size()))
|
||||
{
|
||||
util::json::Array json_row;
|
||||
auto row_begin_iterator = result_table.begin() + (row * params.destinations.size());
|
||||
auto row_end_iterator = result_table.begin() + ((row + 1) * params.destinations.size());
|
||||
json_row.values.insert(json_row.values.end(), row_begin_iterator, row_end_iterator);
|
||||
matrix_json_array.values.push_back(std::move(json_row));
|
||||
}
|
||||
result.values["distance_table"] = matrix_json_array;
|
||||
api::TableAPI table_api {facade, params};
|
||||
table_api.MakeResponse(result_table, snapped_phantoms, result);
|
||||
|
||||
// symmetric case
|
||||
if (params.sources.empty())
|
||||
{
|
||||
BOOST_ASSERT(params.destinations.empty());
|
||||
util::json::Array target_coord_json_array;
|
||||
for (const auto &phantom : snapped_phantoms)
|
||||
{
|
||||
util::json::Array json_coord;
|
||||
json_coord.values.push_back(phantom.location.lat / COORDINATE_PRECISION);
|
||||
json_coord.values.push_back(phantom.location.lon / COORDINATE_PRECISION);
|
||||
target_coord_json_array.values.push_back(std::move(json_coord));
|
||||
}
|
||||
result.values["destination_coordinates"] = std::move(target_coord_json_array);
|
||||
util::json::Array source_coord_json_array;
|
||||
for (const auto &phantom : snapped_phantoms)
|
||||
{
|
||||
util::json::Array json_coord;
|
||||
json_coord.values.push_back(phantom.location.lat / COORDINATE_PRECISION);
|
||||
json_coord.values.push_back(phantom.location.lon / COORDINATE_PRECISION);
|
||||
source_coord_json_array.values.push_back(std::move(json_coord));
|
||||
}
|
||||
result.values["source_coordinates"] = std::move(source_coord_json_array);
|
||||
}
|
||||
// asymmetric case
|
||||
else
|
||||
{
|
||||
BOOST_ASSERT(!params.destinations.empty());
|
||||
|
||||
util::json::Array target_coord_json_array;
|
||||
for (const auto index : params.sources)
|
||||
{
|
||||
const auto &phantom = snapped_phantoms[index];
|
||||
util::json::Array json_coord;
|
||||
json_coord.values.push_back(phantom.location.lat / COORDINATE_PRECISION);
|
||||
json_coord.values.push_back(phantom.location.lon / COORDINATE_PRECISION);
|
||||
target_coord_json_array.values.push_back(std::move(json_coord));
|
||||
}
|
||||
result.values["destination_coordinates"] = std::move(target_coord_json_array);
|
||||
util::json::Array source_coord_json_array;
|
||||
for (const auto index : params.sources)
|
||||
{
|
||||
const auto &phantom = snapped_phantoms[index];
|
||||
util::json::Array json_coord;
|
||||
json_coord.values.push_back(phantom.location.lat / COORDINATE_PRECISION);
|
||||
json_coord.values.push_back(phantom.location.lon / COORDINATE_PRECISION);
|
||||
source_coord_json_array.values.push_back(std::move(json_coord));
|
||||
}
|
||||
result.values["source_coordinates"] = std::move(source_coord_json_array);
|
||||
}
|
||||
return Status::Ok;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user