diff --git a/.gitignore b/.gitignore index b8ca61d98..2e6107ad3 100644 --- a/.gitignore +++ b/.gitignore @@ -33,11 +33,9 @@ ehthumbs.db Icon? Thumbs.db -# SCons related files # +# build related files # ####################### -SconsBuilder* -.scon* -.build +/build/ # Eclipse related files # ######################### diff --git a/Algorithms/IteratorBasedCRC32.h b/Algorithms/IteratorBasedCRC32.h index 4999754c9..5d4415cb5 100644 --- a/Algorithms/IteratorBasedCRC32.h +++ b/Algorithms/IteratorBasedCRC32.h @@ -30,7 +30,6 @@ class IteratorbasedCRC32 { private: typedef typename ContainerT::iterator ContainerT_iterator; unsigned crc; - unsigned slowcrc_table[1<<8]; typedef boost::crc_optimal<32, 0x1EDC6F41, 0x0, 0x0, true, true> my_crc_32_type; typedef unsigned (IteratorbasedCRC32::*CRC32CFunctionPtr)(char *str, unsigned len, unsigned crc); diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..3de4c4f13 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,111 @@ +cmake_minimum_required(VERSION 2.6) +project(OSRM) +include(FindPackageHandleStandardArgs) +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake) +set(BOOST_COMPONENTS filesystem regex system thread) + +file(GLOB ExtractorGlob Extractor/*.cpp) +set(ExtractorSources extractor.cpp ${ExtractorGlob}) +add_executable(osrm-extract ${ExtractorSources}) + +file(GLOB PrepareGlob Contractor/*.cpp) +set(PrepareSources createHierarchy.cpp ${PrepareGlob}) +add_executable(osrm-prepare ${PrepareSources}) + +file(GLOB RoutedGlob Server/DataStructures/*.cpp Descriptors/*.cpp DataStructures/SearchEngine*.cpp) +set(RoutedSources routed.cpp ${RoutedGlob}) +add_executable(osrm-routed ${RoutedSources}) +set_target_properties(osrm-routed PROPERTIES COMPILE_FLAGS -DROUTED) + +# Check the release mode +if(NOT CMAKE_BUILD_TYPE MATCHES Debug) + set(CMAKE_BUILD_TYPE Release) +endif(NOT CMAKE_BUILD_TYPE MATCHES Debug) +if(CMAKE_BUILD_TYPE MATCHES Debug) + message(STATUS "Configuring OSRM in debug mode") +endif(CMAKE_BUILD_TYPE MATCHES Debug) +if(CMAKE_BUILD_TYPE MATCHES Release) + message(STATUS "Configuring OSRM in release mode") +endif(CMAKE_BUILD_TYPE MATCHES Release) + +#Configuring compilers +if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + # using Clang + set(CMAKE_CXX_FLAGS "-Wall -Wno-unknown-pragmas -Wno-unneeded-internal-declaration") + message(STATUS "OpenMP parallelization not available using clang++") +elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + # using GCC + set(CMAKE_CXX_FLAGS "-Wall -fopenmp -pedantic") +elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel") + # using Intel C++ + set(CMAKE_CXX_FLAGS "-static-intel -wd10237 -Wall -openmp -ipo") +elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") + # using Visual Studio C++ +endif() + +if(APPLE) + SET(CMAKE_OSX_ARCHITECTURES "x86_64") + message("Set Architecture to x64 on OS X") +endif() + +#Check Boost +set(BOOST_MIN_VERSION "1.44.0") +find_package( Boost ${BOOST_MIN_VERSION} COMPONENTS ${BOOST_COMPONENTS} REQUIRED ) +if (NOT Boost_FOUND) + message(FATAL_ERROR "Fatal error: Boost (version >= 1.44.0) required.\n") +endif (NOT Boost_FOUND) +include_directories(${Boost_INCLUDE_DIRS}) +target_link_libraries( osrm-extract ${Boost_LIBRARIES} ) +target_link_libraries( osrm-prepare ${Boost_LIBRARIES} ) +target_link_libraries( osrm-routed ${Boost_LIBRARIES} ) + +find_package ( BZip2 REQUIRED ) +include_directories(${BZIP_INCLUDE_DIRS}) +target_link_libraries (osrm-extract ${BZIP2_LIBRARIES}) + +find_package( ZLIB REQUIRED ) +target_link_libraries (osrm-extract ${ZLIB_LIBRARY}) +target_link_libraries (osrm-routed ${ZLIB_LIBRARY}) + +find_package( Threads REQUIRED ) +target_link_libraries (osrm-extract ${Threads_LIBRARY}) + +find_package( Lua51 REQUIRED ) +include_directories(${LUA_INCLUDE_DIR}) +target_link_libraries( osrm-extract ${LUA_LIBRARY} ) +target_link_libraries( osrm-prepare ${LUA_LIBRARY} ) + +find_package( LibXml2 REQUIRED ) +include_directories(${LIBXML2_INCLUDE_DIR}) +target_link_libraries (osrm-extract ${LIBXML2_LIBRARIES}) + +find_package( Luabind REQUIRED ) +include_directories(${LUABIND_INCLUDE_DIR}) +target_link_libraries (osrm-extract ${LUABIND_LIBRARY}) +target_link_libraries (osrm-prepare ${LUABIND_LIBRARY}) + +find_package( Protobuf REQUIRED ) +include_directories(${PROTOBUF_INCLUDE_DIRS}) +target_link_libraries (osrm-extract ${PROTOBUF_LIBRARY}) +target_link_libraries (osrm-prepare ${PROTOBUF_LIBRARY}) + +find_package( STXXL REQUIRED ) +include_directories(${STXXL_INCLUDE_DIR}) +target_link_libraries (osrm-extract ${STXXL_LIBRARY}) +target_link_libraries (osrm-prepare ${STXXL_LIBRARY}) + +find_package( OSMPBF REQUIRED ) +include_directories(${OSMPBF_INCLUDE_DIR}) +target_link_libraries (osrm-extract ${OSMPBF_LIBRARY}) +target_link_libraries (osrm-prepare ${OSMPBF_LIBRARY}) + +if(WITH_TOOLS) + message("-- Activating OSRM internal tools") + find_package( GDAL ) + if(GDAL_FOUND) + add_executable(osrm-components Tools/componentAnalysis.cpp) + include_directories(${GDAL_INCLUDE_DIR}) + target_link_libraries( osrm-components ${GDAL_LIBRARIES} ) + target_link_libraries( osrm-components ${Boost_LIBRARIES} ) + endif(GDAL_FOUND) +endif(WITH_TOOLS) diff --git a/Contractor/Contractor.h b/Contractor/Contractor.h index b9059ca62..35d2aa29e 100644 --- a/Contractor/Contractor.h +++ b/Contractor/Contractor.h @@ -20,17 +20,6 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef CONTRACTOR_H_INCLUDED #define CONTRACTOR_H_INCLUDED -#include -#include -#include - -#include -#include - -#include -#include -#include -#include #include "TemporaryStorage.h" #include "../DataStructures/BinaryHeap.h" @@ -42,6 +31,19 @@ or see http://www.gnu.org/licenses/agpl.txt. #include "../Util/OpenMPWrapper.h" #include "../Util/StringUtil.h" +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include + class Contractor { private: @@ -119,7 +121,7 @@ public: newEdge.target = diter->target(); newEdge.data = _ContractorEdgeData( (std::max)((int)diter->weight(), 1 ), 1, diter->id(), false, diter->isForward(), diter->isBackward()); - assert( newEdge.data.distance > 0 ); + BOOST_ASSERT_MSG( newEdge.data.distance > 0, "edge distance < 1" ); #ifndef NDEBUG if ( newEdge.data.distance > 24 * 60 * 60 * 10 ) { WARN("Edge weight large -> " << newEdge.data.distance); @@ -232,8 +234,9 @@ public: //initialize the variables #pragma omp parallel for schedule ( guided ) - for ( int x = 0; x < ( int ) numberOfNodes; ++x ) + for ( int x = 0; x < ( int ) numberOfNodes; ++x ) { remainingNodes[x].id = x; + } std::cout << "initializing elimination PQ ..." << std::flush; #pragma omp parallel @@ -247,7 +250,7 @@ public: std::cout << "ok" << std::endl << "preprocessing " << numberOfNodes << " nodes ..." << std::flush; bool flushedContractor = false; - while ( numberOfContractedNodes < numberOfNodes ) { + while ( numberOfNodes > 2 && numberOfContractedNodes < numberOfNodes ) { if(!flushedContractor && (numberOfContractedNodes > (numberOfNodes*0.65) ) ){ DeallocatingVector<_ContractorEdge> newSetOfEdges; //this one is not explicitely cleared since it goes out of scope anywa std::cout << " [flush " << numberOfContractedNodes << " nodes] " << std::flush; @@ -282,7 +285,6 @@ public: //walk over all nodes for(unsigned i = 0; i < _graph->GetNumberOfNodes(); ++i) { - //INFO("Restructuring node " << i << "|" << _graph->GetNumberOfNodes()); const NodeID start = i; for(_DynamicGraph::EdgeIterator currentEdge = _graph->BeginEdges(start); currentEdge < _graph->EndEdges(start); ++currentEdge) { _DynamicGraph::EdgeData & data = _graph->GetEdgeData(currentEdge); @@ -301,8 +303,14 @@ public: newEdge.target = newNodeIDFromOldNodeIDMap[target]; newEdge.data = data; newEdge.data.originalViaNodeID = true; - assert(UINT_MAX != newNodeIDFromOldNodeIDMap[start] ); - assert(UINT_MAX != newNodeIDFromOldNodeIDMap[target]); + BOOST_ASSERT_MSG( + UINT_MAX != newNodeIDFromOldNodeIDMap[start], + "new start id not resolveable" + ); + BOOST_ASSERT_MSG( + UINT_MAX != newNodeIDFromOldNodeIDMap[target], + "new target id not resolveable" + ); newSetOfEdges.push_back(newEdge); } } @@ -311,8 +319,6 @@ public: tempStorage.seek(temporaryStorageSlotID, initialFilePosition); tempStorage.writeToSlot(temporaryStorageSlotID, (char*)&numberOfTemporaryEdges, sizeof(unsigned)); - // INFO("Flushed " << numberOfTemporaryEdges << " edges to disk"); - //Delete map from old NodeIDs to new ones. std::vector().swap(newNodeIDFromOldNodeIDMap); @@ -438,38 +444,48 @@ public: Percent p (_graph->GetNumberOfNodes()); INFO("Getting edges of minimized graph"); NodeID numberOfNodes = _graph->GetNumberOfNodes(); - if(oldNodeIDFromNewNodeIDMap.size()) { + if(_graph->GetNumberOfNodes()) { for ( NodeID node = 0; node < numberOfNodes; ++node ) { p.printStatus(node); for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge < endEdges; ++edge ) { const NodeID target = _graph->GetTarget( edge ); const _DynamicGraph::EdgeData& data = _graph->GetEdgeData( edge ); Edge newEdge; - newEdge.source = oldNodeIDFromNewNodeIDMap[node]; - newEdge.target = oldNodeIDFromNewNodeIDMap[target]; - assert(UINT_MAX != newEdge.source); - assert(UINT_MAX != newEdge.target); - + if(0 != oldNodeIDFromNewNodeIDMap.size()) { + newEdge.source = oldNodeIDFromNewNodeIDMap[node]; + newEdge.target = oldNodeIDFromNewNodeIDMap[target]; + } else { + newEdge.source = node; + newEdge.target = target; + } + BOOST_ASSERT_MSG( + UINT_MAX != newEdge.source, + "Source id invalid" + ); + BOOST_ASSERT_MSG( + UINT_MAX != newEdge.target, + "Target id invalid" + ); newEdge.data.distance = data.distance; newEdge.data.shortcut = data.shortcut; - if(!data.originalViaNodeID) + if(!data.originalViaNodeID && oldNodeIDFromNewNodeIDMap.size()) { newEdge.data.id = oldNodeIDFromNewNodeIDMap[data.id]; - else + } else { newEdge.data.id = data.id; - - assert(newEdge.data.id != UINT_MAX); + } + BOOST_ASSERT_MSG( + newEdge.data.id <= INT_MAX, //2^31 + "edge id invalid" + ); newEdge.data.forward = data.forward; newEdge.data.backward = data.backward; edges.push_back( newEdge ); } } } - INFO("Renumbered edges of minimized graph, freeing space"); _graph.reset(); std::vector().swap(oldNodeIDFromNewNodeIDMap); - INFO("Loading temporary edges"); - // std::ifstream temporaryEdgeStorage(temporaryEdgeStorageFilename.c_str(), std::ios::binary); TemporaryStorage & tempStorage = TemporaryStorage::GetInstance(); //Also get the edges from temporary storage unsigned numberOfTemporaryEdges = 0; @@ -494,7 +510,6 @@ public: edges.push_back( newEdge ); } tempStorage.deallocateSlot(temporaryStorageSlotID); - INFO("Hierarchy has " << edges.size() << " edges"); } private: @@ -517,24 +532,27 @@ private: if ( heap.GetData( node ).target ) { ++targetsFound; - if ( targetsFound >= numTargets ) + if ( targetsFound >= numTargets ) { return; + } } //iterate over all edges of node for ( _DynamicGraph::EdgeIterator edge = _graph->BeginEdges( node ), endEdges = _graph->EndEdges( node ); edge != endEdges; ++edge ) { const _ContractorEdgeData& data = _graph->GetEdgeData( edge ); - if ( !data.forward ) + if ( !data.forward ){ continue; + } const NodeID to = _graph->GetTarget( edge ); - if(middleNode == to) + if(middleNode == to) { continue; + } const int toDistance = distance + data.distance; //New Node discovered -> Add to Heap + Node Info Storage - if ( !heap.WasInserted( to ) ) + if ( !heap.WasInserted( to ) ) { heap.Insert( to, toDistance, _HeapData(currentHop, false) ); - + } //Found a shorter Path -> Update distance else if ( toDistance < heap.GetKey( to ) ) { heap.DecreaseKey( to, toDistance ); @@ -584,8 +602,9 @@ private: for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) { const _ContractorEdgeData& outData = _graph->GetEdgeData( outEdge ); - if ( !outData.forward ) + if ( !outData.forward ) { continue; + } const NodeID target = _graph->GetTarget( outEdge ); const int pathDistance = inData.distance + outData.distance; maxDistance = std::max( maxDistance, pathDistance ); @@ -595,15 +614,16 @@ private: } } - if( Simulate ) + if( Simulate ) { _Dijkstra( maxDistance, numTargets, 1000, data, node ); - else + } else { _Dijkstra( maxDistance, numTargets, 2000, data, node ); - + } for ( _DynamicGraph::EdgeIterator outEdge = _graph->BeginEdges( node ), endOutEdges = _graph->EndEdges( node ); outEdge != endOutEdges; ++outEdge ) { const _ContractorEdgeData& outData = _graph->GetEdgeData( outEdge ); - if ( !outData.forward ) + if ( !outData.forward ) { continue; + } const NodeID target = _graph->GetTarget( outEdge ); const int pathDistance = inData.distance + outData.distance; const int distance = heap.GetKey( target ); @@ -643,8 +663,9 @@ private: found = true; break; } - if ( !found ) + if ( !found ) { insertedEdges[insertedEdgesSize++] = insertedEdges[i]; + } } insertedEdges.resize( insertedEdgesSize ); } diff --git a/Contractor/EdgeBasedGraphFactory.cpp b/Contractor/EdgeBasedGraphFactory.cpp index fce247d31..13f49ecb7 100644 --- a/Contractor/EdgeBasedGraphFactory.cpp +++ b/Contractor/EdgeBasedGraphFactory.cpp @@ -89,11 +89,14 @@ EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector& outputEdgeList ) { - GUARANTEE(0 == outputEdgeList.size(), "Vector passed to EdgeBasedGraphFactory::GetEdgeBasedEdges(..) is not empty"); + BOOST_ASSERT_MSG( + 0 == outputEdgeList.size(), + "Vector is not empty" + ); edgeBasedEdges.swap(outputEdgeList); } -void EdgeBasedGraphFactory::GetEdgeBasedNodes( DeallocatingVector< EdgeBasedNode> & nodes) { +void EdgeBasedGraphFactory::GetEdgeBasedNodes( std::vector & nodes) { #ifndef NDEBUG BOOST_FOREACH(EdgeBasedNode & node, edgeBasedNodes){ assert(node.lat1 != INT_MAX); assert(node.lon1 != INT_MAX); @@ -282,7 +285,7 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename, lua_State // turnInstruction |= TurnInstructions.AccessRestrictionFlag; // } distance += penalty; - + //distance += heightPenalty; //distance += ComputeTurnPenalty(u, v, w); @@ -328,7 +331,7 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename, lua_State TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w, unsigned& penalty, lua_State *myLuaState) const { const double angle = GetAngleBetweenTwoEdges(inputNodeInfoList[u], inputNodeInfoList[v], inputNodeInfoList[w]); - + if( speedProfile.has_turn_penalty_function ) { try { //call lua profile to compute turn penalty @@ -340,7 +343,7 @@ TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID u, const NodeID } else { penalty = 0; } - + if(u == w) { return TurnInstructions.UTurn; } diff --git a/Contractor/EdgeBasedGraphFactory.h b/Contractor/EdgeBasedGraphFactory.h index e751a7169..c51e4d2ea 100644 --- a/Contractor/EdgeBasedGraphFactory.h +++ b/Contractor/EdgeBasedGraphFactory.h @@ -25,20 +25,6 @@ #ifndef EDGEBASEDGRAPHFACTORY_H_ #define EDGEBASEDGRAPHFACTORY_H_ -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - #include "../typedefs.h" #include "../DataStructures/DeallocatingVector.h" #include "../DataStructures/DynamicGraph.h" @@ -50,14 +36,22 @@ #include "../DataStructures/Percent.h" #include "../DataStructures/TurnInstructions.h" #include "../Util/BaseConfiguration.h" +#include "../Util/LuaUtil.h" -extern "C" { -#include -#include -#include -} -#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include class EdgeBasedGraphFactory : boost::noncopyable { public: @@ -65,9 +59,23 @@ public: bool operator<(const EdgeBasedNode & other) const { return other.id < id; } + bool operator==(const EdgeBasedNode & other) const { return id == other.id; } + + inline _Coordinate Centroid() const { + _Coordinate centroid; + //The coordinates of the midpoint are given by: + //x = (x1 + x2) /2 and y = (y1 + y2) /2. + centroid.lon = (std::min(lon1, lon2) + std::max(lon1, lon2))/2; + centroid.lat = (std::min(lat1, lat2) + std::max(lat1, lat2))/2; + return centroid; + } + + inline bool isIgnored() const { + return ignoreInGrid; + } NodeID id; int lat1; int lat2; @@ -127,7 +135,7 @@ private: RestrictionMap _restrictionMap; DeallocatingVector edgeBasedEdges; - DeallocatingVector edgeBasedNodes; + std::vector edgeBasedNodes; NodeID CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const; bool CheckIfTurnIsRestricted(const NodeID u, const NodeID v, const NodeID w) const; @@ -145,7 +153,7 @@ public: void Run(const char * originalEdgeDataFilename, lua_State *myLuaState); void GetEdgeBasedEdges( DeallocatingVector< EdgeBasedEdge >& edges ); - void GetEdgeBasedNodes( DeallocatingVector< EdgeBasedNode> & nodes); + void GetEdgeBasedNodes( std::vector< EdgeBasedNode> & nodes); void GetOriginalEdgeData( std::vector< OriginalEdgeData> & originalEdgeData); TurnInstruction AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w, unsigned& penalty, lua_State *myLuaState) const; unsigned GetNumberOfNodes() const; diff --git a/Contractor/TemporaryStorage.h b/Contractor/TemporaryStorage.h index 69b35e701..e1899817b 100644 --- a/Contractor/TemporaryStorage.h +++ b/Contractor/TemporaryStorage.h @@ -33,8 +33,8 @@ //This is one big workaround for latest boost renaming woes. -#ifndef BOOST_FILESYSTEM_VERSION -#warning Boost Installation with Filesystem3 (>=1.44) is required, activating workaround +#if BOOST_FILESYSTEM_VERSION < 3 +#warning Boost Installation with Filesystem3 missing, activating workaround #include namespace boost { namespace filesystem { @@ -54,8 +54,9 @@ inline path unique_path(const path&) { #endif +#ifndef BOOST_FILESYSTEM_VERSION #define BOOST_FILESYSTEM_VERSION 3 - +#endif /** * This class implements a singleton file storage for temporary data. * temporary slots can be accessed by other objects through an int @@ -89,8 +90,6 @@ private: } void abort(boost::filesystem::filesystem_error& e); - ; - struct StreamData { bool writeMode; boost::filesystem::path pathToTemporaryFile; diff --git a/DataStructures/Coordinate.h b/DataStructures/Coordinate.h index c1d9c4167..bc6328991 100644 --- a/DataStructures/Coordinate.h +++ b/DataStructures/Coordinate.h @@ -21,7 +21,10 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef COORDINATE_H_ #define COORDINATE_H_ +#include +#include #include + #include struct _Coordinate { @@ -102,5 +105,4 @@ inline double ApproximateDistanceByEuclid(const _Coordinate &c1, const _Coordina return d; } - #endif /* COORDINATE_H_ */ diff --git a/DataStructures/DynamicGraph.h b/DataStructures/DynamicGraph.h index 6730babdf..67db0650a 100644 --- a/DataStructures/DynamicGraph.h +++ b/DataStructures/DynamicGraph.h @@ -21,18 +21,21 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef DYNAMICGRAPH_H_INCLUDED #define DYNAMICGRAPH_H_INCLUDED -#include +#include "../DataStructures/DeallocatingVector.h" + +#include +#include + #include #include - -#include "../DataStructures/DeallocatingVector.h" +#include template< typename EdgeDataT> class DynamicGraph { public: typedef EdgeDataT EdgeData; - typedef unsigned NodeIterator; - typedef unsigned EdgeIterator; + typedef uint32_t NodeIterator; + typedef uint32_t EdgeIterator; class InputEdge { public: @@ -47,16 +50,16 @@ class DynamicGraph { }; //Constructs an empty graph with a given number of nodes. - DynamicGraph( int nodes ) : m_numNodes(nodes), m_numEdges(0) { + DynamicGraph( int32_t nodes ) : m_numNodes(nodes), m_numEdges(0) { m_nodes.reserve( m_numNodes ); m_nodes.resize( m_numNodes ); m_edges.reserve( m_numNodes * 1.1 ); m_edges.resize( m_numNodes ); } + template - DynamicGraph( const int nodes, const ContainerT &graph ) - { + DynamicGraph( const int32_t nodes, const ContainerT &graph ) { m_numNodes = nodes; m_numEdges = ( EdgeIterator ) graph.size(); m_nodes.reserve( m_numNodes +1); @@ -80,7 +83,10 @@ class DynamicGraph { for ( EdgeIterator i = m_nodes[node].firstEdge, e = m_nodes[node].firstEdge + m_nodes[node].edges; i != e; ++i ) { m_edges[i].target = graph[edge].target; m_edges[i].data = graph[edge].data; - GUARANTEE(graph[edge].data.distance > 0, "edge: " << edge << "(" << graph[edge].source << "," << graph[edge].target << ")=" << graph[edge].data.distance); + BOOST_ASSERT_MSG( + graph[edge].data.distance > 0, + "edge distance invalid" + ); ++edge; } } @@ -88,17 +94,15 @@ class DynamicGraph { ~DynamicGraph(){ } - unsigned GetNumberOfNodes() const - { + uint32_t GetNumberOfNodes() const { return m_numNodes; } - unsigned GetNumberOfEdges() const - { + uint32_t GetNumberOfEdges() const { return m_numEdges; } - unsigned GetOutDegree( const NodeIterator n ) const { + uint32_t GetOutDegree( const NodeIterator n ) const { return m_nodes[n].edges; } @@ -115,7 +119,6 @@ class DynamicGraph { } EdgeIterator BeginEdges( const NodeIterator n ) const { - //assert( EndEdges( n ) - EdgeIterator( _nodes[n].firstEdge ) <= 100 ); return EdgeIterator( m_nodes[n].firstEdge ); } @@ -133,7 +136,7 @@ class DynamicGraph { m_edges[node.firstEdge] = m_edges[node.firstEdge + node.edges]; } else { EdgeIterator newFirstEdge = ( EdgeIterator ) m_edges.size(); - unsigned newSize = node.edges * 1.1 + 2; + uint32_t newSize = node.edges * 1.1 + 2; EdgeIterator requiredCapacity = newSize + m_edges.size(); EdgeIterator oldCapacity = m_edges.capacity(); if ( requiredCapacity >= oldCapacity ) { @@ -162,15 +165,15 @@ class DynamicGraph { Node &node = m_nodes[source]; --m_numEdges; --node.edges; - const unsigned last = node.firstEdge + node.edges; + const uint32_t last = node.firstEdge + node.edges; //swap with last edge m_edges[e] = m_edges[last]; makeDummy( last ); } //removes all edges (source,target) - int DeleteEdgesTo( const NodeIterator source, const NodeIterator target ) { - int deleted = 0; + int32_t DeleteEdgesTo( const NodeIterator source, const NodeIterator target ) { + int32_t deleted = 0; for ( EdgeIterator i = BeginEdges( source ), iend = EndEdges( source ); i < iend - deleted; ++i ) { if ( m_edges[i].target == target ) { do { @@ -200,11 +203,11 @@ class DynamicGraph { protected: - bool isDummy( EdgeIterator edge ) const { + bool isDummy( const EdgeIterator edge ) const { return m_edges[edge].target == (std::numeric_limits< NodeIterator >::max)(); } - void makeDummy( EdgeIterator edge ) { + void makeDummy( const EdgeIterator edge ) { m_edges[edge].target = (std::numeric_limits< NodeIterator >::max)(); } @@ -212,7 +215,7 @@ class DynamicGraph { //index of the first edge EdgeIterator firstEdge; //amount of edges - unsigned edges; + uint32_t edges; }; struct Edge { diff --git a/DataStructures/HilbertValue.h b/DataStructures/HilbertValue.h new file mode 100644 index 000000000..05e2bb15f --- /dev/null +++ b/DataStructures/HilbertValue.h @@ -0,0 +1,87 @@ +/* + open source routing machine + Copyright (C) Dennis Luxen, others 2010 + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU AFFERO General Public License as published by +the Free Software Foundation; either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +or see http://www.gnu.org/licenses/agpl.txt. + */ + +#ifndef HILBERTVALUE_H_ +#define HILBERTVALUE_H_ + +#include +#include + +// computes a 64 bit value that corresponds to the hilbert space filling curve + +class HilbertCode : boost::noncopyable { +public: + static uint64_t GetHilbertNumberForCoordinate( + const _Coordinate & current_coordinate) { + unsigned location[2]; + location[0] = current_coordinate.lat+( 90*100000); + location[1] = current_coordinate.lon+(180*100000); + + TransposeCoordinate(location); + const uint64_t result = BitInterleaving(location[0], location[1]); + return result; + } +private: + static inline uint64_t BitInterleaving(const uint32_t a, const uint32_t b) { + uint64_t result = 0; + for(int8_t index = 31; index >= 0; --index){ + result |= (a >> index) & 1; + result <<= 1; + result |= (b >> index) & 1; + if(0 != index){ + result <<= 1; + } + } + return result; + } + + static inline void TransposeCoordinate( uint32_t * X) { + uint32_t M = 1 << (32-1), P, Q, t; + int i; + // Inverse undo + for( Q = M; Q > 1; Q >>= 1 ) { + P=Q-1; + for( i = 0; i < 2; ++i ) { + if( X[i] & Q ) { + X[0] ^= P; // invert + } else { + t = (X[0]^X[i]) & P; + X[0] ^= t; + X[i] ^= t; + } + } // exchange + } + // Gray encode + for( i = 1; i < 2; ++i ) { + X[i] ^= X[i-1]; + } + t=0; + for( Q = M; Q > 1; Q >>= 1 ) { + if( X[2-1] & Q ) { + t ^= Q-1; + } + } //check if this for loop is wrong + for( i = 0; i < 2; ++i ) { + X[i] ^= t; + } + } +}; + +#endif /* HILBERTVALUE_H_ */ diff --git a/DataStructures/NodeInformationHelpDesk.h b/DataStructures/NodeInformationHelpDesk.h index 029d30def..f13c9ad34 100644 --- a/DataStructures/NodeInformationHelpDesk.h +++ b/DataStructures/NodeInformationHelpDesk.h @@ -21,34 +21,49 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef NODEINFORMATIONHELPDESK_H_ #define NODEINFORMATIONHELPDESK_H_ +#include "NodeCoords.h" +#include "PhantomNodes.h" +#include "QueryEdge.h" +#include "StaticRTree.h" +#include "../Contractor/EdgeBasedGraphFactory.h" +#include "../typedefs.h" + +#include +#include + #include + #include #include -#include - -#include "../typedefs.h" -#include "../DataStructures/QueryEdge.h" -#include "NNGrid.h" -#include "PhantomNodes.h" -#include "NodeCoords.h" +typedef EdgeBasedGraphFactory::EdgeBasedNode RTreeLeaf; class NodeInformationHelpDesk : boost::noncopyable{ public: - NodeInformationHelpDesk(const char* ramIndexInput, const char* fileIndexInput, const unsigned _numberOfNodes, const unsigned crc) : numberOfNodes(_numberOfNodes), checkSum(crc) { - readOnlyGrid = new ReadOnlyGrid(ramIndexInput,fileIndexInput); - assert(0 == coordinateVector.size()); + NodeInformationHelpDesk( + const char* ramIndexInput, + const char* fileIndexInput, + const unsigned number_of_nodes, + const unsigned crc) : number_of_nodes(number_of_nodes), checkSum(crc) { + read_only_rtree = new StaticRTree( + ramIndexInput, + fileIndexInput + ); + BOOST_ASSERT_MSG( + 0 == coordinateVector.size(), + "Coordinate vector not empty" + ); } //Todo: Shared memory mechanism -// NodeInformationHelpDesk(const char* ramIndexInput, const char* fileIndexInput, const unsigned crc) : checkSum(crc) { -// readOnlyGrid = new ReadOnlyGrid(ramIndexInput,fileIndexInput); -// } - ~NodeInformationHelpDesk() { - delete readOnlyGrid; + delete read_only_rtree; } - void initNNGrid(std::ifstream& nodesInstream, std::ifstream& edgesInStream) { + + void initNNGrid( + std::ifstream& nodesInstream, + std::ifstream& edgesInStream + ) { DEBUG("Loading node data"); NodeInfo b; while(!nodesInstream.eof()) { @@ -68,20 +83,15 @@ public: OriginalEdgeData deserialized_originalEdgeData; for(unsigned i = 0; i < numberOfOrigEdges; ++i) { edgesInStream.read((char*)&(deserialized_originalEdgeData), sizeof(OriginalEdgeData)); - origEdgeData_viaNode[i] = deserialized_originalEdgeData.viaNode; - origEdgeData_nameID[i] = deserialized_originalEdgeData.nameID; + origEdgeData_viaNode[i] = deserialized_originalEdgeData.viaNode; + origEdgeData_nameID[i] = deserialized_originalEdgeData.nameID; origEdgeData_turnInstruction[i] = deserialized_originalEdgeData.turnInstruction; } edgesInStream.close(); DEBUG("Loaded " << numberOfOrigEdges << " orig edges"); DEBUG("Opening NN indices"); - readOnlyGrid->OpenIndexFiles(); } -// void initNNGrid() { -// readOnlyGrid->OpenIndexFiles(); -// } - inline int getLatitudeOfNode(const unsigned id) const { const NodeID node = origEdgeData_viaNode.at(id); return coordinateVector.at(node).lat; @@ -100,24 +110,36 @@ public: return origEdgeData_turnInstruction.at(id); } - inline NodeID getNumberOfNodes() const { return numberOfNodes; } - inline NodeID getNumberOfNodes2() const { return coordinateVector.size(); } + inline NodeID getNumberOfNodes() const { + return number_of_nodes; + } - inline bool FindNearestNodeCoordForLatLon(const _Coordinate& coord, _Coordinate& result) const { - return readOnlyGrid->FindNearestCoordinateOnEdgeInNodeBasedGraph(coord, result); - } + inline NodeID getNumberOfNodes2() const { + return coordinateVector.size(); + } - inline bool FindPhantomNodeForCoordinate( const _Coordinate & location, PhantomNode & resultNode, const unsigned zoomLevel) { - return readOnlyGrid->FindPhantomNodeForCoordinate(location, resultNode, zoomLevel); - } + inline bool FindNearestNodeCoordForLatLon( + const _Coordinate& input_coordinate, + _Coordinate& result, + const unsigned zoom_level = 18 + ) const { + PhantomNode resulting_phantom_node; + bool foundNode = FindPhantomNodeForCoordinate(input_coordinate, resulting_phantom_node, zoom_level); + result = resulting_phantom_node.location; + return foundNode; + } - inline void FindRoutingStarts(const _Coordinate &start, const _Coordinate &target, PhantomNodes & phantomNodes, const unsigned zoomLevel) const { - readOnlyGrid->FindRoutingStarts(start, target, phantomNodes, zoomLevel); - } - - inline void FindNearestPointOnEdge(const _Coordinate & input, _Coordinate& output){ - readOnlyGrid->FindNearestPointOnEdge(input, output); - } + inline bool FindPhantomNodeForCoordinate( + const _Coordinate & input_coordinate, + PhantomNode & resulting_phantom_node, + const unsigned zoom_level + ) const { + return read_only_rtree->FindPhantomNodeForCoordinate( + input_coordinate, + resulting_phantom_node, + zoom_level + ); + } inline unsigned GetCheckSum() const { return checkSum; @@ -129,8 +151,8 @@ private: std::vector origEdgeData_nameID; std::vector origEdgeData_turnInstruction; - ReadOnlyGrid * readOnlyGrid; - const unsigned numberOfNodes; + StaticRTree * read_only_rtree; + const unsigned number_of_nodes; const unsigned checkSum; }; diff --git a/DataStructures/SearchEngine.cpp b/DataStructures/SearchEngine.cpp new file mode 100644 index 000000000..2a538a7ac --- /dev/null +++ b/DataStructures/SearchEngine.cpp @@ -0,0 +1,89 @@ +/* + open source routing machine + Copyright (C) Dennis Luxen, others 2010 + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU AFFERO General Public License as published by +the Free Software Foundation; either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +or see http://www.gnu.org/licenses/agpl.txt. + */ + +#include "SearchEngine.h" + +SearchEngine::SearchEngine( + QueryGraph * g, + NodeInformationHelpDesk * nh, + std::vector & n + ) : + _queryData(g, nh, n), + shortestPath(_queryData), + alternativePaths(_queryData) + {} + SearchEngine::~SearchEngine() {} + +void SearchEngine::GetCoordinatesForNodeID( + NodeID id, + _Coordinate& result + ) const { + result.lat = _queryData.nodeHelpDesk->getLatitudeOfNode(id); + result.lon = _queryData.nodeHelpDesk->getLongitudeOfNode(id); +} + +void SearchEngine::FindPhantomNodeForCoordinate( + const _Coordinate & location, + PhantomNode & result, + const unsigned zoomLevel + ) const { + _queryData.nodeHelpDesk->FindPhantomNodeForCoordinate( + location, + result, zoomLevel + ); +} + +NodeID SearchEngine::GetNameIDForOriginDestinationNodeID( + const NodeID s, + const NodeID t + ) const { + if(s == t){ + return 0; + } + EdgeID e = _queryData.graph->FindEdge(s, t); + if(e == UINT_MAX) { + e = _queryData.graph->FindEdge( t, s ); + } + if(UINT_MAX == e) { + return 0; + } + assert(e != UINT_MAX); + const QueryEdge::EdgeData ed = _queryData.graph->GetEdgeData(e); + return ed.id; +} + +std::string SearchEngine::GetEscapedNameForNameID(const unsigned nameID) const { + bool is_name_invalid = (nameID >= _queryData.names.size() || nameID == 0); + if (is_name_invalid) { + return std::string(""); + } + + return HTMLEntitize(_queryData.names.at(nameID)); +} + +SearchEngineHeapPtr SearchEngineData::forwardHeap; +SearchEngineHeapPtr SearchEngineData::backwardHeap; + +SearchEngineHeapPtr SearchEngineData::forwardHeap2; +SearchEngineHeapPtr SearchEngineData::backwardHeap2; + +SearchEngineHeapPtr SearchEngineData::forwardHeap3; +SearchEngineHeapPtr SearchEngineData::backwardHeap3; + diff --git a/DataStructures/SearchEngine.h b/DataStructures/SearchEngine.h index 505d3f905..64d273bb0 100644 --- a/DataStructures/SearchEngine.h +++ b/DataStructures/SearchEngine.h @@ -21,152 +21,48 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef SEARCHENGINE_H_ #define SEARCHENGINE_H_ -#include -#include -#include "SimpleStack.h" - -#include - -#include "BinaryHeap.h" +#include "Coordinate.h" #include "NodeInformationHelpDesk.h" #include "PhantomNodes.h" +#include "QueryEdge.h" +#include "SearchEngineData.h" #include "../RoutingAlgorithms/AlternativePathRouting.h" -#include "../RoutingAlgorithms/BasicRoutingInterface.h" #include "../RoutingAlgorithms/ShortestPathRouting.h" #include "../Util/StringUtil.h" #include "../typedefs.h" -struct _HeapData { - NodeID parent; - _HeapData( NodeID p ) : parent(p) { } -}; +#include +#include +#include -typedef BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage > QueryHeapType; -typedef boost::thread_specific_ptr SearchEngineHeapPtr; - -template -struct SearchEngineData { - typedef GraphT Graph; - typedef QueryHeapType QueryHeap; - SearchEngineData(GraphT * g, NodeInformationHelpDesk * nh, std::vector & n) :graph(g), nodeHelpDesk(nh), names(n) {} - const GraphT * graph; - NodeInformationHelpDesk * nodeHelpDesk; - std::vector & names; - static SearchEngineHeapPtr forwardHeap; - static SearchEngineHeapPtr backwardHeap; - static SearchEngineHeapPtr forwardHeap2; - static SearchEngineHeapPtr backwardHeap2; - static SearchEngineHeapPtr forwardHeap3; - static SearchEngineHeapPtr backwardHeap3; - - inline void InitializeOrClearFirstThreadLocalStorage() { - if(!forwardHeap.get()) { - forwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - forwardHeap->Clear(); - - if(!backwardHeap.get()) { - backwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - backwardHeap->Clear(); - } - - inline void InitializeOrClearSecondThreadLocalStorage() { - if(!forwardHeap2.get()) { - forwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - forwardHeap2->Clear(); - - if(!backwardHeap2.get()) { - backwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - backwardHeap2->Clear(); - } - - inline void InitializeOrClearThirdThreadLocalStorage() { - if(!forwardHeap3.get()) { - forwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - forwardHeap3->Clear(); - - if(!backwardHeap3.get()) { - backwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); - } - else - backwardHeap3->Clear(); - } -}; - -template class SearchEngine { private: - typedef SearchEngineData SearchEngineDataT; - SearchEngineDataT _queryData; + SearchEngineData _queryData; - inline double absDouble(double input) { if(input < 0) return input*(-1); else return input;} public: - ShortestPathRouting shortestPath; - AlternativeRouting alternativePaths; + ShortestPathRouting shortestPath; + AlternativeRouting alternativePaths; - SearchEngine(GraphT * g, NodeInformationHelpDesk * nh, std::vector & n) : - _queryData(g, nh, n), - shortestPath(_queryData), - alternativePaths(_queryData) - {} - ~SearchEngine() {} + SearchEngine( + QueryGraph * g, + NodeInformationHelpDesk * nh, + std::vector & n + ); + ~SearchEngine(); - inline void GetCoordinatesForNodeID(NodeID id, _Coordinate& result) const { - result.lat = _queryData.nodeHelpDesk->getLatitudeOfNode(id); - result.lon = _queryData.nodeHelpDesk->getLongitudeOfNode(id); - } + void GetCoordinatesForNodeID(NodeID id, _Coordinate& result) const; - inline void FindRoutingStarts(const _Coordinate & start, const _Coordinate & target, PhantomNodes & routingStarts) const { - _queryData.nodeHelpDesk->FindRoutingStarts(start, target, routingStarts); - } + void FindPhantomNodeForCoordinate( + const _Coordinate & location, + PhantomNode & result, + unsigned zoomLevel + ) const; - inline void FindPhantomNodeForCoordinate(const _Coordinate & location, PhantomNode & result, unsigned zoomLevel) const { - _queryData.nodeHelpDesk->FindPhantomNodeForCoordinate(location, result, zoomLevel); - } - - inline NodeID GetNameIDForOriginDestinationNodeID(const NodeID s, const NodeID t) const { - if(s == t) - return 0; - - EdgeID e = _queryData.graph->FindEdge(s, t); - if(e == UINT_MAX) - e = _queryData.graph->FindEdge( t, s ); - if(UINT_MAX == e) { - return 0; - } - assert(e != UINT_MAX); - const EdgeData ed = _queryData.graph->GetEdgeData(e); - return ed.via; - } - - inline std::string GetEscapedNameForNameID(const unsigned nameID) const { - return ((nameID >= _queryData.names.size() || nameID == 0) ? std::string("") : HTMLEntitize(_queryData.names.at(nameID))); - } - - inline std::string GetEscapedNameForEdgeBasedEdgeID(const unsigned edgeID) const { - const unsigned nameID = _queryData.graph->GetEdgeData(edgeID).nameID1; - return GetEscapedNameForNameID(nameID); - } + NodeID GetNameIDForOriginDestinationNodeID( + const NodeID s, const NodeID t) const; + std::string GetEscapedNameForNameID(const unsigned nameID) const; }; -template SearchEngineHeapPtr SearchEngineData::forwardHeap; -template SearchEngineHeapPtr SearchEngineData::backwardHeap; - -template SearchEngineHeapPtr SearchEngineData::forwardHeap2; -template SearchEngineHeapPtr SearchEngineData::backwardHeap2; - -template SearchEngineHeapPtr SearchEngineData::forwardHeap3; -template SearchEngineHeapPtr SearchEngineData::backwardHeap3; - #endif /* SEARCHENGINE_H_ */ diff --git a/DataStructures/SearchEngineData.cpp b/DataStructures/SearchEngineData.cpp new file mode 100644 index 000000000..77492f69e --- /dev/null +++ b/DataStructures/SearchEngineData.cpp @@ -0,0 +1,60 @@ +/* + open source routing machine + Copyright (C) Dennis Luxen, others 2010 + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU AFFERO General Public License as published by +the Free Software Foundation; either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +or see http://www.gnu.org/licenses/agpl.txt. + */ + +#include "SearchEngineData.h" + +void SearchEngineData::InitializeOrClearFirstThreadLocalStorage() { + if(!forwardHeap.get()) { + forwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + forwardHeap->Clear(); + } + if(!backwardHeap.get()) { + backwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + backwardHeap->Clear(); + } +} + +void SearchEngineData::InitializeOrClearSecondThreadLocalStorage() { + if(!forwardHeap2.get()) { + forwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + forwardHeap2->Clear(); + } + if(!backwardHeap2.get()) { + backwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + backwardHeap2->Clear(); + } +} + +void SearchEngineData::InitializeOrClearThirdThreadLocalStorage() { + if(!forwardHeap3.get()) { + forwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + forwardHeap3->Clear(); + } + if(!backwardHeap3.get()) { + backwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); + } else { + backwardHeap3->Clear(); + } +} diff --git a/DataStructures/SearchEngineData.h b/DataStructures/SearchEngineData.h new file mode 100644 index 000000000..f9a2623a0 --- /dev/null +++ b/DataStructures/SearchEngineData.h @@ -0,0 +1,60 @@ +/* + open source routing machine + Copyright (C) Dennis Luxen, others 2010 + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU AFFERO General Public License as published by +the Free Software Foundation; either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +or see http://www.gnu.org/licenses/agpl.txt. + */ + +#include "BinaryHeap.h" +#include "QueryEdge.h" +#include "NodeInformationHelpDesk.h" +#include "StaticGraph.h" + +#include "../typedefs.h" + +#include + +#include +#include + +struct _HeapData { + NodeID parent; + _HeapData( NodeID p ) : parent(p) { } +}; +typedef StaticGraph QueryGraph; +typedef BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage > QueryHeapType; +typedef boost::thread_specific_ptr SearchEngineHeapPtr; + +struct SearchEngineData { + typedef QueryGraph Graph; + typedef QueryHeapType QueryHeap; + SearchEngineData(QueryGraph * g, NodeInformationHelpDesk * nh, std::vector & n) :graph(g), nodeHelpDesk(nh), names(n) {} + const QueryGraph * graph; + NodeInformationHelpDesk * nodeHelpDesk; + std::vector & names; + static SearchEngineHeapPtr forwardHeap; + static SearchEngineHeapPtr backwardHeap; + static SearchEngineHeapPtr forwardHeap2; + static SearchEngineHeapPtr backwardHeap2; + static SearchEngineHeapPtr forwardHeap3; + static SearchEngineHeapPtr backwardHeap3; + + void InitializeOrClearFirstThreadLocalStorage(); + + void InitializeOrClearSecondThreadLocalStorage(); + + void InitializeOrClearThirdThreadLocalStorage(); +}; diff --git a/DataStructures/StaticRTree.h b/DataStructures/StaticRTree.h new file mode 100644 index 000000000..b85516f04 --- /dev/null +++ b/DataStructures/StaticRTree.h @@ -0,0 +1,909 @@ +/* + open source routing machine + Copyright (C) Dennis Luxen, others 2010 + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU AFFERO General Public License as published by +the Free Software Foundation; either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +or see http://www.gnu.org/licenses/agpl.txt. + */ + +#ifndef STATICRTREE_H_ +#define STATICRTREE_H_ + +#include "MercatorUtil.h" +#include "TimingUtil.h" +#include "Coordinate.h" +#include "PhantomNodes.h" +#include "DeallocatingVector.h" +#include "HilbertValue.h" +#include "../typedefs.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include + +//tuning parameters +const static uint32_t RTREE_BRANCHING_FACTOR = 50; +const static uint32_t RTREE_LEAF_NODE_SIZE = 1170; + +// Implements a static, i.e. packed, R-tree + +static boost::thread_specific_ptr thread_local_rtree_stream; + +template +class StaticRTree : boost::noncopyable { +private: + struct RectangleInt2D { + RectangleInt2D() : + min_lon(INT_MAX), + max_lon(INT_MIN), + min_lat(INT_MAX), + max_lat(INT_MIN) {} + + int32_t min_lon, max_lon; + int32_t min_lat, max_lat; + + inline void InitializeMBRectangle( + const DataT * objects, + const uint32_t element_count + ) { + for(uint32_t i = 0; i < element_count; ++i) { + min_lon = std::min( + min_lon, std::min(objects[i].lon1, objects[i].lon2) + ); + max_lon = std::max( + max_lon, std::max(objects[i].lon1, objects[i].lon2) + ); + + min_lat = std::min( + min_lat, std::min(objects[i].lat1, objects[i].lat2) + ); + max_lat = std::max( + max_lat, std::max(objects[i].lat1, objects[i].lat2) + ); + } + } + + inline void AugmentMBRectangle(const RectangleInt2D & other) { + min_lon = std::min(min_lon, other.min_lon); + max_lon = std::max(max_lon, other.max_lon); + min_lat = std::min(min_lat, other.min_lat); + max_lat = std::max(max_lat, other.max_lat); + } + + inline _Coordinate Centroid() const { + _Coordinate centroid; + //The coordinates of the midpoints are given by: + //x = (x1 + x2) /2 and y = (y1 + y2) /2. + centroid.lon = (min_lon + max_lon)/2; + centroid.lat = (min_lat + max_lat)/2; + return centroid; + } + + inline bool Intersects(const RectangleInt2D & other) const { + _Coordinate upper_left (other.max_lat, other.min_lon); + _Coordinate upper_right(other.max_lat, other.max_lon); + _Coordinate lower_right(other.min_lat, other.max_lon); + _Coordinate lower_left (other.min_lat, other.min_lon); + + return ( + Contains(upper_left) + || Contains(upper_right) + || Contains(lower_right) + || Contains(lower_left) + ); + } + + inline double GetMinDist(const _Coordinate & location) const { + bool is_contained = Contains(location); + if (is_contained) { + return 0.0; + } + + double min_dist = DBL_MAX; + min_dist = std::min( + min_dist, + ApproximateDistance( + location.lat, + location.lon, + max_lat, + min_lon + ) + ); + min_dist = std::min( + min_dist, + ApproximateDistance( + location.lat, + location.lon, + max_lat, + max_lon + ) + ); + min_dist = std::min( + min_dist, + ApproximateDistance( + location.lat, + location.lon, + min_lat, + max_lon + ) + ); + min_dist = std::min( + min_dist, + ApproximateDistance( + location.lat, + location.lon, + min_lat, + min_lon + ) + ); + return min_dist; + } + + inline double GetMinMaxDist(const _Coordinate & location) const { + double min_max_dist = DBL_MAX; + //Get minmax distance to each of the four sides + _Coordinate upper_left (max_lat, min_lon); + _Coordinate upper_right(max_lat, max_lon); + _Coordinate lower_right(min_lat, max_lon); + _Coordinate lower_left (min_lat, min_lon); + + min_max_dist = std::min( + min_max_dist, + std::max( + ApproximateDistance(location, upper_left ), + ApproximateDistance(location, upper_right) + ) + ); + + min_max_dist = std::min( + min_max_dist, + std::max( + ApproximateDistance(location, upper_right), + ApproximateDistance(location, lower_right) + ) + ); + + min_max_dist = std::min( + min_max_dist, + std::max( + ApproximateDistance(location, lower_right), + ApproximateDistance(location, lower_left ) + ) + ); + + min_max_dist = std::min( + min_max_dist, + std::max( + ApproximateDistance(location, lower_left ), + ApproximateDistance(location, upper_left ) + ) + ); + return min_max_dist; + } + + inline bool Contains(const _Coordinate & location) const { + bool lats_contained = + (location.lat > min_lat) && (location.lat < max_lat); + bool lons_contained = + (location.lon > min_lon) && (location.lon < max_lon); + return lats_contained && lons_contained; + } + + inline friend std::ostream & operator<< ( std::ostream & out, const RectangleInt2D & rect ) { + out << rect.min_lat/100000. << "," << rect.min_lon/100000. << " " << rect.max_lat/100000. << "," << rect.max_lon/100000.; + return out; + } + }; + + typedef RectangleInt2D RectangleT; + + struct WrappedInputElement { + explicit WrappedInputElement(const uint32_t _array_index, const uint64_t _hilbert_value) : + m_array_index(_array_index), m_hilbert_value(_hilbert_value) {} + WrappedInputElement() : m_array_index(UINT_MAX), m_hilbert_value(0) {} + + uint32_t m_array_index; + uint64_t m_hilbert_value; + + inline bool operator<(const WrappedInputElement & other) const { + return m_hilbert_value < other.m_hilbert_value; + } + }; + + struct LeafNode { + LeafNode() : object_count(0) {} + uint32_t object_count; + DataT objects[RTREE_LEAF_NODE_SIZE]; + }; + + struct TreeNode { + TreeNode() : child_count(0), child_is_on_disk(false) {} + RectangleT minimum_bounding_rectangle; + uint32_t child_count:31; + bool child_is_on_disk:1; + uint32_t children[RTREE_BRANCHING_FACTOR]; + }; + + struct QueryCandidate { + explicit QueryCandidate(const uint32_t n_id, const double dist) : node_id(n_id), min_dist(dist)/*, minmax_dist(DBL_MAX)*/ {} + QueryCandidate() : node_id(UINT_MAX), min_dist(DBL_MAX)/*, minmax_dist(DBL_MAX)*/ {} + uint32_t node_id; + double min_dist; + // double minmax_dist; + inline bool operator<(const QueryCandidate & other) const { + return min_dist < other.min_dist; + } + }; + + std::vector m_search_tree; + uint64_t m_element_count; + + std::string m_leaf_node_filename; +public: + //Construct a pack R-Tree from the input-list with Kamel-Faloutsos algorithm [1] + explicit StaticRTree(std::vector & input_data_vector, const char * tree_node_filename, const char * leaf_node_filename) : + m_leaf_node_filename(leaf_node_filename) { + m_element_count = input_data_vector.size(); + //remove elements that are flagged to be ignored +// boost::remove_erase_if(input_data_vector, boost::bind(&DataT::isIgnored, _1 )); + + INFO("constructing r-tree of " << m_element_count << " elements"); +// INFO("sizeof(LeafNode)=" << sizeof(LeafNode)); +// INFO("sizeof(TreeNode)=" << sizeof(TreeNode)); +// INFO("sizeof(WrappedInputElement)=" << sizeof(WrappedInputElement)); + double time1 = get_timestamp(); + std::vector input_wrapper_vector(input_data_vector.size()); + + //generate auxiliary vector of hilbert-values +#pragma omp parallel for schedule(guided) + for(uint64_t element_counter = 0; element_counter < m_element_count; ++element_counter) { + //INFO("ID: " << input_data_vector[element_counter].id); + input_wrapper_vector[element_counter].m_array_index = element_counter; + //Get Hilbert-Value for centroid in mercartor projection + DataT & current_element = input_data_vector[element_counter]; + _Coordinate current_centroid = current_element.Centroid(); + current_centroid.lat = 100000*lat2y(current_centroid.lat/100000.); + + uint64_t current_hilbert_value = HilbertCode::GetHilbertNumberForCoordinate(current_centroid); + input_wrapper_vector[element_counter].m_hilbert_value = current_hilbert_value; + + } + //INFO("finished wrapper setup"); + + //open leaf file + std::ofstream leaf_node_file(leaf_node_filename, std::ios::binary); + leaf_node_file.write((char*) &m_element_count, sizeof(uint64_t)); + + //sort the hilbert-value representatives + std::sort(input_wrapper_vector.begin(), input_wrapper_vector.end()); + // INFO("finished sorting"); + std::vector tree_nodes_in_level; + + //pack M elements into leaf node and write to leaf file + uint64_t processed_objects_count = 0; + while(processed_objects_count < m_element_count) { + + LeafNode current_leaf; + TreeNode current_node; + for(uint32_t current_element_index = 0; RTREE_LEAF_NODE_SIZE > current_element_index; ++current_element_index) { + if(m_element_count > (processed_objects_count + current_element_index)) { + // INFO("Checking element " << (processed_objects_count + current_element_index)); + uint32_t index_of_next_object = input_wrapper_vector[processed_objects_count + current_element_index].m_array_index; + current_leaf.objects[current_element_index] = input_data_vector[index_of_next_object]; + ++current_leaf.object_count; + } + } + + if(0 == processed_objects_count) { + for(uint32_t i = 0; i < current_leaf.object_count; ++i) { + //INFO("[" << i << "] id: " << current_leaf.objects[i].id << ", weight: " << current_leaf.objects[i].weight << ", " << current_leaf.objects[i].lat1/100000. << "," << current_leaf.objects[i].lon1/100000. << ";" << current_leaf.objects[i].lat2/100000. << "," << current_leaf.objects[i].lon2/100000.); + } + } + + //generate tree node that resemble the objects in leaf and store it for next level + current_node.minimum_bounding_rectangle.InitializeMBRectangle(current_leaf.objects, current_leaf.object_count); + current_node.child_is_on_disk = true; + current_node.children[0] = tree_nodes_in_level.size(); + tree_nodes_in_level.push_back(current_node); + + //write leaf_node to leaf node file + leaf_node_file.write((char*)¤t_leaf, sizeof(current_leaf)); + processed_objects_count += current_leaf.object_count; + } + + // INFO("wrote " << processed_objects_count << " leaf objects"); + + //close leaf file + leaf_node_file.close(); + + uint32_t processing_level = 0; + while(1 < tree_nodes_in_level.size()) { + // INFO("processing " << (uint32_t)tree_nodes_in_level.size() << " tree nodes in level " << processing_level); + std::vector tree_nodes_in_next_level; + uint32_t processed_tree_nodes_in_level = 0; + while(processed_tree_nodes_in_level < tree_nodes_in_level.size()) { + TreeNode parent_node; + //pack RTREE_BRANCHING_FACTOR elements into tree_nodes each + for(uint32_t current_child_node_index = 0; RTREE_BRANCHING_FACTOR > current_child_node_index; ++current_child_node_index) { + if(processed_tree_nodes_in_level < tree_nodes_in_level.size()) { + TreeNode & current_child_node = tree_nodes_in_level[processed_tree_nodes_in_level]; + //add tree node to parent entry + parent_node.children[current_child_node_index] = m_search_tree.size(); + m_search_tree.push_back(current_child_node); + //augment MBR of parent + parent_node.minimum_bounding_rectangle.AugmentMBRectangle(current_child_node.minimum_bounding_rectangle); + //increase counters + ++parent_node.child_count; + ++processed_tree_nodes_in_level; + } + } + tree_nodes_in_next_level.push_back(parent_node); + // INFO("processed: " << processed_tree_nodes_in_level << ", generating " << (uint32_t)tree_nodes_in_next_level.size() << " parents"); + } + tree_nodes_in_level.swap(tree_nodes_in_next_level); + ++processing_level; + } + BOOST_ASSERT_MSG(1 == tree_nodes_in_level.size(), "tree broken, more than one root node"); + //last remaining entry is the root node; + // INFO("root node has " << (uint32_t)tree_nodes_in_level[0].child_count << " children"); + //store root node + m_search_tree.push_back(tree_nodes_in_level[0]); + + //reverse and renumber tree to have root at index 0 + std::reverse(m_search_tree.begin(), m_search_tree.end()); +#pragma omp parallel for schedule(guided) + for(uint32_t i = 0; i < m_search_tree.size(); ++i) { + TreeNode & current_tree_node = m_search_tree[i]; + for(uint32_t j = 0; j < current_tree_node.child_count; ++j) { + const uint32_t old_id = current_tree_node.children[j]; + const uint32_t new_id = m_search_tree.size() - old_id - 1; + current_tree_node.children[j] = new_id; + } + } + + //open tree file + std::ofstream tree_node_file(tree_node_filename, std::ios::binary); + uint32_t size_of_tree = m_search_tree.size(); + BOOST_ASSERT_MSG(0 < size_of_tree, "tree empty"); + tree_node_file.write((char *)&size_of_tree, sizeof(uint32_t)); + tree_node_file.write((char *)&m_search_tree[0], sizeof(TreeNode)*size_of_tree); + //close tree node file. + tree_node_file.close(); + double time2 = get_timestamp(); +// INFO("written " << processed_objects_count << " leafs in " << sizeof(LeafNode)*(1+(unsigned)std::ceil(processed_objects_count/RTREE_LEAF_NODE_SIZE) )+sizeof(uint64_t) << " bytes"); +// INFO("written search tree of " << size_of_tree << " tree nodes in " << sizeof(TreeNode)*size_of_tree+sizeof(uint32_t) << " bytes"); + INFO("finished r-tree construction in " << (time2-time1) << " seconds"); + + //todo: test queries +/* INFO("first MBR:" << m_search_tree[0].minimum_bounding_rectangle); + + DataT result; + time1 = get_timestamp(); + bool found_nearest = NearestNeighbor(_Coordinate(50.191085,8.466479), result); + time2 = get_timestamp(); + INFO("found nearest element to (50.191085,8.466479): " << (found_nearest ? "yes" : "no") << " in " << (time2-time1) << "s at (" << result.lat1/100000. << "," << result.lon1/100000. << " " << result.lat2/100000. << "," << result.lon2/100000. << ")"); + time1 = get_timestamp(); + found_nearest = NearestNeighbor(_Coordinate(50.23979, 8.51882), result); + time2 = get_timestamp(); + INFO("found nearest element to (50.23979, 8.51882): " << (found_nearest ? "yes" : "no") << " in " << (time2-time1) << "s at (" << result.lat1/100000. << "," << result.lon1/100000. << " " << result.lat2/100000. << "," << result.lon2/100000. << ")"); + time1 = get_timestamp(); + found_nearest = NearestNeighbor(_Coordinate(49.0316,2.6937), result); + time2 = get_timestamp(); + INFO("found nearest element to (49.0316,2.6937): " << (found_nearest ? "yes" : "no") << " in " << (time2-time1) << "s at (" << result.lat1/100000. << "," << result.lon1/100000. << " " << result.lat2/100000. << "," << result.lon2/100000. << ")"); +*/ + } + + //Read-only operation for queries + explicit StaticRTree( + const char * node_filename, + const char * leaf_filename + ) : m_leaf_node_filename(leaf_filename) { + //INFO("Loading nodes: " << node_filename); + //INFO("opening leafs: " << leaf_filename); + //open tree node file and load into RAM. + std::ifstream tree_node_file(node_filename, std::ios::binary); + uint32_t tree_size = 0; + tree_node_file.read((char*)&tree_size, sizeof(uint32_t)); + //INFO("reading " << tree_size << " tree nodes in " << (sizeof(TreeNode)*tree_size) << " bytes"); + m_search_tree.resize(tree_size); + tree_node_file.read((char*)&m_search_tree[0], sizeof(TreeNode)*tree_size); + tree_node_file.close(); + + //open leaf node file and store thread specific pointer + std::ifstream leaf_node_file(leaf_filename, std::ios::binary); + leaf_node_file.read((char*)&m_element_count, sizeof(uint64_t)); + leaf_node_file.close(); + + //INFO( tree_size << " nodes in search tree"); + //INFO( m_element_count << " elements in leafs"); + } +/* + inline void FindKNearestPhantomNodesForCoordinate( + const _Coordinate & location, + const unsigned zoom_level, + const unsigned candidate_count, + std::vector > & result_vector + ) const { + + bool ignore_tiny_components = (zoom_level <= 14); + DataT nearest_edge; + + uint32_t io_count = 0; + uint32_t explored_tree_nodes_count = 0; + INFO("searching for coordinate " << input_coordinate); + double min_dist = DBL_MAX; + double min_max_dist = DBL_MAX; + bool found_a_nearest_edge = false; + + _Coordinate nearest, current_start_coordinate, current_end_coordinate; + + //initialize queue with root element + std::priority_queue traversal_queue; + traversal_queue.push(QueryCandidate(0, m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate))); + BOOST_ASSERT_MSG(FLT_EPSILON > (0. - traversal_queue.top().min_dist), "Root element in NN Search has min dist != 0."); + + while(!traversal_queue.empty()) { + const QueryCandidate current_query_node = traversal_queue.top(); traversal_queue.pop(); + + ++explored_tree_nodes_count; + bool prune_downward = (current_query_node.min_dist >= min_max_dist); + bool prune_upward = (current_query_node.min_dist >= min_dist); + if( !prune_downward && !prune_upward ) { //downward pruning + TreeNode & current_tree_node = m_search_tree[current_query_node.node_id]; + if (current_tree_node.child_is_on_disk) { + LeafNode current_leaf_node; + LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); + ++io_count; + for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) { + DataT & current_edge = current_leaf_node.objects[i]; + if(ignore_tiny_components && current_edge.belongsToTinyComponent) { + continue; + } + + double current_ratio = 0.; + double current_perpendicular_distance = ComputePerpendicularDistance( + input_coordinate, + _Coordinate(current_edge.lat1, current_edge.lon1), + _Coordinate(current_edge.lat2, current_edge.lon2), + nearest, + ¤t_ratio + ); + + if( + current_perpendicular_distance < min_dist + && !DoubleEpsilonCompare( + current_perpendicular_distance, + min_dist + ) + ) { //found a new minimum + min_dist = current_perpendicular_distance; + result_phantom_node.edgeBasedNode = current_edge.id; + result_phantom_node.nodeBasedEdgeNameID = current_edge.nameID; + result_phantom_node.weight1 = current_edge.weight; + result_phantom_node.weight2 = INT_MAX; + result_phantom_node.location = nearest; + current_start_coordinate.lat = current_edge.lat1; + current_start_coordinate.lon = current_edge.lon1; + current_end_coordinate.lat = current_edge.lat2; + current_end_coordinate.lon = current_edge.lon2; + nearest_edge = current_edge; + found_a_nearest_edge = true; + } else if( + DoubleEpsilonCompare(current_perpendicular_distance, min_dist) && + 1 == abs(current_edge.id - result_phantom_node.edgeBasedNode ) + && CoordinatesAreEquivalent( + current_start_coordinate, + _Coordinate( + current_edge.lat1, + current_edge.lon1 + ), + _Coordinate( + current_edge.lat2, + current_edge.lon2 + ), + current_end_coordinate + ) + ) { + result_phantom_node.edgeBasedNode = std::min(current_edge.id, result_phantom_node.edgeBasedNode); + result_phantom_node.weight2 = current_edge.weight; + } + } + } else { + //traverse children, prune if global mindist is smaller than local one + for (uint32_t i = 0; i < current_tree_node.child_count; ++i) { + const int32_t child_id = current_tree_node.children[i]; + TreeNode & child_tree_node = m_search_tree[child_id]; + RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle; + const double current_min_dist = child_rectangle.GetMinDist(input_coordinate); + const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate); + if( current_min_max_dist < min_max_dist ) { + min_max_dist = current_min_max_dist; + } + if (current_min_dist > min_max_dist) { + continue; + } + if (current_min_dist > min_dist) { //upward pruning + continue; + } + traversal_queue.push(QueryCandidate(child_id, current_min_dist)); + } + } + } + } + + const double ratio = (found_a_nearest_edge ? + std::min(1., ApproximateDistance(_Coordinate(nearest_edge.lat1, nearest_edge.lon1), + result_phantom_node.location)/ApproximateDistance(_Coordinate(nearest_edge.lat1, nearest_edge.lon1), _Coordinate(nearest_edge.lat2, nearest_edge.lon2)) + ) : 0 + ); + result_phantom_node.weight1 *= ratio; + if(INT_MAX != result_phantom_node.weight2) { + result_phantom_node.weight2 *= (1.-ratio); + } + result_phantom_node.ratio = ratio; + + //Hack to fix rounding errors and wandering via nodes. + if(std::abs(input_coordinate.lon - result_phantom_node.location.lon) == 1) { + result_phantom_node.location.lon = input_coordinate.lon; + } + if(std::abs(input_coordinate.lat - result_phantom_node.location.lat) == 1) { + result_phantom_node.location.lat = input_coordinate.lat; + } + + INFO("mindist: " << min_dist << ", io's: " << io_count << ", nodes: " << explored_tree_nodes_count << ", loc: " << result_phantom_node.location << ", ratio: " << ratio << ", id: " << result_phantom_node.edgeBasedNode); + INFO("bidirected: " << (result_phantom_node.isBidirected() ? "yes" : "no") ); + return found_a_nearest_edge; + + } + + */ + bool FindPhantomNodeForCoordinate( + const _Coordinate & input_coordinate, + PhantomNode & result_phantom_node, + const unsigned zoom_level + ) { + + bool ignore_tiny_components = (zoom_level <= 14); + DataT nearest_edge; + + uint32_t io_count = 0; + uint32_t explored_tree_nodes_count = 0; + //INFO("searching for coordinate " << input_coordinate); + double min_dist = DBL_MAX; + double min_max_dist = DBL_MAX; + bool found_a_nearest_edge = false; + + _Coordinate nearest, current_start_coordinate, current_end_coordinate; + + //initialize queue with root element + std::priority_queue traversal_queue; + double current_min_dist = m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate); + traversal_queue.push( + QueryCandidate(0, current_min_dist) + ); + + BOOST_ASSERT_MSG( + FLT_EPSILON > (0. - traversal_queue.top().min_dist), + "Root element in NN Search has min dist != 0." + ); + + while(!traversal_queue.empty()) { + const QueryCandidate current_query_node = traversal_queue.top(); traversal_queue.pop(); + + ++explored_tree_nodes_count; + bool prune_downward = (current_query_node.min_dist >= min_max_dist); + bool prune_upward = (current_query_node.min_dist >= min_dist); + if( !prune_downward && !prune_upward ) { //downward pruning + TreeNode & current_tree_node = m_search_tree[current_query_node.node_id]; + if (current_tree_node.child_is_on_disk) { + LeafNode current_leaf_node; + LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); + ++io_count; + //INFO("checking " << current_leaf_node.object_count << " elements"); + for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) { + DataT & current_edge = current_leaf_node.objects[i]; + if(ignore_tiny_components && current_edge.belongsToTinyComponent) { + continue; + } + if(current_edge.isIgnored()) { + continue; + } + + double current_ratio = 0.; + double current_perpendicular_distance = ComputePerpendicularDistance( + input_coordinate, + _Coordinate(current_edge.lat1, current_edge.lon1), + _Coordinate(current_edge.lat2, current_edge.lon2), + nearest, + ¤t_ratio + ); + + //INFO("[" << current_edge.id << "] (" << current_edge.lat1/100000. << "," << current_edge.lon1/100000. << ")==(" << current_edge.lat2/100000. << "," << current_edge.lon2/100000. << ") at distance " << current_perpendicular_distance << " min dist: " << min_dist + // << ", ratio " << current_ratio + // ); + + if( + current_perpendicular_distance < min_dist + && !DoubleEpsilonCompare( + current_perpendicular_distance, + min_dist + ) + ) { //found a new minimum + min_dist = current_perpendicular_distance; + result_phantom_node.edgeBasedNode = current_edge.id; + result_phantom_node.nodeBasedEdgeNameID = current_edge.nameID; + result_phantom_node.weight1 = current_edge.weight; + result_phantom_node.weight2 = INT_MAX; + result_phantom_node.location = nearest; + current_start_coordinate.lat = current_edge.lat1; + current_start_coordinate.lon = current_edge.lon1; + current_end_coordinate.lat = current_edge.lat2; + current_end_coordinate.lon = current_edge.lon2; + nearest_edge = current_edge; + found_a_nearest_edge = true; + } else if( + DoubleEpsilonCompare(current_perpendicular_distance, min_dist) && + 1 == abs(current_edge.id - result_phantom_node.edgeBasedNode ) + && CoordinatesAreEquivalent( + current_start_coordinate, + _Coordinate( + current_edge.lat1, + current_edge.lon1 + ), + _Coordinate( + current_edge.lat2, + current_edge.lon2 + ), + current_end_coordinate + ) + ) { + BOOST_ASSERT_MSG(current_edge.id != result_phantom_node.edgeBasedNode, "IDs not different"); + //INFO("found bidirected edge on nodes " << current_edge.id << " and " << result_phantom_node.edgeBasedNode); + result_phantom_node.weight2 = current_edge.weight; + if(current_edge.id < result_phantom_node.edgeBasedNode) { + result_phantom_node.edgeBasedNode = current_edge.id; + std::swap(result_phantom_node.weight1, result_phantom_node.weight2); + std::swap(current_end_coordinate, current_start_coordinate); + // INFO("case 2"); + } + //INFO("w1: " << result_phantom_node.weight1 << ", w2: " << result_phantom_node.weight2); + } + } + } else { + //traverse children, prune if global mindist is smaller than local one + for (uint32_t i = 0; i < current_tree_node.child_count; ++i) { + const int32_t child_id = current_tree_node.children[i]; + TreeNode & child_tree_node = m_search_tree[child_id]; + RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle; + const double current_min_dist = child_rectangle.GetMinDist(input_coordinate); + const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate); + if( current_min_max_dist < min_max_dist ) { + min_max_dist = current_min_max_dist; + } + if (current_min_dist > min_max_dist) { + continue; + } + if (current_min_dist > min_dist) { //upward pruning + continue; + } + traversal_queue.push(QueryCandidate(child_id, current_min_dist)); + } + } + } + } + + const double ratio = (found_a_nearest_edge ? + std::min(1., ApproximateDistance(current_start_coordinate, + result_phantom_node.location)/ApproximateDistance(current_start_coordinate, current_end_coordinate) + ) : 0 + ); + result_phantom_node.weight1 *= ratio; + if(INT_MAX != result_phantom_node.weight2) { + result_phantom_node.weight2 *= (1.-ratio); + } + result_phantom_node.ratio = ratio; + + //Hack to fix rounding errors and wandering via nodes. + if(std::abs(input_coordinate.lon - result_phantom_node.location.lon) == 1) { + result_phantom_node.location.lon = input_coordinate.lon; + } + if(std::abs(input_coordinate.lat - result_phantom_node.location.lat) == 1) { + result_phantom_node.location.lat = input_coordinate.lat; + } + +// INFO("start: (" << nearest_edge.lat1 << "," << nearest_edge.lon1 << "), end: (" << nearest_edge.lat2 << "," << nearest_edge.lon2 << ")" ); +// INFO("mindist: " << min_dist << ", io's: " << io_count << ", nodes: " << explored_tree_nodes_count << ", loc: " << result_phantom_node.location << ", ratio: " << ratio << ", id: " << result_phantom_node.edgeBasedNode); +// INFO("weight1: " << result_phantom_node.weight1 << ", weight2: " << result_phantom_node.weight2); +// INFO("bidirected: " << (result_phantom_node.isBidirected() ? "yes" : "no") ); +// INFO("NameID: " << result_phantom_node.nodeBasedEdgeNameID); + return found_a_nearest_edge; + + } +/* + //Nearest-Neighbor query with the Roussopoulos et al. algorithm [2] + inline bool NearestNeighbor(const _Coordinate & input_coordinate, DataT & result_element) { + uint32_t io_count = 0; + uint32_t explored_tree_nodes_count = 0; + INFO("searching for coordinate " << input_coordinate); + double min_dist = DBL_MAX; + double min_max_dist = DBL_MAX; + bool found_return_value = false; + + //initialize queue with root element + std::priority_queue traversal_queue; + traversal_queue.push(QueryCandidate(0, m_search_tree[0].minimum_bounding_rectangle.GetMinDist(input_coordinate))); + BOOST_ASSERT_MSG(FLT_EPSILON > (0. - traversal_queue.top().min_dist), "Root element in NN Search has min dist != 0."); + + while(!traversal_queue.empty()) { + const QueryCandidate current_query_node = traversal_queue.top(); traversal_queue.pop(); + + ++explored_tree_nodes_count; + + // INFO("popped node " << current_query_node.node_id << " at distance " << current_query_node.min_dist); + bool prune_downward = (current_query_node.min_dist >= min_max_dist); + bool prune_upward = (current_query_node.min_dist >= min_dist); + // INFO(" up prune: " << (prune_upward ? "y" : "n" )); + // INFO(" down prune: " << (prune_downward ? "y" : "n" )); + if( prune_downward || prune_upward ) { //downward pruning + // INFO(" pruned node " << current_query_node.node_id << " because " << current_query_node.min_dist << "<" << min_max_dist); + } else { + TreeNode & current_tree_node = m_search_tree[current_query_node.node_id]; + if (current_tree_node.child_is_on_disk) { + // INFO(" Fetching child from disk for id: " << current_query_node.node_id); + LeafNode current_leaf_node; + LoadLeafFromDisk(current_tree_node.children[0], current_leaf_node); + ++io_count; + double ratio = 0.; + _Coordinate nearest; + for(uint32_t i = 0; i < current_leaf_node.object_count; ++i) { + DataT & current_object = current_leaf_node.objects[i]; + double current_perpendicular_distance = ComputePerpendicularDistance( + input_coordinate, + _Coordinate(current_object.lat1, current_object.lon1), + _Coordinate(current_object.lat2, current_object.lon2), + nearest, + &ratio + ); + + if(current_perpendicular_distance < min_dist && !DoubleEpsilonCompare(current_perpendicular_distance, min_dist)) { //found a new minimum + min_dist = current_perpendicular_distance; + result_element = current_object; + found_return_value = true; + } + } + } else { + //traverse children, prune if global mindist is smaller than local one + // INFO(" Checking " << current_tree_node.child_count << " children of node " << current_query_node.node_id); + for (uint32_t i = 0; i < current_tree_node.child_count; ++i) { + const int32_t child_id = current_tree_node.children[i]; + TreeNode & child_tree_node = m_search_tree[child_id]; + RectangleT & child_rectangle = child_tree_node.minimum_bounding_rectangle; + const double current_min_dist = child_rectangle.GetMinDist(input_coordinate); + const double current_min_max_dist = child_rectangle.GetMinMaxDist(input_coordinate); + if( current_min_max_dist < min_max_dist ) { + min_max_dist = current_min_max_dist; + } + if (current_min_dist > min_max_dist) { + continue; + } + if (current_min_dist > min_dist) { //upward pruning + continue; + } + // INFO(" pushing node " << child_id << " at distance " << current_min_dist); + traversal_queue.push(QueryCandidate(child_id, current_min_dist)); + } + } + } + } + INFO("mindist: " << min_dist << ", io's: " << io_count << ", touched nodes: " << explored_tree_nodes_count); + return found_return_value; + } + */ +private: + inline void LoadLeafFromDisk(const uint32_t leaf_id, LeafNode& result_node) { + if(!thread_local_rtree_stream.get() || !thread_local_rtree_stream->is_open()) { + thread_local_rtree_stream.reset( + new std::ifstream( + m_leaf_node_filename.c_str(), + std::ios::in | std::ios::binary + ) + ); + } + if(!thread_local_rtree_stream->good()) { + thread_local_rtree_stream->clear(std::ios::goodbit); + DEBUG("Resetting stale filestream"); + } + uint64_t seek_pos = sizeof(uint64_t) + leaf_id*sizeof(LeafNode); + thread_local_rtree_stream->seekg(seek_pos); + thread_local_rtree_stream->read((char *)&result_node, sizeof(LeafNode)); + } + + inline double ComputePerpendicularDistance( + const _Coordinate& inputPoint, + const _Coordinate& source, + const _Coordinate& target, + _Coordinate& nearest, double *r) const { + const double x = static_cast(inputPoint.lat); + const double y = static_cast(inputPoint.lon); + const double a = static_cast(source.lat); + const double b = static_cast(source.lon); + const double c = static_cast(target.lat); + const double d = static_cast(target.lon); + double p,q,mX,nY; + if(fabs(a-c) > FLT_EPSILON){ + const double m = (d-b)/(c-a); // slope + // Projection of (x,y) on line joining (a,b) and (c,d) + p = ((x + (m*y)) + (m*m*a - m*b))/(1. + m*m); + q = b + m*(p - a); + } else { + p = c; + q = y; + } + nY = (d*p - c*q)/(a*d - b*c); + mX = (p - nY*a)/c;// These values are actually n/m+n and m/m+n , we need + // not calculate the explicit values of m an n as we + // are just interested in the ratio + if(std::isnan(mX)) { + *r = (target == inputPoint) ? 1. : 0.; + } else { + *r = mX; + } + if(*r<=0.){ + nearest.lat = source.lat; + nearest.lon = source.lon; + return ((b - y)*(b - y) + (a - x)*(a - x)); +// return std::sqrt(((b - y)*(b - y) + (a - x)*(a - x))); + } else if(*r >= 1.){ + nearest.lat = target.lat; + nearest.lon = target.lon; + return ((d - y)*(d - y) + (c - x)*(c - x)); +// return std::sqrt(((d - y)*(d - y) + (c - x)*(c - x))); + } + // point lies in between + nearest.lat = p; + nearest.lon = q; +// return std::sqrt((p-x)*(p-x) + (q-y)*(q-y)); + return (p-x)*(p-x) + (q-y)*(q-y); + } + + inline bool CoordinatesAreEquivalent(const _Coordinate & a, const _Coordinate & b, const _Coordinate & c, const _Coordinate & d) const { + return (a == b && c == d) || (a == c && b == d) || (a == d && b == c); + } + + inline bool DoubleEpsilonCompare(const double d1, const double d2) const { + return (std::fabs(d1 - d2) < FLT_EPSILON); + } + +}; + +//[1] "On Packing R-Trees"; I. Kamel, C. Faloutsos; 1993; DOI: 10.1145/170088.170403 +//[2] "Nearest Neighbor Queries", N. Roussopulos et al; 1995; DOI: 10.1145/223784.223794 + + +#endif /* STATICRTREE_H_ */ diff --git a/Descriptors/BaseDescriptor.h b/Descriptors/BaseDescriptor.h index 6a49bd81b..2ead8bdae 100644 --- a/Descriptors/BaseDescriptor.h +++ b/Descriptors/BaseDescriptor.h @@ -28,8 +28,9 @@ or see http://www.gnu.org/licenses/agpl.txt. #include #include "../typedefs.h" -#include "../DataStructures/PhantomNodes.h" #include "../DataStructures/HashTable.h" +#include "../DataStructures/PhantomNodes.h" +#include "../DataStructures/SearchEngine.h" #include "../Util/StringUtil.h" #include "../Plugins/RawRouteData.h" @@ -42,13 +43,12 @@ struct _DescriptorConfig { unsigned short z; }; -template class BaseDescriptor { public: BaseDescriptor() { } //Maybe someone can explain the pure virtual destructor thing to me (dennis) virtual ~BaseDescriptor() { } - virtual void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngineT &sEngine) = 0; + virtual void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngine &sEngine) = 0; virtual void SetConfig(const _DescriptorConfig & config) = 0; }; diff --git a/Descriptors/DescriptionFactory.cpp b/Descriptors/DescriptionFactory.cpp index 46a1d5b82..b1f2f8538 100644 --- a/Descriptors/DescriptionFactory.cpp +++ b/Descriptors/DescriptionFactory.cpp @@ -82,7 +82,7 @@ void DescriptionFactory::AppendUnencodedPolylineString(std::string &output) { pc.printUnencodedString(pathDescription, output); } -void DescriptionFactory::Run(const SearchEngineT &sEngine, const unsigned zoomLevel) { +void DescriptionFactory::Run(const SearchEngine &sEngine, const unsigned zoomLevel) { if(0 == pathDescription.size()) return; diff --git a/Descriptors/DescriptionFactory.h b/Descriptors/DescriptionFactory.h index 52adc2713..911aae7de 100644 --- a/Descriptors/DescriptionFactory.h +++ b/Descriptors/DescriptionFactory.h @@ -27,7 +27,6 @@ #include "../Algorithms/DouglasPeucker.h" #include "../Algorithms/PolylineCompressor.h" #include "../DataStructures/Coordinate.h" -#include "../DataStructures/QueryEdge.h" #include "../DataStructures/SearchEngine.h" #include "../DataStructures/SegmentInformation.h" #include "../DataStructures/TurnInstructions.h" @@ -40,8 +39,6 @@ class DescriptionFactory { PolylineCompressor pc; PhantomNode startPhantom, targetPhantom; - typedef SearchEngine > SearchEngineT; - double DegreeToRadian(const double degree) const; double RadianToDegree(const double degree) const; public: @@ -73,7 +70,7 @@ public: void SetStartSegment(const PhantomNode & startPhantom); void SetEndSegment(const PhantomNode & startPhantom); void AppendEncodedPolylineString(std::string & output, bool isEncoded); - void Run(const SearchEngineT &sEngine, const unsigned zoomLevel); + void Run(const SearchEngine &sEngine, const unsigned zoomLevel); }; #endif /* DESCRIPTIONFACTORY_H_ */ diff --git a/Descriptors/GPXDescriptor.h b/Descriptors/GPXDescriptor.h index 5096468fb..1d3389ade 100644 --- a/Descriptors/GPXDescriptor.h +++ b/Descriptors/GPXDescriptor.h @@ -24,8 +24,7 @@ or see http://www.gnu.org/licenses/agpl.txt. #include #include "BaseDescriptor.h" -template -class GPXDescriptor : public BaseDescriptor{ +class GPXDescriptor : public BaseDescriptor{ private: _DescriptorConfig config; _Coordinate current; @@ -33,7 +32,7 @@ private: std::string tmp; public: void SetConfig(const _DescriptorConfig& c) { config = c; } - void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngineT &sEngine) { + void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngine &sEngine) { reply.content += (""); reply.content += " -class JSONDescriptor : public BaseDescriptor{ +class JSONDescriptor : public BaseDescriptor{ private: _DescriptorConfig config; DescriptionFactory descriptionFactory; @@ -68,7 +67,7 @@ public: JSONDescriptor() : numberOfEnteredRestrictedAreas(0) {} void SetConfig(const _DescriptorConfig & c) { config = c; } - void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngineT &sEngine) { + void Run(http::Reply & reply, const RawRouteData &rawRoute, PhantomNodes &phantomNodes, SearchEngine &sEngine) { WriteHeaderToOutput(reply.content); @@ -246,7 +245,7 @@ public: reply.content += "}"; } - void GetRouteNames(std::vector & shortestSegments, std::vector & alternativeSegments, const SearchEngineT &sEngine, RouteNames & routeNames) { + void GetRouteNames(std::vector & shortestSegments, std::vector & alternativeSegments, const SearchEngine &sEngine, RouteNames & routeNames) { /*** extract names for both alternatives ***/ Segment shortestSegment1, shortestSegment2; @@ -304,7 +303,7 @@ public: "\"status\":"; } - inline void BuildTextualDescription(DescriptionFactory & descriptionFactory, http::Reply & reply, const int lengthOfRoute, const SearchEngineT &sEngine, std::vector & segmentVector) { + inline void BuildTextualDescription(DescriptionFactory & descriptionFactory, http::Reply & reply, const int lengthOfRoute, const SearchEngine &sEngine, std::vector & segmentVector) { //Segment information has following format: //["instruction","streetname",length,position,time,"length","earth_direction",azimuth] //Example: ["Turn left","High Street",200,4,10,"200m","NE",22.5] diff --git a/Extractor/ExtractionContainers.cpp b/Extractor/ExtractionContainers.cpp index b2476bc86..2cb2baf22 100644 --- a/Extractor/ExtractionContainers.cpp +++ b/Extractor/ExtractionContainers.cpp @@ -257,8 +257,8 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const fout.write((char*)&edgeIT->ignoreInGrid, sizeof(bool)); fout.write((char*)&edgeIT->isAccessRestricted, sizeof(bool)); fout.write((char*)&edgeIT->isContraFlow, sizeof(bool)); + ++usedEdgeCounter; } - ++usedEdgeCounter; ++edgeIT; } } diff --git a/Extractor/ExtractionContainers.h b/Extractor/ExtractionContainers.h index abf718d5f..f5dfa789d 100644 --- a/Extractor/ExtractionContainers.h +++ b/Extractor/ExtractionContainers.h @@ -21,12 +21,12 @@ #ifndef EXTRACTIONCONTAINERS_H_ #define EXTRACTIONCONTAINERS_H_ -#include -#include - #include "ExtractorStructs.h" #include "../DataStructures/TimingUtil.h" +#include +#include + class ExtractionContainers { public: typedef stxxl::vector STXXLNodeIDVector; diff --git a/Extractor/PBFParser.cpp b/Extractor/PBFParser.cpp index 2f343ca9f..51b099f98 100644 --- a/Extractor/PBFParser.cpp +++ b/Extractor/PBFParser.cpp @@ -76,7 +76,7 @@ inline bool PBFParser::ReadHeader() { else if ( "DenseNodes" == feature ) { supported = true; } - + if ( !supported ) { std::cerr << "[error] required feature not supported: " << feature.data() << std::endl; return false; @@ -155,22 +155,19 @@ inline bool PBFParser::Parse() { inline void PBFParser::parseDenseNode(_ThreadData * threadData) { const OSMPBF::DenseNodes& dense = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).dense(); int denseTagIndex = 0; - int m_lastDenseID = 0; - int m_lastDenseLatitude = 0; - int m_lastDenseLongitude = 0; + int64_t m_lastDenseID = 0; + int64_t m_lastDenseLatitude = 0; + int64_t m_lastDenseLongitude = 0; - ImportNode n; - std::vector extracted_nodes_vector; const int number_of_nodes = dense.id_size(); - extracted_nodes_vector.reserve(number_of_nodes); + std::vector extracted_nodes_vector(number_of_nodes); for(int i = 0; i < number_of_nodes; ++i) { - n.Clear(); m_lastDenseID += dense.id( i ); m_lastDenseLatitude += dense.lat( i ); m_lastDenseLongitude += dense.lon( i ); - n.id = m_lastDenseID; - n.lat = 100000*( ( double ) m_lastDenseLatitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lat_offset() ) / NANO; - n.lon = 100000*( ( double ) m_lastDenseLongitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lon_offset() ) / NANO; + extracted_nodes_vector[i].id = m_lastDenseID; + extracted_nodes_vector[i].lat = 100000*( ( double ) m_lastDenseLatitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lat_offset() ) / NANO; + extracted_nodes_vector[i].lon = 100000*( ( double ) m_lastDenseLongitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lon_offset() ) / NANO; while (denseTagIndex < dense.keys_vals_size()) { const int tagValue = dense.keys_vals( denseTagIndex ); if( 0==tagValue ) { @@ -180,10 +177,9 @@ inline void PBFParser::parseDenseNode(_ThreadData * threadData) { const int keyValue = dense.keys_vals ( denseTagIndex+1 ); const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(tagValue).data(); const std::string & value = threadData->PBFprimitiveBlock.stringtable().s(keyValue).data(); - n.keyVals.Add(key, value); + extracted_nodes_vector[i].keyVals.Add(key, value); denseTagIndex += 2; } - extracted_nodes_vector.push_back(n); } #pragma omp parallel for schedule ( guided ) @@ -292,37 +288,33 @@ inline void PBFParser::parseRelation(_ThreadData * threadData) { } inline void PBFParser::parseWay(_ThreadData * threadData) { - ExtractionWay w; - std::vector waysToParse; const int number_of_ways = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways_size(); - waysToParse.reserve(number_of_ways); + std::vector parsed_way_vector(number_of_ways); for(int i = 0; i < number_of_ways; ++i) { - w.Clear(); const OSMPBF::Way& inputWay = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways( i ); - w.id = inputWay.id(); + parsed_way_vector[i].id = inputWay.id(); unsigned pathNode(0); const int number_of_referenced_nodes = inputWay.refs_size(); - for(int i = 0; i < number_of_referenced_nodes; ++i) { - pathNode += inputWay.refs(i); - w.path.push_back(pathNode); + for(int j = 0; j < number_of_referenced_nodes; ++j) { + pathNode += inputWay.refs(j); + parsed_way_vector[i].path.push_back(pathNode); } assert(inputWay.keys_size() == inputWay.vals_size()); const int number_of_keys = inputWay.keys_size(); - for(int i = 0; i < number_of_keys; ++i) { - const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputWay.keys(i)); - const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputWay.vals(i)); - w.keyVals.Add(key, val); + for(int j = 0; j < number_of_keys; ++j) { + const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputWay.keys(j)); + const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputWay.vals(j)); + parsed_way_vector[i].keyVals.Add(key, val); } - waysToParse.push_back(w); } #pragma omp parallel for schedule ( guided ) for(int i = 0; i < number_of_ways; ++i) { - ExtractionWay & w = waysToParse[i]; + ExtractionWay & w = parsed_way_vector[i]; ParseWayInLua( w, scriptingEnvironment.getLuaStateForThreadID(omp_get_thread_num()) ); } - BOOST_FOREACH(ExtractionWay & w, waysToParse) { + BOOST_FOREACH(ExtractionWay & w, parsed_way_vector) { extractor_callbacks->wayFunction(w); } } @@ -423,7 +415,7 @@ inline bool PBFParser::readBlob(std::fstream& stream, _ThreadData * threadData) if(stream.eof()) { return false; } - + const int size = threadData->PBFBlobHeader.datasize(); if ( size < 0 || size > MAX_BLOB_SIZE ) { std::cerr << "[error] invalid Blob size:" << size << std::endl; diff --git a/Extractor/PBFParser.h b/Extractor/PBFParser.h index 587fbc96c..f3748b67b 100644 --- a/Extractor/PBFParser.h +++ b/Extractor/PBFParser.h @@ -1,17 +1,17 @@ /* open source routing machine Copyright (C) Dennis Luxen, others 2010 - + This program is free software; you can redistribute it and/or modify it under the terms of the GNU AFFERO General Public License as published by the Free Software Foundation; either version 3 of the License, or any later version. - + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - + You should have received a copy of the GNU Affero General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA @@ -21,6 +21,13 @@ #ifndef PBFPARSER_H_ #define PBFPARSER_H_ +#include "../DataStructures/HashTable.h" +#include "../DataStructures/ConcurrentQueue.h" +#include "../Util/MachineInfo.h" +#include "../Util/OpenMPWrapper.h" +#include "../typedefs.h" + +#include "BaseParser.h" #include #include #include @@ -30,44 +37,38 @@ #include -#include "../typedefs.h" -#include "../DataStructures/HashTable.h" -#include "../DataStructures/ConcurrentQueue.h" -#include "../Util/MachineInfo.h" -#include "../Util/OpenMPWrapper.h" -#include "BaseParser.h" class PBFParser : public BaseParser { - + enum EntityType { TypeNode = 1, TypeWay = 2, TypeRelation = 4, TypeDenseNode = 8 } ; - + struct _ThreadData { int currentGroupID; int currentEntityID; short entityTypeIndicator; - + OSMPBF::BlobHeader PBFBlobHeader; OSMPBF::Blob PBFBlob; - + OSMPBF::HeaderBlock PBFHeaderBlock; OSMPBF::PrimitiveBlock PBFprimitiveBlock; - + std::vector charBuffer; }; - + public: PBFParser(const char * fileName, ExtractorCallbacks* ec, ScriptingEnvironment& se); virtual ~PBFParser(); - + inline bool ReadHeader(); inline bool Parse(); - + private: inline void ReadData(); inline void ParseData(); @@ -75,7 +76,7 @@ private: inline void parseNode(_ThreadData * ); inline void parseRelation(_ThreadData * threadData); inline void parseWay(_ThreadData * threadData); - + inline void loadGroup(_ThreadData * threadData); inline void loadBlock(_ThreadData * threadData); inline bool readPBFBlobHeader(std::fstream& stream, _ThreadData * threadData); @@ -83,17 +84,17 @@ private: inline bool unpackLZMA(std::fstream &, _ThreadData * ); inline bool readBlob(std::fstream& stream, _ThreadData * threadData) ; inline bool readNextBlock(std::fstream& stream, _ThreadData * threadData); - + static const int NANO = 1000 * 1000 * 1000; static const int MAX_BLOB_HEADER_SIZE = 64 * 1024; static const int MAX_BLOB_SIZE = 32 * 1024 * 1024; - + #ifndef NDEBUG /* counting the number of read blocks and groups */ unsigned groupCount; unsigned blockCount; #endif - + std::fstream input; // the input stream to parse boost::shared_ptr > threadDataQueue; }; diff --git a/Plugins/LocatePlugin.h b/Plugins/LocatePlugin.h index c26e75fb6..f1c7d60ac 100644 --- a/Plugins/LocatePlugin.h +++ b/Plugins/LocatePlugin.h @@ -21,13 +21,13 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef LOCATEPLUGIN_H_ #define LOCATEPLUGIN_H_ -#include - -#include "../Server/DataStructures/QueryObjectsStorage.h" #include "BasePlugin.h" #include "RouteParameters.h" -#include "../Util/StringUtil.h" #include "../DataStructures/NodeInformationHelpDesk.h" +#include "../Server/DataStructures/QueryObjectsStorage.h" +#include "../Util/StringUtil.h" + +#include /* * This Plugin locates the nearest node in the road network for a given coordinate. diff --git a/Plugins/ViaRoutePlugin.h b/Plugins/ViaRoutePlugin.h index d1e5bbc08..8249d991b 100644 --- a/Plugins/ViaRoutePlugin.h +++ b/Plugins/ViaRoutePlugin.h @@ -52,14 +52,14 @@ private: StaticGraph * graph; HashTable descriptorTable; std::string pluginDescriptorString; - SearchEngine > * searchEngine; + SearchEngine * searchEnginePtr; public: ViaRoutePlugin(QueryObjectsStorage * objects, std::string psd = "viaroute") : names(objects->names), pluginDescriptorString(psd) { nodeHelpDesk = objects->nodeHelpDesk; graph = objects->graph; - searchEngine = new SearchEngine >(graph, nodeHelpDesk, names); + searchEnginePtr = new SearchEngine(graph, nodeHelpDesk, names); descriptorTable.Set("", 0); //default descriptor descriptorTable.Set("json", 0); @@ -67,7 +67,7 @@ public: } virtual ~ViaRoutePlugin() { - delete searchEngine; + delete searchEnginePtr; } std::string GetDescriptor() const { return pluginDescriptorString; } @@ -101,7 +101,7 @@ public: } } // INFO("Brute force lookup of coordinate " << i); - searchEngine->FindPhantomNodeForCoordinate( rawRoute.rawViaNodeCoordinates[i], phantomNodeVector[i], routeParameters.zoomLevel); + searchEnginePtr->FindPhantomNodeForCoordinate( rawRoute.rawViaNodeCoordinates[i], phantomNodeVector[i], routeParameters.zoomLevel); } for(unsigned i = 0; i < phantomNodeVector.size()-1; ++i) { @@ -112,10 +112,10 @@ public: } if( ( routeParameters.alternateRoute ) && (1 == rawRoute.segmentEndCoordinates.size()) ) { // INFO("Checking for alternative paths"); - searchEngine->alternativePaths(rawRoute.segmentEndCoordinates[0], rawRoute); + searchEnginePtr->alternativePaths(rawRoute.segmentEndCoordinates[0], rawRoute); } else { - searchEngine->shortestPath(rawRoute.segmentEndCoordinates, rawRoute); + searchEnginePtr->shortestPath(rawRoute.segmentEndCoordinates, rawRoute); } @@ -125,7 +125,7 @@ public: reply.status = http::Reply::ok; //TODO: Move to member as smart pointer - BaseDescriptor > > * desc; + BaseDescriptor * desc; if("" != routeParameters.jsonpParameter) { reply.content += routeParameters.jsonpParameter; reply.content += "("; @@ -140,15 +140,15 @@ public: switch(descriptorType){ case 0: - desc = new JSONDescriptor > >(); + desc = new JSONDescriptor(); break; case 1: - desc = new GPXDescriptor > >(); + desc = new GPXDescriptor(); break; default: - desc = new JSONDescriptor > >(); + desc = new JSONDescriptor(); break; } @@ -161,7 +161,7 @@ public: // INFO("Number of segments: " << rawRoute.segmentEndCoordinates.size()); desc->SetConfig(descriptorConfig); - desc->Run(reply, rawRoute, phantomNodes, *searchEngine); + desc->Run(reply, rawRoute, phantomNodes, *searchEnginePtr); if("" != routeParameters.jsonpParameter) { reply.content += ")\n"; } diff --git a/Rakefile b/Rakefile index a7c6ae809..ccbb0cb9a 100644 --- a/Rakefile +++ b/Rakefile @@ -4,6 +4,7 @@ require 'digest/sha1' require 'cucumber/rake/task' require 'sys/proctable' +BUILD_FOLDER = 'build' DATA_FOLDER = 'sandbox' PROFILE = 'bicycle' OSRM_PORT = 5000 @@ -77,11 +78,13 @@ end desc "Rebuild and run tests." -task :default => [:build, :cucumber] +task :default => [:build] -desc "Build using SConsstruct." +desc "Build using CMake." task :build do - system "scons" + Dir.chdir BUILD_FOLDER do + system "make" + end end desc "Setup config files." @@ -99,8 +102,8 @@ desc "Download OSM data." task :download => :setup do Dir.mkdir "#{DATA_FOLDER}" unless File.exist? "#{DATA_FOLDER}" puts "Downloading..." - puts "curl http://download.geofabrik.de/openstreetmap/europe/#{osm_data_country}.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf" - raise "Error while downloading data." unless system "curl http://download.geofabrik.de/openstreetmap/europe/#{osm_data_country}.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf" + puts "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf" + raise "Error while downloading data." unless system "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf" if osm_data_area_bbox puts "Cropping and converting to protobuffer..." raise "Error while cropping data." unless system "osmosis --read-pbf file=#{DATA_FOLDER}/#{osm_data_country}.osm.pbf --bounding-box #{osm_data_area_bbox} --write-pbf file=#{DATA_FOLDER}/#{osm_data_area_name}.osm.pbf omitmetadata=true" @@ -117,9 +120,9 @@ end desc "Reprocess OSM data." task :process => :setup do Dir.chdir DATA_FOLDER do - raise "Error while extracting data." unless system "../osrm-extract #{osm_data_area_name}.osm.pbf #{PROFILES_FOLDER}/#{PROFILE}.lua" + raise "Error while extracting data." unless system "../#{BUILD_FOLDER}/osrm-extract #{osm_data_area_name}.osm.pbf #{PROFILES_FOLDER}/#{PROFILE}.lua" puts - raise "Error while preparing data." unless system "../osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions #{PROFILES_FOLDER}/#{PROFILE}.lua" + raise "Error while preparing data." unless system "../#{BUILD_FOLDER}/osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions #{PROFILES_FOLDER}/#{PROFILE}.lua" puts end end @@ -127,14 +130,14 @@ end desc "Extract OSM data." task :extract => :setup do Dir.chdir DATA_FOLDER do - raise "Error while extracting data." unless system "../osrm-extract #{osm_data_area_name}.osm.pbf ../profiles/#{PROFILE}.lua" + raise "Error while extracting data." unless system "../#{BUILD_FOLDER}/osrm-extract #{osm_data_area_name}.osm.pbf ../profiles/#{PROFILE}.lua" end end desc "Prepare OSM data." task :prepare => :setup do Dir.chdir DATA_FOLDER do - raise "Error while preparing data." unless system "../osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions ../profiles/#{PROFILE}.lua" + raise "Error while preparing data." unless system "../#{BUILD_FOLDER}/osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions ../profiles/#{PROFILE}.lua" end end @@ -154,7 +157,7 @@ desc "Run the routing server in the terminal. Press Ctrl-C to stop." task :run => :setup do Dir.chdir DATA_FOLDER do write_server_ini osm_data_area_name - system "../osrm-routed" + system "../#{BUILD_FOLDER}/osrm-routed" end end @@ -163,7 +166,7 @@ task :up => :setup do Dir.chdir DATA_FOLDER do abort("Already up.") if up? write_server_ini osm_data_area_name - pipe = IO.popen('../osrm-routed 1>>osrm-routed.log 2>>osrm-routed.log') + pipe = IO.popen("../#{BUILD_FOLDER}/osrm-routed 1>>osrm-routed.log 2>>osrm-routed.log") timeout = 5 (timeout*10).times do begin diff --git a/RoutingAlgorithms/BasicRoutingInterface.h b/RoutingAlgorithms/BasicRoutingInterface.h index f329c697b..caef97a96 100644 --- a/RoutingAlgorithms/BasicRoutingInterface.h +++ b/RoutingAlgorithms/BasicRoutingInterface.h @@ -23,13 +23,15 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef BASICROUTINGINTERFACE_H_ #define BASICROUTINGINTERFACE_H_ +#include "../Plugins/RawRouteData.h" +#include "../Util/ContainerUtils.h" + #include #include #include -#include "../Plugins/RawRouteData.h" -#include "../Util/ContainerUtils.h" +#include template class BasicRoutingInterface : boost::noncopyable{ @@ -42,6 +44,7 @@ public: inline void RoutingStep(typename QueryDataT::QueryHeap & _forwardHeap, typename QueryDataT::QueryHeap & _backwardHeap, NodeID *middle, int *_upperbound, const int edgeBasedOffset, const bool forwardDirection) const { const NodeID node = _forwardHeap.DeleteMin(); const int distance = _forwardHeap.GetKey(node); + //INFO("Settled (" << _forwardHeap.GetData( node ).parent << "," << node << ")=" << distance); if(_backwardHeap.WasInserted(node) ){ const int newDistance = _backwardHeap.GetKey(node) + distance; if(newDistance < *_upperbound ){ diff --git a/RoutingAlgorithms/ShortestPathRouting.h b/RoutingAlgorithms/ShortestPathRouting.h index 66d374079..472e7ddbe 100644 --- a/RoutingAlgorithms/ShortestPathRouting.h +++ b/RoutingAlgorithms/ShortestPathRouting.h @@ -73,18 +73,24 @@ public: //insert new starting nodes into forward heap, adjusted by previous distances. if(searchFrom1stStartNode) { forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); + INFO("fw1: " << phantomNodePair.startPhantom.edgeBasedNode << "´, w: " << -phantomNodePair.startPhantom.weight1); forward_heap2.Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); - } + INFO("fw2: " << phantomNodePair.startPhantom.edgeBasedNode << "´, w: " << -phantomNodePair.startPhantom.weight1); + } if(phantomNodePair.startPhantom.isBidirected() && searchFrom2ndStartNode) { forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); + INFO("fw1: " << phantomNodePair.startPhantom.edgeBasedNode+1 << "´, w: " << -phantomNodePair.startPhantom.weight2); forward_heap2.Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); + INFO("fw2: " << phantomNodePair.startPhantom.edgeBasedNode+1 << "´, w: " << -phantomNodePair.startPhantom.weight2); } //insert new backward nodes into backward heap, unadjusted. reverse_heap1.Insert(phantomNodePair.targetPhantom.edgeBasedNode, phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.edgeBasedNode); + INFO("rv1: " << phantomNodePair.targetPhantom.edgeBasedNode << ", w;" << phantomNodePair.targetPhantom.weight1 ); if(phantomNodePair.targetPhantom.isBidirected() ) { reverse_heap2.Insert(phantomNodePair.targetPhantom.edgeBasedNode+1, phantomNodePair.targetPhantom.weight2, phantomNodePair.targetPhantom.edgeBasedNode+1); - } + INFO("rv2: " << phantomNodePair.targetPhantom.edgeBasedNode+1 << ", w;" << phantomNodePair.targetPhantom.weight2 ); + } const int forward_offset = phantomNodePair.startPhantom.weight1 + (phantomNodePair.startPhantom.isBidirected() ? phantomNodePair.startPhantom.weight2 : 0); const int reverse_offset = phantomNodePair.targetPhantom.weight1 + (phantomNodePair.targetPhantom.isBidirected() ? phantomNodePair.targetPhantom.weight2 : 0); diff --git a/SConstruct b/SConstruct deleted file mode 100644 index b0d04b2ba..000000000 --- a/SConstruct +++ /dev/null @@ -1,300 +0,0 @@ -#Sconstruct - -import os -import os.path -import string -import sys -from subprocess import call - -def CheckBoost(context, version): - # Boost versions are in format major.minor.subminor - v_arr = version.split(".") - version_n = 0 - if len(v_arr) > 0: - version_n += int(v_arr[0])*100000 - if len(v_arr) > 1: - version_n += int(v_arr[1])*100 - if len(v_arr) > 2: - version_n += int(v_arr[2]) - - context.Message('Checking for Boost version >= %s... ' % (version)) - ret = context.TryRun(""" - #include - - int main() - { - return BOOST_VERSION >= %d ? 0 : 1; - } - """ % version_n, '.cpp')[0] - context.Result(ret) - return ret - -def CheckProtobuf(context, version): - # Protobuf versions are in format major.minor.subminor - v_arr = version.split(".") - version_n = 0 - if len(v_arr) > 0: - version_n += int(v_arr[0])*1000000 - if len(v_arr) > 1: - version_n += int(v_arr[1])*1000 - if len(v_arr) > 2: - version_n += int(v_arr[2]) - - context.Message('Checking for Protobuffer version >= %s... ' % (version)) - ret = context.TryRun(""" - #include - int main() { - return (GOOGLE_PROTOBUF_VERSION >= %d) ? 0 : 1; - } - """ % version_n, '.cpp')[0] - context.Result(ret) - return ret - -# Adding various options to the SConstruct -AddOption('--cxx', dest='cxx', type='string', nargs=1, action='store', metavar='STRING', help='C++ Compiler') -AddOption('--stxxlroot', dest='stxxlroot', type='string', nargs=1, action='store', metavar='STRING', help='root directory of STXXL') -AddOption('--verbosity', dest='verbosity', type='string', nargs=1, action='store', metavar='STRING', help='make Scons talking') -AddOption('--buildconfiguration', dest='buildconfiguration', type='string', nargs=1, action='store', metavar='STRING', help='debug or release') -AddOption('--all-flags', dest='allflags', type='string', nargs=0, action='store', metavar='STRING', help='turn off -march optimization in release mode') -AddOption('--with-tools', dest='withtools', type='string', nargs=0, action='store', metavar='STRING', help='build tools for data analysis') -AddOption('--no-march', dest='nomarch', type='string', nargs=0, action='store', metavar='STRING', help='turn off native optimizations') - -env = Environment( ENV = {'PATH' : os.environ['PATH']} ,COMPILER = GetOption('cxx')) -env["CC"] = os.getenv("CC") or env["CC"] -env["CXX"] = os.getenv("CXX") or env["CXX"] -env["ENV"].update(x for x in os.environ.items() if x[0].startswith("CCC_")) -try: - env['ENV']['TERM'] = os.environ['TERM'] -except KeyError: - env['ENV']['TERM'] = 'none' - -conf = Configure(env, custom_tests = { 'CheckBoost' : CheckBoost, 'CheckProtobuf' : CheckProtobuf }) - -if GetOption('cxx') is None: - #default Compiler - if sys.platform == 'darwin': #Mac OS X - env['CXX'] = 'clang++' - print 'Using default C++ Compiler: ', env['CXX'].strip() -else: - env.Replace(CXX = GetOption('cxx')) - print 'Using user supplied C++ Compiler: ', env['CXX'] - -if GetOption('allflags') is not None: - env.Append(CXXFLAGS = ["-Wextra", "-Wall", "-Wnon-virtual-dtor", "-Wundef", "-Wno-long-long", "-Woverloaded-virtual", "-Wfloat-equal", "-Wredundant-decls"]) - -if "clang" in env["CXX"]: - print "Warning building with clang removes OpenMP parallelization" - if GetOption('allflags') is not None: - env.Append(CXXFLAGS = ["-W#warnings", "-Wc++0x-compat", "-Waddress-of-temporary", "-Wambiguous-member-template", "-Warray-bounds", "-Watomic-properties", "-Wbind-to-temporary-copy", "-Wbuiltin-macro-redefined", "-Wc++-compat", "-Wc++0x-extensions", "-Wcomments", "-Wconditional-uninitialized", "-Wconstant-logical-operand", "-Wdeclaration-after-statement", "-Wdeprecated", "-Wdeprecated-implementations", "-Wdeprecated-writable-strings", "-Wduplicate-method-arg", "-Wempty-body", "-Wendif-labels", "-Wenum-compare", "-Wformat=2", "-Wfour-char-constants", "-Wgnu", "-Wincomplete-implementation", "-Winvalid-noreturn", "-Winvalid-offsetof", "-Winvalid-token-paste", "-Wlocal-type-template-args", "-Wmethod-signatures", "-Wmicrosoft", "-Wmissing-declarations", "-Wnon-pod-varargs", "-Wnonfragile-abi2", "-Wnull-dereference", "-Wout-of-line-declaration", "-Woverlength-strings", "-Wpacked", "-Wpointer-arith", "-Wpointer-sign", "-Wprotocol", "-Wreadonly-setter-attrs", "-Wselector", "-Wshift-overflow", "-Wshift-sign-overflow", "-Wstrict-selector-match", "-Wsuper-class-method-mismatch", "-Wtautological-compare", "-Wtypedef-redefinition", "-Wundeclared-selector", "-Wunnamed-type-template-args", "-Wunused-exception-parameter", "-Wunused-member-function", "-Wused-but-marked-unused", "-Wvariadic-macros"]) -else: - env.Append(CCFLAGS = ['-minline-all-stringops', '-fopenmp', '-Wall']) - env.Append(LINKFLAGS = '-fopenmp') - -if GetOption('buildconfiguration') == 'debug': - env.Append(CCFLAGS = ['-Wall', '-g3', '-rdynamic']) -else: - env.Append(CCFLAGS = ['-O3', '-DNDEBUG']) - -if sys.platform == 'darwin': #Mac OS X - #os x default installations - env.Append(CPPPATH = ['/usr/include/libxml2'] ) - env.Append(CPPPATH = ['/usr/X11/include']) #comes with os x -# env.Append(LIBPATH = ['/usr/X11/lib']) #needed for libpng - - #assume stxxl and boost are installed via homebrew. call brew binary to get folder locations - import subprocess - stxxl_prefix = subprocess.check_output(["brew", "--prefix", "libstxxl"]).strip() - env.Append(CPPPATH = [stxxl_prefix+"/include"] ) - env.Append(LIBPATH = [stxxl_prefix+"/lib"] ) - boost_prefix = subprocess.check_output(["brew", "--prefix", "boost"]).strip() - env.Append(CPPPATH = [boost_prefix+"/include"] ) - env.Append(LIBPATH = [boost_prefix+"/lib"] ) - if not conf.CheckLibWithHeader('lua', 'lua.h', 'C'): - print "lua library not found. Exiting" - Exit(-1) - - if not conf.CheckLibWithHeader('luabind', 'luabind/luabind.hpp', 'CXX'): - print "luabind library not found. Exiting" - Exit(-1) - -elif sys.platform.startswith("freebsd"): - env.ParseConfig('pkg-config --cflags --libs protobuf') - env.Append(CPPPATH = ['/usr/local/include', '/usr/local/include/libxml2']) - env.Append(LIBPATH = ['/usr/local/lib']) - if GetOption('stxxlroot') is not None: - env.Append(CPPPATH = GetOption('stxxlroot')+'/include') - env.Append(LIBPATH = GetOption('stxxlroot')+'/lib') - print 'STXXLROOT = ', GetOption('stxxlroot') -elif sys.platform == 'win32': - #SCons really wants to use Microsoft compiler - print "Compiling is not yet supported on Windows" - Exit(-1) -else: - print "Default platform" - if GetOption('stxxlroot') is not None: - env.Append(CPPPATH = GetOption('stxxlroot')+'/include') - env.Append(LIBPATH = GetOption('stxxlroot')+'/lib') - print 'STXXLROOT = ', GetOption('stxxlroot') - env.Append(CPPPATH = ['/usr/include', '/usr/include/include', '/usr/include/libxml2/']) - if not conf.CheckLibWithHeader('pthread', 'pthread.h', 'CXX'): - print "pthread not found. Exiting" - Exit(-1) - - if not conf.CheckLibWithHeader('luajit-5.1', 'luajit-2.0/lua.h', 'CXX'): - print "luajit library not found. Checking for interpreter" - env.ParseConfig('pkg-config --cflags --libs lua5.1') - env.ParseConfig('pkg-config --cflags --libs luabind') - -#Check if architecture optimizations shall be turned off -if GetOption('buildconfiguration') != 'debug' and sys.platform != 'darwin' and GetOption('nomarch') is None: - env.Append(CCFLAGS = ['-march=native']) - -if not conf.CheckHeader('omp.h'): - if "clang" not in env["CXX"]: - print "OpenMP not found, Exiting" - Exit(-1) -if not conf.CheckLibWithHeader('bz2', 'bzlib.h', 'CXX'): - print "bz2 library not found. Exiting" - Exit(-1) -if GetOption('withtools') is not None: - if not conf.CheckLibWithHeader('gdal', 'gdal/gdal.h', 'CXX'): - print "gdal library not found. Exiting" - Exit(-1) -if not conf.CheckLibWithHeader('osmpbf', 'osmpbf/osmpbf.h', 'CXX'): - print "osmpbf library not found. Exiting" - print "Either install libosmpbf-dev (Ubuntu) or use https://github.com/scrosby/OSM-binary" - Exit(-1) -if not conf.CheckLibWithHeader('protobuf', 'google/protobuf/descriptor.h', 'CXX'): - print "Google Protobuffer library not found. Exiting" - Exit(-1) -#check for protobuf 2.3.0 -if not (conf.CheckProtobuf('2.3.0')): - print 'libprotobuf version >= 2.3.0 needed' - Exit(-1); -if not (env.Detect('protoc')): - print 'protobuffer compiler not found' - Exit(-1); -if not conf.CheckLibWithHeader('stxxl', 'stxxl.h', 'CXX'): - print "stxxl library not found. Exiting" - Exit(-1) -if not conf.CheckLibWithHeader('xml2', 'libxml/xmlreader.h', 'CXX'): - print "libxml2 library or header not found. Exiting" - Exit(-1) -if not conf.CheckLibWithHeader('z', 'zlib.h', 'CXX'): - print "zlib library or header not found. Exiting" - Exit(-1) -#Check BOOST installation -if not (conf.CheckBoost('1.44')): - print 'Boost version >= 1.44 needed' - Exit(-1); -if not conf.CheckLib('boost_system', language="C++"): - if not conf.CheckLib('boost_system-mt', language="C++"): - print "boost_system library not found. Exiting" - Exit(-1) - else: - print "using boost -mt" - env.Append(CCFLAGS = ' -lboost_system-mt') - env.Append(LINKFLAGS = ' -lboost_system-mt') -if not conf.CheckLibWithHeader('boost_thread', 'boost/thread.hpp', 'CXX'): - if not conf.CheckLibWithHeader('boost_thread-mt', 'boost/thread.hpp', 'CXX'): - print "boost thread library not found. Exiting" - Exit(-1) - else: - print "using boost -mt" - env.Append(CCFLAGS = ' -lboost_thread-mt') - env.Append(LINKFLAGS = ' -lboost_thread-mt') -if not conf.CheckLibWithHeader('boost_regex', 'boost/regex.hpp', 'CXX'): - if not conf.CheckLibWithHeader('boost_regex-mt', 'boost/regex.hpp', 'CXX'): - print "boost/regex.hpp not found. Exiting" - Exit(-1) - else: - print "using boost_regex -mt" - env.Append(CCFLAGS = ' -lboost_regex-mt') - env.Append(LINKFLAGS = ' -lboost_regex-mt') -if not conf.CheckLib('boost_filesystem', language="C++"): - if not conf.CheckLib('boost_filesystem-mt', language="C++"): - print "boost_filesystem library not found. Exiting" - Exit(-1) - else: - print "using boost -mt" - env.Append(CCFLAGS = ' -lboost_filesystem-mt') - env.Append(LINKFLAGS = ' -lboost_filesystem-mt') -if not conf.CheckCXXHeader('boost/archive/iterators/base64_from_binary.hpp'): - print "boost/archive/iterators/base64_from_binary.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/archive/iterators/binary_from_base64.hpp'): - print "boost/archive/iterators/binary_from_base64.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/archive/iterators/transform_width.hpp'): - print "boost/archive/iterators/transform_width.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/bind.hpp'): - print "boost/bind.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/circular_buffer.hpp'): - print "boost/circular_buffer.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/enable_shared_from_this.hpp'): - print "boost/bind.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/foreach.hpp'): - print "boost/foreach.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/lexical_cast.hpp'): - print "boost/foreach.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/logic/tribool.hpp'): - print "boost/foreach.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/math/tr1.hpp'): - print "boost/foreach.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/noncopyable.hpp'): - print "boost/noncopyable.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/property_tree/ptree.hpp'): - print "boost/property_tree/ptree.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/property_tree/ini_parser.hpp'): - print "boost/property_tree/ini_parser.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/shared_ptr.hpp'): - print "boost/shared_ptr.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/thread/mutex.hpp'): - print "boost/shared_ptr.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/thread/thread.hpp'): - print "boost/thread/thread.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/thread/condition.hpp'): - print "boost/thread/condition.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/thread/thread.hpp'): - print "boost/thread/thread.hpp not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/thread.hpp'): - print "boost thread header not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/tuple/tuple.hpp'): - print "boost thread header not found. Exiting" - Exit(-1) -if not conf.CheckCXXHeader('boost/unordered_map.hpp'): - print "boost thread header not found. Exiting" - Exit(-1) - -#checks for intels thread building blocks library -#if not conf.CheckLibWithHeader('tbb', 'tbb/tbb.h', 'CXX'): -# print "Intel TBB library not found. Exiting" -# Exit(-1) -#if not conf.CheckCXXHeader('tbb/task_scheduler_init.h'): -# print "tbb/task_scheduler_init.h not found. Exiting" -# Exit(-1) - -env.Program(target = 'osrm-extract', source = ["extractor.cpp", Glob('Util/*.cpp'), Glob('Extractor/*.cpp')]) -env.Program(target = 'osrm-prepare', source = ["createHierarchy.cpp", Glob('Contractor/*.cpp'), Glob('Util/SRTMLookup/*.cpp'), Glob('Algorithms/*.cpp')]) -env.Program(target = 'osrm-routed', source = ["routed.cpp", 'Descriptors/DescriptionFactory.cpp', Glob('ThirdParty/*.cc'), Glob('Server/DataStructures/*.cpp')], CCFLAGS = env['CCFLAGS'] + ['-DROUTED']) -if GetOption('withtools') is not None: - env.Program(target = 'Tools/osrm-component', source = ["Tools/componentAnalysis.cpp"]) -env = conf.Finish() - diff --git a/Server/BasicDatastructures.h b/Server/BasicDatastructures.h index 71e3ac996..366abaece 100644 --- a/Server/BasicDatastructures.h +++ b/Server/BasicDatastructures.h @@ -21,8 +21,8 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef BASIC_DATASTRUCTURES_H #define BASIC_DATASTRUCTURES_H #include +#include #include -#include #include "../Util/StringUtil.h" @@ -143,7 +143,11 @@ Reply Reply::stockReply(Reply::status_type status) { rep.headers[0].name = "Access-Control-Allow-Origin"; rep.headers[0].value = "*"; rep.headers[1].name = "Content-Length"; - rep.headers[1].value = boost::lexical_cast(rep.content.size()); + + std::string s; + intToString(rep.content.size(), s); + + rep.headers[1].value = s; rep.headers[2].name = "Content-Type"; rep.headers[2].value = "text/html"; return rep; diff --git a/Server/DataStructures/QueryObjectsStorage.cpp b/Server/DataStructures/QueryObjectsStorage.cpp index 325cdb97d..df15f532c 100644 --- a/Server/DataStructures/QueryObjectsStorage.cpp +++ b/Server/DataStructures/QueryObjectsStorage.cpp @@ -22,14 +22,27 @@ or see http://www.gnu.org/licenses/agpl.txt. #include "QueryObjectsStorage.h" #include "../../Util/GraphLoader.h" -QueryObjectsStorage::QueryObjectsStorage(std::string hsgrPath, std::string ramIndexPath, std::string fileIndexPath, std::string nodesPath, std::string edgesPath, std::string namesPath, std::string timestampPath) { +QueryObjectsStorage::QueryObjectsStorage( + std::string hsgrPath, + std::string ramIndexPath, + std::string fileIndexPath, + std::string nodesPath, + std::string edgesPath, + std::string namesPath, + std::string timestampPath +) { INFO("loading graph data"); std::ifstream hsgrInStream(hsgrPath.c_str(), std::ios::binary); if(!hsgrInStream) { ERR(hsgrPath << " not found"); } //Deserialize road network graph std::vector< QueryGraph::_StrNode> nodeList; std::vector< QueryGraph::_StrEdge> edgeList; - const int n = readHSGRFromStream(hsgrInStream, nodeList, edgeList, &checkSum); + const int n = readHSGRFromStream( + hsgrInStream, + nodeList, + edgeList, + &checkSum + ); INFO("Data checksum is " << checkSum); graph = new QueryGraph(nodeList, edgeList); @@ -39,7 +52,7 @@ QueryObjectsStorage::QueryObjectsStorage(std::string hsgrPath, std::string ramIn if(timestampPath.length()) { INFO("Loading Timestamp"); std::ifstream timestampInStream(timestampPath.c_str()); - if(!timestampInStream) { ERR(timestampPath << " not found"); } + if(!timestampInStream) { WARN(timestampPath << " not found"); } getline(timestampInStream, timestamp); timestampInStream.close(); diff --git a/Server/RequestHandler.h b/Server/RequestHandler.h index c801b7971..9dd0e2849 100644 --- a/Server/RequestHandler.h +++ b/Server/RequestHandler.h @@ -25,7 +25,6 @@ or see http://www.gnu.org/licenses/agpl.txt. #include // std::tolower #include #include -#include #include #include "APIGrammar.h" @@ -52,19 +51,20 @@ public: void handle_request(const Request& req, Reply& rep){ //parse command - std::string request(req.uri); - - { //This block logs the current request to std out. should be moved to a logging component - time_t ltime; - struct tm *Tm; - - ltime=time(NULL); - Tm=localtime(<ime); - - INFO((Tm->tm_mday < 10 ? "0" : "" ) << Tm->tm_mday << "-" << (Tm->tm_mon+1 < 10 ? "0" : "" ) << (Tm->tm_mon+1) << "-" << 1900+Tm->tm_year << " " << (Tm->tm_hour < 10 ? "0" : "" ) << Tm->tm_hour << ":" << (Tm->tm_min < 10 ? "0" : "" ) << Tm->tm_min << ":" << (Tm->tm_sec < 10 ? "0" : "" ) << Tm->tm_sec << " " << - req.endpoint.to_string() << " " << req.referrer << ( 0 == req.referrer.length() ? "- " :" ") << req.agent << ( 0 == req.agent.length() ? "- " :" ") << req.uri ); - } try { + std::string request(req.uri); + + { //This block logs the current request to std out. should be moved to a logging component + time_t ltime; + struct tm *Tm; + + ltime=time(NULL); + Tm=localtime(<ime); + + INFO((Tm->tm_mday < 10 ? "0" : "" ) << Tm->tm_mday << "-" << (Tm->tm_mon+1 < 10 ? "0" : "" ) << (Tm->tm_mon+1) << "-" << 1900+Tm->tm_year << " " << (Tm->tm_hour < 10 ? "0" : "" ) << Tm->tm_hour << ":" << (Tm->tm_min < 10 ? "0" : "" ) << Tm->tm_min << ":" << (Tm->tm_sec < 10 ? "0" : "" ) << Tm->tm_sec << " " << + req.endpoint.to_string() << " " << req.referrer << ( 0 == req.referrer.length() ? "- " :" ") << req.agent << ( 0 == req.agent.length() ? "- " :" ") << req.uri ); + } + RouteParameters routeParameters; APIGrammar apiParser(&routeParameters); diff --git a/Util/GraphLoader.h b/Util/GraphLoader.h index 40bc98a9c..5dc50f888 100644 --- a/Util/GraphLoader.h +++ b/Util/GraphLoader.h @@ -21,6 +21,15 @@ or see http://www.gnu.org/licenses/agpl.txt. #ifndef GRAPHLOADER_H #define GRAPHLOADER_H +#include "../DataStructures/ImportNode.h" +#include "../DataStructures/ImportEdge.h" +#include "../DataStructures/NodeCoords.h" +#include "../DataStructures/Restriction.h" +#include "../typedefs.h" + +#include +#include + #include #include @@ -30,19 +39,11 @@ or see http://www.gnu.org/licenses/agpl.txt. #include #include -#include - -#include "../DataStructures/ImportNode.h" -#include "../DataStructures/ImportEdge.h" -#include "../DataStructures/NodeCoords.h" -#include "../DataStructures/Restriction.h" -#include "../typedefs.h" - typedef boost::unordered_map ExternalNodeMap; template struct _ExcessRemover { - inline bool operator()( EdgeT & edge ) const { + inline bool operator()( const EdgeT & edge ) const { return edge.source() == UINT_MAX; } }; @@ -116,9 +117,9 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector& edgeL in.read((char*)&isAccessRestricted, sizeof(bool)); in.read((char*)&isContraFlow, sizeof(bool)); - GUARANTEE(length > 0, "loaded null length edge" ); - GUARANTEE(weight > 0, "loaded null weight"); - GUARANTEE(0<=dir && dir<=2, "loaded bogus direction"); + BOOST_ASSERT_MSG(length > 0, "loaded null length edge" ); + BOOST_ASSERT_MSG(weight > 0, "loaded null weight"); + BOOST_ASSERT_MSG(0<=dir && dir<=2, "loaded bogus direction"); bool forward = true; bool backward = true; @@ -144,7 +145,9 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector& edgeL continue; } target = intNodeID->second; - GUARANTEE(source != UINT_MAX && target != UINT_MAX, "nonexisting source or target"); + BOOST_ASSERT_MSG(source != UINT_MAX && target != UINT_MAX, + "nonexisting source or target" + ); if(source > target) { std::swap(source, target); @@ -185,9 +188,9 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector& edgeL } } } - std::vector::iterator newEnd = std::remove_if(edgeList.begin(), edgeList.end(), _ExcessRemover()); + typename std::vector::iterator newEnd = std::remove_if(edgeList.begin(), edgeList.end(), _ExcessRemover()); ext2IntNodeMap.clear(); - std::vector(edgeList.begin(), newEnd).swap(edgeList); //remove excess candidates. + std::vector(edgeList.begin(), newEnd).swap(edgeList); //remove excess candidates. INFO("Graph loaded ok and has " << edgeList.size() << " edges"); return n; } @@ -299,7 +302,7 @@ NodeID readDTMPGraphFromStream(std::istream &in, std::vector& edgeList, s edgeList.push_back(inputEdge); } ext2IntNodeMap.clear(); - std::vector(edgeList.begin(), edgeList.end()).swap(edgeList); //remove excess candidates. + std::vector(edgeList.begin(), edgeList.end()).swap(edgeList); //remove excess candidates. std::cout << "ok" << std::endl; return n; } diff --git a/Util/LuaUtil.h b/Util/LuaUtil.h index fadf78d6f..3793d8bda 100644 --- a/Util/LuaUtil.h +++ b/Util/LuaUtil.h @@ -18,14 +18,19 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA or see http://www.gnu.org/licenses/agpl.txt. */ - - #ifndef LUAUTIL_H_ #define LUAUTIL_H_ +extern "C" { + #include + #include + #include +} + +#include +#include #include #include -#include template void LUA_print(T number) { diff --git a/cmake/FindLuabind.cmake b/cmake/FindLuabind.cmake new file mode 100644 index 000000000..39b325059 --- /dev/null +++ b/cmake/FindLuabind.cmake @@ -0,0 +1,75 @@ +# Locate Luabind library +# This module defines +# LUABIND_FOUND, if false, do not try to link to Luabind +# LUABIND_LIBRARIES +# LUABIND_INCLUDE_DIR, where to find luabind.hpp +# +# Note that the expected include convention is +# #include +# and not +# #include + +IF( NOT LUABIND_FIND_QUIETLY ) + MESSAGE(STATUS "Looking for Luabind...") +ENDIF() + +FIND_PATH(LUABIND_INCLUDE_DIR luabind.hpp + HINTS + $ENV{LUABIND_DIR} + PATH_SUFFIXES luabind include/luabind include + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local # DarwinPorts + /opt +) + +FIND_LIBRARY(LUABIND_LIBRARY + NAMES luabind + HINTS + $ENV{LUABIND_DIR} + PATH_SUFFIXES lib64 lib + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local + /opt +) + +FIND_LIBRARY(LUABIND_LIBRARY_DBG + NAMES luabindd + HINTS + $ENV{LUABIND_DIR} + PATH_SUFFIXES lib64 lib + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local + /opt +) + +IF(LUABIND_LIBRARY) + SET( LUABIND_LIBRARIES "${LUABIND_LIBRARY}" CACHE STRING "Luabind Libraries") +ENDIF(LUABIND_LIBRARY) + +INCLUDE(FindPackageHandleStandardArgs) +# handle the QUIETLY and REQUIRED arguments and set LUABIND_FOUND to TRUE if +# all listed variables are TRUE +FIND_PACKAGE_HANDLE_STANDARD_ARGS(Luabind DEFAULT_MSG LUABIND_LIBRARIES LUABIND_INCLUDE_DIR) + +IF( NOT LUABIND_FIND_QUIETLY ) + IF( LUABIND_FOUND ) + MESSAGE(STATUS "Found Luabind: ${LUABIND_LIBRARY}" ) + ENDIF() + IF( LUABIND_LIBRARY_DBG ) + MESSAGE(STATUS "Luabind debug library availible: ${LUABIND_LIBRARY_DBG}") + ENDIF() +ENDIF() + +MARK_AS_ADVANCED(LUABIND_INCLUDE_DIR LUABIND_LIBRARIES LUABIND_LIBRARY LUABIND_LIBRARY_DBG) diff --git a/cmake/FindOSMPBF.cmake b/cmake/FindOSMPBF.cmake new file mode 100644 index 000000000..78b1d9dc8 --- /dev/null +++ b/cmake/FindOSMPBF.cmake @@ -0,0 +1,54 @@ +# Locate OSMPBF library +# This module defines +# OSMPBF_FOUND, if false, do not try to link to OSMPBF +# OSMPBF_LIBRARIES +# OSMPBF_INCLUDE_DIR, where to find OSMPBF.hpp +# +# Note that the expected include convention is +# #include +# and not +# #include + +IF( NOT OSMPBF_FIND_QUIETLY ) + MESSAGE(STATUS "Looking for OSMPBF...") +ENDIF() + +FIND_PATH(OSMPBF_INCLUDE_DIR osmpbf.h + HINTS + $ENV{OSMPBF_DIR} + PATH_SUFFIXES OSMPBF include/osmpbf include + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local # DarwinPorts + /opt +) + +FIND_LIBRARY(OSMPBF_LIBRARY + NAMES osmpbf + HINTS + $ENV{OSMPBF_DIR} + PATH_SUFFIXES lib64 lib + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local + /opt +) + +INCLUDE(FindPackageHandleStandardArgs) +# handle the QUIETLY and REQUIRED arguments and set OSMPBF_FOUND to TRUE if +# all listed variables are TRUE +FIND_PACKAGE_HANDLE_STANDARD_ARGS(OSMPBF DEFAULT_MSG OSMPBF_LIBRARY OSMPBF_INCLUDE_DIR) + +IF( NOT OSMPBF_FIND_QUIETLY ) + IF( OSMPBF_FOUND ) + MESSAGE(STATUS "Found OSMPBF: ${OSMPBF_LIBRARY}" ) + ENDIF() +ENDIF() + +#MARK_AS_ADVANCED(OSMPBF_INCLUDE_DIR OSMPBF_LIBRARIES OSMPBF_LIBRARY OSMPBF_LIBRARY_DBG) diff --git a/cmake/FindSTXXL.cmake b/cmake/FindSTXXL.cmake new file mode 100644 index 000000000..52d508c05 --- /dev/null +++ b/cmake/FindSTXXL.cmake @@ -0,0 +1,51 @@ +# Locate STXXL library +# This module defines +# STXXL_FOUND, if false, do not try to link to libstxxl +# STXXL_LIBRARY +# STXXL_INCLUDE_DIR, where to find stxxl.h +# + + +IF( NOT STXXL_FIND_QUIETLY ) + MESSAGE(STATUS "Looking for STXXL...") +ENDIF() + +FIND_PATH(STXXL_INCLUDE_DIR stxxl.h + HINTS + $ENV{STXXL_DIR} + PATH_SUFFIXES stxxl include/stxxl/stxxl include/stxxl include + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local # DarwinPorts + /opt +) + +FIND_LIBRARY(STXXL_LIBRARY + NAMES stxxl + HINTS + $ENV{STXXL_DIR} + PATH_SUFFIXES lib64 lib + PATHS + ~/Library/Frameworks + /Library/Frameworks + /usr/local + /usr + /opt/local + /opt +) + +INCLUDE(FindPackageHandleStandardArgs) +# handle the QUIETLY and REQUIRED arguments and set STXXL_FOUND to TRUE if +# all listed variables are TRUE +FIND_PACKAGE_HANDLE_STANDARD_ARGS(STXXL DEFAULT_MSG STXXL_LIBRARY STXXL_INCLUDE_DIR) + +IF( NOT STXXL_FIND_QUIETLY ) + IF( STXXL_FOUND ) + MESSAGE(STATUS "Found STXXL: ${STXXL_LIBRARY}" ) + ENDIF() +ENDIF() + +MARK_AS_ADVANCED(STXXL_INCLUDE_DIR STXXL_LIBRARY) diff --git a/cmake/GetGitRevisionDescription.cmake b/cmake/GetGitRevisionDescription.cmake new file mode 100644 index 000000000..1bf023008 --- /dev/null +++ b/cmake/GetGitRevisionDescription.cmake @@ -0,0 +1,123 @@ +# - Returns a version string from Git +# +# These functions force a re-configure on each git commit so that you can +# trust the values of the variables in your build system. +# +# get_git_head_revision( [ ...]) +# +# Returns the refspec and sha hash of the current head revision +# +# git_describe( [ ...]) +# +# Returns the results of git describe on the source tree, and adjusting +# the output so that it tests false if an error occurs. +# +# git_get_exact_tag( [ ...]) +# +# Returns the results of git describe --exact-match on the source tree, +# and adjusting the output so that it tests false if there was no exact +# matching tag. +# +# Requires CMake 2.6 or newer (uses the 'function' command) +# +# Original Author: +# 2009-2010 Ryan Pavlik +# http://academic.cleardefinition.com +# Iowa State University HCI Graduate Program/VRAC +# +# Copyright Iowa State University 2009-2010. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or copy at +# http://www.boost.org/LICENSE_1_0.txt) + +if(__get_git_revision_description) + return() +endif() +set(__get_git_revision_description YES) + +# We must run the following at "include" time, not at function call time, +# to find the path to this module rather than the path to a calling list file +get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH) + +function(get_git_head_revision _refspecvar _hashvar) + set(GIT_PARENT_DIR "${CMAKE_SOURCE_DIR}") + set(GIT_DIR "${GIT_PARENT_DIR}/.git") + while(NOT EXISTS "${GIT_DIR}") # .git dir not found, search parent directories + set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}") + get_filename_component(GIT_PARENT_DIR ${GIT_PARENT_DIR} PATH) + if(GIT_PARENT_DIR STREQUAL GIT_PREVIOUS_PARENT) + # We have reached the root directory, we are not in git + set(${_refspecvar} "GITDIR-NOTFOUND" PARENT_SCOPE) + set(${_hashvar} "GITDIR-NOTFOUND" PARENT_SCOPE) + return() + endif() + set(GIT_DIR "${GIT_PARENT_DIR}/.git") + endwhile() + set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data") + if(NOT EXISTS "${GIT_DATA}") + file(MAKE_DIRECTORY "${GIT_DATA}") + endif() + + if(NOT EXISTS "${GIT_DIR}/HEAD") + return() + endif() + set(HEAD_FILE "${GIT_DATA}/HEAD") + configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY) + + configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in" + "${GIT_DATA}/grabRef.cmake" + @ONLY) + include("${GIT_DATA}/grabRef.cmake") + + set(${_refspecvar} "${HEAD_REF}" PARENT_SCOPE) + set(${_hashvar} "${HEAD_HASH}" PARENT_SCOPE) +endfunction() + +function(git_describe _var) + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + get_git_head_revision(refspec hash) + if(NOT GIT_FOUND) + set(${_var} "GIT-NOTFOUND" PARENT_SCOPE) + return() + endif() + if(NOT hash) + set(${_var} "HEAD-HASH-NOTFOUND" PARENT_SCOPE) + return() + endif() + + # TODO sanitize + #if((${ARGN}" MATCHES "&&") OR + # (ARGN MATCHES "||") OR + # (ARGN MATCHES "\\;")) + # message("Please report the following error to the project!") + # message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}") + #endif() + + #message(STATUS "Arguments to execute_process: ${ARGN}") + + execute_process(COMMAND + "${GIT_EXECUTABLE}" + describe + ${hash} + ${ARGN} + WORKING_DIRECTORY + "${CMAKE_SOURCE_DIR}" + RESULT_VARIABLE + res + OUTPUT_VARIABLE + out + ERROR_QUIET + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT res EQUAL 0) + set(out "${out}-${res}-NOTFOUND") + endif() + + set(${_var} "${out}" PARENT_SCOPE) +endfunction() + +function(git_get_exact_tag _var) + git_describe(out --exact-match ${ARGN}) + set(${_var} "${out}" PARENT_SCOPE) +endfunction() diff --git a/createHierarchy.cpp b/createHierarchy.cpp index 7156cef32..5e6343d1d 100644 --- a/createHierarchy.cpp +++ b/createHierarchy.cpp @@ -18,15 +18,26 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA or see http://www.gnu.org/licenses/agpl.txt. */ -extern "C" { -#include -#include -#include -} -#include +#include "Algorithms/IteratorBasedCRC32.h" +#include "Contractor/Contractor.h" +#include "Contractor/EdgeBasedGraphFactory.h" +#include "DataStructures/BinaryHeap.h" +#include "DataStructures/DeallocatingVector.h" +#include "DataStructures/QueryEdge.h" +#include "DataStructures/StaticGraph.h" +#include "DataStructures/StaticRTree.h" +#include "Util/BaseConfiguration.h" +#include "Util/GraphLoader.h" +#include "Util/InputFileUtil.h" +#include "Util/LuaUtil.h" +#include "Util/OpenMPWrapper.h" +#include "Util/StringUtil.h" +#include "typedefs.h" #include +#include + #include #include #include @@ -34,21 +45,6 @@ extern "C" { #include #include -#include "Algorithms/IteratorBasedCRC32.h" -#include "Util/OpenMPWrapper.h" -#include "typedefs.h" -#include "Contractor/Contractor.h" -#include "Contractor/EdgeBasedGraphFactory.h" -#include "DataStructures/BinaryHeap.h" -#include "DataStructures/DeallocatingVector.h" -#include "DataStructures/NNGrid.h" -#include "DataStructures/QueryEdge.h" -#include "Util/BaseConfiguration.h" -#include "Util/GraphLoader.h" -#include "Util/InputFileUtil.h" -#include "Util/LuaUtil.h" -#include "Util/StringUtil.h" - typedef QueryEdge::EdgeData EdgeData; typedef DynamicGraph::InputEdge InputEdge; typedef StaticGraph::InputEdge StaticEdge; @@ -61,212 +57,220 @@ std::vector trafficLightNodes; std::vector edgeList; int main (int argc, char *argv[]) { - if(argc < 3) { - ERR("usage: " << std::endl << argv[0] << " []"); - } - - double startupTime = get_timestamp(); - unsigned numberOfThreads = omp_get_num_procs(); - if(testDataFile("contractor.ini")) { - ContractorConfiguration contractorConfig("contractor.ini"); - unsigned rawNumber = stringToInt(contractorConfig.GetParameter("Threads")); - if(rawNumber != 0 && rawNumber <= numberOfThreads) - numberOfThreads = rawNumber; - } - omp_set_num_threads(numberOfThreads); - - INFO("Using restrictions from file: " << argv[2]); - std::ifstream restrictionsInstream(argv[2], std::ios::binary); - if(!restrictionsInstream.good()) { - ERR("Could not access files"); - } - _Restriction restriction; - unsigned usableRestrictionsCounter(0); - restrictionsInstream.read((char*)&usableRestrictionsCounter, sizeof(unsigned)); - inputRestrictions.resize(usableRestrictionsCounter); - restrictionsInstream.read((char *)&(inputRestrictions[0]), usableRestrictionsCounter*sizeof(_Restriction)); - restrictionsInstream.close(); - - std::ifstream in; - in.open (argv[1], std::ifstream::in | std::ifstream::binary); - if (!in.is_open()) { - ERR("Cannot open " << argv[1]); - } - - std::string nodeOut(argv[1]); nodeOut += ".nodes"; - std::string edgeOut(argv[1]); edgeOut += ".edges"; - std::string graphOut(argv[1]); graphOut += ".hsgr"; - std::string ramIndexOut(argv[1]); ramIndexOut += ".ramIndex"; - std::string fileIndexOut(argv[1]); fileIndexOut += ".fileIndex"; - - /*** Setup Scripting Environment ***/ - if(!testDataFile( (argc > 3 ? argv[3] : "profile.lua") )) { - ERR("Need profile.lua to apply traffic signal penalty"); - } - - // Create a new lua state - lua_State *myLuaState = luaL_newstate(); - - // Connect LuaBind to this lua state - luabind::open(myLuaState); - - //open utility libraries string library; - luaL_openlibs(myLuaState); - - //adjust lua load path - luaAddScriptFolderToLoadPath( myLuaState, (argc > 3 ? argv[3] : "profile.lua") ); - - // Now call our function in a lua script - INFO("Parsing speedprofile from " << (argc > 3 ? argv[3] : "profile.lua") ); - if(0 != luaL_dofile(myLuaState, (argc > 3 ? argv[3] : "profile.lua") )) { - ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); - } - - EdgeBasedGraphFactory::SpeedProfileProperties speedProfile; - - if(0 != luaL_dostring( myLuaState, "return traffic_signal_penalty\n")) { - ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); - } - speedProfile.trafficSignalPenalty = 10*lua_tointeger(myLuaState, -1); - - if(0 != luaL_dostring( myLuaState, "return u_turn_penalty\n")) { - ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); - } - speedProfile.uTurnPenalty = 10*lua_tointeger(myLuaState, -1); - - speedProfile.has_turn_penalty_function = lua_function_exists( myLuaState, "turn_function" ); - - std::vector edgeList; - NodeID nodeBasedNodeNumber = readBinaryOSRMGraphFromStream(in, edgeList, bollardNodes, trafficLightNodes, &internalToExternalNodeMapping, inputRestrictions); - in.close(); - INFO(inputRestrictions.size() << " restrictions, " << bollardNodes.size() << " bollard nodes, " << trafficLightNodes.size() << " traffic lights"); - if(0 == edgeList.size()) - ERR("The input data is broken. It is impossible to do any turns in this graph"); - - - /*** - * Building an edge-expanded graph from node-based input an turn restrictions - */ - - INFO("Generating edge-expanded graph representation"); - EdgeBasedGraphFactory * edgeBasedGraphFactory = new EdgeBasedGraphFactory (nodeBasedNodeNumber, edgeList, bollardNodes, trafficLightNodes, inputRestrictions, internalToExternalNodeMapping, speedProfile); - std::vector().swap(edgeList); - edgeBasedGraphFactory->Run(edgeOut.c_str(), myLuaState); - std::vector<_Restriction>().swap(inputRestrictions); - std::vector().swap(bollardNodes); - std::vector().swap(trafficLightNodes); - NodeID edgeBasedNodeNumber = edgeBasedGraphFactory->GetNumberOfNodes(); - DeallocatingVector edgeBasedEdgeList; - edgeBasedGraphFactory->GetEdgeBasedEdges(edgeBasedEdgeList); - DeallocatingVector nodeBasedEdgeList; - edgeBasedGraphFactory->GetEdgeBasedNodes(nodeBasedEdgeList); - delete edgeBasedGraphFactory; - - /*** - * Writing info on original (node-based) nodes - */ - - INFO("writing node map ..."); - std::ofstream mapOutFile(nodeOut.c_str(), std::ios::binary); - mapOutFile.write((char *)&(internalToExternalNodeMapping[0]), internalToExternalNodeMapping.size()*sizeof(NodeInfo)); - mapOutFile.close(); - std::vector().swap(internalToExternalNodeMapping); - - double expansionHasFinishedTime = get_timestamp() - startupTime; - - /*** - * Building grid-like nearest-neighbor data structure - */ - - INFO("building grid ..."); - WritableGrid * writeableGrid = new WritableGrid(); - writeableGrid->ConstructGrid(nodeBasedEdgeList, ramIndexOut.c_str(), fileIndexOut.c_str()); - delete writeableGrid; - IteratorbasedCRC32 > crc32; - unsigned crc32OfNodeBasedEdgeList = crc32(nodeBasedEdgeList.begin(), nodeBasedEdgeList.end() ); - nodeBasedEdgeList.clear(); - INFO("CRC32 based checksum is " << crc32OfNodeBasedEdgeList); - - /*** - * Contracting the edge-expanded graph - */ - - INFO("initializing contractor"); - Contractor* contractor = new Contractor( edgeBasedNodeNumber, edgeBasedEdgeList ); - double contractionStartedTimestamp(get_timestamp()); - contractor->Run(); - INFO("Contraction took " << get_timestamp() - contractionStartedTimestamp << " sec"); - - DeallocatingVector< QueryEdge > contractedEdgeList; - contractor->GetEdges( contractedEdgeList ); - delete contractor; - - /*** - * Sorting contracted edges in a way that the static query graph can read some in in-place. - */ - - INFO("Building Node Array"); - std::sort(contractedEdgeList.begin(), contractedEdgeList.end()); - unsigned numberOfNodes = 0; - unsigned numberOfEdges = contractedEdgeList.size(); - INFO("Serializing compacted graph"); - std::ofstream edgeOutFile(graphOut.c_str(), std::ios::binary); - - BOOST_FOREACH(const QueryEdge & edge, contractedEdgeList) { - if(edge.source > numberOfNodes) { - numberOfNodes = edge.source; + try { + if(argc < 3) { + ERR("usage: " << std::endl << argv[0] << " []"); } - if(edge.target > numberOfNodes) { - numberOfNodes = edge.target; + + double startupTime = get_timestamp(); + unsigned numberOfThreads = omp_get_num_procs(); + if(testDataFile("contractor.ini")) { + ContractorConfiguration contractorConfig("contractor.ini"); + unsigned rawNumber = stringToInt(contractorConfig.GetParameter("Threads")); + if(rawNumber != 0 && rawNumber <= numberOfThreads) + numberOfThreads = rawNumber; } - } - numberOfNodes+=1; + omp_set_num_threads(numberOfThreads); - std::vector< StaticGraph::_StrNode > _nodes; - _nodes.resize( numberOfNodes + 1 ); + INFO("Using restrictions from file: " << argv[2]); + std::ifstream restrictionsInstream(argv[2], std::ios::binary); + if(!restrictionsInstream.good()) { + ERR("Could not access files"); + } + _Restriction restriction; + unsigned usableRestrictionsCounter(0); + restrictionsInstream.read((char*)&usableRestrictionsCounter, sizeof(unsigned)); + inputRestrictions.resize(usableRestrictionsCounter); + restrictionsInstream.read((char *)&(inputRestrictions[0]), usableRestrictionsCounter*sizeof(_Restriction)); + restrictionsInstream.close(); - StaticGraph::EdgeIterator edge = 0; - StaticGraph::EdgeIterator position = 0; - for ( StaticGraph::NodeIterator node = 0; node <= numberOfNodes; ++node ) { - StaticGraph::EdgeIterator lastEdge = edge; - while ( edge < numberOfEdges && contractedEdgeList[edge].source == node ) - ++edge; - _nodes[node].firstEdge = position; //=edge - position += edge - lastEdge; //remove - } - ++numberOfNodes; - //Serialize numberOfNodes, nodes - edgeOutFile.write((char*) &crc32OfNodeBasedEdgeList, sizeof(unsigned)); - edgeOutFile.write((char*) &numberOfNodes, sizeof(unsigned)); - edgeOutFile.write((char*) &_nodes[0], sizeof(StaticGraph::_StrNode)*(numberOfNodes)); - //Serialize number of Edges - edgeOutFile.write((char*) &position, sizeof(unsigned)); - --numberOfNodes; - edge = 0; - int usedEdgeCounter = 0; - StaticGraph::_StrEdge currentEdge; - for ( StaticGraph::NodeIterator node = 0; node < numberOfNodes; ++node ) { - for ( StaticGraph::EdgeIterator i = _nodes[node].firstEdge, e = _nodes[node+1].firstEdge; i != e; ++i ) { - assert(node != contractedEdgeList[edge].target); - currentEdge.target = contractedEdgeList[edge].target; - currentEdge.data = contractedEdgeList[edge].data; - if(currentEdge.data.distance <= 0) { - INFO("Edge: " << i << ",source: " << contractedEdgeList[edge].source << ", target: " << contractedEdgeList[edge].target << ", dist: " << currentEdge.data.distance); - ERR("Failed at edges of node " << node << " of " << numberOfNodes); + std::ifstream in; + in.open (argv[1], std::ifstream::in | std::ifstream::binary); + if (!in.is_open()) { + ERR("Cannot open " << argv[1]); + } + + std::string nodeOut(argv[1]); nodeOut += ".nodes"; + std::string edgeOut(argv[1]); edgeOut += ".edges"; + std::string graphOut(argv[1]); graphOut += ".hsgr"; + std::string rtree_nodes_path(argv[1]); rtree_nodes_path += ".ramIndex"; + std::string rtree_leafs_path(argv[1]); rtree_leafs_path += ".fileIndex"; + + /*** Setup Scripting Environment ***/ + if(!testDataFile( (argc > 3 ? argv[3] : "profile.lua") )) { + ERR("Need profile.lua to apply traffic signal penalty"); + } + + // Create a new lua state + lua_State *myLuaState = luaL_newstate(); + + // Connect LuaBind to this lua state + luabind::open(myLuaState); + + //open utility libraries string library; + luaL_openlibs(myLuaState); + + //adjust lua load path + luaAddScriptFolderToLoadPath( myLuaState, (argc > 3 ? argv[3] : "profile.lua") ); + + // Now call our function in a lua script + INFO("Parsing speedprofile from " << (argc > 3 ? argv[3] : "profile.lua") ); + if(0 != luaL_dofile(myLuaState, (argc > 3 ? argv[3] : "profile.lua") )) { + ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); + } + + EdgeBasedGraphFactory::SpeedProfileProperties speedProfile; + + if(0 != luaL_dostring( myLuaState, "return traffic_signal_penalty\n")) { + ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); + } + speedProfile.trafficSignalPenalty = 10*lua_tointeger(myLuaState, -1); + + if(0 != luaL_dostring( myLuaState, "return u_turn_penalty\n")) { + ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); + } + speedProfile.uTurnPenalty = 10*lua_tointeger(myLuaState, -1); + + speedProfile.has_turn_penalty_function = lua_function_exists( myLuaState, "turn_function" ); + + std::vector edgeList; + NodeID nodeBasedNodeNumber = readBinaryOSRMGraphFromStream(in, edgeList, bollardNodes, trafficLightNodes, &internalToExternalNodeMapping, inputRestrictions); + in.close(); + INFO(inputRestrictions.size() << " restrictions, " << bollardNodes.size() << " bollard nodes, " << trafficLightNodes.size() << " traffic lights"); + if(0 == edgeList.size()) + ERR("The input data is broken. It is impossible to do any turns in this graph"); + + + /*** + * Building an edge-expanded graph from node-based input an turn restrictions + */ + + INFO("Generating edge-expanded graph representation"); + EdgeBasedGraphFactory * edgeBasedGraphFactory = new EdgeBasedGraphFactory (nodeBasedNodeNumber, edgeList, bollardNodes, trafficLightNodes, inputRestrictions, internalToExternalNodeMapping, speedProfile); + std::vector().swap(edgeList); + edgeBasedGraphFactory->Run(edgeOut.c_str(), myLuaState); + std::vector<_Restriction>().swap(inputRestrictions); + std::vector().swap(bollardNodes); + std::vector().swap(trafficLightNodes); + NodeID edgeBasedNodeNumber = edgeBasedGraphFactory->GetNumberOfNodes(); + DeallocatingVector edgeBasedEdgeList; + edgeBasedGraphFactory->GetEdgeBasedEdges(edgeBasedEdgeList); + std::vector nodeBasedEdgeList; + edgeBasedGraphFactory->GetEdgeBasedNodes(nodeBasedEdgeList); + delete edgeBasedGraphFactory; + + /*** + * Writing info on original (node-based) nodes + */ + + INFO("writing node map ..."); + std::ofstream mapOutFile(nodeOut.c_str(), std::ios::binary); + mapOutFile.write((char *)&(internalToExternalNodeMapping[0]), internalToExternalNodeMapping.size()*sizeof(NodeInfo)); + mapOutFile.close(); + std::vector().swap(internalToExternalNodeMapping); + + double expansionHasFinishedTime = get_timestamp() - startupTime; + + /*** + * Building grid-like nearest-neighbor data structure + */ + + INFO("building r-tree ..."); + StaticRTree * rtree = + new StaticRTree( + nodeBasedEdgeList, + rtree_nodes_path.c_str(), + rtree_leafs_path.c_str() + ); + delete rtree; + IteratorbasedCRC32 > crc32; + unsigned crc32OfNodeBasedEdgeList = crc32(nodeBasedEdgeList.begin(), nodeBasedEdgeList.end() ); + nodeBasedEdgeList.clear(); + INFO("CRC32 based checksum is " << crc32OfNodeBasedEdgeList); + + /*** + * Contracting the edge-expanded graph + */ + + INFO("initializing contractor"); + Contractor* contractor = new Contractor( edgeBasedNodeNumber, edgeBasedEdgeList ); + double contractionStartedTimestamp(get_timestamp()); + contractor->Run(); + INFO("Contraction took " << get_timestamp() - contractionStartedTimestamp << " sec"); + + DeallocatingVector< QueryEdge > contractedEdgeList; + contractor->GetEdges( contractedEdgeList ); + delete contractor; + + /*** + * Sorting contracted edges in a way that the static query graph can read some in in-place. + */ + + INFO("Building Node Array"); + std::sort(contractedEdgeList.begin(), contractedEdgeList.end()); + unsigned numberOfNodes = 0; + unsigned numberOfEdges = contractedEdgeList.size(); + INFO("Serializing compacted graph of " << numberOfEdges << " edges"); + std::ofstream edgeOutFile(graphOut.c_str(), std::ios::binary); + + BOOST_FOREACH(const QueryEdge & edge, contractedEdgeList) { + if(edge.source > numberOfNodes) { + numberOfNodes = edge.source; + } + if(edge.target > numberOfNodes) { + numberOfNodes = edge.target; } - //Serialize edges - edgeOutFile.write((char*) ¤tEdge, sizeof(StaticGraph::_StrEdge)); - ++edge; - ++usedEdgeCounter; } - } - double endTime = (get_timestamp() - startupTime); - INFO("Expansion : " << (nodeBasedNodeNumber/expansionHasFinishedTime) << " nodes/sec and "<< (edgeBasedNodeNumber/expansionHasFinishedTime) << " edges/sec"); - INFO("Contraction: " << (edgeBasedNodeNumber/expansionHasFinishedTime) << " nodes/sec and "<< usedEdgeCounter/endTime << " edges/sec"); + numberOfNodes+=1; - edgeOutFile.close(); - //cleanedEdgeList.clear(); - _nodes.clear(); - INFO("finished preprocessing"); + std::vector< StaticGraph::_StrNode > _nodes; + _nodes.resize( numberOfNodes + 1 ); + + StaticGraph::EdgeIterator edge = 0; + StaticGraph::EdgeIterator position = 0; + for ( StaticGraph::NodeIterator node = 0; node <= numberOfNodes; ++node ) { + StaticGraph::EdgeIterator lastEdge = edge; + while ( edge < numberOfEdges && contractedEdgeList[edge].source == node ) + ++edge; + _nodes[node].firstEdge = position; //=edge + position += edge - lastEdge; //remove + } + ++numberOfNodes; + //Serialize numberOfNodes, nodes + edgeOutFile.write((char*) &crc32OfNodeBasedEdgeList, sizeof(unsigned)); + edgeOutFile.write((char*) &numberOfNodes, sizeof(unsigned)); + edgeOutFile.write((char*) &_nodes[0], sizeof(StaticGraph::_StrNode)*(numberOfNodes)); + //Serialize number of Edges + edgeOutFile.write((char*) &position, sizeof(unsigned)); + --numberOfNodes; + edge = 0; + int usedEdgeCounter = 0; + StaticGraph::_StrEdge currentEdge; + for ( StaticGraph::NodeIterator node = 0; node < numberOfNodes; ++node ) { + for ( StaticGraph::EdgeIterator i = _nodes[node].firstEdge, e = _nodes[node+1].firstEdge; i != e; ++i ) { + assert(node != contractedEdgeList[edge].target); + currentEdge.target = contractedEdgeList[edge].target; + currentEdge.data = contractedEdgeList[edge].data; + if(currentEdge.data.distance <= 0) { + INFO("Edge: " << i << ",source: " << contractedEdgeList[edge].source << ", target: " << contractedEdgeList[edge].target << ", dist: " << currentEdge.data.distance); + ERR("Failed at edges of node " << node << " of " << numberOfNodes); + } + //Serialize edges + edgeOutFile.write((char*) ¤tEdge, sizeof(StaticGraph::_StrEdge)); + ++edge; + ++usedEdgeCounter; + } + } + double endTime = (get_timestamp() - startupTime); + INFO("Expansion : " << (nodeBasedNodeNumber/expansionHasFinishedTime) << " nodes/sec and "<< (edgeBasedNodeNumber/expansionHasFinishedTime) << " edges/sec"); + INFO("Contraction: " << (edgeBasedNodeNumber/expansionHasFinishedTime) << " nodes/sec and "<< usedEdgeCounter/endTime << " edges/sec"); + + edgeOutFile.close(); + //cleanedEdgeList.clear(); + _nodes.clear(); + INFO("finished preprocessing"); + } catch (std::exception &e) { + ERR("Exception occured: " << e.what()); + } return 0; } diff --git a/extractor.cpp b/extractor.cpp index 4912c9b67..3beeef510 100644 --- a/extractor.cpp +++ b/extractor.cpp @@ -18,12 +18,6 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA or see http://www.gnu.org/licenses/agpl.txt. */ -#include -#include -#include -#include - -#include "typedefs.h" #include "Extractor/ExtractorCallbacks.h" #include "Extractor/ExtractionContainers.h" #include "Extractor/ScriptingEnvironment.h" @@ -34,6 +28,13 @@ or see http://www.gnu.org/licenses/agpl.txt. #include "Util/MachineInfo.h" #include "Util/OpenMPWrapper.h" #include "Util/StringUtil.h" +#include "typedefs.h" + +#include +#include +#include + +#include typedef BaseConfiguration ExtractorConfiguration; @@ -84,11 +85,11 @@ int main (int argc, char *argv[]) { } unsigned amountOfRAM = 1; - unsigned installedRAM = GetPhysicalmemory(); + unsigned installedRAM = GetPhysicalmemory(); if(installedRAM < 2048264) { WARN("Machine has less than 2GB RAM."); } - + StringMap stringMap; ExtractionContainers externalMemory; @@ -100,7 +101,7 @@ int main (int argc, char *argv[]) { } else { parser = new XMLParser(argv[1], extractCallBacks, scriptingEnvironment); } - + if(!parser->ReadHeader()) { ERR("Parser not initialized!"); } diff --git a/features/bicycle/area.feature b/features/bicycle/area.feature index 0be8235d8..541f02631 100644 --- a/features/bicycle/area.feature +++ b/features/bicycle/area.feature @@ -41,14 +41,14 @@ Feature: Bike - Squares and other areas When I route I should get | from | to | route | - | a | b | | - | a | d | | - | b | c | | - | c | b | | - | c | d | | - | d | c | | - | d | a | | - | a | d | | + | a | b | xa | + | a | d | xa | + | b | c | xa | + | c | b | xa | + | c | d | xa | + | d | c | xa | + | d | a | xa | + | a | d | xa | @parking Scenario: Bike - parking areas @@ -100,4 +100,4 @@ Feature: Bike - Squares and other areas | c | d | abcda | | d | c | abcda | | d | a | abcda | - | a | d | abcda | + | a | d | abcda | \ No newline at end of file diff --git a/features/bicycle/mode.feature b/features/bicycle/mode.feature new file mode 100644 index 000000000..c24af4686 --- /dev/null +++ b/features/bicycle/mode.feature @@ -0,0 +1,89 @@ +@routing @bicycle @mode +Feature: Bike - Mode flag + + Background: + Given the profile "bicycle" + + @todo + Scenario: Bike - Mode when using a ferry + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | route | duration | + | ab | primary | | | + | bc | | ferry | 0:01 | + | cd | primary | | | + + When I route I should get + | from | to | route | turns | modes | + | a | d | ab,bc,cd | head,right,left, destination | bike,ferry,bike | + | d | a | cd,bc,ab | head,right,left, destination | bike,ferry,bike | + | c | a | bc,ab | head,left,destination | ferry,bike | + | d | b | cd,bc | head,right,destination | bike,ferry | + | a | c | ab,bc | head,right,destination | bike,ferry | + | b | d | bc,cd | head,left,destination | ferry,bike | + + @todo + Scenario: Bike - Mode when pushing bike against oneways + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | oneway | + | ab | primary | | + | bc | primary | yes | + | cd | primary | | + + When I route I should get + | from | to | route | turns | modes | + | a | d | ab,bc,cd | head,right,left,destination | bike,push,bike | + | d | a | cd,bc,ab | head,right,left,destination | bike,push,bike | + | c | a | bc,ab | head,left,destination | push,bike | + | d | b | cd,bc | head,right,destination | bike,push | + | a | c | ab,bc | head,right,destination | bike,push | + | b | d | bc,cd | head,left,destination | push,bike | + + @todo + Scenario: Bike - Mode when pushing on pedestrain streets + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | + | ab | primary | + | bc | pedestrian | + | cd | primary | + + When I route I should get + | from | to | route | turns | modes | + | a | d | ab,bc,cd | head,right,left,destination | bike,push,bike | + | d | a | cd,bc,ab | head,right,left,destination | bike,push,bike | + | c | a | bc,ab | head,left,destination | push,bike | + | d | b | cd,bc | head,right,destination | bike,push | + | a | c | ab,bc | head,right,destination | bike,push | + | b | d | bc,cd | head,left,destination | push,bike | + + @todo + Scenario: Bike - Mode when pushing on pedestrain areas + Given the node map + | a | b | | | + | | c | d | f | + + And the ways + | nodes | highway | area | + | ab | primary | | + | bcd | pedestrian | yes | + | df | primary | | + + When I route I should get + | from | to | route | modes | + | a | f | ab,bcd,df | bike,push,bike | + | f | a | df,bcd,ab | bike,push,bike | + | d | a | bcd,ab | push,bike | + | f | b | df,bcd | bike,push | + | a | d | ab,bcd | bike,push | + | b | f | bcd,df | push,bike | diff --git a/features/bicycle/names.feature b/features/bicycle/names.feature index d1912472f..d8a1ecf1e 100644 --- a/features/bicycle/names.feature +++ b/features/bicycle/names.feature @@ -10,14 +10,15 @@ Feature: Bike - Street names in instructions | | c | And the ways - | nodes | name | - | ab | My Way | + | nodes | name | + | ab | My Way | | bc | Your Way | When I route I should get | from | to | route | | a | c | My Way,Your Way | - + + @unnamed Scenario: Bike - Use way type to describe unnamed ways Given the node map | a | b | c | d | @@ -25,8 +26,24 @@ Feature: Bike - Street names in instructions And the ways | nodes | highway | name | | ab | cycleway | | - | bcd | track | | + | bcd | track | | When I route I should get - | from | to | route | - | a | d | cycleway,track | \ No newline at end of file + | from | to | route | + | a | d | {highway:cycleway},{highway:track} | + + @area @names @todo + Scenario: Bike - name on streets overlapping an area + Given the node map + | x | a | b | y | + | | d | c | | + + And the ways + | nodes | highway | area | + | xaby | residential | | + | abcda | residential | yes | + + When I route I should get + | from | to | route | + | x | y | xaby | + | y | x | xaby | diff --git a/features/bicycle/oneway.feature b/features/bicycle/oneway.feature index 6a9d3bf02..ee0d7131f 100644 --- a/features/bicycle/oneway.feature +++ b/features/bicycle/oneway.feature @@ -112,3 +112,17 @@ Usually we can push bikes against oneways, but we use foot=no to prevent this in | no | roundabout | | yes | x | | | no | roundabout | | no | x | | | no | roundabout | | -1 | x | | + + Scenario: Bike - Two consecutive oneways + Given the node map + | a | b | c | + + And the ways + | nodes | oneway | + | ab | yes | + | bc | yes | + + + When I route I should get + | from | to | route | + | a | c | ab,bc | \ No newline at end of file diff --git a/features/bicycle/pushing.feature b/features/bicycle/pushing.feature index 169d340e3..13590d07b 100644 --- a/features/bicycle/pushing.feature +++ b/features/bicycle/pushing.feature @@ -85,3 +85,40 @@ Feature: Bike - Accessability of different way types Then routability should be | junction | forw | backw | | roundabout | x | | + + Scenario: Bike - Instructions when pushing bike on oneways + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | oneway | + | ab | primary | | + | bc | primary | yes | + | cd | primary | | + + When I route I should get + | from | to | route | turns | + | a | d | ab,bc,cd | head,right,left,destination | + | d | a | cd,bc,ab | head,enter_contraflow,leave_contraflow,destination | + | c | a | bc,ab | head,leave_contraflow,destination | + | d | b | cd,bc | head,enter_contraflow,destination | + + @todo + Scenario: Bike - Instructions when pushing bike on footway/pedestrian, etc. + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | + | ab | primary | + | bc | footway | + | cd | primary | + + When I route I should get + | from | to | route | turns | + | a | d | ab,bc,cd | head,right,left,destination | + | d | a | cd,bc,ab | head,enter_contraflow,leave_contraflow,destination | + | c | a | bc,ab | head,leave_contraflow,destination | + | d | b | cd,bc | head,enter_contraflow,destination | \ No newline at end of file diff --git a/features/bicycle/ref.feature b/features/bicycle/ref.feature new file mode 100644 index 000000000..da1585902 --- /dev/null +++ b/features/bicycle/ref.feature @@ -0,0 +1,41 @@ +@routing @bicycle @ref @name +Feature: Bike - Way ref + + Background: + Given the profile "bicycle" + + Scenario: Bike - Way with both name and ref + Given the node map + | a | b | + + And the ways + | nodes | name | ref | + | ab | Utopia Drive | E7 | + + When I route I should get + | from | to | route | + | a | b | Utopia Drive / E7 | + + Scenario: Bike - Way with only ref + Given the node map + | a | b | + + And the ways + | nodes | name | ref | + | ab | | E7 | + + When I route I should get + | from | to | route | + | a | b | E7 | + + Scenario: Bike - Way with only name + Given the node map + | a | b | + + And the ways + | nodes | name | + | ab | Utopia Drive | + + When I route I should get + | from | to | route | + | a | b | Utopia Drive | diff --git a/features/car/maxspeed.feature b/features/car/maxspeed.feature index 7da5088f4..3e09a0ad3 100644 --- a/features/car/maxspeed.feature +++ b/features/car/maxspeed.feature @@ -31,7 +31,7 @@ Feature: Car - Max speed restrictions When I route I should get | from | to | route | time | | a | b | ab | 144s ~10% | - | b | c | bc | 63s ~10% | + | b | c | bc | 42s ~10% | Scenario: Car - Forward/backward maxspeed Given the shortcuts diff --git a/features/car/oneway.feature b/features/car/oneway.feature index 0d906c06c..b18108539 100644 --- a/features/car/oneway.feature +++ b/features/car/oneway.feature @@ -56,4 +56,18 @@ Handle oneways streets, as defined at http://wiki.openstreetmap.org/wiki/OSM_tag | primary | | -1 | -1 | | x | | primary | roundabout | | yes | x | | | primary | roundabout | | no | x | | - | primary | roundabout | | -1 | x | | \ No newline at end of file + | primary | roundabout | | -1 | x | | + + Scenario: Car - Two consecutive oneways + Given the node map + | a | b | c | + + And the ways + | nodes | oneway | + | ab | yes | + | bc | yes | + + + When I route I should get + | from | to | route | + | a | c | ab,bc | \ No newline at end of file diff --git a/features/car/shuttle_train.feature b/features/car/shuttle_train.feature index ed2187c27..e58ffa210 100644 --- a/features/car/shuttle_train.feature +++ b/features/car/shuttle_train.feature @@ -3,23 +3,24 @@ Feature: Car - Handle ferryshuttle train routes Background: Given the profile "car" - + Scenario: Car - Use a ferry route Given the node map - | a | b | c | | | - | | | d | | | - | | | e | f | g | - + | a | b | c | | | | + | | | d | | | | + | | | e | f | g | h | + And the ways | nodes | highway | route | bicycle | | abc | primary | | | | cde | | shuttle_train | yes | | ef | primary | | | | fg | | ferry_man | | - + | gh | primary | | no | + When I route I should get | from | to | route | - | a | g | abc,cde,ef | + | a | f | abc,cde,ef | | b | f | abc,cde,ef | | e | c | cde | | e | b | cde,abc | @@ -27,5 +28,6 @@ Feature: Car - Handle ferryshuttle train routes | c | e | cde | | c | f | cde,ef | | f | g | | + | g | h | gh | diff --git a/features/step_definitions/data.rb b/features/step_definitions/data.rb index c16adaacb..1e021b6ae 100644 --- a/features/step_definitions/data.rb +++ b/features/step_definitions/data.rb @@ -18,12 +18,13 @@ Given /^the node map$/ do |table| unless name.empty? raise "*** node invalid name '#{name}', must be single characters" unless name.size == 1 raise "*** invalid node name '#{name}', must me alphanumeric" unless name.match /[a-z0-9]/ - raise "*** duplicate node '#{name}'" if name_node_hash[name] - node = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, ORIGIN[0]+ci*@zoom, ORIGIN[1]-ri*@zoom - node << { :name => name } - node.uid = OSM_UID - osm_db << node - name_node_hash[name] = node + if name.match /[a-z]/ + raise "*** duplicate node '#{name}'" if name_node_hash[name] + add_osm_node name, *table_coord_to_lonlat(ci,ri) + else + raise "*** duplicate node '#{name}'" if location_hash[name] + add_location name, *table_coord_to_lonlat(ci,ri) + end end end end @@ -32,21 +33,18 @@ end Given /^the node locations$/ do |table| table.hashes.each do |row| name = row['node'] - raise "*** node invalid name '#{name}', must be single characters" unless name.size == 1 - raise "*** invalid node name '#{name}', must me alphanumeric" unless name.match /[a-z0-9]/ - raise "*** duplicate node '#{name}'" if name_node_hash[name] - node = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, row['lon'].to_f, row['lat'].to_f - node << { :name => name } - node.uid = OSM_UID - osm_db << node - name_node_hash[name] = node + raise "*** duplicate node '#{name}'" if find_node_by_name name + if name.match /[a-z]/ + add_osm_node name, row['lon'].to_f, row['lat'].to_f + else + add_location name, row['lon'].to_f, row['lat'].to_f + end end end Given /^the nodes$/ do |table| table.hashes.each do |row| name = row.delete 'node' - raise "***invalid node name '#{c}', must be single characters" unless name.size == 1 node = find_node_by_name(name) raise "*** unknown node '#{c}'" unless node node << row @@ -61,8 +59,7 @@ Given /^the ways$/ do |table| nodes = row.delete 'nodes' raise "*** duplicate way '#{nodes}'" if name_way_hash[nodes] nodes.each_char do |c| - raise "***invalid node name '#{c}', must be single characters" unless c.size == 1 - raise "*** ways cannot use numbered nodes, '#{name}'" unless c.match /[a-z]/ + raise "*** ways can only use names a-z, '#{name}'" unless c.match /[a-z]/ node = find_node_by_name(c) raise "*** unknown node '#{c}'" unless node way << node diff --git a/features/step_definitions/nearest.rb b/features/step_definitions/nearest.rb index 303788b47..3b65792fe 100644 --- a/features/step_definitions/nearest.rb +++ b/features/step_definitions/nearest.rb @@ -3,10 +3,10 @@ When /^I request nearest I should get$/ do |table| actual = [] OSRMLauncher.new do table.hashes.each_with_index do |row,ri| - in_node = @name_node_hash[ row['in'] ] + in_node = find_node_by_name row['in'] raise "*** unknown in-node '#{row['in']}" unless in_node - out_node = @name_node_hash[ row['out'] ] + out_node = find_node_by_name row['out'] raise "*** unknown out-node '#{row['out']}" unless out_node response = request_nearest("#{in_node.lat},#{in_node.lon}") @@ -42,10 +42,10 @@ When /^I request nearest I should get$/ do |table| table.routing_diff! actual end -When /^I route (\d+) times I should get$/ do |n,table| +When /^I request nearest (\d+) times I should get$/ do |n,table| ok = true n.to_i.times do - ok = false unless step "I route I should get", table + ok = false unless step "I request nearest I should get", table end ok end \ No newline at end of file diff --git a/features/step_definitions/routability.rb b/features/step_definitions/routability.rb index 0af027a98..078428aaf 100644 --- a/features/step_definitions/routability.rb +++ b/features/step_definitions/routability.rb @@ -12,9 +12,13 @@ Then /^routability should be$/ do |table| ['forw','backw','bothw'].each do |direction| if table.headers.include? direction if direction == 'forw' || direction == 'bothw' - response = request_route("#{ORIGIN[1]},#{ORIGIN[0]+(1+WAY_SPACING*i)*@zoom}","#{ORIGIN[1]},#{ORIGIN[0]+(3+WAY_SPACING*i)*@zoom}") + a = Location.new ORIGIN[0]+(1+WAY_SPACING*i)*@zoom, ORIGIN[1] + b = Location.new ORIGIN[0]+(3+WAY_SPACING*i)*@zoom, ORIGIN[1] + response = request_route [a,b] elsif direction == 'backw' || direction == 'bothw' - response = request_route("#{ORIGIN[1]},#{ORIGIN[0]+(3+WAY_SPACING*i)*@zoom}","#{ORIGIN[1]},#{ORIGIN[0]+(1+WAY_SPACING*i)*@zoom}") + a = Location.new ORIGIN[0]+(3+WAY_SPACING*i)*@zoom, ORIGIN[1] + b = Location.new ORIGIN[0]+(1+WAY_SPACING*i)*@zoom, ORIGIN[1] + response = request_route [a,b] end want = shortcuts_hash[row[direction]] || row[direction] #expand shortcuts got[direction] = route_status response @@ -22,7 +26,11 @@ Then /^routability should be$/ do |table| if got[direction].empty? == false route = way_list json['route_instructions'] if route != "w#{i}" - got[direction] = "testing w#{i}, but got #{route}!?" + if row[direction].empty? == true + got[direction] = want + else + got[direction] = "testing w#{i}, but got #{route}!?" + end elsif want =~ /^\d+s/ time = json['route_summary']['total_time'] got[direction] = "#{time}s" diff --git a/features/step_definitions/routing.rb b/features/step_definitions/routing.rb index 7ef5ba756..13c8b6a04 100644 --- a/features/step_definitions/routing.rb +++ b/features/step_definitions/routing.rb @@ -3,11 +3,41 @@ When /^I route I should get$/ do |table| actual = [] OSRMLauncher.new do table.hashes.each_with_index do |row,ri| - from_node = @name_node_hash[ row['from'] ] - raise "*** unknown from-node '#{row['from']}" unless from_node - to_node = @name_node_hash[ row['to'] ] - raise "*** unknown to-node '#{row['to']}" unless to_node - response = request_route("#{from_node.lat},#{from_node.lon}", "#{to_node.lat},#{to_node.lon}") + waypoints = [] + if row['from'] and row['to'] + node = find_node_by_name(row['from']) + raise "*** unknown from-node '#{row['from']}" unless node + waypoints << node + + node = find_node_by_name(row['to']) + raise "*** unknown to-node '#{row['to']}" unless node + waypoints << node + + got = {'from' => row['from'], 'to' => row['to'] } + elsif row['waypoints'] + row['waypoints'].split(',').each do |n| + node = find_node_by_name(n.strip) + raise "*** unknown waypoint node '#{n.strip}" unless node + waypoints << node + end + got = {'waypoints' => row['waypoints'] } + else + raise "*** no waypoints" + end + + params = {} + row.each_pair do |k,v| + if k =~ /param:(.*)/ + if v=='(nil)' + params[$1]=nil + elsif v!=nil + params[$1]=v + end + got[k]=v + end + end + + response = request_route(waypoints, params) if response.code == "200" && response.body.empty? == false json = JSON.parse response.body if json['status'] == 0 @@ -15,10 +45,10 @@ When /^I route I should get$/ do |table| bearings = bearing_list json['route_instructions'] compasses = compass_list json['route_instructions'] turns = turn_list json['route_instructions'] + modes = mode_list json['route_instructions'] end end - got = {'from' => row['from'], 'to' => row['to'] } if table.headers.include? 'start' got['start'] = instructions ? json['route_summary']['start_point'] : nil end @@ -46,6 +76,9 @@ When /^I route I should get$/ do |table| if table.headers.include? 'turns' got['turns'] = turns end + if table.headers.include? 'modes' + got['modes'] = modes + end if table.headers.include? '#' # comment column got['#'] = row['#'] # copy value so it always match end diff --git a/features/support/data.rb b/features/support/data.rb index ce6b995c1..dbb0398b8 100644 --- a/features/support/data.rb +++ b/features/support/data.rb @@ -14,9 +14,19 @@ DEFAULT_SPEEDPROFILE = 'bicycle' WAY_SPACING = 100 DEFAULT_GRID_SIZE = 100 #meters PROFILES_PATH = '../profiles' +BIN_PATH = '../build' ORIGIN = [1,1] +class Location + attr_accessor :lon,:lat + + def initialize lon,lat + @lat = lat + @lon = lon + end +end + def sanitized_scenario_title @sanitized_scenario_title ||= @scenario_title.gsub /[^0-9A-Za-z.\-]/, '_' end @@ -32,10 +42,10 @@ def build_ways_from_table table #add one unconnected way for each row table.hashes.each_with_index do |row,ri| #NOTE: - #currently osrm crashes when processing an isolated oneway with just 2 nodes, so we use 4 - #this is relatated to the fact that a oneway deadend doesn't make a lot of sense + #currently osrm crashes when processing an isolated oneway with just 2 nodes, so we use 4 edges + #this is relatated to the fact that a oneway dead-end street doesn't make a lot of sense - #if we stack ways on different x coordinates, outability tests get messed up, because osrm might pick a neighboring way if the one test can't be used. + #if we stack ways on different x coordinates, routability tests get messed up, because osrm might pick a neighboring way if the one test can't be used. #instead we place all lines as a string on the same y coordinate. this prevents using neightboring ways. #a few nodes... @@ -110,8 +120,30 @@ def build_ways_from_table table end end +def table_coord_to_lonlat ci,ri + [ORIGIN[0]+ci*@zoom, ORIGIN[1]-ri*@zoom] +end + +def add_osm_node name,lon,lat + node = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, lon, lat + node << { :name => name } + node.uid = OSM_UID + osm_db << node + name_node_hash[name] = node +end + +def add_location name,lon,lat + location_hash[name] = Location.new(lon,lat) +end + def find_node_by_name s - name_node_hash[s.to_s] + raise "***invalid node name '#{s}', must be single characters" unless s.size == 1 + raise "*** invalid node name '#{s}', must be alphanumeric" unless s.match /[a-z0-9]/ + if s.match /[a-z]/ + from_node = name_node_hash[ s.to_s ] + else + from_node = location_hash[ s.to_s ] + end end def find_way_by_name s @@ -135,6 +167,7 @@ end def reset_osm osm_db.clear name_node_hash.clear + location_hash.clear name_way_hash.clear @osm_str = nil @osm_hash = nil @@ -157,6 +190,10 @@ def name_node_hash @name_node_hash ||= {} end +def location_hash + @location_hash ||= {} +end + def name_way_hash @name_way_hash ||= {} end @@ -213,7 +250,7 @@ def reprocess unless extracted? log_preprocess_info log "== Extracting #{@osm_file}.osm...", :preprocess - unless system "../osrm-extract #{@osm_file}.osm#{'.pbf' if use_pbf} 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" + unless system "#{BIN_PATH}/osrm-extract #{@osm_file}.osm#{'.pbf' if use_pbf} 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" log "*** Exited with code #{$?.exitstatus}.", :preprocess raise ExtractError.new $?.exitstatus, "osrm-extract exited with code #{$?.exitstatus}." end @@ -222,7 +259,7 @@ def reprocess unless prepared? log_preprocess_info log "== Preparing #{@osm_file}.osm...", :preprocess - unless system "../osrm-prepare #{@osm_file}.osrm #{@osm_file}.osrm.restrictions 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" + unless system "#{BIN_PATH}/osrm-prepare #{@osm_file}.osrm #{@osm_file}.osrm.restrictions 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" log "*** Exited with code #{$?.exitstatus}.", :preprocess raise PrepareError.new $?.exitstatus, "osrm-prepare exited with code #{$?.exitstatus}." end diff --git a/features/support/fuzzy.rb b/features/support/fuzzy.rb index 9138dcce5..611d1efec 100644 --- a/features/support/fuzzy.rb +++ b/features/support/fuzzy.rb @@ -22,8 +22,8 @@ class FuzzyMatch end def self.match_location got, want - match( got[0], "#{want.lat} ~0.002%" ) && - match( got[1], "#{want.lon} ~0.002%" ) + match( got[0], "#{want.lat} ~0.0025%" ) && + match( got[1], "#{want.lon} ~0.0025%" ) end end diff --git a/features/support/hash.rb b/features/support/hash.rb index 83e5f916d..e37a6f2df 100644 --- a/features/support/hash.rb +++ b/features/support/hash.rb @@ -29,15 +29,15 @@ def lua_lib_hash end def bin_extract_hash - @@bin_extract_hash ||= hash_of_files '../osrm-extract' + @@bin_extract_hash ||= hash_of_files "#{BIN_PATH}/osrm-extract" end def bin_prepare_hash - @@bin_prepare_hash ||= hash_of_files '../osrm-prepare' + @@bin_prepare_hash ||= hash_of_files "#{BIN_PATH}/osrm-prepare" end def bin_routed_hash - @@bin_routed_hash ||= hash_of_files '../osrm-routed' + @@bin_routed_hash ||= hash_of_files "#{BIN_PATH}/osrm-routed" end #combine state of data, profile and binaries into a hash that identifies the exact test scenario diff --git a/features/support/launch.rb b/features/support/launch.rb index 8df2f27f5..e4252d893 100644 --- a/features/support/launch.rb +++ b/features/support/launch.rb @@ -48,7 +48,7 @@ class OSRMLauncher def osrm_up return if osrm_up? - @pid = Process.spawn(['../osrm-routed',''],:out=>OSRM_ROUTED_LOG_FILE, :err=>OSRM_ROUTED_LOG_FILE) + @pid = Process.spawn(["#{BIN_PATH}/osrm-routed",''],:out=>OSRM_ROUTED_LOG_FILE, :err=>OSRM_ROUTED_LOG_FILE) end def osrm_down diff --git a/features/support/route.rb b/features/support/route.rb index 8be4e9219..9cfbbfa14 100644 --- a/features/support/route.rb +++ b/features/support/route.rb @@ -4,10 +4,19 @@ HOST = "http://localhost:#{OSRM_PORT}" REQUEST_TIMEOUT = 1 DESTINATION_REACHED = 15 #OSRM instruction code +class Hash + def to_param(namespace = nil) + collect do |key, value| + "#{key}=#{value}" + end.sort + end +end -def request_path path - @query = path - uri = URI.parse "#{HOST}/#{path}" +def request_path path, waypoints=[], options={} + locs = waypoints.compact.map { |w| "loc=#{w.lat},#{w.lon}" } + params = (locs + options.to_param).join('&') + params = nil if params=="" + uri = URI.parse ["#{HOST}/#{path}", params].compact.join('?') Timeout.timeout(REQUEST_TIMEOUT) do Net::HTTP.get_response uri end @@ -17,8 +26,9 @@ rescue Timeout::Error raise "*** osrm-routed did not respond." end -def request_route a,b - request_path "viaroute?loc=#{a}&loc=#{b}&output=json&instructions=true&alt=true" +def request_route waypoints, params={} + defaults = { 'output' => 'json', 'instructions' => true, 'alt' => false } + request_path "viaroute", waypoints, defaults.merge(params) end def parse_response response @@ -108,9 +118,18 @@ def turn_list instructions 12 => :leave_roundabout, 13 => :stay_roundabout, 14 => :start_end_of_street, - 15 => :destination + 15 => :destination, + 16 => :enter_contraflow, + 17 => :leave_contraflow } instructions. map { |r| types[r[0].to_i].to_s }. join(',') end + +def mode_list instructions + instructions.reject { |r| r[0].to_s=="#{DESTINATION_REACHED}" }. + map { |r| r[8] }. + map { |r| (r=="" || r==nil) ? '""' : r }. + join(',') +end \ No newline at end of file diff --git a/features/testbot/bad.feature b/features/testbot/bad.feature index 77a660776..8fb370376 100644 --- a/features/testbot/bad.feature +++ b/features/testbot/bad.feature @@ -16,17 +16,17 @@ Feature: Handle bad data in a graceful manner Scenario: Only dead-end oneways Given the node map - | a | b | c | + | a | b | c | d | e | Given the ways | nodes | oneway | - | ab | yes | - | cb | yes | + | abcde | yes | When I route I should get | from | to | route | - | a | b | ab | - + | b | d | abcde | + + @todo Scenario: Start/end point at the same location Given the node map | a | b | @@ -58,27 +58,29 @@ Feature: Handle bad data in a graceful manner | k | -78 | 0 | | l | -80 | 0 | | m | -82 | 0 | - | n | -87 | 0 | - | o | -89 | 0 | +# | n | -87 | 0 | +# | o | -89 | 0 | And the ways | nodes | - | ab | +# | ab | | bc | | cd | | de | | kl | | lm | - | mn | - | no | +# | mn | +# | no | When I route I should get | from | to | route | - | a | b | | - | b | c | | - | a | d | | - | c | d | cd | - | l | m | lm | - | o | l | | - | n | m | | - | o | n | | +# | a | b | cd | +# | b | c | cd | +# | a | d | cd | +# | c | d | cd | + | d | e | de | +# | k | l | kl | +# | l | m | lm | +# | o | l | lm | +# | n | m | lm | +# | o | n | lm | diff --git a/features/testbot/bearing_param.feature b/features/testbot/bearing_param.feature new file mode 100644 index 000000000..24d329dc6 --- /dev/null +++ b/features/testbot/bearing_param.feature @@ -0,0 +1,93 @@ +@routing @bearing_param @todo +Feature: Bearing parameter + + Background: + Given the profile "testbot" + And a grid size of 10 meters + + Scenario: Testbot - Intial bearing in simple case + Given the node map + | a | | + | 0 | c | + | b | | + + And the ways + | nodes | + | ac | + | bc | + + When I route I should get + | from | to | param:bearing | route | bearing | + | 0 | c | 0 | bc | 45 | + | 0 | c | 45 | bc | 45 | + | 0 | c | 85 | bc | 45 | + | 0 | c | 95 | ac | 135 | + | 0 | c | 135 | ac | 135 | + | 0 | c | 180 | ac | 135 | + + Scenario: Testbot - Initial bearing on split way + Given the node map + | d | | | | | 1 | | | | | c | + | a | | | | | 0 | | | | | b | + + And the ways + | nodes | oneway | + | ab | yes | + | bc | yes | + | cd | yes | + | da | yes | + + When I route I should get + | from | to | param:bearing | route | bearing | + | 0 | b | 10 | ab | 90 | + | 0 | b | 90 | ab | 90 | + | 0 | b | 170 | ab | 90 | + | 0 | b | 190 | cd,da,ab | 270 | + | 0 | b | 270 | cd,da,ab | 270 | + | 0 | b | 350 | cd,da,ab | 270 | + | 1 | d | 10 | cd | 90 | + | 1 | d | 90 | cd | 90 | + | 1 | d | 170 | cd | 90 | + | 1 | d | 190 | ab,bc,cd | 270 | + | 1 | d | 270 | ab,bc,cd | 270 | + | 1 | d | 350 | ab,bc,cd | 270 | + + Scenario: Testbot - Initial bearing in all direction + Given the node map + | h | | | a | | | b | + | | | | | | | | + | | | p | i | j | | | + | g | | o | 0 | k | | c | + | | | n | m | l | | | + | | | | | | | | + | f | | | e | | | d | + + And the ways + | nodes | oneway | + | ia | yes | + | jb | yes | + | kc | yes | + | ld | yes | + | me | yes | + | nf | yes | + | og | yes | + | ph | yes | + | ab | yes | + | bc | yes | + | cd | yes | + | de | yes | + | ef | yes | + | fg | yes | + | gh | yes | + | ha | yes | + + When I route I should get + | from | to | param:bearing | route | bearing | + | 0 | a | 0 | ia | 0 | + | 0 | a | 45 | jb,bc,cd,de,ef,fg,gh,ha | 45 | + | 0 | a | 90 | kc,cd,de,ef,fg,gh,ha | 90 | + | 0 | a | 135 | ld,de,ef,fg,gh,ha | 135 | + | 0 | a | 180 | me,de,ef,fg,gh,ha | 180 | + | 0 | a | 225 | nf,ef,fg,gh,ha | 225 | + | 0 | a | 270 | og,gh,ha | 270 | + | 0 | a | 315 | pn,ha | 315 | diff --git a/features/testbot/bug.feature b/features/testbot/bug.feature new file mode 100644 index 000000000..6eb1349e1 --- /dev/null +++ b/features/testbot/bug.feature @@ -0,0 +1,23 @@ +@routing @testbot @bug @todo +Feature: Testbot - Things that looks like bugs + + Background: + Given the profile "testbot" + + Scenario: Testbot - Triangle problem + Given the node map + | | | | d | + | a | b | c | | + | | | | e | + + And the ways + | nodes | highway | oneway | + | abc | primary | | + | cd | primary | yes | + | ce | river | | + | de | primary | | + + When I route I should get + | from | to | route | + | d | c | de,ce | + | e | d | de | diff --git a/features/testbot/example.feature b/features/testbot/example.feature new file mode 100644 index 000000000..699d35cb1 --- /dev/null +++ b/features/testbot/example.feature @@ -0,0 +1,38 @@ +@routing @testbot @example +Feature: Testbot - Walkthrough + +# A complete walk-through of how this data is processed can be found at: +# https://github.com/DennisOSRM/Project-OSRM/wiki/Processing-Flow + + Background: + Given the profile "testbot" + + Scenario: Testbot - Processing Flow + Given the node map + | | | | d | + | a | b | c | | + | | | | e | + + And the ways + | nodes | highway | oneway | + | abc | primary | | + | cd | primary | yes | + | ce | river | | + | de | primary | | + + When I route I should get + | from | to | route | + | a | b | abc | + | a | c | abc | + | a | d | abc,cd | + | a | e | abc,ce | + | b | a | abc | + | b | c | abc | + | b | d | abc,cd | + | b | e | abc,ce | + | d | a | de,ce,abc | + | d | b | de,ce,abc | + | d | e | de | + | e | a | ce,abc | + | e | b | ce,abc | + | e | c | ce | diff --git a/features/testbot/mode.feature b/features/testbot/mode.feature new file mode 100644 index 000000000..3192c48f9 --- /dev/null +++ b/features/testbot/mode.feature @@ -0,0 +1,26 @@ +@routing @testbot @mode +Feature: Testbot - Mode flag + + Background: + Given the profile "testbot" + + @todo + Scenario: Bike - Mode + Given the node map + | a | b | | + | | c | d | + + And the ways + | nodes | highway | route | duration | + | ab | primary | | | + | bc | | ferry | 0:01 | + | cd | primary | | | + + When I route I should get + | from | to | route | turns | modes | + | a | d | ab,bc,cd | head,right,left,destination | bot,ferry,bot | + | d | a | cd,bc,ab | head,right left,destination | bot,ferry,bot | + | c | a | bc,ab | head,left,destination | ferry,bot | + | d | b | cd,bc | head,right,destination | bot,ferry | + | a | c | ab,bc | head,right,destination | bot,ferry | + | b | d | bc,cd | head,left,destination | ferry,bot | diff --git a/features/testbot/penalty.feature b/features/testbot/penalty.feature index 8b96050c9..97d5e6ade 100644 --- a/features/testbot/penalty.feature +++ b/features/testbot/penalty.feature @@ -4,7 +4,7 @@ Testbot uses a signal penalty of 7s. Background: Given the profile "testbot" - + Scenario: Traffic signals should incur a delay, without changing distance Given the node map | a | b | c | @@ -44,11 +44,8 @@ Testbot uses a signal penalty of 7s. When I route I should get | from | to | route | time | - | a | b | abc | 10s +-1 | | a | c | abc | 27s +-1 | - | d | e | def | 20s +-1 | | d | f | def | 47s +-1 | - | g | h | ghi | 30s +-1 | | g | i | ghi | 67s +-1 | Scenario: Passing multiple traffic signals should incur a accumulated delay @@ -69,6 +66,7 @@ Testbot uses a signal penalty of 7s. | from | to | route | time | | a | e | abcde | 61s +-1 | + @todo Scenario: Signal penalty should not depend on way type Given the node map | a | b | c | @@ -114,6 +112,7 @@ Testbot uses a signal penalty of 7s. | from | to | route | time | | a | e | abcde | 61s +-1 | + @todo Scenario: Starting or ending at a traffic signal should not incur a delay Given the node map | a | b | c | @@ -154,7 +153,7 @@ Testbot uses a signal penalty of 7s. And the node map | a | | b | | c | | | | d | | | - + And the nodes | node | highway | | b | traffic_signals | @@ -163,7 +162,7 @@ Testbot uses a signal penalty of 7s. | nodes | highway | | abc | primary | | adc | primary | - + When I route I should get - | from | to | route | + | from | to | route | | a | c | adc | \ No newline at end of file diff --git a/features/testbot/snap.feature b/features/testbot/snap.feature index ebed9f829..bb3156dbd 100644 --- a/features/testbot/snap.feature +++ b/features/testbot/snap.feature @@ -98,7 +98,7 @@ Feature: Snap start/end point to the nearest way | b | x | xb | | c | x | xc | - Scenario: Find edges within 1km, but not 10km + Scenario: Find edges within 1km, and the same from 10km Given a grid size of 1000 meters Given the node map | p | | | | | | | | | | | i | | | | | | | | | | | j | @@ -144,11 +144,11 @@ Feature: Snap start/end point to the nearest way | x | 6 | xf | | x | 7 | xg | | x | 8 | xh | - | x | i | | - | x | j | | - | x | k | | - | x | l | | - | x | m | | - | x | n | | - | x | o | | - | x | p | | \ No newline at end of file + | x | i | xa | + | x | j | xb | + | x | k | xc | + | x | l | xd | + | x | m | xe | + | x | n | xf | + | x | o | xg | + | x | p | xh | \ No newline at end of file diff --git a/features/testbot/via.feature b/features/testbot/via.feature new file mode 100644 index 000000000..5ba8baca0 --- /dev/null +++ b/features/testbot/via.feature @@ -0,0 +1,52 @@ +@routing @testbot @via +Feature: Via points + + Background: + Given the profile "testbot" + + Scenario: Simple via point + Given the node map + | a | b | c | + + And the ways + | nodes | + | abc | + + When I route I should get + | waypoints | route | + | a,b,c | abc | + | c,b,a | abc | + + Scenario: Via point at a dead end + Given the node map + | a | b | c | + | | d | | + + And the ways + | nodes | + | abc | + | bd | + + When I route I should get + | waypoints | route | + | a,d,c | abc,bd,bd,abc | + | c,d,a | abc,bd,bd,abc | + + Scenario: Multiple via points + Given the node map + | a | | | | e | f | g | | + | | b | c | d | | | | h | + + And the ways + | nodes | + | ae | + | ab | + | bcd | + | de | + | efg | + | gh | + | dh | + + When I route I should get + | waypoints | route | + | a,c,f,h | ab,bcd,de,efg,gh | diff --git a/profile.lua b/profile.lua deleted file mode 100644 index 33a7247d6..000000000 --- a/profile.lua +++ /dev/null @@ -1,219 +0,0 @@ --- Begin of globals -require("lib/access") - -barrier_whitelist = { ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true, ["no"] = true} -access_tag_whitelist = { ["yes"] = true, ["motorcar"] = true, ["motor_vehicle"] = true, ["vehicle"] = true, ["permissive"] = true, ["designated"] = true } -access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestry"] = true } -access_tag_restricted = { ["destination"] = true, ["delivery"] = true } -access_tags = { "motorcar", "motor_vehicle", "vehicle" } -access_tags_hierachy = { "motorcar", "motor_vehicle", "vehicle", "access" } -service_tag_restricted = { ["parking_aisle"] = true } -ignore_in_grid = { ["ferry"] = true } -restriction_exception_tags = { "motorcar", "motor_vehicle", "vehicle" } - -speed_profile = { - ["motorway"] = 90, - ["motorway_link"] = 75, - ["trunk"] = 85, - ["trunk_link"] = 70, - ["primary"] = 65, - ["primary_link"] = 60, - ["secondary"] = 55, - ["secondary_link"] = 50, - ["tertiary"] = 40, - ["tertiary_link"] = 30, - ["unclassified"] = 25, - ["residential"] = 25, - ["living_street"] = 10, - ["service"] = 15, --- ["track"] = 5, - ["ferry"] = 5, - ["shuttle_train"] = 10, - ["default"] = 50 -} - -take_minimum_of_speeds = false -obey_oneway = true -obey_bollards = true -use_restrictions = true -ignore_areas = true -- future feature -traffic_signal_penalty = 2 -u_turn_penalty = 20 - --- End of globals - -function get_exceptions(vector) - for i,v in ipairs(restriction_exception_tags) do - vector:Add(v) - end -end - -local function parse_maxspeed(source) - if source == nil then - return 0 - end - local n = tonumber(source:match("%d*")) - if n == nil then - n = 0 - end - if string.match(source, "mph") or string.match(source, "mp/h") then - n = (n*1609)/1000; - end - return math.abs(n) -end - -function node_function (node) - local barrier = node.tags:Find ("barrier") - local access = Access.find_access_tag(node, access_tags_hierachy) - local traffic_signal = node.tags:Find("highway") - - --flag node if it carries a traffic light - - if traffic_signal == "traffic_signals" then - node.traffic_light = true; - end - - -- parse access and barrier tags - if access and access ~= "" then - if access_tag_blacklist[access] then - node.bollard = true - end - elseif barrier and barrier ~= "" then - if barrier_whitelist[barrier] then - return - else - node.bollard = true - end - end - return 1 -end - - -function way_function (way) - -- First, get the properties of each way that we come across - local highway = way.tags:Find("highway") - local name = way.tags:Find("name") - local ref = way.tags:Find("ref") - local junction = way.tags:Find("junction") - local route = way.tags:Find("route") - local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) - local maxspeed_forward = tonumber(way.tags:Find( "maxspeed:forward")) - local maxspeed_backward = tonumber(way.tags:Find( "maxspeed:backward")) - local barrier = way.tags:Find("barrier") - local oneway = way.tags:Find("oneway") - local cycleway = way.tags:Find("cycleway") - local duration = way.tags:Find("duration") - local service = way.tags:Find("service") - local area = way.tags:Find("area") - local access = Access.find_access_tag(way, access_tags_hierachy) - - -- Second, parse the way according to these properties - - if ignore_areas and ("yes" == area) then - return 0 - end - - -- Check if we are allowed to access the way - if access_tag_blacklist[access] then - return 0 - end - - -- Set the name that will be used for instructions - if "" ~= ref then - way.name = ref - elseif "" ~= name then - way.name = name --- else --- way.name = highway -- if no name exists, use way type - end - - if "roundabout" == junction then - way.roundabout = true; - end - - -- Handling ferries and piers - if (speed_profile[route] ~= nil and speed_profile[route] > 0) then - if durationIsValid(duration) then - way.duration = math.max( parseDuration(duration), 1 ); - end - way.direction = Way.bidirectional - if speed_profile[route] ~= nil then - highway = route; - end - if tonumber(way.duration) < 0 then - way.speed = speed_profile[highway] - end - end - - -- Set the avg speed on the way if it is accessible by road class - if (speed_profile[highway] ~= nil and way.speed == -1 ) then - if maxspeed > speed_profile[highway] then - way.speed = maxspeed - else - if 0 == maxspeed then - maxspeed = math.huge - end - way.speed = math.min(speed_profile[highway], maxspeed) - end - end - - -- Set the avg speed on ways that are marked accessible - if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then - if 0 == maxspeed then - maxspeed = math.huge - end - way.speed = math.min(speed_profile["default"], maxspeed) - end - - -- Set access restriction flag if access is allowed under certain restrictions only - if access ~= "" and access_tag_restricted[access] then - way.is_access_restricted = true - end - - -- Set access restriction flag if service is allowed under certain restrictions only - if service ~= "" and service_tag_restricted[service] then - way.is_access_restricted = true - end - - -- Set direction according to tags on way - if obey_oneway then - if oneway == "no" or oneway == "0" or oneway == "false" then - way.direction = Way.bidirectional - elseif oneway == "-1" then - way.direction = Way.opposite - elseif oneway == "yes" or oneway == "1" or oneway == "true" or junction == "roundabout" or highway == "motorway_link" or highway == "motorway" then - way.direction = Way.oneway - else - way.direction = Way.bidirectional - end - else - way.direction = Way.bidirectional - end - - -- Override speed settings if explicit forward/backward maxspeeds are given - if maxspeed_forward ~= nil and maxspeed_forward > 0 then - if Way.bidirectional == way.direction then - way.backward_speed = way.speed - end - way.speed = maxspeed_forward - end - if maxspeed_backward ~= nil and maxspeed_backward > 0 then - way.backward_speed = maxspeed_backward - end - - -- Override general direction settings of there is a specific one for our mode of travel - - if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then - way.ignore_in_grid = true - end - way.type = 1 - return 1 -end - --- These are wrappers to parse vectors of nodes and ways and thus to speed up any tracing JIT - -function node_vector_function(vector) - for v in vector.nodes do - node_function(v) - end -end diff --git a/profiles/bicycle.lua b/profiles/bicycle.lua index 96c06b555..98d03c83d 100644 --- a/profiles/bicycle.lua +++ b/profiles/bicycle.lua @@ -65,6 +65,24 @@ route_speeds = { ["ferry"] = 5 } +surface_speeds = { + ["cobblestone:flattened"] = 10, + ["paving_stones"] = 10, + ["compacted"] = 10, + ["cobblestone"] = 6, + ["unpaved"] = 6, + ["fine_gravel"] = 6, + ["gravel"] = 6, + ["fine_gravel"] = 6, + ["pebbelstone"] = 6, + ["ground"] = 6, + ["dirt"] = 6, + ["earth"] = 6, + ["grass"] = 6, + ["mud"] = 3, + ["sand"] = 3 +} + take_minimum_of_speeds = true obey_oneway = true obey_bollards = false @@ -158,16 +176,20 @@ function way_function (way) local service = way.tags:Find("service") local area = way.tags:Find("area") local foot = way.tags:Find("foot") + local surface = way.tags:Find("surface") -- name - if "" ~= ref then - way.name = ref + if "" ~= ref and "" ~= name then + way.name = name .. ' / ' .. ref + elseif "" ~= ref then + way.name = ref elseif "" ~= name then way.name = name else - way.name = highway -- if no name exists, use way type + way.name = "{highway:"..highway.."}" -- if no name exists, use way type + -- this encoding scheme is excepted to be a temporary solution end - + -- speed if route_speeds[route] then -- ferries (doesn't cover routes tagged using relations) @@ -285,6 +307,15 @@ function way_function (way) elseif cycleway_right and cycleway_tags[cycleway_right] then way.speed = bicycle_speeds["cycleway"] end + + -- surfaces + if surface then + surface_speed = surface_speeds[surface] + if surface_speed then + way.speed = math.min(way.speed, surface_speed) + way.backward_speed = math.min(way.backward_speed, surface_speed) + end + end -- maxspeed -- TODO: maxspeed of backward direction diff --git a/profiles/car.lua b/profiles/car.lua index 33a7247d6..ff7c37971 100644 --- a/profiles/car.lua +++ b/profiles/car.lua @@ -11,10 +11,10 @@ service_tag_restricted = { ["parking_aisle"] = true } ignore_in_grid = { ["ferry"] = true } restriction_exception_tags = { "motorcar", "motor_vehicle", "vehicle" } -speed_profile = { - ["motorway"] = 90, - ["motorway_link"] = 75, - ["trunk"] = 85, +speed_profile = { + ["motorway"] = 90, + ["motorway_link"] = 75, + ["trunk"] = 85, ["trunk_link"] = 70, ["primary"] = 65, ["primary_link"] = 60, @@ -43,7 +43,7 @@ u_turn_penalty = 20 -- End of globals function get_exceptions(vector) - for i,v in ipairs(restriction_exception_tags) do + for i,v in ipairs(restriction_exception_tags) do vector:Add(v) end end @@ -66,13 +66,13 @@ function node_function (node) local barrier = node.tags:Find ("barrier") local access = Access.find_access_tag(node, access_tags_hierachy) local traffic_signal = node.tags:Find("highway") - + --flag node if it carries a traffic light - + if traffic_signal == "traffic_signals" then - node.traffic_light = true; + node.traffic_light = true; end - + -- parse access and barrier tags if access and access ~= "" then if access_tag_blacklist[access] then @@ -90,35 +90,40 @@ end function way_function (way) - -- First, get the properties of each way that we come across - local highway = way.tags:Find("highway") - local name = way.tags:Find("name") - local ref = way.tags:Find("ref") - local junction = way.tags:Find("junction") - local route = way.tags:Find("route") - local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) - local maxspeed_forward = tonumber(way.tags:Find( "maxspeed:forward")) - local maxspeed_backward = tonumber(way.tags:Find( "maxspeed:backward")) - local barrier = way.tags:Find("barrier") - local oneway = way.tags:Find("oneway") - local cycleway = way.tags:Find("cycleway") - local duration = way.tags:Find("duration") - local service = way.tags:Find("service") - local area = way.tags:Find("area") - local access = Access.find_access_tag(way, access_tags_hierachy) + -- we dont route over areas + local area = way.tags:Find("area") + if ignore_areas and ("yes" == area) then + return 0 + end + + -- check if oneway tag is unsupported + local oneway = way.tags:Find("oneway") + if "reversible" == oneway then + return 0 + end + + -- Check if we are allowed to access the way + local access = Access.find_access_tag(way, access_tags_hierachy) + if access_tag_blacklist[access] then + return 0 + end -- Second, parse the way according to these properties + local highway = way.tags:Find("highway") + local name = way.tags:Find("name") + local ref = way.tags:Find("ref") + local junction = way.tags:Find("junction") + local route = way.tags:Find("route") + local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) + local maxspeed_forward = parse_maxspeed(way.tags:Find( "maxspeed:forward")) + local maxspeed_backward = parse_maxspeed(way.tags:Find( "maxspeed:backward")) + local barrier = way.tags:Find("barrier") + local cycleway = way.tags:Find("cycleway") + local duration = way.tags:Find("duration") + local service = way.tags:Find("service") - if ignore_areas and ("yes" == area) then - return 0 - end - - -- Check if we are allowed to access the way - if access_tag_blacklist[access] then - return 0 - end - -- Set the name that will be used for instructions + -- Set the name that will be used for instructions if "" ~= ref then way.name = ref elseif "" ~= name then @@ -126,87 +131,82 @@ function way_function (way) -- else -- way.name = highway -- if no name exists, use way type end - + if "roundabout" == junction then way.roundabout = true; end -- Handling ferries and piers if (speed_profile[route] ~= nil and speed_profile[route] > 0) then - if durationIsValid(duration) then - way.duration = math.max( parseDuration(duration), 1 ); - end - way.direction = Way.bidirectional - if speed_profile[route] ~= nil then - highway = route; - end - if tonumber(way.duration) < 0 then - way.speed = speed_profile[highway] - end + if durationIsValid(duration) then + way.duration = math.max( parseDuration(duration), 1 ); + end + way.direction = Way.bidirectional + if speed_profile[route] ~= nil then + highway = route; + end + if tonumber(way.duration) < 0 then + way.speed = speed_profile[highway] + end end - + -- Set the avg speed on the way if it is accessible by road class if (speed_profile[highway] ~= nil and way.speed == -1 ) then - if maxspeed > speed_profile[highway] then - way.speed = maxspeed - else - if 0 == maxspeed then - maxspeed = math.huge - end - way.speed = math.min(speed_profile[highway], maxspeed) + if maxspeed > speed_profile[highway] then + way.speed = maxspeed + else + if 0 == maxspeed then + maxspeed = math.huge + end + way.speed = math.min(speed_profile[highway], maxspeed) end end -- Set the avg speed on ways that are marked accessible - if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then - if 0 == maxspeed then - maxspeed = math.huge - end - way.speed = math.min(speed_profile["default"], maxspeed) + if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then + if 0 == maxspeed then + maxspeed = math.huge end + way.speed = math.min(speed_profile["default"], maxspeed) + end -- Set access restriction flag if access is allowed under certain restrictions only - if access ~= "" and access_tag_restricted[access] then - way.is_access_restricted = true - end + if access ~= "" and access_tag_restricted[access] then + way.is_access_restricted = true + end -- Set access restriction flag if service is allowed under certain restrictions only - if service ~= "" and service_tag_restricted[service] then + if service ~= "" and service_tag_restricted[service] then way.is_access_restricted = true - end - + end + -- Set direction according to tags on way - if obey_oneway then - if oneway == "no" or oneway == "0" or oneway == "false" then - way.direction = Way.bidirectional - elseif oneway == "-1" then + way.direction = Way.bidirectional + if obey_oneway then + if oneway == "-1" then way.direction = Way.opposite - elseif oneway == "yes" or oneway == "1" or oneway == "true" or junction == "roundabout" or highway == "motorway_link" or highway == "motorway" then - way.direction = Way.oneway - else - way.direction = Way.bidirectional - end - else - way.direction = Way.bidirectional + elseif oneway == "yes" or oneway == "1" or oneway == "true" or junction == "roundabout" or highway == "motorway_link" or highway == "motorway" then + way.direction = Way.oneway end + end -- Override speed settings if explicit forward/backward maxspeeds are given - if maxspeed_forward ~= nil and maxspeed_forward > 0 then - if Way.bidirectional == way.direction then - way.backward_speed = way.speed - end - way.speed = maxspeed_forward - end - if maxspeed_backward ~= nil and maxspeed_backward > 0 then - way.backward_speed = maxspeed_backward + if maxspeed_forward ~= nil and maxspeed_forward > 0 then + if Way.bidirectional == way.direction then + way.backward_speed = way.speed end + way.speed = maxspeed_forward + end + if maxspeed_backward ~= nil and maxspeed_backward > 0 then + way.backward_speed = maxspeed_backward + end -- Override general direction settings of there is a specific one for our mode of travel - - if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then + if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then way.ignore_in_grid = true - end - way.type = 1 + end + + way.type = 1 return 1 end diff --git a/profiles/lib/access.lua b/profiles/lib/access.lua index a1e2fcf4d..094db6290 100644 --- a/profiles/lib/access.lua +++ b/profiles/lib/access.lua @@ -3,7 +3,7 @@ local ipairs = ipairs module "Access" function find_access_tag(source,access_tags_hierachy) - for i,v in ipairs(access_tags_hierachy) do + for i,v in ipairs(access_tags_hierachy) do local tag = source.tags:Find(v) if tag ~= '' then return tag diff --git a/routed.cpp b/routed.cpp index bdf30c652..e097bd70d 100644 --- a/routed.cpp +++ b/routed.cpp @@ -23,8 +23,9 @@ or see http://www.gnu.org/licenses/agpl.txt. #include #include -#include #include +#include +#include #include "Server/DataStructures/QueryObjectsStorage.h" #include "Server/ServerConfiguration.h" @@ -64,7 +65,7 @@ BOOL WINAPI console_ctrl_handler(DWORD ctrl_type) } #endif -int main (int argc, char * argv[0]) { +int main (int argc, char * argv[]) { #ifdef __linux__ if(!mlockall(MCL_CURRENT | MCL_FUTURE)) WARN("Process " << argv[0] << "could not be locked to RAM"); @@ -138,7 +139,11 @@ int main (int argc, char * argv[0]) { std::cout << "[server] initiating shutdown" << std::endl; s->Stop(); std::cout << "[server] stopping threads" << std::endl; - t.join(); + + if(!t.timed_join(boost::posix_time::seconds(2))) { +// INFO("Threads did not finish within 2 seconds. Hard abort!"); + } + std::cout << "[server] freeing objects" << std::endl; delete s; delete objects; diff --git a/typedefs.h b/typedefs.h index 32807130e..a962598e9 100644 --- a/typedefs.h +++ b/typedefs.h @@ -35,16 +35,14 @@ or see http://www.gnu.org/licenses/agpl.txt. #include -#define INFO(x) do {std::cout << "[info " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); -#define ERR(x) do {std::cerr << "[error " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl; std::exit(-1);} while(0); -#define WARN(x) do {std::cerr << "[warn " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); +#define INFO(x) do {std::cout << "[i " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); +#define ERR(x) do {std::cerr << "[! " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl; std::exit(-1);} while(0); +#define WARN(x) do {std::cerr << "[? " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); #ifdef NDEBUG #define DEBUG(x) -#define GUARANTEE(x,y) #else -#define DEBUG(x) do {std::cout << "[debug " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); -#define GUARANTEE(x,y) do { {do{ if(false == (x)) { ERR(y) } } while(0);} } while(0); +#define DEBUG(x) do {std::cout << "[d " << __FILE__ << ":" << __LINE__ << "] " << x << std::endl;} while(0); #endif #ifndef M_PI