Merging changes for 0.3.3
This commit is contained in:
		
						commit
						42b68fa834
					
				
							
								
								
									
										27
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -54,23 +54,23 @@ stxxl.errlog | |||||||
| 
 | 
 | ||||||
| # compiled protobuffers # | # compiled protobuffers # | ||||||
| ######################### | ######################### | ||||||
| DataStructures/pbf-proto/*.pb.h | /DataStructures/pbf-proto/*.pb.h | ||||||
| DataStructures/pbf-proto/*.pb.cc | /DataStructures/pbf-proto/*.pb.cc | ||||||
| 
 | 
 | ||||||
| # External Libs # | # External Libs # | ||||||
| ################# | ################# | ||||||
| lib/ | /lib/ | ||||||
| win/lib | /win/lib | ||||||
| 
 | 
 | ||||||
| # Visual Studio Temp + build Files # | # Visual Studio Temp + build Files # | ||||||
| #################################### | #################################### | ||||||
| win/*.user | /win/*.user | ||||||
| win/*.ncb | /win/*.ncb | ||||||
| win/*.suo | /win/*.suo | ||||||
| win/Debug/ | /win/Debug/ | ||||||
| win/Release/ | /win/Release/ | ||||||
| win/bin/ | /win/bin/ | ||||||
| win/bin-debug/ | /win/bin-debug/ | ||||||
| /osrm-extract | /osrm-extract | ||||||
| /osrm-routed | /osrm-routed | ||||||
| /osrm-prepare | /osrm-prepare | ||||||
| @ -78,6 +78,7 @@ win/bin-debug/ | |||||||
| 
 | 
 | ||||||
| # Sandbox folder # | # Sandbox folder # | ||||||
| ################### | ################### | ||||||
| sandbox/ | /sandbox/ | ||||||
| 
 | 
 | ||||||
| test/profile.lua | /test/profile.lua | ||||||
|  | /profile.lua | ||||||
|  | |||||||
| @ -27,7 +27,6 @@ | |||||||
| class CRC32 { | class CRC32 { | ||||||
| private: | private: | ||||||
|     unsigned crc; |     unsigned crc; | ||||||
|     unsigned slowcrc_table[1<<8]; |  | ||||||
| 
 | 
 | ||||||
|     typedef boost::crc_optimal<32, 0x1EDC6F41, 0x0, 0x0, true, true> my_crc_32_type; |     typedef boost::crc_optimal<32, 0x1EDC6F41, 0x0, 0x0, true, true> my_crc_32_type; | ||||||
|     typedef unsigned (CRC32::*CRC32CFunctionPtr)(char *str, unsigned len, unsigned crc); |     typedef unsigned (CRC32::*CRC32CFunctionPtr)(char *str, unsigned len, unsigned crc); | ||||||
|  | |||||||
| @ -33,12 +33,12 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| 
 | 
 | ||||||
| typedef | typedef | ||||||
|         boost::archive::iterators::base64_from_binary< |         boost::archive::iterators::base64_from_binary< | ||||||
|         boost::archive::iterators::transform_width<string::const_iterator, 6, 8> |         boost::archive::iterators::transform_width<std::string::const_iterator, 6, 8> | ||||||
| > base64_t; | > base64_t; | ||||||
| 
 | 
 | ||||||
| typedef | typedef | ||||||
|         boost::archive::iterators::transform_width< |         boost::archive::iterators::transform_width< | ||||||
|         boost::archive::iterators::binary_from_base64<string::const_iterator>, 8, 6 |         boost::archive::iterators::binary_from_base64<std::string::const_iterator>, 8, 6 | ||||||
|         > binary_t; |         > binary_t; | ||||||
| 
 | 
 | ||||||
| template<class ToEncodeT> | template<class ToEncodeT> | ||||||
| @ -54,7 +54,7 @@ static void EncodeObjectToBase64(const ToEncodeT & object, std::string& encoded) | |||||||
| template<class ToEncodeT> | template<class ToEncodeT> | ||||||
| static void DecodeObjectFromBase64(ToEncodeT & object, const std::string& _encoded) { | static void DecodeObjectFromBase64(ToEncodeT & object, const std::string& _encoded) { | ||||||
|     try { |     try { | ||||||
|         string encoded(_encoded); |     	std::string encoded(_encoded); | ||||||
|         //replace "-" with "+" and "_" with "/"
 |         //replace "-" with "+" and "_" with "/"
 | ||||||
|         replaceAll(encoded, "-", "+"); |         replaceAll(encoded, "-", "+"); | ||||||
|         replaceAll(encoded, "_", "/"); |         replaceAll(encoded, "_", "/"); | ||||||
|  | |||||||
| @ -52,21 +52,12 @@ private: | |||||||
|         unsigned edgeBasedNodeID; |         unsigned edgeBasedNodeID; | ||||||
|         unsigned nameID:31; |         unsigned nameID:31; | ||||||
|         bool shortcut:1; |         bool shortcut:1; | ||||||
|  |         short type; | ||||||
|  |         bool isAccessRestricted:1; | ||||||
|         bool forward:1; |         bool forward:1; | ||||||
|         bool backward:1; |         bool backward:1; | ||||||
|         bool roundabout:1; |         bool roundabout:1; | ||||||
|         bool ignoreInGrid:1; |         bool ignoreInGrid:1; | ||||||
|         short type; |  | ||||||
|         bool isAccessRestricted; |  | ||||||
|     }; |  | ||||||
| 
 |  | ||||||
|     struct _EdgeBasedEdgeData { |  | ||||||
|         int distance; |  | ||||||
|         unsigned via; |  | ||||||
|         unsigned nameID; |  | ||||||
|         bool forward; |  | ||||||
|         bool backward; |  | ||||||
|         TurnInstruction turnInstruction; |  | ||||||
|     }; |     }; | ||||||
| 
 | 
 | ||||||
|     typedef DynamicGraph< _NodeBasedEdgeData > _NodeBasedDynamicGraph; |     typedef DynamicGraph< _NodeBasedEdgeData > _NodeBasedDynamicGraph; | ||||||
|  | |||||||
| @ -157,11 +157,13 @@ public: | |||||||
|             forwardEdge.data.distance = backwardEdge.data.distance = std::numeric_limits< int >::max(); |             forwardEdge.data.distance = backwardEdge.data.distance = std::numeric_limits< int >::max(); | ||||||
|             //remove parallel edges
 |             //remove parallel edges
 | ||||||
|             while ( i < edges.size() && edges[i].source == source && edges[i].target == target ) { |             while ( i < edges.size() && edges[i].source == source && edges[i].target == target ) { | ||||||
|                 if ( edges[i].data.forward ) |             	if ( edges[i].data.forward) { | ||||||
|                     forwardEdge.data.distance = std::min( edges[i].data.distance, forwardEdge.data.distance ); |                     forwardEdge.data.distance = std::min( edges[i].data.distance, forwardEdge.data.distance ); | ||||||
|                 if ( edges[i].data.backward ) |                 } | ||||||
|  |                 if ( edges[i].data.backward) { | ||||||
|                     backwardEdge.data.distance = std::min( edges[i].data.distance, backwardEdge.data.distance ); |                     backwardEdge.data.distance = std::min( edges[i].data.distance, backwardEdge.data.distance ); | ||||||
|                 i++; |                 } | ||||||
|  |                 ++i; | ||||||
|             } |             } | ||||||
|             //merge edges (s,t) and (t,s) into bidirectional edge
 |             //merge edges (s,t) and (t,s) into bidirectional edge
 | ||||||
|             if ( forwardEdge.data.distance == backwardEdge.data.distance ) { |             if ( forwardEdge.data.distance == backwardEdge.data.distance ) { | ||||||
| @ -242,7 +244,7 @@ public: | |||||||
|                 nodePriority[x] = _Evaluate( data, &nodeData[x], x ); |                 nodePriority[x] = _Evaluate( data, &nodeData[x], x ); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         std::cout << "ok" << std::endl << "preprocessing ..." << std::flush; |         std::cout << "ok" << std::endl << "preprocessing " << numberOfNodes << " nodes ..." << std::flush; | ||||||
| 
 | 
 | ||||||
|         bool flushedContractor = false; |         bool flushedContractor = false; | ||||||
|         while ( numberOfContractedNodes < numberOfNodes ) { |         while ( numberOfContractedNodes < numberOfNodes ) { | ||||||
| @ -274,7 +276,7 @@ public: | |||||||
|                 TemporaryStorage & tempStorage = TemporaryStorage::GetInstance(); |                 TemporaryStorage & tempStorage = TemporaryStorage::GetInstance(); | ||||||
|                 //Write dummy number of edges to temporary file
 |                 //Write dummy number of edges to temporary file
 | ||||||
|                 //        		std::ofstream temporaryEdgeStorage(temporaryEdgeStorageFilename.c_str(), std::ios::binary);
 |                 //        		std::ofstream temporaryEdgeStorage(temporaryEdgeStorageFilename.c_str(), std::ios::binary);
 | ||||||
|                 long initialFilePosition = tempStorage.tell(temporaryStorageSlotID); |                 uint64_t initialFilePosition = tempStorage.tell(temporaryStorageSlotID); | ||||||
|                 unsigned numberOfTemporaryEdges = 0; |                 unsigned numberOfTemporaryEdges = 0; | ||||||
|                 tempStorage.writeToSlot(temporaryStorageSlotID, (char*)&numberOfTemporaryEdges, sizeof(unsigned)); |                 tempStorage.writeToSlot(temporaryStorageSlotID, (char*)&numberOfTemporaryEdges, sizeof(unsigned)); | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -21,8 +21,8 @@ | |||||||
| #include "EdgeBasedGraphFactory.h" | #include "EdgeBasedGraphFactory.h" | ||||||
| 
 | 
 | ||||||
| template<> | template<> | ||||||
| EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector<NodeBasedEdge> & inputEdges, std::vector<NodeID> & bn, std::vector<NodeID> & tl, std::vector<_Restriction> & irs, std::vector<NodeInfo> & nI, SpeedProfileProperties sp) : inputNodeInfoList(nI), numberOfTurnRestrictions(irs.size()), speedProfile(sp) { | EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector<NodeBasedEdge> & inputEdges, std::vector<NodeID> & bn, std::vector<NodeID> & tl, std::vector<_Restriction> & irs, std::vector<NodeInfo> & nI, SpeedProfileProperties sp) : speedProfile(sp), inputNodeInfoList(nI), numberOfTurnRestrictions(irs.size()) { | ||||||
| 	BOOST_FOREACH(_Restriction & restriction, irs) { | 	BOOST_FOREACH(const _Restriction & restriction, irs) { | ||||||
|         std::pair<NodeID, NodeID> restrictionSource = std::make_pair(restriction.fromNode, restriction.viaNode); |         std::pair<NodeID, NodeID> restrictionSource = std::make_pair(restriction.fromNode, restriction.viaNode); | ||||||
|         unsigned index; |         unsigned index; | ||||||
|         RestrictionMap::iterator restrIter = _restrictionMap.find(restrictionSource); |         RestrictionMap::iterator restrIter = _restrictionMap.find(restrictionSource); | ||||||
| @ -44,18 +44,12 @@ EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector<NodeBasedEdg | |||||||
|         _restrictionBucketVector.at(index).push_back(std::make_pair(restriction.toNode, restriction.flags.isOnly)); |         _restrictionBucketVector.at(index).push_back(std::make_pair(restriction.toNode, restriction.flags.isOnly)); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     BOOST_FOREACH(NodeID id, bn) { | 	_barrierNodes.insert(bn.begin(), bn.end()); | ||||||
|         _barrierNodes[id] = true; | 	_trafficLights.insert(tl.begin(), tl.end()); | ||||||
|     } |  | ||||||
|     BOOST_FOREACH(NodeID id, tl) { |  | ||||||
|         _trafficLights[id] = true; |  | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|     DeallocatingVector< _NodeBasedEdge > edges; |     DeallocatingVector< _NodeBasedEdge > edges; | ||||||
|     //    edges.reserve( 2 * inputEdges.size() );
 |     _NodeBasedEdge edge; | ||||||
|     for ( std::vector< NodeBasedEdge >::const_iterator i = inputEdges.begin(); i != inputEdges.end(); ++i ) { |     for ( std::vector< NodeBasedEdge >::const_iterator i = inputEdges.begin(); i != inputEdges.end(); ++i ) { | ||||||
| 
 |  | ||||||
|         _NodeBasedEdge edge; |  | ||||||
|         if(!i->isForward()) { |         if(!i->isForward()) { | ||||||
|             edge.source = i->target(); |             edge.source = i->target(); | ||||||
|             edge.target = i->source(); |             edge.target = i->source(); | ||||||
| @ -67,9 +61,9 @@ EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector<NodeBasedEdg | |||||||
|             edge.data.forward = i->isForward(); |             edge.data.forward = i->isForward(); | ||||||
|             edge.data.backward = i->isBackward(); |             edge.data.backward = i->isBackward(); | ||||||
|         } |         } | ||||||
|         if(edge.source == edge.target) |         if(edge.source == edge.target) { | ||||||
|             continue; |         	continue; | ||||||
| 
 |         } | ||||||
|         edge.data.distance = (std::max)((int)i->weight(), 1 ); |         edge.data.distance = (std::max)((int)i->weight(), 1 ); | ||||||
|         assert( edge.data.distance > 0 ); |         assert( edge.data.distance > 0 ); | ||||||
|         edge.data.shortcut = false; |         edge.data.shortcut = false; | ||||||
| @ -79,6 +73,7 @@ EdgeBasedGraphFactory::EdgeBasedGraphFactory(int nodes, std::vector<NodeBasedEdg | |||||||
|         edge.data.type = i->type(); |         edge.data.type = i->type(); | ||||||
|         edge.data.isAccessRestricted = i->isAccessRestricted(); |         edge.data.isAccessRestricted = i->isAccessRestricted(); | ||||||
|         edge.data.edgeBasedNodeID = edges.size(); |         edge.data.edgeBasedNodeID = edges.size(); | ||||||
|  |         edge.data.contraFlow = i->isContraFlow(); | ||||||
|         edges.push_back( edge ); |         edges.push_back( edge ); | ||||||
|         if( edge.data.backward ) { |         if( edge.data.backward ) { | ||||||
|             std::swap( edge.source, edge.target ); |             std::swap( edge.source, edge.target ); | ||||||
| @ -108,16 +103,12 @@ void EdgeBasedGraphFactory::GetEdgeBasedNodes( DeallocatingVector< EdgeBasedNode | |||||||
|     nodes.swap(edgeBasedNodes); |     nodes.swap(edgeBasedNodes); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| void EdgeBasedGraphFactory::GetOriginalEdgeData( std::vector< OriginalEdgeData> & oed) { |  | ||||||
|     oed.swap(originalEdgeData); |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| NodeID EdgeBasedGraphFactory::CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const { | NodeID EdgeBasedGraphFactory::CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const { | ||||||
|     std::pair < NodeID, NodeID > restrictionSource = std::make_pair(u, v); |     std::pair < NodeID, NodeID > restrictionSource = std::make_pair(u, v); | ||||||
|     RestrictionMap::const_iterator restrIter = _restrictionMap.find(restrictionSource); |     RestrictionMap::const_iterator restrIter = _restrictionMap.find(restrictionSource); | ||||||
|     if (restrIter != _restrictionMap.end()) { |     if (restrIter != _restrictionMap.end()) { | ||||||
|         unsigned index = restrIter->second; |         unsigned index = restrIter->second; | ||||||
|         BOOST_FOREACH(RestrictionSource restrictionTarget, _restrictionBucketVector.at(index)) { |         BOOST_FOREACH(const RestrictionSource & restrictionTarget, _restrictionBucketVector.at(index)) { | ||||||
|             if(restrictionTarget.second) { |             if(restrictionTarget.second) { | ||||||
|                 return restrictionTarget.first; |                 return restrictionTarget.first; | ||||||
|             } |             } | ||||||
| @ -159,7 +150,7 @@ void EdgeBasedGraphFactory::InsertEdgeBasedNode( | |||||||
|     edgeBasedNodes.push_back(currentNode); |     edgeBasedNodes.push_back(currentNode); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename, lua_State *myLuaState) { | ||||||
|     Percent p(_nodeBasedGraph->GetNumberOfNodes()); |     Percent p(_nodeBasedGraph->GetNumberOfNodes()); | ||||||
|     int numberOfSkippedTurns(0); |     int numberOfSkippedTurns(0); | ||||||
|     int nodeBasedEdgeCounter(0); |     int nodeBasedEdgeCounter(0); | ||||||
| @ -243,12 +234,16 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | |||||||
|     std::vector<NodeID>().swap(vectorOfComponentSizes); |     std::vector<NodeID>().swap(vectorOfComponentSizes); | ||||||
|     std::vector<NodeID>().swap(componentsIndex); |     std::vector<NodeID>().swap(componentsIndex); | ||||||
| 
 | 
 | ||||||
|  |     std::vector<OriginalEdgeData> original_edge_data_vector; | ||||||
|  |     original_edge_data_vector.reserve(10000); | ||||||
|  | 
 | ||||||
|     //Loop over all turns and generate new set of edges.
 |     //Loop over all turns and generate new set of edges.
 | ||||||
|     //Three nested loop look super-linear, but we are dealing with a linear number of turns only.
 |     //Three nested loop look super-linear, but we are dealing with a linear number of turns only.
 | ||||||
|     for(_NodeBasedDynamicGraph::NodeIterator u = 0; u < _nodeBasedGraph->GetNumberOfNodes(); ++u ) { |     for(_NodeBasedDynamicGraph::NodeIterator u = 0; u < _nodeBasedGraph->GetNumberOfNodes(); ++u ) { | ||||||
|         for(_NodeBasedDynamicGraph::EdgeIterator e1 = _nodeBasedGraph->BeginEdges(u); e1 < _nodeBasedGraph->EndEdges(u); ++e1) { |         for(_NodeBasedDynamicGraph::EdgeIterator e1 = _nodeBasedGraph->BeginEdges(u); e1 < _nodeBasedGraph->EndEdges(u); ++e1) { | ||||||
|             ++nodeBasedEdgeCounter; |             ++nodeBasedEdgeCounter; | ||||||
|             _NodeBasedDynamicGraph::NodeIterator v = _nodeBasedGraph->GetTarget(e1); |             _NodeBasedDynamicGraph::NodeIterator v = _nodeBasedGraph->GetTarget(e1); | ||||||
|  |             bool isBollardNode = (_barrierNodes.find(v) != _barrierNodes.end()); | ||||||
|             //EdgeWeight heightPenalty = ComputeHeightPenalty(u, v);
 |             //EdgeWeight heightPenalty = ComputeHeightPenalty(u, v);
 | ||||||
|             NodeID onlyToNode = CheckForEmanatingIsOnlyTurn(u, v); |             NodeID onlyToNode = CheckForEmanatingIsOnlyTurn(u, v); | ||||||
|             for(_NodeBasedDynamicGraph::EdgeIterator e2 = _nodeBasedGraph->BeginEdges(v); e2 < _nodeBasedGraph->EndEdges(v); ++e2) { |             for(_NodeBasedDynamicGraph::EdgeIterator e2 = _nodeBasedGraph->BeginEdges(v); e2 < _nodeBasedGraph->EndEdges(v); ++e2) { | ||||||
| @ -258,7 +253,7 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | |||||||
|                     ++numberOfSkippedTurns; |                     ++numberOfSkippedTurns; | ||||||
|                     continue; |                     continue; | ||||||
|                 } |                 } | ||||||
|                 bool isBollardNode = (_barrierNodes.find(v) != _barrierNodes.end()); | 
 | ||||||
|                 if(u == w && 1 != _nodeBasedGraph->GetOutDegree(v) ) { |                 if(u == w && 1 != _nodeBasedGraph->GetOutDegree(v) ) { | ||||||
|                     continue; |                     continue; | ||||||
|                 } |                 } | ||||||
| @ -278,30 +273,30 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | |||||||
|                         if(_trafficLights.find(v) != _trafficLights.end()) { |                         if(_trafficLights.find(v) != _trafficLights.end()) { | ||||||
|                             distance += speedProfile.trafficSignalPenalty; |                             distance += speedProfile.trafficSignalPenalty; | ||||||
|                         } |                         } | ||||||
|                         TurnInstruction turnInstruction = AnalyzeTurn(u, v, w); |                         unsigned penalty = 0; | ||||||
|  |                         TurnInstruction turnInstruction = AnalyzeTurn(u, v, w, penalty, myLuaState); | ||||||
|                         if(turnInstruction == TurnInstructions.UTurn) |                         if(turnInstruction == TurnInstructions.UTurn) | ||||||
|                             distance += speedProfile.uTurnPenalty; |                             distance += speedProfile.uTurnPenalty; | ||||||
| //                        if(!edgeData1.isAccessRestricted && edgeData2.isAccessRestricted) {
 | //                        if(!edgeData1.isAccessRestricted && edgeData2.isAccessRestricted) {
 | ||||||
| //                            distance += TurnInstructions.AccessRestrictionPenalty;
 | //                            distance += TurnInstructions.AccessRestrictionPenalty;
 | ||||||
| //                            turnInstruction |= TurnInstructions.AccessRestrictionFlag;
 | //                            turnInstruction |= TurnInstructions.AccessRestrictionFlag;
 | ||||||
| //                        }
 | //                        }
 | ||||||
| 
 |                         distance += penalty; | ||||||
|  | 						 | ||||||
| 
 | 
 | ||||||
|                         //distance += heightPenalty;
 |                         //distance += heightPenalty;
 | ||||||
|                         //distance += ComputeTurnPenalty(u, v, w);
 |                         //distance += ComputeTurnPenalty(u, v, w);
 | ||||||
|                         assert(edgeData1.edgeBasedNodeID != edgeData2.edgeBasedNodeID); |                         assert(edgeData1.edgeBasedNodeID != edgeData2.edgeBasedNodeID); | ||||||
|                         if(originalEdgeData.size() == originalEdgeData.capacity()-3) { |  | ||||||
|                             originalEdgeData.reserve(originalEdgeData.size()*1.2); |  | ||||||
|                         } |  | ||||||
|                         OriginalEdgeData oed(v,edgeData2.nameID, turnInstruction); |                         OriginalEdgeData oed(v,edgeData2.nameID, turnInstruction); | ||||||
|                         EdgeBasedEdge newEdge(edgeData1.edgeBasedNodeID, edgeData2.edgeBasedNodeID, edgeBasedEdges.size(), distance, true, false ); |                         original_edge_data_vector.push_back(oed); | ||||||
|                         originalEdgeData.push_back(oed); |  | ||||||
|                         if(originalEdgeData.size() > 100000) { |  | ||||||
|                             originalEdgeDataOutFile.write((char*)&(originalEdgeData[0]), originalEdgeData.size()*sizeof(OriginalEdgeData)); |  | ||||||
|                             originalEdgeData.clear(); |  | ||||||
|                         } |  | ||||||
|                         ++numberOfOriginalEdges; |                         ++numberOfOriginalEdges; | ||||||
|                         ++nodeBasedEdgeCounter; | 
 | ||||||
|  |                         if(original_edge_data_vector.size() > 100000) { | ||||||
|  |                             originalEdgeDataOutFile.write((char*)&(original_edge_data_vector[0]), original_edge_data_vector.size()*sizeof(OriginalEdgeData)); | ||||||
|  |                             original_edge_data_vector.clear(); | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                         EdgeBasedEdge newEdge(edgeData1.edgeBasedNodeID, edgeData2.edgeBasedNodeID, edgeBasedEdges.size(), distance, true, false ); | ||||||
|                         edgeBasedEdges.push_back(newEdge); |                         edgeBasedEdges.push_back(newEdge); | ||||||
|                     } else { |                     } else { | ||||||
|                         ++numberOfSkippedTurns; |                         ++numberOfSkippedTurns; | ||||||
| @ -311,8 +306,7 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | |||||||
|         } |         } | ||||||
|         p.printIncrement(); |         p.printIncrement(); | ||||||
|     } |     } | ||||||
|     numberOfOriginalEdges += originalEdgeData.size(); |     originalEdgeDataOutFile.write((char*)&(original_edge_data_vector[0]), original_edge_data_vector.size()*sizeof(OriginalEdgeData)); | ||||||
|     originalEdgeDataOutFile.write((char*)&(originalEdgeData[0]), originalEdgeData.size()*sizeof(OriginalEdgeData)); |  | ||||||
|     originalEdgeDataOutFile.seekp(std::ios::beg); |     originalEdgeDataOutFile.seekp(std::ios::beg); | ||||||
|     originalEdgeDataOutFile.write((char*)&numberOfOriginalEdges, sizeof(unsigned)); |     originalEdgeDataOutFile.write((char*)&numberOfOriginalEdges, sizeof(unsigned)); | ||||||
|     originalEdgeDataOutFile.close(); |     originalEdgeDataOutFile.close(); | ||||||
| @ -326,12 +320,27 @@ void EdgeBasedGraphFactory::Run(const char * originalEdgeDataFilename) { | |||||||
| //    std::vector<EdgeBasedNode>(edgeBasedNodes).swap(edgeBasedNodes);
 | //    std::vector<EdgeBasedNode>(edgeBasedNodes).swap(edgeBasedNodes);
 | ||||||
| //    INFO("size: " << edgeBasedNodes.size() << ", cap: " << edgeBasedNodes.capacity());
 | //    INFO("size: " << edgeBasedNodes.size() << ", cap: " << edgeBasedNodes.capacity());
 | ||||||
|     INFO("Node-based graph contains " << nodeBasedEdgeCounter     << " edges"); |     INFO("Node-based graph contains " << nodeBasedEdgeCounter     << " edges"); | ||||||
|  |     INFO("Edge-based graph contains " << edgeBasedEdges.size()    << " edges"); | ||||||
| //    INFO("Edge-based graph contains " << edgeBasedEdges.size()    << " edges, blowup is " << 2*((double)edgeBasedEdges.size()/(double)nodeBasedEdgeCounter));
 | //    INFO("Edge-based graph contains " << edgeBasedEdges.size()    << " edges, blowup is " << 2*((double)edgeBasedEdges.size()/(double)nodeBasedEdgeCounter));
 | ||||||
|     INFO("Edge-based graph skipped "  << numberOfSkippedTurns     << " turns, defined by " << numberOfTurnRestrictions << " restrictions."); |     INFO("Edge-based graph skipped "  << numberOfSkippedTurns     << " turns, defined by " << numberOfTurnRestrictions << " restrictions."); | ||||||
|     INFO("Generated " << edgeBasedNodes.size() << " edge based nodes"); |     INFO("Generated " << edgeBasedNodes.size() << " edge based nodes"); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w) const { | TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w, unsigned& penalty, lua_State *myLuaState) const { | ||||||
|  |     const double angle = GetAngleBetweenTwoEdges(inputNodeInfoList[u], inputNodeInfoList[v], inputNodeInfoList[w]); | ||||||
|  | 	 | ||||||
|  |     if( speedProfile.has_turn_penalty_function ) { | ||||||
|  |     	try { | ||||||
|  |             //call lua profile to compute turn penalty
 | ||||||
|  |             penalty = luabind::call_function<int>( myLuaState, "turn_function", 180-angle ); | ||||||
|  |         } catch (const luabind::error &er) { | ||||||
|  |             std::cerr << er.what() << std::endl; | ||||||
|  |             //TODO handle lua errors
 | ||||||
|  |         } | ||||||
|  |     } else { | ||||||
|  |         penalty = 0; | ||||||
|  |     } | ||||||
|  |      | ||||||
|     if(u == w) { |     if(u == w) { | ||||||
|         return TurnInstructions.UTurn; |         return TurnInstructions.UTurn; | ||||||
|     } |     } | ||||||
| @ -342,33 +351,42 @@ TurnInstruction EdgeBasedGraphFactory::AnalyzeTurn(const NodeID u, const NodeID | |||||||
|     _NodeBasedDynamicGraph::EdgeData & data1 = _nodeBasedGraph->GetEdgeData(edge1); |     _NodeBasedDynamicGraph::EdgeData & data1 = _nodeBasedGraph->GetEdgeData(edge1); | ||||||
|     _NodeBasedDynamicGraph::EdgeData & data2 = _nodeBasedGraph->GetEdgeData(edge2); |     _NodeBasedDynamicGraph::EdgeData & data2 = _nodeBasedGraph->GetEdgeData(edge2); | ||||||
| 
 | 
 | ||||||
|  |     if(!data1.contraFlow && data2.contraFlow) { | ||||||
|  |     	return TurnInstructions.EnterAgainstAllowedDirection; | ||||||
|  |     } | ||||||
|  |     if(data1.contraFlow && !data2.contraFlow) { | ||||||
|  |     	return TurnInstructions.LeaveAgainstAllowedDirection; | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     //roundabouts need to be handled explicitely
 |     //roundabouts need to be handled explicitely
 | ||||||
|     if(data1.roundabout && data2.roundabout) { |     if(data1.roundabout && data2.roundabout) { | ||||||
|         //Is a turn possible? If yes, we stay on the roundabout!
 |         //Is a turn possible? If yes, we stay on the roundabout!
 | ||||||
|         if( 1 == (_nodeBasedGraph->EndEdges(v) - _nodeBasedGraph->BeginEdges(v)) ) { |         if( 1 == (_nodeBasedGraph->EndEdges(v) - _nodeBasedGraph->BeginEdges(v)) ) { | ||||||
|             //No turn possible.
 |             //No turn possible.
 | ||||||
|             return TurnInstructions.NoTurn; |             return TurnInstructions.NoTurn; | ||||||
|         } else { |  | ||||||
|             return TurnInstructions.StayOnRoundAbout; |  | ||||||
|         } |         } | ||||||
|  |         return TurnInstructions.StayOnRoundAbout; | ||||||
|     } |     } | ||||||
|     //Does turn start or end on roundabout?
 |     //Does turn start or end on roundabout?
 | ||||||
|     if(data1.roundabout || data2.roundabout) { |     if(data1.roundabout || data2.roundabout) { | ||||||
|         //We are entering the roundabout
 |         //We are entering the roundabout
 | ||||||
|         if( (!data1.roundabout) && data2.roundabout) |         if( (!data1.roundabout) && data2.roundabout) { | ||||||
|             return TurnInstructions.EnterRoundAbout; |             return TurnInstructions.EnterRoundAbout; | ||||||
|  |         } | ||||||
|         //We are leaving the roundabout
 |         //We are leaving the roundabout
 | ||||||
|         else if(data1.roundabout && (!data2.roundabout) ) |         if(data1.roundabout && (!data2.roundabout) ) { | ||||||
|             return TurnInstructions.LeaveRoundAbout; |             return TurnInstructions.LeaveRoundAbout; | ||||||
|  |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     //If street names stay the same and if we are certain that it is not a roundabout, we skip it.
 |     //If street names stay the same and if we are certain that it is not a roundabout, we skip it.
 | ||||||
|     if( (data1.nameID == data2.nameID) && (0 != data1.nameID)) |     if( (data1.nameID == data2.nameID) && (0 != data1.nameID)) { | ||||||
|         return TurnInstructions.NoTurn; |         return TurnInstructions.NoTurn; | ||||||
|     if( (data1.nameID == data2.nameID) && (0 == data1.nameID) && (_nodeBasedGraph->GetOutDegree(v) <= 2) ) |     } | ||||||
|  |     if( (data1.nameID == data2.nameID) && (0 == data1.nameID) && (_nodeBasedGraph->GetOutDegree(v) <= 2) ) { | ||||||
|         return TurnInstructions.NoTurn; |         return TurnInstructions.NoTurn; | ||||||
|  |     } | ||||||
| 
 | 
 | ||||||
|     double angle = GetAngleBetweenTwoEdges(inputNodeInfoList[u], inputNodeInfoList[v], inputNodeInfoList[w]); |  | ||||||
|     return TurnInstructions.GetTurnDirectionOfInstruction(angle); |     return TurnInstructions.GetTurnDirectionOfInstruction(angle); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -379,12 +397,12 @@ unsigned EdgeBasedGraphFactory::GetNumberOfNodes() const { | |||||||
| /* Get angle of line segment (A,C)->(C,B), atan2 magic, formerly cosine theorem*/ | /* Get angle of line segment (A,C)->(C,B), atan2 magic, formerly cosine theorem*/ | ||||||
| template<class CoordinateT> | template<class CoordinateT> | ||||||
| double EdgeBasedGraphFactory::GetAngleBetweenTwoEdges(const CoordinateT& A, const CoordinateT& C, const CoordinateT& B) const { | double EdgeBasedGraphFactory::GetAngleBetweenTwoEdges(const CoordinateT& A, const CoordinateT& C, const CoordinateT& B) const { | ||||||
|     const int v1x = A.lon - C.lon; |     const double v1x = (A.lon - C.lon)/100000.; | ||||||
|     const int v1y = A.lat - C.lat; |     const double v1y = lat2y(A.lat/100000.) - lat2y(C.lat/100000.); | ||||||
|     const int v2x = B.lon - C.lon; |     const double v2x = (B.lon - C.lon)/100000.; | ||||||
|     const int v2y = B.lat - C.lat; |     const double v2y = lat2y(B.lat/100000.) - lat2y(C.lat/100000.); | ||||||
| 
 | 
 | ||||||
|     double angle = (atan2((double)v2y,v2x) - atan2((double)v1y,v1x) )*180/M_PI; |     double angle = (atan2(v2y,v2x) - atan2(v1y,v1x) )*180/M_PI; | ||||||
|     while(angle < 0) |     while(angle < 0) | ||||||
|         angle += 360; |         angle += 360; | ||||||
|     return angle; |     return angle; | ||||||
|  | |||||||
| @ -34,8 +34,10 @@ | |||||||
| #include <boost/foreach.hpp> | #include <boost/foreach.hpp> | ||||||
| #include <boost/lexical_cast.hpp> | #include <boost/lexical_cast.hpp> | ||||||
| #include <boost/make_shared.hpp> | #include <boost/make_shared.hpp> | ||||||
|  | #include <boost/noncopyable.hpp> | ||||||
| #include <boost/shared_ptr.hpp> | #include <boost/shared_ptr.hpp> | ||||||
| 
 | #include <boost/unordered_map.hpp> | ||||||
|  | #include <boost/unordered_set.hpp> | ||||||
| 
 | 
 | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| #include "../DataStructures/DeallocatingVector.h" | #include "../DataStructures/DeallocatingVector.h" | ||||||
| @ -43,39 +45,21 @@ | |||||||
| #include "../Extractor/ExtractorStructs.h" | #include "../Extractor/ExtractorStructs.h" | ||||||
| #include "../DataStructures/HashTable.h" | #include "../DataStructures/HashTable.h" | ||||||
| #include "../DataStructures/ImportEdge.h" | #include "../DataStructures/ImportEdge.h" | ||||||
|  | #include "../DataStructures/MercatorUtil.h" | ||||||
| #include "../DataStructures/QueryEdge.h" | #include "../DataStructures/QueryEdge.h" | ||||||
| #include "../DataStructures/Percent.h" | #include "../DataStructures/Percent.h" | ||||||
| #include "../DataStructures/TurnInstructions.h" | #include "../DataStructures/TurnInstructions.h" | ||||||
| #include "../Util/BaseConfiguration.h" | #include "../Util/BaseConfiguration.h" | ||||||
| 
 | 
 | ||||||
| class EdgeBasedGraphFactory { | extern "C" { | ||||||
| private: | #include <lua.h> | ||||||
|     struct _NodeBasedEdgeData { | #include <lauxlib.h> | ||||||
|         int distance; | #include <lualib.h> | ||||||
|         unsigned edgeBasedNodeID; | } | ||||||
|         unsigned nameID:31; | #include <luabind/luabind.hpp> | ||||||
|         bool shortcut:1; |  | ||||||
|         bool forward:1; |  | ||||||
|         bool backward:1; |  | ||||||
|         bool roundabout:1; |  | ||||||
|         bool ignoreInGrid:1; |  | ||||||
|         short type; |  | ||||||
|         bool isAccessRestricted; |  | ||||||
|     }; |  | ||||||
| 
 | 
 | ||||||
|     struct _EdgeBasedEdgeData { |  | ||||||
|         int distance; |  | ||||||
|         unsigned via; |  | ||||||
|         unsigned nameID; |  | ||||||
|         bool forward; |  | ||||||
|         bool backward; |  | ||||||
|         TurnInstruction turnInstruction; |  | ||||||
|     }; |  | ||||||
| 
 | 
 | ||||||
|     typedef DynamicGraph< _NodeBasedEdgeData > _NodeBasedDynamicGraph; | class EdgeBasedGraphFactory : boost::noncopyable { | ||||||
|     typedef _NodeBasedDynamicGraph::InputEdge _NodeBasedEdge; |  | ||||||
|     std::vector<NodeInfo>               inputNodeInfoList; |  | ||||||
|     unsigned numberOfTurnRestrictions; |  | ||||||
| public: | public: | ||||||
|     struct EdgeBasedNode { |     struct EdgeBasedNode { | ||||||
|         bool operator<(const EdgeBasedNode & other) const { |         bool operator<(const EdgeBasedNode & other) const { | ||||||
| @ -95,16 +79,45 @@ public: | |||||||
|         bool ignoreInGrid:1; |         bool ignoreInGrid:1; | ||||||
|     }; |     }; | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     struct SpeedProfileProperties{ |     struct SpeedProfileProperties{ | ||||||
|         SpeedProfileProperties()  : trafficSignalPenalty(0), uTurnPenalty(0) {} |         SpeedProfileProperties()  : trafficSignalPenalty(0), uTurnPenalty(0), has_turn_penalty_function(false) {} | ||||||
|         int trafficSignalPenalty; |         int trafficSignalPenalty; | ||||||
|         int uTurnPenalty; |         int uTurnPenalty; | ||||||
|  |         bool has_turn_penalty_function; | ||||||
|     } speedProfile; |     } speedProfile; | ||||||
|  | 
 | ||||||
| private: | private: | ||||||
|  |     struct _NodeBasedEdgeData { | ||||||
|  |         int distance; | ||||||
|  |         unsigned edgeBasedNodeID; | ||||||
|  |         unsigned nameID; | ||||||
|  |         short type; | ||||||
|  |         bool isAccessRestricted:1; | ||||||
|  |         bool shortcut:1; | ||||||
|  |         bool forward:1; | ||||||
|  |         bool backward:1; | ||||||
|  |         bool roundabout:1; | ||||||
|  |         bool ignoreInGrid:1; | ||||||
|  |         bool contraFlow:1; | ||||||
|  |     }; | ||||||
|  | 
 | ||||||
|  |     struct _EdgeBasedEdgeData { | ||||||
|  |         int distance; | ||||||
|  |         unsigned via; | ||||||
|  |         unsigned nameID; | ||||||
|  |         bool forward; | ||||||
|  |         bool backward; | ||||||
|  |         TurnInstruction turnInstruction; | ||||||
|  |     }; | ||||||
|  | 
 | ||||||
|  |     typedef DynamicGraph< _NodeBasedEdgeData > _NodeBasedDynamicGraph; | ||||||
|  |     typedef _NodeBasedDynamicGraph::InputEdge _NodeBasedEdge; | ||||||
|  |     std::vector<NodeInfo>               inputNodeInfoList; | ||||||
|  |     unsigned numberOfTurnRestrictions; | ||||||
|  | 
 | ||||||
|     boost::shared_ptr<_NodeBasedDynamicGraph>   _nodeBasedGraph; |     boost::shared_ptr<_NodeBasedDynamicGraph>   _nodeBasedGraph; | ||||||
|     boost::unordered_map<NodeID, bool>          _barrierNodes; |     boost::unordered_set<NodeID>          _barrierNodes; | ||||||
|     boost::unordered_map<NodeID, bool>          _trafficLights; |     boost::unordered_set<NodeID>          _trafficLights; | ||||||
| 
 | 
 | ||||||
|     typedef std::pair<NodeID, NodeID> RestrictionSource; |     typedef std::pair<NodeID, NodeID> RestrictionSource; | ||||||
|     typedef std::pair<NodeID, bool>   RestrictionTarget; |     typedef std::pair<NodeID, bool>   RestrictionTarget; | ||||||
| @ -113,10 +126,8 @@ private: | |||||||
|     std::vector<EmanatingRestrictionsVector> _restrictionBucketVector; |     std::vector<EmanatingRestrictionsVector> _restrictionBucketVector; | ||||||
|     RestrictionMap _restrictionMap; |     RestrictionMap _restrictionMap; | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     DeallocatingVector<EdgeBasedEdge>   edgeBasedEdges; |     DeallocatingVector<EdgeBasedEdge>   edgeBasedEdges; | ||||||
|     DeallocatingVector<EdgeBasedNode>   edgeBasedNodes; |     DeallocatingVector<EdgeBasedNode>   edgeBasedNodes; | ||||||
|     std::vector<OriginalEdgeData>       originalEdgeData; |  | ||||||
| 
 | 
 | ||||||
|     NodeID CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const; |     NodeID CheckForEmanatingIsOnlyTurn(const NodeID u, const NodeID v) const; | ||||||
|     bool CheckIfTurnIsRestricted(const NodeID u, const NodeID v, const NodeID w) const; |     bool CheckIfTurnIsRestricted(const NodeID u, const NodeID v, const NodeID w) const; | ||||||
| @ -127,18 +138,16 @@ private: | |||||||
|             bool belongsToTinyComponent); |             bool belongsToTinyComponent); | ||||||
|     template<class CoordinateT> |     template<class CoordinateT> | ||||||
|     double GetAngleBetweenTwoEdges(const CoordinateT& A, const CoordinateT& C, const CoordinateT& B) const; |     double GetAngleBetweenTwoEdges(const CoordinateT& A, const CoordinateT& C, const CoordinateT& B) const; | ||||||
| //    SRTMLookup srtmLookup;
 |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| public: | public: | ||||||
|     template< class InputEdgeT > |     template< class InputEdgeT > | ||||||
|     explicit EdgeBasedGraphFactory(int nodes, std::vector<InputEdgeT> & inputEdges, std::vector<NodeID> & _bollardNodes, std::vector<NodeID> & trafficLights, std::vector<_Restriction> & inputRestrictions, std::vector<NodeInfo> & nI, SpeedProfileProperties speedProfile); |     explicit EdgeBasedGraphFactory(int nodes, std::vector<InputEdgeT> & inputEdges, std::vector<NodeID> & _bollardNodes, std::vector<NodeID> & trafficLights, std::vector<_Restriction> & inputRestrictions, std::vector<NodeInfo> & nI, SpeedProfileProperties speedProfile); | ||||||
| 
 | 
 | ||||||
|     void Run(const char * originalEdgeDataFilename); |     void Run(const char * originalEdgeDataFilename, lua_State *myLuaState); | ||||||
|     void GetEdgeBasedEdges( DeallocatingVector< EdgeBasedEdge >& edges ); |     void GetEdgeBasedEdges( DeallocatingVector< EdgeBasedEdge >& edges ); | ||||||
|     void GetEdgeBasedNodes( DeallocatingVector< EdgeBasedNode> & nodes); |     void GetEdgeBasedNodes( DeallocatingVector< EdgeBasedNode> & nodes); | ||||||
|     void GetOriginalEdgeData( std::vector< OriginalEdgeData> & originalEdgeData); |     void GetOriginalEdgeData( std::vector< OriginalEdgeData> & originalEdgeData); | ||||||
|     TurnInstruction AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w) const; |     TurnInstruction AnalyzeTurn(const NodeID u, const NodeID v, const NodeID w, unsigned& penalty, lua_State *myLuaState) const; | ||||||
|     unsigned GetNumberOfNodes() const; |     unsigned GetNumberOfNodes() const; | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -75,10 +75,13 @@ template< typename NodeID, typename Key > | |||||||
| class UnorderedMapStorage { | class UnorderedMapStorage { | ||||||
| public: | public: | ||||||
| 
 | 
 | ||||||
| 	UnorderedMapStorage( size_t ) {  } | 	UnorderedMapStorage( size_t ) { | ||||||
|  | 		//hash table gets 1000 Buckets
 | ||||||
|  | 		nodes.rehash(1000); | ||||||
|  | 	} | ||||||
| 
 | 
 | ||||||
|     Key &operator[]( NodeID node ) { |     Key &operator[]( const NodeID node ) { | ||||||
|         return nodes[node]; |     	return nodes[node]; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     void Clear() { |     void Clear() { | ||||||
|  | |||||||
| @ -22,7 +22,6 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #define DEALLOCATINGVECTOR_H_ | #define DEALLOCATINGVECTOR_H_ | ||||||
| 
 | 
 | ||||||
| #include <cassert> | #include <cassert> | ||||||
| #include <cstdlib> |  | ||||||
| #include <vector> | #include <vector> | ||||||
| 
 | 
 | ||||||
| #if __cplusplus > 199711L | #if __cplusplus > 199711L | ||||||
| @ -32,7 +31,7 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #endif | #endif | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| template<typename ElementT, size_t bucketSizeC = 10485760/sizeof(ElementT), bool DeallocateC = false> | template<typename ElementT, std::size_t bucketSizeC = 8388608/sizeof(ElementT), bool DeallocateC = false> | ||||||
| class DeallocatingVectorIterator : public std::iterator<std::random_access_iterator_tag, ElementT> { | class DeallocatingVectorIterator : public std::iterator<std::random_access_iterator_tag, ElementT> { | ||||||
| protected: | protected: | ||||||
| 
 | 
 | ||||||
| @ -42,12 +41,12 @@ protected: | |||||||
|         DeallocatingVectorIteratorState(); |         DeallocatingVectorIteratorState(); | ||||||
|     public: |     public: | ||||||
|         explicit DeallocatingVectorIteratorState(const DeallocatingVectorIteratorState &r) : mData(r.mData), mIndex(r.mIndex), mBucketList(r.mBucketList) {} |         explicit DeallocatingVectorIteratorState(const DeallocatingVectorIteratorState &r) : mData(r.mData), mIndex(r.mIndex), mBucketList(r.mBucketList) {} | ||||||
|         //explicit DeallocatingVectorIteratorState(const ElementT * ptr, const size_t idx, const std::vector<ElementT *> & input_list) : mData(ptr), mIndex(idx), mBucketList(input_list) {}
 |         //explicit DeallocatingVectorIteratorState(const ElementT * ptr, const std::size_t idx, const std::vector<ElementT *> & input_list) : mData(ptr), mIndex(idx), mBucketList(input_list) {}
 | ||||||
|         explicit DeallocatingVectorIteratorState(const size_t idx, std::vector<ElementT *> & input_list) : mData(DEALLOCATION_VECTOR_NULL_PTR), mIndex(idx), mBucketList(input_list) { |         explicit DeallocatingVectorIteratorState(const std::size_t idx, std::vector<ElementT *> & input_list) : mData(DEALLOCATION_VECTOR_NULL_PTR), mIndex(idx), mBucketList(input_list) { | ||||||
|             setPointerForIndex(); |             setPointerForIndex(); | ||||||
|         } |         } | ||||||
|         ElementT * mData; |         ElementT * mData; | ||||||
|         size_t mIndex; |         std::size_t mIndex; | ||||||
|         std::vector<ElementT *> & mBucketList; |         std::vector<ElementT *> & mBucketList; | ||||||
| 
 | 
 | ||||||
|         inline void setPointerForIndex() { |         inline void setPointerForIndex() { | ||||||
| @ -55,8 +54,8 @@ protected: | |||||||
|                 mData = DEALLOCATION_VECTOR_NULL_PTR; |                 mData = DEALLOCATION_VECTOR_NULL_PTR; | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             size_t _bucket = mIndex/bucketSizeC; |             std::size_t _bucket = mIndex/bucketSizeC; | ||||||
|             size_t _index = mIndex%bucketSizeC; |             std::size_t _index = mIndex%bucketSizeC; | ||||||
|             mData = &(mBucketList[_bucket][_index]); |             mData = &(mBucketList[_bucket][_index]); | ||||||
| 
 | 
 | ||||||
|             if(DeallocateC) { |             if(DeallocateC) { | ||||||
| @ -104,8 +103,8 @@ public: | |||||||
|     template<typename T2> |     template<typename T2> | ||||||
|     DeallocatingVectorIterator(const DeallocatingVectorIterator<T2> & r) : mState(r.mState) {} |     DeallocatingVectorIterator(const DeallocatingVectorIterator<T2> & r) : mState(r.mState) {} | ||||||
| 
 | 
 | ||||||
|     DeallocatingVectorIterator(size_t idx, std::vector<ElementT *> & input_list) : mState(idx, input_list) {} |     DeallocatingVectorIterator(std::size_t idx, std::vector<ElementT *> & input_list) : mState(idx, input_list) {} | ||||||
|     //DeallocatingVectorIterator(size_t idx, const std::vector<ElementT *> & input_list) : mState(idx, input_list) {}
 |     //DeallocatingVectorIterator(std::size_t idx, const std::vector<ElementT *> & input_list) : mState(idx, input_list) {}
 | ||||||
|     DeallocatingVectorIterator(const DeallocatingVectorIteratorState & r) : mState(r) {} |     DeallocatingVectorIterator(const DeallocatingVectorIteratorState & r) : mState(r) {} | ||||||
| 
 | 
 | ||||||
|     template<typename T2> |     template<typename T2> | ||||||
| @ -185,10 +184,10 @@ public: | |||||||
|     } |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| template<typename ElementT, size_t bucketSizeC = 10485760/sizeof(ElementT) > | template<typename ElementT, std::size_t bucketSizeC = 8388608/sizeof(ElementT) > | ||||||
| class DeallocatingVector { | class DeallocatingVector { | ||||||
| private: | private: | ||||||
|     size_t mCurrentSize; |     std::size_t mCurrentSize; | ||||||
|     std::vector<ElementT *> mBucketList; |     std::vector<ElementT *> mBucketList; | ||||||
| 
 | 
 | ||||||
| public: | public: | ||||||
| @ -227,21 +226,21 @@ public: | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void push_back(const ElementT & element) { |     inline void push_back(const ElementT & element) { | ||||||
|         size_t _capacity = capacity(); |         std::size_t _capacity = capacity(); | ||||||
|         if(mCurrentSize == _capacity) { |         if(mCurrentSize == _capacity) { | ||||||
|             mBucketList.push_back(new ElementT[bucketSizeC]); |             mBucketList.push_back(new ElementT[bucketSizeC]); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         size_t _index = size()%bucketSizeC; |         std::size_t _index = size()%bucketSizeC; | ||||||
|         mBucketList.back()[_index] = element; |         mBucketList.back()[_index] = element; | ||||||
|         ++mCurrentSize; |         ++mCurrentSize; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void reserve(const size_t) const { |     inline void reserve(const std::size_t) const { | ||||||
|         //don't do anything
 |         //don't do anything
 | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void resize(const size_t new_size) { |     inline void resize(const std::size_t new_size) { | ||||||
|         if(new_size > mCurrentSize) { |         if(new_size > mCurrentSize) { | ||||||
|             while(capacity() < new_size) { |             while(capacity() < new_size) { | ||||||
|                 mBucketList.push_back(new ElementT[bucketSizeC]); |                 mBucketList.push_back(new ElementT[bucketSizeC]); | ||||||
| @ -249,7 +248,7 @@ public: | |||||||
|             mCurrentSize = new_size; |             mCurrentSize = new_size; | ||||||
|         } |         } | ||||||
|         if(new_size < mCurrentSize) { |         if(new_size < mCurrentSize) { | ||||||
|             size_t number_of_necessary_buckets = 1+(new_size / bucketSizeC); |             std::size_t number_of_necessary_buckets = 1+(new_size / bucketSizeC); | ||||||
| 
 | 
 | ||||||
|             for(unsigned i = number_of_necessary_buckets; i < mBucketList.size(); ++i) { |             for(unsigned i = number_of_necessary_buckets; i < mBucketList.size(); ++i) { | ||||||
|                 delete[] mBucketList[i]; |                 delete[] mBucketList[i]; | ||||||
| @ -259,16 +258,16 @@ public: | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline size_t size() const { |     inline std::size_t size() const { | ||||||
|         return mCurrentSize; |         return mCurrentSize; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline size_t capacity() const { |     inline std::size_t capacity() const { | ||||||
|         return mBucketList.size() * bucketSizeC; |         return mBucketList.size() * bucketSizeC; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline iterator begin() { |     inline iterator begin() { | ||||||
|         return iterator(static_cast<size_t>(0), mBucketList); |         return iterator(static_cast<std::size_t>(0), mBucketList); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline iterator end() { |     inline iterator end() { | ||||||
| @ -276,7 +275,7 @@ public: | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline deallocation_iterator dbegin() { |     inline deallocation_iterator dbegin() { | ||||||
|         return deallocation_iterator(static_cast<size_t>(0), mBucketList); |         return deallocation_iterator(static_cast<std::size_t>(0), mBucketList); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline deallocation_iterator dend() { |     inline deallocation_iterator dend() { | ||||||
| @ -284,24 +283,36 @@ public: | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline const_iterator begin() const { |     inline const_iterator begin() const { | ||||||
|         return const_iterator(static_cast<size_t>(0), mBucketList); |         return const_iterator(static_cast<std::size_t>(0), mBucketList); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline const_iterator end() const { |     inline const_iterator end() const { | ||||||
|         return const_iterator(size(), mBucketList); |         return const_iterator(size(), mBucketList); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline ElementT & operator[](const size_t index) { |     inline ElementT & operator[](const std::size_t index) { | ||||||
|         size_t _bucket = index / bucketSizeC; |         std::size_t _bucket = index / bucketSizeC; | ||||||
|         size_t _index = index % bucketSizeC; |         std::size_t _index = index % bucketSizeC; | ||||||
|         return (mBucketList[_bucket][_index]); |         return (mBucketList[_bucket][_index]); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     const inline ElementT & operator[](const size_t index) const { |     const inline ElementT & operator[](const std::size_t index) const { | ||||||
|         size_t _bucket = index / bucketSizeC; |         std::size_t _bucket = index / bucketSizeC; | ||||||
|         size_t _index = index % bucketSizeC; |         std::size_t _index = index % bucketSizeC; | ||||||
|         return (mBucketList[_bucket][_index]); |         return (mBucketList[_bucket][_index]); | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     inline ElementT & back() { | ||||||
|  |         std::size_t _bucket = mCurrentSize / bucketSizeC; | ||||||
|  |     	std::size_t _index = mCurrentSize % bucketSizeC; | ||||||
|  |     	return (mBucketList[_bucket][_index]); | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     const inline ElementT & back() const { | ||||||
|  |         std::size_t _bucket = mCurrentSize / bucketSizeC; | ||||||
|  |         std::size_t _index = mCurrentSize % bucketSizeC; | ||||||
|  |     	return (mBucketList[_bucket][_index]); | ||||||
|  |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| #endif /* DEALLOCATINGVECTOR_H_ */ | #endif /* DEALLOCATINGVECTOR_H_ */ | ||||||
|  | |||||||
| @ -59,8 +59,8 @@ class DynamicGraph { | |||||||
|         { |         { | ||||||
|             m_numNodes = nodes; |             m_numNodes = nodes; | ||||||
|             m_numEdges = ( EdgeIterator ) graph.size(); |             m_numEdges = ( EdgeIterator ) graph.size(); | ||||||
|             m_nodes.reserve( m_numNodes ); |             m_nodes.reserve( m_numNodes +1); | ||||||
|             m_nodes.resize( m_numNodes ); |             m_nodes.resize( m_numNodes +1); | ||||||
|             EdgeIterator edge = 0; |             EdgeIterator edge = 0; | ||||||
|             EdgeIterator position = 0; |             EdgeIterator position = 0; | ||||||
|             for ( NodeIterator node = 0; node < m_numNodes; ++node ) { |             for ( NodeIterator node = 0; node < m_numNodes; ++node ) { | ||||||
| @ -72,6 +72,7 @@ class DynamicGraph { | |||||||
|                 m_nodes[node].edges = edge - lastEdge; |                 m_nodes[node].edges = edge - lastEdge; | ||||||
|                 position += m_nodes[node].edges; |                 position += m_nodes[node].edges; | ||||||
|             } |             } | ||||||
|  |             m_nodes.back().firstEdge = position; | ||||||
|             m_edges.reserve( position * 1.1 ); |             m_edges.reserve( position * 1.1 ); | ||||||
|             m_edges.resize( position ); |             m_edges.resize( position ); | ||||||
|             edge = 0; |             edge = 0; | ||||||
| @ -97,40 +98,33 @@ class DynamicGraph { | |||||||
|             return m_numEdges; |             return m_numEdges; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         unsigned GetOutDegree( const NodeIterator &n ) const |         unsigned GetOutDegree( const NodeIterator n ) const { | ||||||
|         { |  | ||||||
|             return m_nodes[n].edges; |             return m_nodes[n].edges; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         NodeIterator GetTarget( const EdgeIterator &e ) const |         NodeIterator GetTarget( const EdgeIterator e ) const { | ||||||
|         { |  | ||||||
|             return NodeIterator( m_edges[e].target ); |             return NodeIterator( m_edges[e].target ); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         EdgeDataT &GetEdgeData( const EdgeIterator &e ) |         EdgeDataT &GetEdgeData( const EdgeIterator e ) { | ||||||
|         { |  | ||||||
|             return m_edges[e].data; |             return m_edges[e].data; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         const EdgeDataT &GetEdgeData( const EdgeIterator &e ) const |         const EdgeDataT &GetEdgeData( const EdgeIterator e ) const { | ||||||
|         { |  | ||||||
|             return m_edges[e].data; |             return m_edges[e].data; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         EdgeIterator BeginEdges( const NodeIterator &n ) const |         EdgeIterator BeginEdges( const NodeIterator n ) const { | ||||||
|         { |  | ||||||
|             //assert( EndEdges( n ) - EdgeIterator( _nodes[n].firstEdge ) <= 100 );
 |             //assert( EndEdges( n ) - EdgeIterator( _nodes[n].firstEdge ) <= 100 );
 | ||||||
|             return EdgeIterator( m_nodes[n].firstEdge ); |             return EdgeIterator( m_nodes[n].firstEdge ); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         EdgeIterator EndEdges( const NodeIterator &n ) const |         EdgeIterator EndEdges( const NodeIterator n ) const { | ||||||
|         { |  | ||||||
|             return EdgeIterator( m_nodes[n].firstEdge + m_nodes[n].edges ); |             return EdgeIterator( m_nodes[n].firstEdge + m_nodes[n].edges ); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //adds an edge. Invalidates edge iterators for the source node
 |         //adds an edge. Invalidates edge iterators for the source node
 | ||||||
|         EdgeIterator InsertEdge( const NodeIterator &from, const NodeIterator &to, const EdgeDataT &data ) |         EdgeIterator InsertEdge( const NodeIterator from, const NodeIterator to, const EdgeDataT &data ) { | ||||||
|         { |  | ||||||
|             Node &node = m_nodes[from]; |             Node &node = m_nodes[from]; | ||||||
|             EdgeIterator newFirstEdge = node.edges + node.firstEdge; |             EdgeIterator newFirstEdge = node.edges + node.firstEdge; | ||||||
|             if ( newFirstEdge >= m_edges.size() || !isDummy( newFirstEdge ) ) { |             if ( newFirstEdge >= m_edges.size() || !isDummy( newFirstEdge ) ) { | ||||||
| @ -164,7 +158,7 @@ class DynamicGraph { | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //removes an edge. Invalidates edge iterators for the source node
 |         //removes an edge. Invalidates edge iterators for the source node
 | ||||||
|         void DeleteEdge( const NodeIterator source, const EdgeIterator &e ) { |         void DeleteEdge( const NodeIterator source, const EdgeIterator e ) { | ||||||
|             Node &node = m_nodes[source]; |             Node &node = m_nodes[source]; | ||||||
|             --m_numEdges; |             --m_numEdges; | ||||||
|             --node.edges; |             --node.edges; | ||||||
| @ -175,8 +169,7 @@ class DynamicGraph { | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //removes all edges (source,target)
 |         //removes all edges (source,target)
 | ||||||
|         int DeleteEdgesTo( const NodeIterator source, const NodeIterator target ) |         int DeleteEdgesTo( const NodeIterator source, const NodeIterator target ) { | ||||||
|         { |  | ||||||
|             int deleted = 0; |             int deleted = 0; | ||||||
|             for ( EdgeIterator i = BeginEdges( source ), iend = EndEdges( source ); i < iend - deleted; ++i ) { |             for ( EdgeIterator i = BeginEdges( source ), iend = EndEdges( source ); i < iend - deleted; ++i ) { | ||||||
|                 if ( m_edges[i].target == target ) { |                 if ( m_edges[i].target == target ) { | ||||||
| @ -196,8 +189,7 @@ class DynamicGraph { | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //searches for a specific edge
 |         //searches for a specific edge
 | ||||||
|         EdgeIterator FindEdge( const NodeIterator &from, const NodeIterator &to ) const |         EdgeIterator FindEdge( const NodeIterator from, const NodeIterator to ) const { | ||||||
|         { |  | ||||||
|             for ( EdgeIterator i = BeginEdges( from ), iend = EndEdges( from ); i != iend; ++i ) { |             for ( EdgeIterator i = BeginEdges( from ), iend = EndEdges( from ); i != iend; ++i ) { | ||||||
|                 if ( m_edges[i].target == to ) { |                 if ( m_edges[i].target == to ) { | ||||||
|                     return i; |                     return i; | ||||||
| @ -208,13 +200,11 @@ class DynamicGraph { | |||||||
| 
 | 
 | ||||||
|     protected: |     protected: | ||||||
| 
 | 
 | ||||||
|         bool isDummy( EdgeIterator edge ) const |         bool isDummy( EdgeIterator edge ) const { | ||||||
|         { |  | ||||||
|             return m_edges[edge].target == (std::numeric_limits< NodeIterator >::max)(); |             return m_edges[edge].target == (std::numeric_limits< NodeIterator >::max)(); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         void makeDummy( EdgeIterator edge ) |         void makeDummy( EdgeIterator edge ) { | ||||||
|         { |  | ||||||
|             m_edges[edge].target = (std::numeric_limits< NodeIterator >::max)(); |             m_edges[edge].target = (std::numeric_limits< NodeIterator >::max)(); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
| @ -233,9 +223,8 @@ class DynamicGraph { | |||||||
|         NodeIterator m_numNodes; |         NodeIterator m_numNodes; | ||||||
|         EdgeIterator m_numEdges; |         EdgeIterator m_numEdges; | ||||||
| 
 | 
 | ||||||
|        DeallocatingVector< Node > m_nodes; |         std::vector< Node > m_nodes; | ||||||
|        DeallocatingVector< Edge > m_edges; |         DeallocatingVector< Edge > m_edges; | ||||||
| 
 |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| #endif // DYNAMICGRAPH_H_INCLUDED
 | #endif // DYNAMICGRAPH_H_INCLUDED
 | ||||||
|  | |||||||
| @ -16,7 +16,7 @@ You should have received a copy of the GNU Affero General Public License | |||||||
| along with this program; if not, write to the Free Software | along with this program; if not, write to the Free Software | ||||||
| Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
| or see http://www.gnu.org/licenses/agpl.txt.
 | or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
| */ |  */ | ||||||
| 
 | 
 | ||||||
| #ifndef EDGE_H | #ifndef EDGE_H | ||||||
| #define EDGE_H | #define EDGE_H | ||||||
| @ -40,12 +40,8 @@ public: | |||||||
|         return (source() < e.source()); |         return (source() < e.source()); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     /** Default constructor. target and weight are set to 0.*/ |     explicit NodeBasedEdge(NodeID s, NodeID t, NodeID n, EdgeWeight w, bool f, bool b, short ty, bool ra, bool ig, bool ar, bool cf) : | ||||||
|     NodeBasedEdge() : |                                             _source(s), _target(t), _name(n), _weight(w), forward(f), backward(b), _type(ty), _roundabout(ra), _ignoreInGrid(ig), _accessRestricted(ar), _contraFlow(cf) { if(ty < 0) {ERR("Type: " << ty);}; } | ||||||
|         _source(0), _target(0), _name(0), _weight(0), forward(0), backward(0), _type(0), _roundabout(false), _ignoreInGrid(false), _accessRestricted(false) { assert(false); } //shall not be used.
 |  | ||||||
| 
 |  | ||||||
|     explicit NodeBasedEdge(NodeID s, NodeID t, NodeID n, EdgeWeight w, bool f, bool b, short ty, bool ra, bool ig, bool ar) : |  | ||||||
|             _source(s), _target(t), _name(n), _weight(w), forward(f), backward(b), _type(ty), _roundabout(ra), _ignoreInGrid(ig), _accessRestricted(ar) { if(ty < 0) {ERR("Type: " << ty);}; } |  | ||||||
| 
 | 
 | ||||||
|     NodeID target() const {return _target; } |     NodeID target() const {return _target; } | ||||||
|     NodeID source() const {return _source; } |     NodeID source() const {return _source; } | ||||||
| @ -59,6 +55,7 @@ public: | |||||||
|     bool isRoundabout() const { return _roundabout; } |     bool isRoundabout() const { return _roundabout; } | ||||||
|     bool ignoreInGrid() const { return _ignoreInGrid; } |     bool ignoreInGrid() const { return _ignoreInGrid; } | ||||||
|     bool isAccessRestricted() const { return _accessRestricted; } |     bool isAccessRestricted() const { return _accessRestricted; } | ||||||
|  |     bool isContraFlow() const { return _contraFlow; } | ||||||
| 
 | 
 | ||||||
|     NodeID _source; |     NodeID _source; | ||||||
|     NodeID _target; |     NodeID _target; | ||||||
| @ -70,6 +67,13 @@ public: | |||||||
|     bool _roundabout; |     bool _roundabout; | ||||||
|     bool _ignoreInGrid; |     bool _ignoreInGrid; | ||||||
|     bool _accessRestricted; |     bool _accessRestricted; | ||||||
|  |     bool _contraFlow; | ||||||
|  | 
 | ||||||
|  | private: | ||||||
|  |     /** Default constructor. target and weight are set to 0.*/ | ||||||
|  |     NodeBasedEdge() : | ||||||
|  |         _source(0), _target(0), _name(0), _weight(0), forward(0), backward(0), _type(0), _roundabout(false), _ignoreInGrid(false), _accessRestricted(false), _contraFlow(false) { assert(false); } //shall not be used.
 | ||||||
|  | 
 | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| class EdgeBasedEdge { | class EdgeBasedEdge { | ||||||
| @ -91,43 +95,46 @@ public: | |||||||
| 
 | 
 | ||||||
|     template<class EdgeT> |     template<class EdgeT> | ||||||
|     EdgeBasedEdge(const EdgeT & myEdge ) : |     EdgeBasedEdge(const EdgeT & myEdge ) : | ||||||
|         _source(myEdge.source), |         m_source(myEdge.source), | ||||||
|         _target(myEdge.target), |         m_target(myEdge.target), | ||||||
|         _edgeID(myEdge.data.via), |         m_edgeID(myEdge.data.via), | ||||||
| //        _nameID1(myEdge.data.nameID),
 |         m_weight(myEdge.data.distance), | ||||||
|         _weight(myEdge.data.distance), |         m_forward(myEdge.data.forward), | ||||||
|         _forward(myEdge.data.forward), |         m_backward(myEdge.data.backward) | ||||||
|         _backward(myEdge.data.backward)//,
 |     { } | ||||||
| //        _turnInstruction(myEdge.data.turnInstruction)
 |  | ||||||
|                 { } |  | ||||||
| 
 | 
 | ||||||
|     /** Default constructor. target and weight are set to 0.*/ |     /** Default constructor. target and weight are set to 0.*/ | ||||||
|     EdgeBasedEdge() : |     EdgeBasedEdge() : | ||||||
|         _source(0), _target(0), _edgeID(0), _weight(0), _forward(false), _backward(false) { } |         m_source(0), | ||||||
|  |         m_target(0), | ||||||
|  |         m_edgeID(0), | ||||||
|  |         m_weight(0), | ||||||
|  |         m_forward(false), | ||||||
|  |         m_backward(false) | ||||||
|  |     { } | ||||||
| 
 | 
 | ||||||
|     explicit EdgeBasedEdge(NodeID s, NodeID t, NodeID v, EdgeWeight w, bool f, bool b) : |     explicit EdgeBasedEdge(const NodeID s, const NodeID t, const NodeID v, const EdgeWeight w, const bool f, const bool b) : | ||||||
|             _source(s), _target(t), _edgeID(v), _weight(w), _forward(f), _backward(b){} |         m_source(s), | ||||||
|  |         m_target(t), | ||||||
|  |         m_edgeID(v), | ||||||
|  |         m_weight(w), | ||||||
|  |         m_forward(f), | ||||||
|  |         m_backward(b) | ||||||
|  |     {} | ||||||
| 
 | 
 | ||||||
|     NodeID target() const {return _target; } |     NodeID target() const {return m_target; } | ||||||
|     NodeID source() const {return _source; } |     NodeID source() const {return m_source; } | ||||||
|     EdgeWeight weight() const {return _weight; } |     EdgeWeight weight() const {return m_weight; } | ||||||
|     NodeID id() const { return _edgeID; } |     NodeID id() const { return m_edgeID; } | ||||||
|     bool isBackward() const { return _backward; } |     bool isBackward() const { return m_backward; } | ||||||
|     bool isForward() const { return _forward; } |     bool isForward() const { return m_forward; } | ||||||
| 
 | private: | ||||||
|     NodeID _source; |     NodeID m_source; | ||||||
|     NodeID _target; |     NodeID m_target; | ||||||
|     NodeID _edgeID; |     NodeID m_edgeID; | ||||||
|     EdgeWeight _weight:30; |     EdgeWeight m_weight:30; | ||||||
|     bool _forward:1; |     bool m_forward:1; | ||||||
|     bool _backward:1; |     bool m_backward:1; | ||||||
| }; |  | ||||||
| 
 |  | ||||||
| struct MinimalEdgeData { |  | ||||||
| public: |  | ||||||
|     EdgeWeight distance; |  | ||||||
|     bool forward; |  | ||||||
|     bool backward; |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| typedef NodeBasedEdge ImportEdge; | typedef NodeBasedEdge ImportEdge; | ||||||
|  | |||||||
| @ -43,21 +43,21 @@ int readFromBz2Stream( void* pointer, char* buffer, int len ) { | |||||||
|             return read; |             return read; | ||||||
|         } else if(BZ_STREAM_END == context->error) { |         } else if(BZ_STREAM_END == context->error) { | ||||||
|             BZ2_bzReadGetUnused(&context->error, context->bz2, &unusedTmpVoid, &context->nUnused); |             BZ2_bzReadGetUnused(&context->error, context->bz2, &unusedTmpVoid, &context->nUnused); | ||||||
|             if(BZ_OK != context->error) { cerr << "Could not BZ2_bzReadGetUnused" << endl; exit(-1);}; |             if(BZ_OK != context->error) {std::cerr << "Could not BZ2_bzReadGetUnused" <<std::endl; exit(-1);}; | ||||||
|             unusedTmp = (char*)unusedTmpVoid; |             unusedTmp = (char*)unusedTmpVoid; | ||||||
|             for(int i=0;i<context->nUnused;i++) { |             for(int i=0;i<context->nUnused;i++) { | ||||||
|                 context->unused[i] = unusedTmp[i]; |                 context->unused[i] = unusedTmp[i]; | ||||||
|             } |             } | ||||||
|             BZ2_bzReadClose(&context->error, context->bz2); |             BZ2_bzReadClose(&context->error, context->bz2); | ||||||
|             if(BZ_OK != context->error) { cerr << "Could not BZ2_bzReadClose" << endl; exit(-1);}; |             if(BZ_OK != context->error) {std::cerr << "Could not BZ2_bzReadClose" <<std::endl; exit(-1);}; | ||||||
|             context->error = BZ_STREAM_END; // set to the stream end for next call to this function
 |             context->error = BZ_STREAM_END; // set to the stream end for next call to this function
 | ||||||
|             if(0 == context->nUnused && feof(context->file)) { |             if(0 == context->nUnused && feof(context->file)) { | ||||||
|                 return read; |                 return read; | ||||||
|             } else { |             } else { | ||||||
|                 context->bz2 = BZ2_bzReadOpen(&context->error, context->file, 0, 0, context->unused, context->nUnused); |                 context->bz2 = BZ2_bzReadOpen(&context->error, context->file, 0, 0, context->unused, context->nUnused); | ||||||
|                 if(NULL == context->bz2){ cerr << "Could not open file" << endl; exit(-1);}; |                 if(NULL == context->bz2){std::cerr << "Could not open file" <<std::endl; exit(-1);}; | ||||||
|             } |             } | ||||||
|         } else { cerr << "Could not read bz2 file" << endl; exit(-1); } |         } else { std::cerr << "Could not read bz2 file" << std::endl; exit(-1); } | ||||||
|     } |     } | ||||||
|     return read; |     return read; | ||||||
| } | } | ||||||
| @ -74,7 +74,7 @@ xmlTextReaderPtr inputReaderFactory( const char* name ) | |||||||
| { | { | ||||||
|     std::string inputName(name); |     std::string inputName(name); | ||||||
| 
 | 
 | ||||||
|     if(inputName.find(".osm.bz2")!=string::npos) |     if(inputName.find(".osm.bz2")!=std::string::npos) | ||||||
|     { |     { | ||||||
|         BZ2Context* context = new BZ2Context(); |         BZ2Context* context = new BZ2Context(); | ||||||
|         context->error = false; |         context->error = false; | ||||||
|  | |||||||
| @ -16,20 +16,23 @@ You should have received a copy of the GNU Affero General Public License | |||||||
| along with this program; if not, write to the Free Software | along with this program; if not, write to the Free Software | ||||||
| Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
| or see http://www.gnu.org/licenses/agpl.txt.
 | or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  */ | */ | ||||||
| 
 | 
 | ||||||
|  | #ifndef MERCATORUTIL_H_ | ||||||
|  | #define MERCATORUTIL_H_ | ||||||
| 
 | 
 | ||||||
|  | #include <cmath> | ||||||
| 
 | 
 | ||||||
| #ifndef LUAUTIL_H_ | #ifndef M_PI | ||||||
| #define LUAUTIL_H_ | #define M_PI 3.14159265358979323846 | ||||||
|  | #endif | ||||||
| 
 | 
 | ||||||
| #include <iostream> | inline double y2lat(double a) { | ||||||
| 
 | 	return 180/M_PI * (2 * atan(exp(a*M_PI/180)) - M_PI/2); | ||||||
| template<typename T> |  | ||||||
| void LUA_print(T number) { |  | ||||||
|   std::cout << "[LUA] " << number << std::endl; |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | inline double lat2y(double a) { | ||||||
|  | 	return 180/M_PI * log(tan(M_PI/4+a*(M_PI/180)/2)); | ||||||
|  | } | ||||||
| 
 | 
 | ||||||
| 
 | #endif /* MERCATORUTIL_H_ */ | ||||||
| #endif /* LUAUTIL_H_ */ |  | ||||||
| @ -21,10 +21,12 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #ifndef NNGRID_H_ | #ifndef NNGRID_H_ | ||||||
| #define NNGRID_H_ | #define NNGRID_H_ | ||||||
| 
 | 
 | ||||||
| #include <algorithm> |  | ||||||
| #include <cassert> | #include <cassert> | ||||||
| #include <cfloat> | #include <cfloat> | ||||||
| #include <cmath> | #include <cmath> | ||||||
|  | #include <cstring> | ||||||
|  | 
 | ||||||
|  | #include <algorithm> | ||||||
| #include <fstream> | #include <fstream> | ||||||
| #include <limits> | #include <limits> | ||||||
| #include <vector> | #include <vector> | ||||||
| @ -42,12 +44,12 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include <boost/unordered_map.hpp> | #include <boost/unordered_map.hpp> | ||||||
| 
 | 
 | ||||||
| #include "DeallocatingVector.h" | #include "DeallocatingVector.h" | ||||||
| //#include "ExtractorStructs.h"
 |  | ||||||
| #include "GridEdge.h" | #include "GridEdge.h" | ||||||
| #include "Percent.h" | #include "Percent.h" | ||||||
| #include "PhantomNodes.h" | #include "PhantomNodes.h" | ||||||
| #include "Util.h" | #include "MercatorUtil.h" | ||||||
| #include "StaticGraph.h" | #include "StaticGraph.h" | ||||||
|  | #include "TimingUtil.h" | ||||||
| #include "../Algorithms/Bresenham.h" | #include "../Algorithms/Bresenham.h" | ||||||
| 
 | 
 | ||||||
| namespace NNGrid{ | namespace NNGrid{ | ||||||
| @ -58,7 +60,7 @@ template<bool WriteAccess = false> | |||||||
| class NNGrid { | class NNGrid { | ||||||
| public: | public: | ||||||
|     NNGrid() /*: cellCache(500), fileCache(500)*/ { |     NNGrid() /*: cellCache(500), fileCache(500)*/ { | ||||||
|         ramIndexTable.resize((1024*1024), ULONG_MAX); |         ramIndexTable.resize((1024*1024), std::numeric_limits<uint64_t>::max()); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     NNGrid(const char* rif, const char* _i) { |     NNGrid(const char* rif, const char* _i) { | ||||||
| @ -66,7 +68,7 @@ public: | |||||||
|             ERR("Not available in Write mode"); |             ERR("Not available in Write mode"); | ||||||
|         } |         } | ||||||
|         iif = std::string(_i); |         iif = std::string(_i); | ||||||
|         ramIndexTable.resize((1024*1024), ULONG_MAX); |         ramIndexTable.resize((1024*1024), std::numeric_limits<uint64_t>::max()); | ||||||
|         ramInFile.open(rif, std::ios::in | std::ios::binary); |         ramInFile.open(rif, std::ios::in | std::ios::binary); | ||||||
|         if(!ramInFile) { ERR(rif <<  " not found"); } |         if(!ramInFile) { ERR(rif <<  " not found"); } | ||||||
| 
 | 
 | ||||||
| @ -87,13 +89,13 @@ public: | |||||||
| 
 | 
 | ||||||
|     void OpenIndexFiles() { |     void OpenIndexFiles() { | ||||||
|         assert(ramInFile.is_open()); |         assert(ramInFile.is_open()); | ||||||
|         ramInFile.read(static_cast<char*>(static_cast<void*>(&ramIndexTable[0]) ), sizeof(unsigned long)*1024*1024); |         ramInFile.read(static_cast<char*>(static_cast<void*>(&ramIndexTable[0]) ), sizeof(uint64_t)*1024*1024); | ||||||
|         ramInFile.close(); |         ramInFile.close(); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| #ifndef ROUTED | #ifndef ROUTED | ||||||
|     template<typename EdgeT> |     template<typename EdgeT> | ||||||
|     inline void ConstructGrid(DeallocatingVector<EdgeT> & edgeList, char * ramIndexOut, char * fileIndexOut) { |     inline void ConstructGrid(DeallocatingVector<EdgeT> & edgeList, const char * ramIndexOut, const char * fileIndexOut) { | ||||||
|     	//TODO: Implement this using STXXL-Streams
 |     	//TODO: Implement this using STXXL-Streams
 | ||||||
|         Percent p(edgeList.size()); |         Percent p(edgeList.size()); | ||||||
|         BOOST_FOREACH(EdgeT & edge, edgeList) { |         BOOST_FOREACH(EdgeT & edge, edgeList) { | ||||||
| @ -106,6 +108,9 @@ public: | |||||||
|             int tlon = edge.lon2; |             int tlon = edge.lon2; | ||||||
|             AddEdge( _GridEdge( edge.id, edge.nameID, edge.weight, _Coordinate(slat, slon), _Coordinate(tlat, tlon), edge.belongsToTinyComponent ) ); |             AddEdge( _GridEdge( edge.id, edge.nameID, edge.weight, _Coordinate(slat, slon), _Coordinate(tlat, tlon), edge.belongsToTinyComponent ) ); | ||||||
|         } |         } | ||||||
|  |         if( 0 == entries.size() ) { | ||||||
|  |         	ERR("No viable edges for nearest neighbor index. Aborting"); | ||||||
|  |         } | ||||||
|         double timestamp = get_timestamp(); |         double timestamp = get_timestamp(); | ||||||
|         //create index file on disk, old one is over written
 |         //create index file on disk, old one is over written
 | ||||||
|         indexOutFile.open(fileIndexOut, std::ios::out | std::ios::binary | std::ios::trunc); |         indexOutFile.open(fileIndexOut, std::ios::out | std::ios::binary | std::ios::trunc); | ||||||
| @ -114,8 +119,8 @@ public: | |||||||
|         INFO("finished sorting after " << (get_timestamp() - timestamp) << "s"); |         INFO("finished sorting after " << (get_timestamp() - timestamp) << "s"); | ||||||
|         std::vector<GridEntry> entriesInFileWithRAMSameIndex; |         std::vector<GridEntry> entriesInFileWithRAMSameIndex; | ||||||
|         unsigned indexInRamTable = entries.begin()->ramIndex; |         unsigned indexInRamTable = entries.begin()->ramIndex; | ||||||
|         unsigned long lastPositionInIndexFile = 0; |         uint64_t lastPositionInIndexFile = 0; | ||||||
|         cout << "writing data ..." << flush; |         std::cout << "writing data ..." << std::flush; | ||||||
|         p.reinit(entries.size()); |         p.reinit(entries.size()); | ||||||
|         boost::unordered_map< unsigned, unsigned > cellMap(1024); |         boost::unordered_map< unsigned, unsigned > cellMap(1024); | ||||||
|         BOOST_FOREACH(GridEntry & gridEntry, entries) { |         BOOST_FOREACH(GridEntry & gridEntry, entries) { | ||||||
| @ -143,9 +148,9 @@ public: | |||||||
|         indexOutFile.close(); |         indexOutFile.close(); | ||||||
| 
 | 
 | ||||||
|         //Serialize RAM Index
 |         //Serialize RAM Index
 | ||||||
|         ofstream ramFile(ramIndexOut, std::ios::out | std::ios::binary | std::ios::trunc); |         std::ofstream ramFile(ramIndexOut, std::ios::out | std::ios::binary | std::ios::trunc); | ||||||
|         //write 4 MB of index Table in RAM
 |         //write 4 MB of index Table in RAM
 | ||||||
|         ramFile.write((char *)&ramIndexTable[0], sizeof(unsigned long)*1024*1024 ); |         ramFile.write((char *)&ramIndexTable[0], sizeof(uint64_t)*1024*1024 ); | ||||||
|         //close ram index file
 |         //close ram index file
 | ||||||
|         ramFile.close(); |         ramFile.close(); | ||||||
|     } |     } | ||||||
| @ -174,10 +179,10 @@ public: | |||||||
| //        INFO("looked up " << candidates.size());
 | //        INFO("looked up " << candidates.size());
 | ||||||
|         _GridEdge smallestEdge; |         _GridEdge smallestEdge; | ||||||
|         _Coordinate tmp, edgeStartCoord, edgeEndCoord; |         _Coordinate tmp, edgeStartCoord, edgeEndCoord; | ||||||
|         double dist = numeric_limits<double>::max(); |         double dist = std::numeric_limits<double>::max(); | ||||||
|         double r, tmpDist; |         double r, tmpDist; | ||||||
| 
 | 
 | ||||||
|         BOOST_FOREACH(_GridEdge candidate, candidates) { |         BOOST_FOREACH(const _GridEdge & candidate, candidates) { | ||||||
|             if(candidate.belongsToTinyComponent && ignoreTinyComponents) |             if(candidate.belongsToTinyComponent && ignoreTinyComponents) | ||||||
|                 continue; |                 continue; | ||||||
|             r = 0.; |             r = 0.; | ||||||
| @ -216,7 +221,7 @@ public: | |||||||
|         //        }
 |         //        }
 | ||||||
| 
 | 
 | ||||||
| //        INFO("startCoord: " << smallestEdge.startCoord << "; targetCoord: " << smallestEdge.targetCoord << "; newEndpoint: " << resultNode.location);
 | //        INFO("startCoord: " << smallestEdge.startCoord << "; targetCoord: " << smallestEdge.targetCoord << "; newEndpoint: " << resultNode.location);
 | ||||||
|         double ratio = (foundNode ? std::min(1., ApproximateDistance(smallestEdge.startCoord, resultNode.location)/ApproximateDistance(smallestEdge.startCoord, smallestEdge.targetCoord)) : 0); |         const double ratio = (foundNode ? std::min(1., ApproximateDistance(smallestEdge.startCoord, resultNode.location)/ApproximateDistance(smallestEdge.startCoord, smallestEdge.targetCoord)) : 0); | ||||||
|         resultNode.location.lat = round(100000.*(y2lat(static_cast<double>(resultNode.location.lat)/100000.))); |         resultNode.location.lat = round(100000.*(y2lat(static_cast<double>(resultNode.location.lat)/100000.))); | ||||||
| //        INFO("Length of vector: " << ApproximateDistance(smallestEdge.startCoord, resultNode.location)/ApproximateDistance(smallestEdge.startCoord, smallestEdge.targetCoord));
 | //        INFO("Length of vector: " << ApproximateDistance(smallestEdge.startCoord, resultNode.location)/ApproximateDistance(smallestEdge.startCoord, smallestEdge.targetCoord));
 | ||||||
|         //Hack to fix rounding errors and wandering via nodes.
 |         //Hack to fix rounding errors and wandering via nodes.
 | ||||||
| @ -227,12 +232,13 @@ public: | |||||||
| 
 | 
 | ||||||
|         resultNode.weight1 *= ratio; |         resultNode.weight1 *= ratio; | ||||||
|         if(INT_MAX != resultNode.weight2) { |         if(INT_MAX != resultNode.weight2) { | ||||||
|             resultNode.weight2 -= resultNode.weight1; |             resultNode.weight2 *= (1.-ratio); | ||||||
|         } |         } | ||||||
|         resultNode.ratio = ratio; |         resultNode.ratio = ratio; | ||||||
| //        INFO("New weight1: " << resultNode.weight1 << ", new weight2: " << resultNode.weight2 << ", ratio: " << ratio);
 |  | ||||||
| //        INFO("start: " << edgeStartCoord << ", end: " << edgeEndCoord);
 | //        INFO("start: " << edgeStartCoord << ", end: " << edgeEndCoord);
 | ||||||
| //        INFO("selected node: " << resultNode.edgeBasedNode << ", bidirected: " << (resultNode.isBidirected() ? "yes" : "no") <<  "\n--");
 | //        INFO("selected node: " << resultNode.edgeBasedNode << ", bidirected: " << (resultNode.isBidirected() ? "yes" : "no"));
 | ||||||
|  | //        INFO("New weight1: " << resultNode.weight1 << ", new weight2: " << resultNode.weight2 << ", ratio: " << ratio);
 | ||||||
|  |  //       INFO("distance to input coordinate: " << ApproximateDistance(location, resultNode.location) <<  "\n--");
 | ||||||
| //        double time2 = get_timestamp();
 | //        double time2 = get_timestamp();
 | ||||||
| //        INFO("NN-Lookup in " << 1000*(time2-time1) << "ms");
 | //        INFO("NN-Lookup in " << 1000*(time2-time1) << "ms");
 | ||||||
|         return foundNode; |         return foundNode; | ||||||
| @ -264,7 +270,7 @@ public: | |||||||
|         } |         } | ||||||
|         _Coordinate tmp; |         _Coordinate tmp; | ||||||
|         double dist = (std::numeric_limits<double>::max)(); |         double dist = (std::numeric_limits<double>::max)(); | ||||||
|         BOOST_FOREACH(_GridEdge candidate, candidates) { |         BOOST_FOREACH(const _GridEdge & candidate, candidates) { | ||||||
|             double r = 0.; |             double r = 0.; | ||||||
|             double tmpDist = ComputeDistance(startCoord, candidate.startCoord, candidate.targetCoord, tmp, &r); |             double tmpDist = ComputeDistance(startCoord, candidate.startCoord, candidate.targetCoord, tmp, &r); | ||||||
|             if(tmpDist < dist) { |             if(tmpDist < dist) { | ||||||
| @ -310,17 +316,18 @@ private: | |||||||
|         cellMap.insert(insertionVector.begin(), insertionVector.end()); |         cellMap.insert(insertionVector.begin(), insertionVector.end()); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline bool DoubleEpsilonCompare(const double d1, const double d2) { |     inline bool DoubleEpsilonCompare(const double d1, const double d2) const { | ||||||
|         return (std::fabs(d1 - d2) < FLT_EPSILON); |         return (std::fabs(d1 - d2) < FLT_EPSILON); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline unsigned FillCell(std::vector<GridEntry>& entriesWithSameRAMIndex, const unsigned long fileOffset, boost::unordered_map< unsigned, unsigned > & cellMap ) { | #ifndef ROUTED | ||||||
|  |     inline unsigned FillCell(std::vector<GridEntry>& entriesWithSameRAMIndex, const uint64_t fileOffset, boost::unordered_map< unsigned, unsigned > & cellMap ) { | ||||||
|         std::vector<char> tmpBuffer(32*32*4096,0); |         std::vector<char> tmpBuffer(32*32*4096,0); | ||||||
|         unsigned long indexIntoTmpBuffer = 0; |         uint64_t indexIntoTmpBuffer = 0; | ||||||
|         unsigned numberOfWrittenBytes = 0; |         unsigned numberOfWrittenBytes = 0; | ||||||
|         assert(indexOutFile.is_open()); |         assert(indexOutFile.is_open()); | ||||||
| 
 | 
 | ||||||
|         std::vector<unsigned long> cellIndex(32*32,ULONG_MAX); |         std::vector<uint64_t> cellIndex(32*32,std::numeric_limits<uint64_t>::max()); | ||||||
| 
 | 
 | ||||||
|         for(unsigned i = 0; i < entriesWithSameRAMIndex.size() -1; ++i) { |         for(unsigned i = 0; i < entriesWithSameRAMIndex.size() -1; ++i) { | ||||||
|             assert(entriesWithSameRAMIndex[i].ramIndex== entriesWithSameRAMIndex[i+1].ramIndex); |             assert(entriesWithSameRAMIndex[i].ramIndex== entriesWithSameRAMIndex[i+1].ramIndex); | ||||||
| @ -356,8 +363,8 @@ private: | |||||||
|         indexIntoTmpBuffer += FlushEntriesWithSameFileIndexToBuffer(entriesWithSameFileIndex, tmpBuffer, indexIntoTmpBuffer); |         indexIntoTmpBuffer += FlushEntriesWithSameFileIndexToBuffer(entriesWithSameFileIndex, tmpBuffer, indexIntoTmpBuffer); | ||||||
| 
 | 
 | ||||||
|         assert(entriesWithSameFileIndex.size() == 0); |         assert(entriesWithSameFileIndex.size() == 0); | ||||||
|         indexOutFile.write(static_cast<char*>(static_cast<void*>(&cellIndex[0])),32*32*sizeof(unsigned long)); |         indexOutFile.write(static_cast<char*>(static_cast<void*>(&cellIndex[0])),32*32*sizeof(uint64_t)); | ||||||
|         numberOfWrittenBytes += 32*32*sizeof(unsigned long); |         numberOfWrittenBytes += 32*32*sizeof(uint64_t); | ||||||
| 
 | 
 | ||||||
|         //write contents of tmpbuffer to disk
 |         //write contents of tmpbuffer to disk
 | ||||||
|         indexOutFile.write(&tmpBuffer[0], indexIntoTmpBuffer*sizeof(char)); |         indexOutFile.write(&tmpBuffer[0], indexIntoTmpBuffer*sizeof(char)); | ||||||
| @ -366,7 +373,7 @@ private: | |||||||
|         return numberOfWrittenBytes; |         return numberOfWrittenBytes; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline unsigned FlushEntriesWithSameFileIndexToBuffer( std::vector<GridEntry> &vectorWithSameFileIndex, std::vector<char> & tmpBuffer, const unsigned long index) const { |     inline unsigned FlushEntriesWithSameFileIndexToBuffer( std::vector<GridEntry> &vectorWithSameFileIndex, std::vector<char> & tmpBuffer, const uint64_t index) const { | ||||||
|         sort( vectorWithSameFileIndex.begin(), vectorWithSameFileIndex.end() ); |         sort( vectorWithSameFileIndex.begin(), vectorWithSameFileIndex.end() ); | ||||||
|         vectorWithSameFileIndex.erase(unique(vectorWithSameFileIndex.begin(), vectorWithSameFileIndex.end()), vectorWithSameFileIndex.end()); |         vectorWithSameFileIndex.erase(unique(vectorWithSameFileIndex.begin(), vectorWithSameFileIndex.end()), vectorWithSameFileIndex.end()); | ||||||
|         const unsigned lengthOfBucket = vectorWithSameFileIndex.size(); |         const unsigned lengthOfBucket = vectorWithSameFileIndex.size(); | ||||||
| @ -391,11 +398,12 @@ private: | |||||||
|         vectorWithSameFileIndex.clear(); |         vectorWithSameFileIndex.clear(); | ||||||
|         return counter; |         return counter; | ||||||
|     } |     } | ||||||
|  | #endif | ||||||
| 
 | 
 | ||||||
|     inline void GetContentsOfFileBucketEnumerated(const unsigned fileIndex, std::vector<_GridEdge>& result) const { |     inline void GetContentsOfFileBucketEnumerated(const unsigned fileIndex, std::vector<_GridEdge>& result) const { | ||||||
|         unsigned ramIndex = GetRAMIndexFromFileIndex(fileIndex); |         unsigned ramIndex = GetRAMIndexFromFileIndex(fileIndex); | ||||||
|         unsigned long startIndexInFile = ramIndexTable[ramIndex]; |         uint64_t startIndexInFile = ramIndexTable[ramIndex]; | ||||||
|         if(startIndexInFile == ULONG_MAX) { |         if(startIndexInFile == std::numeric_limits<uint64_t>::max()) { | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|         unsigned enumeratedIndex = GetCellIndexFromRAMAndFileIndex(ramIndex, fileIndex); |         unsigned enumeratedIndex = GetCellIndexFromRAMAndFileIndex(ramIndex, fileIndex); | ||||||
| @ -409,14 +417,14 @@ private: | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //only read the single necessary cell index
 |         //only read the single necessary cell index
 | ||||||
|         localStream->seekg(startIndexInFile+(enumeratedIndex*sizeof(unsigned long))); |         localStream->seekg(startIndexInFile+(enumeratedIndex*sizeof(uint64_t))); | ||||||
|         unsigned long fetchedIndex = 0; |         uint64_t fetchedIndex = 0; | ||||||
|         localStream->read(static_cast<char*>( static_cast<void*>(&fetchedIndex)), sizeof(unsigned long)); |         localStream->read(static_cast<char*>( static_cast<void*>(&fetchedIndex)), sizeof(uint64_t)); | ||||||
| 
 | 
 | ||||||
|         if(fetchedIndex == ULONG_MAX) { |         if(fetchedIndex == std::numeric_limits<uint64_t>::max()) { | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|         const unsigned long position = fetchedIndex + 32*32*sizeof(unsigned long) ; |         const uint64_t position = fetchedIndex + 32*32*sizeof(uint64_t) ; | ||||||
| 
 | 
 | ||||||
|         unsigned lengthOfBucket; |         unsigned lengthOfBucket; | ||||||
|         unsigned currentSizeOfResult = result.size(); |         unsigned currentSizeOfResult = result.size(); | ||||||
| @ -428,12 +436,12 @@ private: | |||||||
| 
 | 
 | ||||||
|     inline void GetContentsOfFileBucket(const unsigned fileIndex, std::vector<_GridEdge>& result, boost::unordered_map< unsigned, unsigned> & cellMap) { |     inline void GetContentsOfFileBucket(const unsigned fileIndex, std::vector<_GridEdge>& result, boost::unordered_map< unsigned, unsigned> & cellMap) { | ||||||
|         unsigned ramIndex = GetRAMIndexFromFileIndex(fileIndex); |         unsigned ramIndex = GetRAMIndexFromFileIndex(fileIndex); | ||||||
|         unsigned long startIndexInFile = ramIndexTable[ramIndex]; |         uint64_t startIndexInFile = ramIndexTable[ramIndex]; | ||||||
|         if(startIndexInFile == ULONG_MAX) { |         if(startIndexInFile == std::numeric_limits<uint64_t>::max()) { | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         unsigned long cellIndex[32*32]; |         uint64_t cellIndex[32*32]; | ||||||
| 
 | 
 | ||||||
|         cellMap.clear(); |         cellMap.clear(); | ||||||
|         BuildCellIndexToFileIndexMap(ramIndex,  cellMap); |         BuildCellIndexToFileIndexMap(ramIndex,  cellMap); | ||||||
| @ -446,12 +454,12 @@ private: | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         localStream->seekg(startIndexInFile); |         localStream->seekg(startIndexInFile); | ||||||
|         localStream->read(static_cast<char*>(static_cast<void*>( cellIndex)), 32*32*sizeof(unsigned long)); |         localStream->read(static_cast<char*>(static_cast<void*>( cellIndex)), 32*32*sizeof(uint64_t)); | ||||||
|         assert(cellMap.find(fileIndex) != cellMap.end()); |         assert(cellMap.find(fileIndex) != cellMap.end()); | ||||||
|         if(cellIndex[cellMap[fileIndex]] == ULONG_MAX) { |         if(cellIndex[cellMap[fileIndex]] == std::numeric_limits<uint64_t>::max()) { | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|         const unsigned long position = cellIndex[cellMap[fileIndex]] + 32*32*sizeof(unsigned long) ; |         const uint64_t position = cellIndex[cellMap[fileIndex]] + 32*32*sizeof(uint64_t) ; | ||||||
| 
 | 
 | ||||||
|         unsigned lengthOfBucket; |         unsigned lengthOfBucket; | ||||||
|         unsigned currentSizeOfResult = result.size(); |         unsigned currentSizeOfResult = result.size(); | ||||||
| @ -543,7 +551,7 @@ private: | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline unsigned GetFileIndexForLatLon(const int lt, const int ln) { |     inline unsigned GetFileIndexForLatLon(const int lt, const int ln) const { | ||||||
|         double lat = lt/100000.; |         double lat = lt/100000.; | ||||||
|         double lon = ln/100000.; |         double lon = ln/100000.; | ||||||
| 
 | 
 | ||||||
| @ -574,14 +582,14 @@ private: | |||||||
|         return ramIndex; |         return ramIndex; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     const static unsigned long END_OF_BUCKET_DELIMITER = UINT_MAX; |     const static uint64_t END_OF_BUCKET_DELIMITER = boost::integer_traits<uint64_t>::const_max; | ||||||
| 
 | 
 | ||||||
|     std::ofstream indexOutFile; |  | ||||||
|     std::ifstream ramInFile; |     std::ifstream ramInFile; | ||||||
| #ifndef ROUTED | #ifndef ROUTED | ||||||
|  |     std::ofstream indexOutFile; | ||||||
|     stxxl::vector<GridEntry> entries; |     stxxl::vector<GridEntry> entries; | ||||||
| #endif | #endif | ||||||
|     std::vector<unsigned long> ramIndexTable; //8 MB for first level index in RAM
 |     std::vector<uint64_t> ramIndexTable; //8 MB for first level index in RAM
 | ||||||
|     std::string iif; |     std::string iif; | ||||||
|     //    LRUCache<int,std::vector<unsigned> > cellCache;
 |     //    LRUCache<int,std::vector<unsigned> > cellCache;
 | ||||||
|     //    LRUCache<int,std::vector<_Edge> > fileCache;
 |     //    LRUCache<int,std::vector<_Edge> > fileCache;
 | ||||||
|  | |||||||
| @ -25,13 +25,15 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include <iostream> | #include <iostream> | ||||||
| #include <vector> | #include <vector> | ||||||
| 
 | 
 | ||||||
|  | #include <boost/noncopyable.hpp> | ||||||
|  | 
 | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| #include "../DataStructures/QueryEdge.h" | #include "../DataStructures/QueryEdge.h" | ||||||
| #include "NNGrid.h" | #include "NNGrid.h" | ||||||
| #include "PhantomNodes.h" | #include "PhantomNodes.h" | ||||||
| #include "NodeCoords.h" | #include "NodeCoords.h" | ||||||
| 
 | 
 | ||||||
| class NodeInformationHelpDesk{ | class NodeInformationHelpDesk : boost::noncopyable{ | ||||||
| public: | public: | ||||||
|     NodeInformationHelpDesk(const char* ramIndexInput, const char* fileIndexInput, const unsigned _numberOfNodes, const unsigned crc) : numberOfNodes(_numberOfNodes), checkSum(crc) { |     NodeInformationHelpDesk(const char* ramIndexInput, const char* fileIndexInput, const unsigned _numberOfNodes, const unsigned crc) : numberOfNodes(_numberOfNodes), checkSum(crc) { | ||||||
|         readOnlyGrid = new ReadOnlyGrid(ramIndexInput,fileIndexInput); |         readOnlyGrid = new ReadOnlyGrid(ramIndexInput,fileIndexInput); | ||||||
| @ -59,34 +61,43 @@ public: | |||||||
|         DEBUG("Loading edge data"); |         DEBUG("Loading edge data"); | ||||||
|         unsigned numberOfOrigEdges(0); |         unsigned numberOfOrigEdges(0); | ||||||
|         edgesInStream.read((char*)&numberOfOrigEdges, sizeof(unsigned)); |         edgesInStream.read((char*)&numberOfOrigEdges, sizeof(unsigned)); | ||||||
|         origEdgeData.resize(numberOfOrigEdges); |         origEdgeData_viaNode.resize(numberOfOrigEdges); | ||||||
|         edgesInStream.read((char*)&(origEdgeData[0]), numberOfOrigEdges*sizeof(OriginalEdgeData)); |         origEdgeData_nameID.resize(numberOfOrigEdges); | ||||||
|  |         origEdgeData_turnInstruction.resize(numberOfOrigEdges); | ||||||
|  | 
 | ||||||
|  |         OriginalEdgeData deserialized_originalEdgeData; | ||||||
|  |         for(unsigned i = 0; i < numberOfOrigEdges; ++i) { | ||||||
|  |         	edgesInStream.read((char*)&(deserialized_originalEdgeData), sizeof(OriginalEdgeData)); | ||||||
|  |             origEdgeData_viaNode[i] 		= deserialized_originalEdgeData.viaNode; | ||||||
|  |             origEdgeData_nameID[i] 			= deserialized_originalEdgeData.nameID; | ||||||
|  |             origEdgeData_turnInstruction[i] = deserialized_originalEdgeData.turnInstruction; | ||||||
|  |         } | ||||||
|         edgesInStream.close(); |         edgesInStream.close(); | ||||||
|         DEBUG("Loaded " << numberOfOrigEdges << " orig edges"); |         DEBUG("Loaded " << numberOfOrigEdges << " orig edges"); | ||||||
| 	    DEBUG("Opening NN indices"); | 	    DEBUG("Opening NN indices"); | ||||||
| 	    readOnlyGrid->OpenIndexFiles(); | 	    readOnlyGrid->OpenIndexFiles(); | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	void initNNGrid() { | //	void initNNGrid() {
 | ||||||
| 	    readOnlyGrid->OpenIndexFiles(); | //	    readOnlyGrid->OpenIndexFiles();
 | ||||||
| 	} | //	}
 | ||||||
| 
 | 
 | ||||||
| 	inline int getLatitudeOfNode(const unsigned id) const { | 	inline int getLatitudeOfNode(const unsigned id) const { | ||||||
| 	    const NodeID node = origEdgeData.at(id).viaNode; | 	    const NodeID node = origEdgeData_viaNode.at(id); | ||||||
| 	    return coordinateVector.at(node).lat; | 	    return coordinateVector.at(node).lat; | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	inline int getLongitudeOfNode(const unsigned id) const { | 	inline int getLongitudeOfNode(const unsigned id) const { | ||||||
|         const NodeID node = origEdgeData.at(id).viaNode; |         const NodeID node = origEdgeData_viaNode.at(id); | ||||||
| 	    return coordinateVector.at(node).lon; | 	    return coordinateVector.at(node).lon; | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	inline unsigned getNameIndexFromEdgeID(const unsigned id) const { | 	inline unsigned getNameIndexFromEdgeID(const unsigned id) const { | ||||||
| 	    return origEdgeData.at(id).nameID; | 	    return origEdgeData_nameID.at(id); | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
|     inline TurnInstruction getTurnInstructionFromEdgeID(const unsigned id) const { |     inline TurnInstruction getTurnInstructionFromEdgeID(const unsigned id) const { | ||||||
|         return origEdgeData.at(id).turnInstruction; |         return origEdgeData_turnInstruction.at(id); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline NodeID getNumberOfNodes() const { return numberOfNodes; } |     inline NodeID getNumberOfNodes() const { return numberOfNodes; } | ||||||
| @ -96,7 +107,7 @@ public: | |||||||
| 		return readOnlyGrid->FindNearestCoordinateOnEdgeInNodeBasedGraph(coord, result); | 		return readOnlyGrid->FindNearestCoordinateOnEdgeInNodeBasedGraph(coord, result); | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	inline bool FindPhantomNodeForCoordinate( const _Coordinate & location, PhantomNode & resultNode, const unsigned zoomLevel) const { | 	inline bool FindPhantomNodeForCoordinate( const _Coordinate & location, PhantomNode & resultNode, const unsigned zoomLevel) { | ||||||
| 	    return readOnlyGrid->FindPhantomNodeForCoordinate(location, resultNode, zoomLevel); | 	    return readOnlyGrid->FindPhantomNodeForCoordinate(location, resultNode, zoomLevel); | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| @ -114,7 +125,9 @@ public: | |||||||
| 
 | 
 | ||||||
| private: | private: | ||||||
| 	std::vector<_Coordinate> coordinateVector; | 	std::vector<_Coordinate> coordinateVector; | ||||||
|     std::vector<OriginalEdgeData> origEdgeData; | 	std::vector<NodeID> origEdgeData_viaNode; | ||||||
|  | 	std::vector<unsigned> origEdgeData_nameID; | ||||||
|  | 	std::vector<TurnInstruction> origEdgeData_turnInstruction; | ||||||
| 
 | 
 | ||||||
| 	ReadOnlyGrid * readOnlyGrid; | 	ReadOnlyGrid * readOnlyGrid; | ||||||
| 	const unsigned numberOfNodes; | 	const unsigned numberOfNodes; | ||||||
|  | |||||||
| @ -41,8 +41,8 @@ struct QueryEdge { | |||||||
|     NodeID target; |     NodeID target; | ||||||
|     struct EdgeData { |     struct EdgeData { | ||||||
|         NodeID id:31; |         NodeID id:31; | ||||||
|         int distance:30; |  | ||||||
|         bool shortcut:1; |         bool shortcut:1; | ||||||
|  |         int distance:30; | ||||||
|         bool forward:1; |         bool forward:1; | ||||||
|         bool backward:1; |         bool backward:1; | ||||||
|     } data; |     } data; | ||||||
|  | |||||||
| @ -42,32 +42,33 @@ struct _HeapData { | |||||||
| 	_HeapData( NodeID p ) : parent(p) { } | 	_HeapData( NodeID p ) : parent(p) { } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| typedef boost::thread_specific_ptr<BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> > > SearchEngineHeapPtr; | typedef BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> > QueryHeapType; | ||||||
|  | typedef boost::thread_specific_ptr<QueryHeapType> SearchEngineHeapPtr; | ||||||
| 
 | 
 | ||||||
| template<class EdgeData, class GraphT> | template<class EdgeData, class GraphT> | ||||||
| struct SearchEngineData { | struct SearchEngineData { | ||||||
|     typedef SearchEngineHeapPtr HeapPtr; |  | ||||||
|     typedef GraphT Graph; |     typedef GraphT Graph; | ||||||
|     SearchEngineData(GraphT * g, NodeInformationHelpDesk * nh, std::vector<string> & n) :graph(g), nodeHelpDesk(nh), names(n) {} |     typedef QueryHeapType QueryHeap; | ||||||
|  |     SearchEngineData(GraphT * g, NodeInformationHelpDesk * nh, std::vector<std::string> & n) :graph(g), nodeHelpDesk(nh), names(n) {} | ||||||
|     const GraphT * graph; |     const GraphT * graph; | ||||||
|     NodeInformationHelpDesk * nodeHelpDesk; |     NodeInformationHelpDesk * nodeHelpDesk; | ||||||
|     std::vector<string> & names; |     std::vector<std::string> & names; | ||||||
|     static HeapPtr forwardHeap; |     static SearchEngineHeapPtr forwardHeap; | ||||||
|     static HeapPtr backwardHeap; |     static SearchEngineHeapPtr backwardHeap; | ||||||
|     static HeapPtr forwardHeap2; |     static SearchEngineHeapPtr forwardHeap2; | ||||||
|     static HeapPtr backwardHeap2; |     static SearchEngineHeapPtr backwardHeap2; | ||||||
|     static HeapPtr forwardHeap3; |     static SearchEngineHeapPtr forwardHeap3; | ||||||
|     static HeapPtr backwardHeap3; |     static SearchEngineHeapPtr backwardHeap3; | ||||||
| 
 | 
 | ||||||
|     inline void InitializeOrClearFirstThreadLocalStorage() { |     inline void InitializeOrClearFirstThreadLocalStorage() { | ||||||
|         if(!forwardHeap.get()) { |         if(!forwardHeap.get()) { | ||||||
|             forwardHeap.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             forwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             forwardHeap->Clear(); |             forwardHeap->Clear(); | ||||||
| 
 | 
 | ||||||
|         if(!backwardHeap.get()) { |         if(!backwardHeap.get()) { | ||||||
|             backwardHeap.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             backwardHeap.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             backwardHeap->Clear(); |             backwardHeap->Clear(); | ||||||
| @ -75,13 +76,13 @@ struct SearchEngineData { | |||||||
| 
 | 
 | ||||||
|     inline void InitializeOrClearSecondThreadLocalStorage() { |     inline void InitializeOrClearSecondThreadLocalStorage() { | ||||||
|         if(!forwardHeap2.get()) { |         if(!forwardHeap2.get()) { | ||||||
|             forwardHeap2.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             forwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             forwardHeap2->Clear(); |             forwardHeap2->Clear(); | ||||||
| 
 | 
 | ||||||
|         if(!backwardHeap2.get()) { |         if(!backwardHeap2.get()) { | ||||||
|             backwardHeap2.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             backwardHeap2.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             backwardHeap2->Clear(); |             backwardHeap2->Clear(); | ||||||
| @ -89,13 +90,13 @@ struct SearchEngineData { | |||||||
| 
 | 
 | ||||||
|     inline void InitializeOrClearThirdThreadLocalStorage() { |     inline void InitializeOrClearThirdThreadLocalStorage() { | ||||||
|         if(!forwardHeap3.get()) { |         if(!forwardHeap3.get()) { | ||||||
|             forwardHeap3.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             forwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             forwardHeap3->Clear(); |             forwardHeap3->Clear(); | ||||||
| 
 | 
 | ||||||
|         if(!backwardHeap3.get()) { |         if(!backwardHeap3.get()) { | ||||||
|             backwardHeap3.reset(new BinaryHeap< NodeID, NodeID, int, _HeapData, UnorderedMapStorage<NodeID, int> >(nodeHelpDesk->getNumberOfNodes())); |             backwardHeap3.reset(new QueryHeap(nodeHelpDesk->getNumberOfNodes())); | ||||||
|         } |         } | ||||||
|         else |         else | ||||||
|             backwardHeap3->Clear(); |             backwardHeap3->Clear(); | ||||||
| @ -113,7 +114,7 @@ public: | |||||||
|     ShortestPathRouting<SearchEngineDataT> shortestPath; |     ShortestPathRouting<SearchEngineDataT> shortestPath; | ||||||
|     AlternativeRouting<SearchEngineDataT> alternativePaths; |     AlternativeRouting<SearchEngineDataT> alternativePaths; | ||||||
| 
 | 
 | ||||||
|     SearchEngine(GraphT * g, NodeInformationHelpDesk * nh, std::vector<string> & n) : |     SearchEngine(GraphT * g, NodeInformationHelpDesk * nh, std::vector<std::string> & n) : | ||||||
| 	    _queryData(g, nh, n), | 	    _queryData(g, nh, n), | ||||||
| 	    shortestPath(_queryData), | 	    shortestPath(_queryData), | ||||||
| 	    alternativePaths(_queryData) | 	    alternativePaths(_queryData) | ||||||
|  | |||||||
| @ -18,16 +18,12 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | |||||||
| or see http://www.gnu.org/licenses/agpl.txt.
 | or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
| */ | */ | ||||||
| 
 | 
 | ||||||
| #ifndef TIMEUTIL_H_ | #ifndef TIMINGUTIL_H_ | ||||||
| #define TIMEUTIL_H_ | #define TIMINGUTIL_H_ | ||||||
| 
 | 
 | ||||||
| #include <climits> | #include <climits> | ||||||
| #include <cmath> |  | ||||||
| #include <cstdlib> | #include <cstdlib> | ||||||
| 
 | 
 | ||||||
| #ifndef M_PI |  | ||||||
| #define M_PI 3.14159265358979323846 |  | ||||||
| #endif |  | ||||||
| 
 | 
 | ||||||
| #ifdef _WIN32 | #ifdef _WIN32 | ||||||
|  #include <sys/timeb.h> |  #include <sys/timeb.h> | ||||||
| @ -43,13 +39,6 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
|  #include <sys/time.h> |  #include <sys/time.h> | ||||||
| #endif | #endif | ||||||
| 
 | 
 | ||||||
| #ifdef _WIN32 |  | ||||||
|  #include <boost/functional/hash.hpp> |  | ||||||
| #else |  | ||||||
|  #include <tr1/functional_hash.h> |  | ||||||
| #endif |  | ||||||
| #include <boost/thread.hpp> |  | ||||||
| 
 |  | ||||||
| /** Returns a timestamp (now) in seconds (incl. a fractional part). */ | /** Returns a timestamp (now) in seconds (incl. a fractional part). */ | ||||||
| static inline double get_timestamp() { | static inline double get_timestamp() { | ||||||
|     struct timeval tp; |     struct timeval tp; | ||||||
| @ -57,14 +46,5 @@ static inline double get_timestamp() { | |||||||
|     return double(tp.tv_sec) + tp.tv_usec / 1000000.; |     return double(tp.tv_sec) + tp.tv_usec / 1000000.; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| static inline double y2lat(double a) { return 180/M_PI * (2 * atan(exp(a*M_PI/180)) - M_PI/2); } |  | ||||||
| static inline double lat2y(double a) { return 180/M_PI * log(tan(M_PI/4+a*(M_PI/180)/2)); } |  | ||||||
| 
 | 
 | ||||||
| static inline unsigned boost_thread_id_hash(boost::thread::id const& id) { | #endif /* TIMINGUTIL_H_ */ | ||||||
| 	std::stringstream ostr; |  | ||||||
| 	ostr << id; |  | ||||||
| 	std::tr1::hash<std::string> h; |  | ||||||
| 	return h(ostr.str()); |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| #endif /* TIMEUTIL_H_ */ |  | ||||||
| @ -21,12 +21,12 @@ | |||||||
| #ifndef TURNINSTRUCTIONS_H_ | #ifndef TURNINSTRUCTIONS_H_ | ||||||
| #define TURNINSTRUCTIONS_H_ | #define TURNINSTRUCTIONS_H_ | ||||||
| 
 | 
 | ||||||
| #include <string> | #include <boost/noncopyable.hpp> | ||||||
| 
 | 
 | ||||||
| typedef unsigned char TurnInstruction; | typedef unsigned char TurnInstruction; | ||||||
| 
 | 
 | ||||||
| //This is a hack until c++0x is available enough to use scoped enums
 | //This is a hack until c++0x is available enough to use scoped enums
 | ||||||
| struct TurnInstructionsClass { | struct TurnInstructionsClass : boost::noncopyable { | ||||||
| 
 | 
 | ||||||
|     const static TurnInstruction NoTurn = 0;          //Give no instruction at all
 |     const static TurnInstruction NoTurn = 0;          //Give no instruction at all
 | ||||||
|     const static TurnInstruction GoStraight = 1;      //Tell user to go straight!
 |     const static TurnInstruction GoStraight = 1;      //Tell user to go straight!
 | ||||||
| @ -44,48 +44,14 @@ struct TurnInstructionsClass { | |||||||
|     const static TurnInstruction StayOnRoundAbout = 13; |     const static TurnInstruction StayOnRoundAbout = 13; | ||||||
|     const static TurnInstruction StartAtEndOfStreet = 14; |     const static TurnInstruction StartAtEndOfStreet = 14; | ||||||
|     const static TurnInstruction ReachedYourDestination = 15; |     const static TurnInstruction ReachedYourDestination = 15; | ||||||
|  |     const static TurnInstruction EnterAgainstAllowedDirection = 16; | ||||||
|  |     const static TurnInstruction LeaveAgainstAllowedDirection = 17; | ||||||
| 
 | 
 | ||||||
|     const static TurnInstruction AccessRestrictionFlag = 128; |     const static TurnInstruction AccessRestrictionFlag = 128; | ||||||
|     const static TurnInstruction InverseAccessRestrictionFlag = 0x7f; // ~128 does not work without a warning.
 |     const static TurnInstruction InverseAccessRestrictionFlag = 0x7f; // ~128 does not work without a warning.
 | ||||||
| 
 | 
 | ||||||
|     const static int AccessRestrictionPenalty = 1 << 15; //unrelated to the bit set in the restriction flag
 |     const static int AccessRestrictionPenalty = 1 << 15; //unrelated to the bit set in the restriction flag
 | ||||||
| 
 | 
 | ||||||
| //    std::string TurnStrings[16];
 |  | ||||||
| //    std::string Ordinals[12];
 |  | ||||||
| 
 |  | ||||||
|     //This is a hack until c++0x is available enough to use initializer lists.
 |  | ||||||
| //    TurnInstructionsClass(){
 |  | ||||||
| //        TurnStrings [0] = "";
 |  | ||||||
| //        TurnStrings [1] = "Continue";
 |  | ||||||
| //        TurnStrings [2] = "Turn slight right";
 |  | ||||||
| //        TurnStrings [3] = "Turn right";
 |  | ||||||
| //        TurnStrings [4] = "Turn sharp right";
 |  | ||||||
| //        TurnStrings [5] = "U-Turn";
 |  | ||||||
| //        TurnStrings [6] = "Turn sharp left";
 |  | ||||||
| //        TurnStrings [7] = "Turn left";
 |  | ||||||
| //        TurnStrings [8] = "Turn slight left";
 |  | ||||||
| //        TurnStrings [9] = "Reach via point";
 |  | ||||||
| //        TurnStrings[10] = "Head";
 |  | ||||||
| //        TurnStrings[11] = "Enter roundabout";
 |  | ||||||
| //        TurnStrings[12] = "Leave roundabout";
 |  | ||||||
| //        TurnStrings[13] = "Stay on roundabout";
 |  | ||||||
| //        TurnStrings[14] = "Start";
 |  | ||||||
| //        TurnStrings[15] = "You have reached your destination";
 |  | ||||||
| //
 |  | ||||||
| //        Ordinals[0]     = "zeroth";
 |  | ||||||
| //        Ordinals[1]     = "first";
 |  | ||||||
| //        Ordinals[2]     = "second";
 |  | ||||||
| //        Ordinals[3]     = "third";
 |  | ||||||
| //        Ordinals[4]     = "fourth";
 |  | ||||||
| //        Ordinals[5]     = "fifth";
 |  | ||||||
| //        Ordinals[6]     = "sixth";
 |  | ||||||
| //        Ordinals[7]     = "seventh";
 |  | ||||||
| //        Ordinals[8]     = "eighth";
 |  | ||||||
| //        Ordinals[9]     = "nineth";
 |  | ||||||
| //        Ordinals[10]    = "tenth";
 |  | ||||||
| //        Ordinals[11]    = "one of the too many";
 |  | ||||||
| //    };
 |  | ||||||
| 
 |  | ||||||
|     static inline TurnInstruction GetTurnDirectionOfInstruction( const double angle ) { |     static inline TurnInstruction GetTurnDirectionOfInstruction( const double angle ) { | ||||||
|         if(angle >= 23 && angle < 67) { |         if(angle >= 23 && angle < 67) { | ||||||
|             return TurnSharpRight; |             return TurnSharpRight; | ||||||
|  | |||||||
| @ -49,7 +49,7 @@ public: | |||||||
|         table1.resize(2 << 16); |         table1.resize(2 << 16); | ||||||
|         table2.resize(2 << 16); |         table2.resize(2 << 16); | ||||||
|         for(unsigned i = 0; i < (2 << 16); ++i) { |         for(unsigned i = 0; i < (2 << 16); ++i) { | ||||||
|             table1[i] = i; table2[i]; |             table1[i] = i; table2[i] = i; | ||||||
|         } |         } | ||||||
|         std::random_shuffle(table1.begin(), table1.end()); |         std::random_shuffle(table1.begin(), table1.end()); | ||||||
|         std::random_shuffle(table2.begin(), table2.end()); |         std::random_shuffle(table2.begin(), table2.end()); | ||||||
| @ -75,8 +75,8 @@ public: | |||||||
|         table3.resize(1 << 8); |         table3.resize(1 << 8); | ||||||
|         table4.resize(1 << 8); |         table4.resize(1 << 8); | ||||||
|         for(unsigned i = 0; i < (1 << 8); ++i) { |         for(unsigned i = 0; i < (1 << 8); ++i) { | ||||||
|             table1[i] = i; table2[i]; |             table1[i] = i; table2[i] = i; | ||||||
|             table3[i] = i; table4[i]; |             table3[i] = i; table4[i] = i; | ||||||
|         } |         } | ||||||
|         std::random_shuffle(table1.begin(), table1.end()); |         std::random_shuffle(table1.begin(), table1.end()); | ||||||
|         std::random_shuffle(table2.begin(), table2.end()); |         std::random_shuffle(table2.begin(), table2.end()); | ||||||
|  | |||||||
| @ -53,13 +53,9 @@ public: | |||||||
|         _RouteSummary() : lengthString("0"), durationString("0"), startName(0), destName(0) {} |         _RouteSummary() : lengthString("0"), durationString("0"), startName(0), destName(0) {} | ||||||
|         void BuildDurationAndLengthStrings(const double distance, const unsigned time) { |         void BuildDurationAndLengthStrings(const double distance, const unsigned time) { | ||||||
|             //compute distance/duration for route summary
 |             //compute distance/duration for route summary
 | ||||||
|             std::ostringstream s; |             intToString(round(distance), lengthString); | ||||||
|             s << round(distance); |  | ||||||
|             lengthString = s.str(); |  | ||||||
|             int travelTime = time/10 + 1; |             int travelTime = time/10 + 1; | ||||||
|             s.str(""); |             intToString(travelTime, durationString); | ||||||
|             s << travelTime; |  | ||||||
|             durationString = s.str(); |  | ||||||
|         } |         } | ||||||
|     } summary; |     } summary; | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -39,6 +39,7 @@ public: | |||||||
|                 "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " |                 "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " | ||||||
|                 "xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 gpx.xsd" |                 "xsi:schemaLocation=\"http://www.topografix.com/GPX/1/1 gpx.xsd" | ||||||
|                 "\">"; |                 "\">"; | ||||||
|  |         reply.content += "<metadata><copyright author=\"Project OSRM\"><license>Data (c) OpenStreetMap contributors (ODbL)</license></copyright></metadata>"; | ||||||
|         reply.content += "<rte>"; |         reply.content += "<rte>"; | ||||||
|         if(rawRoute.lengthOfShortestPath != INT_MAX && rawRoute.computedShortestPath.size()) { |         if(rawRoute.lengthOfShortestPath != INT_MAX && rawRoute.computedShortestPath.size()) { | ||||||
|             convertInternalLatLonToString(phantomNodes.startPhantom.location.lat, tmp); |             convertInternalLatLonToString(phantomNodes.startPhantom.location.lat, tmp); | ||||||
| @ -46,7 +47,7 @@ public: | |||||||
|             convertInternalLatLonToString(phantomNodes.startPhantom.location.lon, tmp); |             convertInternalLatLonToString(phantomNodes.startPhantom.location.lon, tmp); | ||||||
|             reply.content += "lon=\"" + tmp + "\"></rtept>"; |             reply.content += "lon=\"" + tmp + "\"></rtept>"; | ||||||
| 
 | 
 | ||||||
|             BOOST_FOREACH(_PathData pathData, rawRoute.computedShortestPath) { |             BOOST_FOREACH(const _PathData & pathData, rawRoute.computedShortestPath) { | ||||||
|                 sEngine.GetCoordinatesForNodeID(pathData.node, current); |                 sEngine.GetCoordinatesForNodeID(pathData.node, current); | ||||||
| 
 | 
 | ||||||
|                 convertInternalLatLonToString(current.lat, tmp); |                 convertInternalLatLonToString(current.lat, tmp); | ||||||
|  | |||||||
| @ -246,7 +246,7 @@ public: | |||||||
|         reply.content += "}"; |         reply.content += "}"; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     void GetRouteNames(std::vector<Segment> & shortestSegments, std::vector<Segment> & alternativeSegments, SearchEngineT &sEngine, RouteNames & routeNames) { |     void GetRouteNames(std::vector<Segment> & shortestSegments, std::vector<Segment> & alternativeSegments, const SearchEngineT &sEngine, RouteNames & routeNames) { | ||||||
|         /*** extract names for both alternatives ***/ |         /*** extract names for both alternatives ***/ | ||||||
| 
 | 
 | ||||||
|         Segment shortestSegment1, shortestSegment2; |         Segment shortestSegment1, shortestSegment2; | ||||||
| @ -262,23 +262,25 @@ public: | |||||||
|             std::vector<Segment> shortestDifference(shortestSegments.size()); |             std::vector<Segment> shortestDifference(shortestSegments.size()); | ||||||
|             std::vector<Segment> alternativeDifference(alternativeSegments.size()); |             std::vector<Segment> alternativeDifference(alternativeSegments.size()); | ||||||
|             std::set_difference(shortestSegments.begin(), shortestSegments.end(), alternativeSegments.begin(), alternativeSegments.end(), shortestDifference.begin(), boost::bind(&Segment::nameID, _1) < boost::bind(&Segment::nameID, _2) ); |             std::set_difference(shortestSegments.begin(), shortestSegments.end(), alternativeSegments.begin(), alternativeSegments.end(), shortestDifference.begin(), boost::bind(&Segment::nameID, _1) < boost::bind(&Segment::nameID, _2) ); | ||||||
|             if(0 < shortestDifference.size() ) { |             int size_of_difference = shortestDifference.size(); | ||||||
|                 unsigned i = 0; |             if(0 < size_of_difference ) { | ||||||
|                 while( i < shortestDifference.size() && shortestDifference[i].nameID == shortestSegments[0].nameID) { |                 int i = 0; | ||||||
|  |                 while( i < size_of_difference && shortestDifference[i].nameID == shortestSegments[0].nameID) { | ||||||
|                     ++i; |                     ++i; | ||||||
|                 } |                 } | ||||||
|                 if(i < shortestDifference.size()) { |                 if(i < size_of_difference ) { | ||||||
|                     shortestSegment2 = shortestDifference[i]; |                     shortestSegment2 = shortestDifference[i]; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             std::set_difference(alternativeSegments.begin(), alternativeSegments.end(), shortestSegments.begin(), shortestSegments.end(), alternativeDifference.begin(), boost::bind(&Segment::nameID, _1) < boost::bind(&Segment::nameID, _2) ); |             std::set_difference(alternativeSegments.begin(), alternativeSegments.end(), shortestSegments.begin(), shortestSegments.end(), alternativeDifference.begin(), boost::bind(&Segment::nameID, _1) < boost::bind(&Segment::nameID, _2) ); | ||||||
|             if(0 < alternativeDifference.size() ) { |             size_of_difference = alternativeDifference.size(); | ||||||
|                 unsigned i = 0; |             if(0 < size_of_difference ) { | ||||||
|                 while( i < alternativeDifference.size() && alternativeDifference[i].nameID == alternativeSegments[0].nameID) { |                 int i = 0; | ||||||
|  |                 while( i < size_of_difference && alternativeDifference[i].nameID == alternativeSegments[0].nameID) { | ||||||
|                     ++i; |                     ++i; | ||||||
|                 } |                 } | ||||||
|                 if(i < alternativeDifference.size()) { |                 if(i < size_of_difference ) { | ||||||
|                     alternativeSegment2 = alternativeDifference[i]; |                     alternativeSegment2 = alternativeDifference[i]; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| @ -292,7 +294,7 @@ public: | |||||||
|             routeNames.shortestPathName2 = sEngine.GetEscapedNameForNameID(shortestSegment2.nameID); |             routeNames.shortestPathName2 = sEngine.GetEscapedNameForNameID(shortestSegment2.nameID); | ||||||
| 
 | 
 | ||||||
|             routeNames.alternativePathName1 = sEngine.GetEscapedNameForNameID(alternativeSegment1.nameID); |             routeNames.alternativePathName1 = sEngine.GetEscapedNameForNameID(alternativeSegment1.nameID); | ||||||
|             routeNames.alternativePathName2 += sEngine.GetEscapedNameForNameID(alternativeSegment2.nameID); |             routeNames.alternativePathName2 = sEngine.GetEscapedNameForNameID(alternativeSegment2.nameID); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | |||||||
							
								
								
									
										116
									
								
								Extractor/BaseParser.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										116
									
								
								Extractor/BaseParser.cpp
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,116 @@ | |||||||
|  | /*
 | ||||||
|  | open source routing machine | ||||||
|  | Copyright (C) Dennis Luxen, others 2010 | ||||||
|  | 
 | ||||||
|  | This program is free software; you can redistribute it and/or modify | ||||||
|  | it under the terms of the GNU AFFERO General Public License as published by | ||||||
|  | the Free Software Foundation; either version 3 of the License, or | ||||||
|  | any later version. | ||||||
|  | 
 | ||||||
|  | This program is distributed in the hope that it will be useful, | ||||||
|  | but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | GNU General Public License for more details. | ||||||
|  | 
 | ||||||
|  | You should have received a copy of the GNU Affero General Public License | ||||||
|  | along with this program; if not, write to the Free Software | ||||||
|  | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
|  | or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | #include "BaseParser.h" | ||||||
|  | 
 | ||||||
|  | BaseParser::BaseParser(ExtractorCallbacks* ec, ScriptingEnvironment& se) : | ||||||
|  | extractor_callbacks(ec), scriptingEnvironment(se), luaState(NULL), use_turn_restrictions(true) { | ||||||
|  |     luaState = se.getLuaStateForThreadID(0); | ||||||
|  |     ReadUseRestrictionsSetting(); | ||||||
|  |     ReadRestrictionExceptions(); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | void BaseParser::ReadUseRestrictionsSetting() { | ||||||
|  |     if( 0 != luaL_dostring( luaState, "return use_turn_restrictions\n") ) { | ||||||
|  |         ERR(lua_tostring( luaState,-1)<< " occured in scripting block"); | ||||||
|  |     } | ||||||
|  |     if( lua_isboolean( luaState, -1) ) { | ||||||
|  |         use_turn_restrictions = lua_toboolean(luaState, -1); | ||||||
|  |     } | ||||||
|  |     if( use_turn_restrictions ) { | ||||||
|  |         INFO("Using turn restrictions" ); | ||||||
|  |     } else { | ||||||
|  |         INFO("Ignoring turn restrictions" ); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | void BaseParser::ReadRestrictionExceptions() { | ||||||
|  |     if(lua_function_exists(luaState, "get_exceptions" )) { | ||||||
|  |         //get list of turn restriction exceptions
 | ||||||
|  |         try { | ||||||
|  |             luabind::call_function<void>( | ||||||
|  |                 luaState, | ||||||
|  |                 "get_exceptions", | ||||||
|  |                 boost::ref(restriction_exceptions) | ||||||
|  |                 ); | ||||||
|  |             INFO("Found " << restriction_exceptions.size() << " exceptions to turn restriction"); | ||||||
|  |             BOOST_FOREACH(std::string & str, restriction_exceptions) { | ||||||
|  |                 INFO("   " << str); | ||||||
|  |             } | ||||||
|  |         } catch (const luabind::error &er) { | ||||||
|  |             lua_State* Ler=er.state(); | ||||||
|  |             report_errors(Ler, -1); | ||||||
|  |             ERR(er.what()); | ||||||
|  |         } | ||||||
|  |     } else { | ||||||
|  |         INFO("Found no exceptions to turn restrictions"); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | void BaseParser::report_errors(lua_State *L, const int status) const { | ||||||
|  |     if( 0!=status ) { | ||||||
|  |         std::cerr << "-- " << lua_tostring(L, -1) << std::endl; | ||||||
|  |         lua_pop(L, 1); // remove error message
 | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | void BaseParser::ParseNodeInLua(ImportNode& n, lua_State* localLuaState) { | ||||||
|  |     try { | ||||||
|  |         luabind::call_function<void>( localLuaState, "node_function", boost::ref(n) ); | ||||||
|  |     } catch (const luabind::error &er) { | ||||||
|  |         lua_State* Ler=er.state(); | ||||||
|  |         report_errors(Ler, -1); | ||||||
|  |         ERR(er.what()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | void BaseParser::ParseWayInLua(ExtractionWay& w, lua_State* localLuaState) { | ||||||
|  |     if(2 > w.path.size()) { | ||||||
|  |         return; | ||||||
|  |     } | ||||||
|  |     try { | ||||||
|  |         luabind::call_function<void>( localLuaState, "way_function", boost::ref(w) ); | ||||||
|  |     } catch (const luabind::error &er) { | ||||||
|  |         lua_State* Ler=er.state(); | ||||||
|  |         report_errors(Ler, -1); | ||||||
|  |         ERR(er.what()); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | bool BaseParser::ShouldIgnoreRestriction(const std::string& except_tag_string) const { | ||||||
|  |     //should this restriction be ignored? yes if there's an overlap between:
 | ||||||
|  |     //a) the list of modes in the except tag of the restriction (except_tag_string), ex: except=bus;bicycle
 | ||||||
|  |     //b) the lua profile defines a hierachy of modes, ex: [access, vehicle, bicycle]
 | ||||||
|  |      | ||||||
|  |     if( "" == except_tag_string ) { | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     //Be warned, this is quadratic work here, but we assume that
 | ||||||
|  |     //only a few exceptions are actually defined.
 | ||||||
|  |     std::vector<std::string> exceptions; | ||||||
|  |     boost::algorithm::split_regex(exceptions, except_tag_string, boost::regex("[;][ ]*")); | ||||||
|  |     BOOST_FOREACH(std::string& str, exceptions) { | ||||||
|  |         if( restriction_exceptions.end() != std::find(restriction_exceptions.begin(), restriction_exceptions.end(), str) ) { | ||||||
|  |             return true; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |     return false; | ||||||
|  | } | ||||||
| @ -27,23 +27,32 @@ extern "C" { | |||||||
| #include <lualib.h> | #include <lualib.h> | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | #include <boost/noncopyable.hpp> | ||||||
|  | 
 | ||||||
|  | #include "ExtractorCallbacks.h" | ||||||
| #include "ScriptingEnvironment.h" | #include "ScriptingEnvironment.h" | ||||||
| 
 | 
 | ||||||
| template<class ExternalMemoryT, typename NodeT, typename RestrictionT, typename WayT> | class BaseParser : boost::noncopyable { | ||||||
| class BaseParser { |  | ||||||
| public: | public: | ||||||
|  |     BaseParser(ExtractorCallbacks* ec, ScriptingEnvironment& se); | ||||||
|     virtual ~BaseParser() {} |     virtual ~BaseParser() {} | ||||||
|     virtual bool Init() = 0; |     virtual bool ReadHeader() = 0; | ||||||
|     virtual void RegisterCallbacks(ExternalMemoryT * externalMemory) = 0; |  | ||||||
|     virtual void RegisterScriptingEnvironment(ScriptingEnvironment & _se) = 0; |  | ||||||
|     virtual bool Parse() = 0; |     virtual bool Parse() = 0; | ||||||
| 
 | 
 | ||||||
|     void report_errors(lua_State *L, int status) { |     virtual void ParseNodeInLua(ImportNode& n, lua_State* luaStateForThread); | ||||||
|         if ( status!=0 ) { |     virtual void ParseWayInLua(ExtractionWay& n, lua_State* luaStateForThread); | ||||||
|             std::cerr << "-- " << lua_tostring(L, -1) << std::endl; |     virtual void report_errors(lua_State *L, const int status) const; | ||||||
|             lua_pop(L, 1); // remove error message
 | 
 | ||||||
|         } | protected:    | ||||||
|     } |     virtual void ReadUseRestrictionsSetting(); | ||||||
|  |     virtual void ReadRestrictionExceptions(); | ||||||
|  |     virtual bool ShouldIgnoreRestriction(const std::string& except_tag_string) const; | ||||||
|  |      | ||||||
|  |     ExtractorCallbacks* extractor_callbacks; | ||||||
|  |     ScriptingEnvironment& scriptingEnvironment; | ||||||
|  |     lua_State* luaState; | ||||||
|  |     std::vector<std::string> restriction_exceptions; | ||||||
|  |     bool use_turn_restrictions; | ||||||
| 
 | 
 | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -27,31 +27,31 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|         double time = get_timestamp(); |         double time = get_timestamp(); | ||||||
|         boost::uint64_t memory_to_use = static_cast<boost::uint64_t>(amountOfRAM) * 1024 * 1024 * 1024; |         boost::uint64_t memory_to_use = static_cast<boost::uint64_t>(amountOfRAM) * 1024 * 1024 * 1024; | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Sorting used nodes        ... " << flush; |         std::cout << "[extractor] Sorting used nodes        ... " << std::flush; | ||||||
|         stxxl::sort(usedNodeIDs.begin(), usedNodeIDs.end(), Cmp(), memory_to_use); |         stxxl::sort(usedNodeIDs.begin(), usedNodeIDs.end(), Cmp(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
|         cout << "[extractor] Erasing duplicate nodes   ... " << flush; |         std::cout << "[extractor] Erasing duplicate nodes   ... " << std::flush; | ||||||
|         stxxl::vector<NodeID>::iterator NewEnd = unique ( usedNodeIDs.begin(),usedNodeIDs.end() ) ; |         stxxl::vector<NodeID>::iterator NewEnd = std::unique ( usedNodeIDs.begin(),usedNodeIDs.end() ) ; | ||||||
|         usedNodeIDs.resize ( NewEnd - usedNodeIDs.begin() ); |         usedNodeIDs.resize ( NewEnd - usedNodeIDs.begin() ); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Sorting all nodes         ... " << flush; |         std::cout << "[extractor] Sorting all nodes         ... " << std::flush; | ||||||
|         stxxl::sort(allNodes.begin(), allNodes.end(), CmpNodeByID(), memory_to_use); |         stxxl::sort(allNodes.begin(), allNodes.end(), CmpNodeByID(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Sorting used ways         ... " << flush; |         std::cout << "[extractor] Sorting used ways         ... " << std::flush; | ||||||
|         stxxl::sort(wayStartEndVector.begin(), wayStartEndVector.end(), CmpWayByID(), memory_to_use); |         stxxl::sort(wayStartEndVector.begin(), wayStartEndVector.end(), CmpWayByID(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Sorting restrctns. by from... " << flush; |         std::cout << "[extractor] Sorting restrctns. by from... " << std::flush; | ||||||
|         stxxl::sort(restrictionsVector.begin(), restrictionsVector.end(), CmpRestrictionContainerByFrom(), memory_to_use); |         stxxl::sort(restrictionsVector.begin(), restrictionsVector.end(), CmpRestrictionContainerByFrom(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Fixing restriction starts ... " << flush; |         std::cout << "[extractor] Fixing restriction starts ... " << std::flush; | ||||||
|         STXXLRestrictionsVector::iterator restrictionsIT = restrictionsVector.begin(); |         STXXLRestrictionsVector::iterator restrictionsIT = restrictionsVector.begin(); | ||||||
|         STXXLWayIDStartEndVector::iterator wayStartAndEndEdgeIT = wayStartEndVector.begin(); |         STXXLWayIDStartEndVector::iterator wayStartAndEndEdgeIT = wayStartEndVector.begin(); | ||||||
| 
 | 
 | ||||||
| @ -79,16 +79,16 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|             ++restrictionsIT; |             ++restrictionsIT; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Sorting restrctns. by to  ... " << flush; |         std::cout << "[extractor] Sorting restrctns. by to  ... " << std::flush; | ||||||
|         stxxl::sort(restrictionsVector.begin(), restrictionsVector.end(), CmpRestrictionContainerByTo(), memory_to_use); |         stxxl::sort(restrictionsVector.begin(), restrictionsVector.end(), CmpRestrictionContainerByTo(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
|         unsigned usableRestrictionsCounter(0); |         unsigned usableRestrictionsCounter(0); | ||||||
|         cout << "[extractor] Fixing restriction ends   ... " << flush; |         std::cout << "[extractor] Fixing restriction ends   ... " << std::flush; | ||||||
|         restrictionsIT = restrictionsVector.begin(); |         restrictionsIT = restrictionsVector.begin(); | ||||||
|         wayStartAndEndEdgeIT = wayStartEndVector.begin(); |         wayStartAndEndEdgeIT = wayStartEndVector.begin(); | ||||||
|         while(wayStartAndEndEdgeIT != wayStartEndVector.end() && restrictionsIT != restrictionsVector.end()) { |         while(wayStartAndEndEdgeIT != wayStartEndVector.end() && restrictionsIT != restrictionsVector.end()) { | ||||||
| @ -116,11 +116,11 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|             } |             } | ||||||
|             ++restrictionsIT; |             ++restrictionsIT; | ||||||
|         } |         } | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         INFO("usable restrictions: " << usableRestrictionsCounter ); |         INFO("usable restrictions: " << usableRestrictionsCounter ); | ||||||
|         //serialize restrictions
 |         //serialize restrictions
 | ||||||
|         ofstream restrictionsOutstream; |         std::ofstream restrictionsOutstream; | ||||||
|         restrictionsOutstream.open(restrictionsFileName.c_str(), ios::binary); |         restrictionsOutstream.open(restrictionsFileName.c_str(), std::ios::binary); | ||||||
|         restrictionsOutstream.write((char*)&usableRestrictionsCounter, sizeof(unsigned)); |         restrictionsOutstream.write((char*)&usableRestrictionsCounter, sizeof(unsigned)); | ||||||
|         for(restrictionsIT = restrictionsVector.begin(); restrictionsIT != restrictionsVector.end(); ++restrictionsIT) { |         for(restrictionsIT = restrictionsVector.begin(); restrictionsIT != restrictionsVector.end(); ++restrictionsIT) { | ||||||
|             if(UINT_MAX != restrictionsIT->restriction.fromNode && UINT_MAX != restrictionsIT->restriction.toNode) { |             if(UINT_MAX != restrictionsIT->restriction.fromNode && UINT_MAX != restrictionsIT->restriction.toNode) { | ||||||
| @ -129,11 +129,11 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|         } |         } | ||||||
|         restrictionsOutstream.close(); |         restrictionsOutstream.close(); | ||||||
| 
 | 
 | ||||||
|         ofstream fout; |         std::ofstream fout; | ||||||
|         fout.open(outputFileName.c_str(), ios::binary); |         fout.open(outputFileName.c_str(), std::ios::binary); | ||||||
|         fout.write((char*)&usedNodeCounter, sizeof(unsigned)); |         fout.write((char*)&usedNodeCounter, sizeof(unsigned)); | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
|         cout << "[extractor] Confirming/Writing used nodes     ... " << flush; |         std::cout << "[extractor] Confirming/Writing used nodes     ... " << std::flush; | ||||||
| 
 | 
 | ||||||
|         STXXLNodeVector::iterator nodesIT = allNodes.begin(); |         STXXLNodeVector::iterator nodesIT = allNodes.begin(); | ||||||
|         STXXLNodeIDVector::iterator usedNodeIDsIT = usedNodeIDs.begin(); |         STXXLNodeIDVector::iterator usedNodeIDsIT = usedNodeIDs.begin(); | ||||||
| @ -154,24 +154,24 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|             } |             } | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] setting number of nodes   ... " << flush; |         std::cout << "[extractor] setting number of nodes   ... " << std::flush; | ||||||
|         ios::pos_type positionInFile = fout.tellp(); |         std::ios::pos_type positionInFile = fout.tellp(); | ||||||
|         fout.seekp(ios::beg); |         fout.seekp(std::ios::beg); | ||||||
|         fout.write((char*)&usedNodeCounter, sizeof(unsigned)); |         fout.write((char*)&usedNodeCounter, sizeof(unsigned)); | ||||||
|         fout.seekp(positionInFile); |         fout.seekp(positionInFile); | ||||||
| 
 | 
 | ||||||
|         cout << "ok" << endl; |         std::cout << "ok" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         // Sort edges by start.
 |         // Sort edges by start.
 | ||||||
|         cout << "[extractor] Sorting edges by start    ... " << flush; |         std::cout << "[extractor] Sorting edges by start    ... " << std::flush; | ||||||
|         stxxl::sort(allEdges.begin(), allEdges.end(), CmpEdgeByStartID(), memory_to_use); |         stxxl::sort(allEdges.begin(), allEdges.end(), CmpEdgeByStartID(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Setting start coords      ... " << flush; |         std::cout << "[extractor] Setting start coords      ... " << std::flush; | ||||||
|         fout.write((char*)&usedEdgeCounter, sizeof(unsigned)); |         fout.write((char*)&usedEdgeCounter, sizeof(unsigned)); | ||||||
|         // Traverse list of edges and nodes in parallel and set start coord
 |         // Traverse list of edges and nodes in parallel and set start coord
 | ||||||
|         nodesIT = allNodes.begin(); |         nodesIT = allNodes.begin(); | ||||||
| @ -191,16 +191,16 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|                 ++edgeIT; |                 ++edgeIT; | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         // Sort Edges by target
 |         // Sort Edges by target
 | ||||||
|         cout << "[extractor] Sorting edges by target   ... " << flush; |         std::cout << "[extractor] Sorting edges by target   ... " << std::flush; | ||||||
|         stxxl::sort(allEdges.begin(), allEdges.end(), CmpEdgeByTargetID(), memory_to_use); |         stxxl::sort(allEdges.begin(), allEdges.end(), CmpEdgeByTargetID(), memory_to_use); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
| 
 | 
 | ||||||
|         cout << "[extractor] Setting target coords     ... " << flush; |         std::cout << "[extractor] Setting target coords     ... " << std::flush; | ||||||
|         // Traverse list of edges and nodes in parallel and set target coord
 |         // Traverse list of edges and nodes in parallel and set target coord
 | ||||||
|         nodesIT = allNodes.begin(); |         nodesIT = allNodes.begin(); | ||||||
|         edgeIT = allEdges.begin(); |         edgeIT = allEdges.begin(); | ||||||
| @ -231,22 +231,22 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|                     fout.write((char*)&edgeIT->target, sizeof(unsigned)); |                     fout.write((char*)&edgeIT->target, sizeof(unsigned)); | ||||||
|                     fout.write((char*)&intDist, sizeof(int)); |                     fout.write((char*)&intDist, sizeof(int)); | ||||||
|                     switch(edgeIT->direction) { |                     switch(edgeIT->direction) { | ||||||
|                     case _Way::notSure: |                     case ExtractionWay::notSure: | ||||||
|                         fout.write((char*)&zero, sizeof(short)); |                         fout.write((char*)&zero, sizeof(short)); | ||||||
|                         break; |                         break; | ||||||
|                     case _Way::oneway: |                     case ExtractionWay::oneway: | ||||||
|                         fout.write((char*)&one, sizeof(short)); |                         fout.write((char*)&one, sizeof(short)); | ||||||
|                         break; |                         break; | ||||||
|                     case _Way::bidirectional: |                     case ExtractionWay::bidirectional: | ||||||
|                         fout.write((char*)&zero, sizeof(short)); |                         fout.write((char*)&zero, sizeof(short)); | ||||||
| 
 | 
 | ||||||
|                         break; |                         break; | ||||||
|                     case _Way::opposite: |                     case ExtractionWay::opposite: | ||||||
|                         fout.write((char*)&one, sizeof(short)); |                         fout.write((char*)&one, sizeof(short)); | ||||||
|                         break; |                         break; | ||||||
|                     default: |                     default: | ||||||
|                         cerr << "[error] edge with no direction: " << edgeIT->direction << endl; |                       std::cerr << "[error] edge with no direction: " << edgeIT->direction << std::endl; | ||||||
|                         assert(false); |                       assert(false); | ||||||
|                         break; |                         break; | ||||||
|                     } |                     } | ||||||
|                     fout.write((char*)&intWeight, sizeof(int)); |                     fout.write((char*)&intWeight, sizeof(int)); | ||||||
| @ -256,33 +256,34 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|                     fout.write((char*)&edgeIT->isRoundabout, sizeof(bool)); |                     fout.write((char*)&edgeIT->isRoundabout, sizeof(bool)); | ||||||
|                     fout.write((char*)&edgeIT->ignoreInGrid, sizeof(bool)); |                     fout.write((char*)&edgeIT->ignoreInGrid, sizeof(bool)); | ||||||
|                     fout.write((char*)&edgeIT->isAccessRestricted, sizeof(bool)); |                     fout.write((char*)&edgeIT->isAccessRestricted, sizeof(bool)); | ||||||
|  |                     fout.write((char*)&edgeIT->isContraFlow, sizeof(bool)); | ||||||
|                 } |                 } | ||||||
|                 ++usedEdgeCounter; |                 ++usedEdgeCounter; | ||||||
|                 ++edgeIT; |                 ++edgeIT; | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
|         cout << "[extractor] setting number of edges   ... " << flush; |         std::cout << "[extractor] setting number of edges   ... " << std::flush; | ||||||
| 
 | 
 | ||||||
|         fout.seekp(positionInFile); |         fout.seekp(positionInFile); | ||||||
|         fout.write((char*)&usedEdgeCounter, sizeof(unsigned)); |         fout.write((char*)&usedEdgeCounter, sizeof(unsigned)); | ||||||
|         fout.close(); |         fout.close(); | ||||||
|         cout << "ok" << endl; |         std::cout << "ok" << std::endl; | ||||||
|         time = get_timestamp(); |         time = get_timestamp(); | ||||||
|         cout << "[extractor] writing street name index ... " << flush; |         std::cout << "[extractor] writing street name index ... " << std::flush; | ||||||
|         std::string nameOutFileName = (outputFileName + ".names"); |         std::string nameOutFileName = (outputFileName + ".names"); | ||||||
|         ofstream nameOutFile(nameOutFileName.c_str(), ios::binary); |         std::ofstream nameOutFile(nameOutFileName.c_str(), std::ios::binary); | ||||||
|         unsigned sizeOfNameIndex = nameVector.size(); |         unsigned sizeOfNameIndex = nameVector.size(); | ||||||
|         nameOutFile.write((char *)&(sizeOfNameIndex), sizeof(unsigned)); |         nameOutFile.write((char *)&(sizeOfNameIndex), sizeof(unsigned)); | ||||||
| 
 | 
 | ||||||
|         BOOST_FOREACH(string str, nameVector) { |         BOOST_FOREACH(const std::string & str, nameVector) { | ||||||
|             unsigned lengthOfRawString = strlen(str.c_str()); |             unsigned lengthOfRawString = strlen(str.c_str()); | ||||||
|             nameOutFile.write((char *)&(lengthOfRawString), sizeof(unsigned)); |             nameOutFile.write((char *)&(lengthOfRawString), sizeof(unsigned)); | ||||||
|             nameOutFile.write(str.c_str(), lengthOfRawString); |             nameOutFile.write(str.c_str(), lengthOfRawString); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         nameOutFile.close(); |         nameOutFile.close(); | ||||||
|         cout << "ok, after " << get_timestamp() - time << "s" << endl; |         std::cout << "ok, after " << get_timestamp() - time << "s" << std::endl; | ||||||
| 
 | 
 | ||||||
|         //        time = get_timestamp();
 |         //        time = get_timestamp();
 | ||||||
|         //        cout << "[extractor] writing address list      ... " << flush;
 |         //        cout << "[extractor] writing address list      ... " << flush;
 | ||||||
| @ -298,8 +299,8 @@ void ExtractionContainers::PrepareData(const std::string & outputFileName, const | |||||||
|         INFO("Processed " << usedNodeCounter << " nodes and " << usedEdgeCounter << " edges"); |         INFO("Processed " << usedNodeCounter << " nodes and " << usedEdgeCounter << " edges"); | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|     } catch ( const exception& e ) { |     } catch ( const std::exception& e ) { | ||||||
|         cerr <<  "Caught Execption:" << e.what() << endl; |       std::cerr <<  "Caught Execption:" << e.what() << std::endl; | ||||||
|     } |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -25,13 +25,13 @@ | |||||||
| #include <stxxl.h> | #include <stxxl.h> | ||||||
| 
 | 
 | ||||||
| #include "ExtractorStructs.h" | #include "ExtractorStructs.h" | ||||||
| #include "../DataStructures/Util.h" | #include "../DataStructures/TimingUtil.h" | ||||||
| 
 | 
 | ||||||
| class ExtractionContainers { | class ExtractionContainers { | ||||||
| public: | public: | ||||||
|     typedef stxxl::vector<NodeID> STXXLNodeIDVector; |     typedef stxxl::vector<NodeID> STXXLNodeIDVector; | ||||||
|     typedef stxxl::vector<_Node> STXXLNodeVector; |     typedef stxxl::vector<_Node> STXXLNodeVector; | ||||||
|     typedef stxxl::vector<_Edge> STXXLEdgeVector; |     typedef stxxl::vector<InternalExtractorEdge> STXXLEdgeVector; | ||||||
|     typedef stxxl::vector<std::string> STXXLStringVector; |     typedef stxxl::vector<std::string> STXXLStringVector; | ||||||
|     typedef stxxl::vector<_RawRestrictionContainer> STXXLRestrictionsVector; |     typedef stxxl::vector<_RawRestrictionContainer> STXXLRestrictionsVector; | ||||||
|     typedef stxxl::vector<_WayIDStartAndEndEdge> STXXLWayIDStartEndVector; |     typedef stxxl::vector<_WayIDStartAndEndEdge> STXXLWayIDStartEndVector; | ||||||
|  | |||||||
| @ -28,12 +28,15 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include <boost/regex.hpp> | #include <boost/regex.hpp> | ||||||
| #include <climits> | #include <climits> | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| #include "../Util/StringUtil.h" | #include "../Util/StringUtil.h" | ||||||
| 
 | 
 | ||||||
|  | namespace qi = boost::spirit::qi; | ||||||
|  | 
 | ||||||
| //TODO: Move into LUA
 | //TODO: Move into LUA
 | ||||||
| 
 | 
 | ||||||
| inline bool durationIsValid(const std::string &s) { | inline bool durationIsValid(const std::string &s) { | ||||||
|     boost::regex e ("((\\d|\\d\\d):)*(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl); |     boost::regex e ("((\\d|\\d\\d):(\\d|\\d\\d):(\\d|\\d\\d))|((\\d|\\d\\d):(\\d|\\d\\d))|(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl); | ||||||
| 
 | 
 | ||||||
|     std::vector< std::string > result; |     std::vector< std::string > result; | ||||||
|     boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ; |     boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ; | ||||||
| @ -42,17 +45,28 @@ inline bool durationIsValid(const std::string &s) { | |||||||
| } | } | ||||||
| 
 | 
 | ||||||
| inline unsigned parseDuration(const std::string &s) { | inline unsigned parseDuration(const std::string &s) { | ||||||
|     int hours = 0; | 	unsigned hours = 0; | ||||||
|     int minutes = 0; |     unsigned minutes = 0; | ||||||
|     boost::regex e ("((\\d|\\d\\d):)*(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl); |     unsigned seconds = 0; | ||||||
|  |     boost::regex e ("((\\d|\\d\\d):(\\d|\\d\\d):(\\d|\\d\\d))|((\\d|\\d\\d):(\\d|\\d\\d))|(\\d|\\d\\d)",boost::regex_constants::icase|boost::regex_constants::perl); | ||||||
| 
 | 
 | ||||||
|     std::vector< std::string > result; |     std::vector< std::string > result; | ||||||
|     boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ; |     boost::algorithm::split_regex( result, s, boost::regex( ":" ) ) ; | ||||||
|     bool matched = regex_match(s, e); |     bool matched = regex_match(s, e); | ||||||
|     if(matched) { |     if(matched) { | ||||||
|         hours = (result.size()== 2) ?  stringToInt(result[0]) : 0; |     	if(1 == result.size()) { | ||||||
|         minutes = (result.size()== 2) ?  stringToInt(result[1]) : stringToInt(result[0]); |     		minutes = stringToInt(result[0]); | ||||||
|         return 600*(hours*60+minutes); |     	} | ||||||
|  |     	if(2 == result.size()) { | ||||||
|  |     		minutes =  stringToInt(result[1]); | ||||||
|  |     		hours = stringToInt(result[0]); | ||||||
|  |     	} | ||||||
|  |     	if(3 == result.size()) { | ||||||
|  |             seconds = stringToInt(result[2]); | ||||||
|  |             minutes = stringToInt(result[1]); | ||||||
|  |             hours   = stringToInt(result[0]); | ||||||
|  |     	} | ||||||
|  |         return 10*(3600*hours+60*minutes+seconds); | ||||||
|     } |     } | ||||||
|     return UINT_MAX; |     return UINT_MAX; | ||||||
| } | } | ||||||
| @ -66,5 +80,4 @@ inline int parseMaxspeed(std::string input) { //call-by-value on purpose. | |||||||
|     return n; |     return n; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| #endif /* EXTRACTIONHELPERFUNCTIONS_H_ */ | #endif /* EXTRACTIONHELPERFUNCTIONS_H_ */ | ||||||
|  | |||||||
| @ -48,58 +48,95 @@ ExtractorCallbacks::ExtractorCallbacks(ExtractionContainers * ext, StringMap * s | |||||||
|     stringMap = strMap; |     stringMap = strMap; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| ExtractorCallbacks::~ExtractorCallbacks() { | ExtractorCallbacks::~ExtractorCallbacks() { } | ||||||
| } |  | ||||||
| 
 | 
 | ||||||
| /** warning: caller needs to take care of synchronization! */ | /** warning: caller needs to take care of synchronization! */ | ||||||
| bool ExtractorCallbacks::nodeFunction(_Node &n) { | void ExtractorCallbacks::nodeFunction(const _Node &n) { | ||||||
|     if(n.lat <= 85*100000 && n.lat >= -85*100000) |     if(n.lat <= 85*100000 && n.lat >= -85*100000) { | ||||||
|         externalMemory->allNodes.push_back(n); |         externalMemory->allNodes.push_back(n); | ||||||
|     return true; |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| bool ExtractorCallbacks::restrictionFunction(_RawRestrictionContainer &r) { | bool ExtractorCallbacks::restrictionFunction(const _RawRestrictionContainer &r) { | ||||||
|     externalMemory->restrictionsVector.push_back(r); |     externalMemory->restrictionsVector.push_back(r); | ||||||
|     return true; |     return true; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /** warning: caller needs to take care of synchronization! */ | /** warning: caller needs to take care of synchronization! */ | ||||||
| bool ExtractorCallbacks::wayFunction(_Way &w) { | void ExtractorCallbacks::wayFunction(ExtractionWay &parsed_way) { | ||||||
|     /*** Store name of way and split it into edge segments ***/ |     if((0 < parsed_way.speed) || (0 < parsed_way.duration)) { //Only true if the way is specified by the speed profile
 | ||||||
|  |         if(UINT_MAX == parsed_way.id){ | ||||||
|  |             DEBUG("found bogus way with id: " << parsed_way.id << " of size " << parsed_way.path.size()); | ||||||
|  |             return; | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     if ( w.speed > 0 ) { //Only true if the way is specified by the speed profile
 |         if(0 < parsed_way.duration) { | ||||||
|  |          //TODO: iterate all way segments and set duration corresponding to the length of each segment
 | ||||||
|  |             parsed_way.speed = parsed_way.duration/(parsed_way.path.size()-1); | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         if(FLT_EPSILON >= fabs(-1. - parsed_way.speed)){ | ||||||
|  |             DEBUG("found way with bogus speed, id: " << parsed_way.id); | ||||||
|  |             return; | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|         //Get the unique identifier for the street name
 |         //Get the unique identifier for the street name
 | ||||||
|         const StringMap::const_iterator strit = stringMap->find(w.name); |         const StringMap::const_iterator string_map_iterator = stringMap->find(parsed_way.name); | ||||||
|         if(strit == stringMap->end()) { |         if(stringMap->end() == string_map_iterator) { | ||||||
|             w.nameID = externalMemory->nameVector.size(); |             parsed_way.nameID = externalMemory->nameVector.size(); | ||||||
|             externalMemory->nameVector.push_back(w.name); |             externalMemory->nameVector.push_back(parsed_way.name); | ||||||
|             stringMap->insert(StringMap::value_type(w.name, w.nameID)); |             stringMap->insert(std::make_pair(parsed_way.name, parsed_way.nameID)); | ||||||
|         } else { |         } else { | ||||||
|             w.nameID = strit->second; |             parsed_way.nameID = string_map_iterator->second; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         if(fabs(-1. - w.speed) < FLT_EPSILON){ |         if(ExtractionWay::opposite == parsed_way.direction) { | ||||||
|             WARN("found way with bogus speed, id: " << w.id); |             std::reverse( parsed_way.path.begin(), parsed_way.path.end() ); | ||||||
|             return true; |             parsed_way.direction = ExtractionWay::oneway; | ||||||
|         } |  | ||||||
|         if(w.id == UINT_MAX) { |  | ||||||
|             WARN("found way with unknown type: " << w.id); |  | ||||||
|             return true; |  | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         if ( w.direction == _Way::opposite ){ |         const bool split_bidirectional_edge = (parsed_way.backward_speed > 0) && (parsed_way.speed != parsed_way.backward_speed); | ||||||
|             std::reverse( w.path.begin(), w.path.end() ); |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         for(vector< NodeID >::size_type n = 0; n < w.path.size()-1; ++n) { |         for(std::vector< NodeID >::size_type n = 0; n < parsed_way.path.size()-1; ++n) { | ||||||
|             externalMemory->allEdges.push_back(_Edge(w.path[n], w.path[n+1], w.type, w.direction, w.speed, w.nameID, w.roundabout, w.ignoreInGrid, w.isDurationSet, w.isAccessRestricted)); |             externalMemory->allEdges.push_back( | ||||||
|             externalMemory->usedNodeIDs.push_back(w.path[n]); |                     InternalExtractorEdge(parsed_way.path[n], | ||||||
|  |                             parsed_way.path[n+1], | ||||||
|  |                             parsed_way.type, | ||||||
|  |                             (split_bidirectional_edge ? ExtractionWay::oneway : parsed_way.direction), | ||||||
|  |                             parsed_way.speed, | ||||||
|  |                             parsed_way.nameID, | ||||||
|  |                             parsed_way.roundabout, | ||||||
|  |                             parsed_way.ignoreInGrid, | ||||||
|  |                             (0 < parsed_way.duration), | ||||||
|  |                             parsed_way.isAccessRestricted | ||||||
|  |                     ) | ||||||
|  |             ); | ||||||
|  |             externalMemory->usedNodeIDs.push_back(parsed_way.path[n]); | ||||||
|         } |         } | ||||||
|         externalMemory->usedNodeIDs.push_back(w.path.back()); |         externalMemory->usedNodeIDs.push_back(parsed_way.path.back()); | ||||||
| 
 | 
 | ||||||
|         //The following information is needed to identify start and end segments of restrictions
 |         //The following information is needed to identify start and end segments of restrictions
 | ||||||
|         externalMemory->wayStartEndVector.push_back(_WayIDStartAndEndEdge(w.id, w.path[0], w.path[1], w.path[w.path.size()-2], w.path[w.path.size()-1])); |         externalMemory->wayStartEndVector.push_back(_WayIDStartAndEndEdge(parsed_way.id, parsed_way.path[0], parsed_way.path[1], parsed_way.path[parsed_way.path.size()-2], parsed_way.path.back())); | ||||||
|  | 
 | ||||||
|  |         if(split_bidirectional_edge) { //Only true if the way should be split
 | ||||||
|  |             std::reverse( parsed_way.path.begin(), parsed_way.path.end() ); | ||||||
|  |             for(std::vector< NodeID >::size_type n = 0; n < parsed_way.path.size()-1; ++n) { | ||||||
|  |                 externalMemory->allEdges.push_back( | ||||||
|  |                         InternalExtractorEdge(parsed_way.path[n], | ||||||
|  |                                 parsed_way.path[n+1], | ||||||
|  |                                 parsed_way.type, | ||||||
|  |                                 ExtractionWay::oneway, | ||||||
|  |                                 parsed_way.backward_speed, | ||||||
|  |                                 parsed_way.nameID, | ||||||
|  |                                 parsed_way.roundabout, | ||||||
|  |                                 parsed_way.ignoreInGrid, | ||||||
|  |                                 (0 < parsed_way.duration), | ||||||
|  |                                 parsed_way.isAccessRestricted, | ||||||
|  |                                 (ExtractionWay::oneway == parsed_way.direction) | ||||||
|  |                         ) | ||||||
|  |                 ); | ||||||
|  |             } | ||||||
|  |             externalMemory->wayStartEndVector.push_back(_WayIDStartAndEndEdge(parsed_way.id, parsed_way.path[0], parsed_way.path[1], parsed_way.path[parsed_way.path.size()-2], parsed_way.path.back())); | ||||||
|  |         } | ||||||
|     } |     } | ||||||
|     return true; |  | ||||||
| } | } | ||||||
|  | |||||||
| @ -45,12 +45,12 @@ public: | |||||||
|     ~ExtractorCallbacks(); |     ~ExtractorCallbacks(); | ||||||
| 
 | 
 | ||||||
|     /** warning: caller needs to take care of synchronization! */ |     /** warning: caller needs to take care of synchronization! */ | ||||||
|     bool nodeFunction(_Node &n); |     void nodeFunction(const _Node &n); | ||||||
| 
 | 
 | ||||||
|     bool restrictionFunction(_RawRestrictionContainer &r); |     bool restrictionFunction(const _RawRestrictionContainer &r); | ||||||
| 
 | 
 | ||||||
|     /** warning: caller needs to take care of synchronization! */ |     /** warning: caller needs to take care of synchronization! */ | ||||||
|     bool wayFunction(_Way &w); |     void wayFunction(ExtractionWay &w); | ||||||
| 
 | 
 | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -34,14 +34,14 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include "../DataStructures/ImportNode.h" | #include "../DataStructures/ImportNode.h" | ||||||
| #include "../DataStructures/NodeCoords.h" | #include "../DataStructures/NodeCoords.h" | ||||||
| #include "../DataStructures/Restriction.h" | #include "../DataStructures/Restriction.h" | ||||||
| #include "../DataStructures/Util.h" | #include "../DataStructures/TimingUtil.h" | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| 
 | 
 | ||||||
| typedef boost::unordered_map<std::string, NodeID > StringMap; | typedef boost::unordered_map<std::string, NodeID > StringMap; | ||||||
| typedef boost::unordered_map<std::string, std::pair<int, short> > StringToIntPairMap; | typedef boost::unordered_map<std::string, std::pair<int, short> > StringToIntPairMap; | ||||||
| 
 | 
 | ||||||
| struct _Way { | struct ExtractionWay { | ||||||
|     _Way() { |     ExtractionWay() { | ||||||
| 		Clear(); | 		Clear(); | ||||||
|     } |     } | ||||||
| 	 | 	 | ||||||
| @ -50,46 +50,52 @@ struct _Way { | |||||||
| 		nameID = UINT_MAX; | 		nameID = UINT_MAX; | ||||||
| 		path.clear(); | 		path.clear(); | ||||||
| 		keyVals.EraseAll(); | 		keyVals.EraseAll(); | ||||||
|         direction = _Way::notSure; |         direction = ExtractionWay::notSure; | ||||||
|         speed = -1; |         speed = -1; | ||||||
|  |         backward_speed = -1; | ||||||
|  |         duration = -1; | ||||||
|         type = -1; |         type = -1; | ||||||
|         access = true; |         access = true; | ||||||
|         roundabout = false; |         roundabout = false; | ||||||
|         isDurationSet = false; |  | ||||||
|         isAccessRestricted = false; |         isAccessRestricted = false; | ||||||
|         ignoreInGrid = false; |         ignoreInGrid = false; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     enum { |     enum Directions { | ||||||
|         notSure = 0, oneway, bidirectional, opposite |         notSure = 0, oneway, bidirectional, opposite | ||||||
|     } direction; |     }; | ||||||
|  |     Directions direction; | ||||||
|     unsigned id; |     unsigned id; | ||||||
|     unsigned nameID; |     unsigned nameID; | ||||||
|     std::string name; |     std::string name; | ||||||
|     double speed; |     double speed; | ||||||
|  |     double backward_speed; | ||||||
|  |     double duration; | ||||||
|     short type; |     short type; | ||||||
|     bool access; |     bool access; | ||||||
|     bool roundabout; |     bool roundabout; | ||||||
|     bool isDurationSet; |  | ||||||
|     bool isAccessRestricted; |     bool isAccessRestricted; | ||||||
|     bool ignoreInGrid; |     bool ignoreInGrid; | ||||||
|     std::vector< NodeID > path; |     std::vector< NodeID > path; | ||||||
|     HashTable<std::string, std::string> keyVals; |     HashTable<std::string, std::string> keyVals; | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| struct _Relation { | struct ExtractorRelation { | ||||||
|     _Relation() : type(unknown){} |     ExtractorRelation() : type(unknown){} | ||||||
|     enum { |     enum { | ||||||
|         unknown = 0, ferry, turnRestriction |         unknown = 0, ferry, turnRestriction | ||||||
|     } type; |     } type; | ||||||
|     HashTable<std::string, std::string> keyVals; |     HashTable<std::string, std::string> keyVals; | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| struct _Edge { | struct InternalExtractorEdge { | ||||||
|     _Edge() : start(0), target(0), type(0), direction(0), speed(0), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false) {}; |     InternalExtractorEdge() : start(0), target(0), type(0), direction(0), speed(0), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false), isContraFlow(false) {}; | ||||||
|     _Edge(NodeID s, NodeID t) : start(s), target(t), type(0), direction(0), speed(0), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false) { } |     InternalExtractorEdge(NodeID s, NodeID t) : start(s), target(t), type(0), direction(0), speed(0), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false), isContraFlow(false) { } | ||||||
|     _Edge(NodeID s, NodeID t, short tp, short d, double sp): start(s), target(t), type(tp), direction(d), speed(sp), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false) { } |     InternalExtractorEdge(NodeID s, NodeID t, short tp, short d, double sp): start(s), target(t), type(tp), direction(d), speed(sp), nameID(0), isRoundabout(false), ignoreInGrid(false), isDurationSet(false), isAccessRestricted(false), isContraFlow(false) { } | ||||||
|     _Edge(NodeID s, NodeID t, short tp, short d, double sp, unsigned nid, bool isra, bool iing, bool ids, bool iar): start(s), target(t), type(tp), direction(d), speed(sp), nameID(nid), isRoundabout(isra), ignoreInGrid(iing), isDurationSet(ids), isAccessRestricted(iar) { |     InternalExtractorEdge(NodeID s, NodeID t, short tp, short d, double sp, unsigned nid, bool isra, bool iing, bool ids, bool iar): start(s), target(t), type(tp), direction(d), speed(sp), nameID(nid), isRoundabout(isra), ignoreInGrid(iing), isDurationSet(ids), isAccessRestricted(iar), isContraFlow(false) { | ||||||
|  |         assert(0 <= type); | ||||||
|  |     } | ||||||
|  |     InternalExtractorEdge(NodeID s, NodeID t, short tp, short d, double sp, unsigned nid, bool isra, bool iing, bool ids, bool iar, bool icf): start(s), target(t), type(tp), direction(d), speed(sp), nameID(nid), isRoundabout(isra), ignoreInGrid(iing), isDurationSet(ids), isAccessRestricted(iar), isContraFlow(icf) { | ||||||
|         assert(0 <= type); |         assert(0 <= type); | ||||||
|     } |     } | ||||||
|     NodeID start; |     NodeID start; | ||||||
| @ -102,19 +108,21 @@ struct _Edge { | |||||||
|     bool ignoreInGrid; |     bool ignoreInGrid; | ||||||
|     bool isDurationSet; |     bool isDurationSet; | ||||||
|     bool isAccessRestricted; |     bool isAccessRestricted; | ||||||
|  |     bool isContraFlow; | ||||||
| 
 | 
 | ||||||
|     _Coordinate startCoord; |     _Coordinate startCoord; | ||||||
|     _Coordinate targetCoord; |     _Coordinate targetCoord; | ||||||
| 
 | 
 | ||||||
|     static _Edge min_value() { |     static InternalExtractorEdge min_value() { | ||||||
|         return _Edge(0,0); |         return InternalExtractorEdge(0,0); | ||||||
|     } |     } | ||||||
|     static _Edge max_value() { |     static InternalExtractorEdge max_value() { | ||||||
|         return _Edge((numeric_limits<unsigned>::max)(), (numeric_limits<unsigned>::max)()); |         return InternalExtractorEdge((std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)()); | ||||||
|     } |     } | ||||||
| 
 |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| struct _WayIDStartAndEndEdge { | struct _WayIDStartAndEndEdge { | ||||||
|     unsigned wayID; |     unsigned wayID; | ||||||
|     NodeID firstStart; |     NodeID firstStart; | ||||||
| @ -125,10 +133,10 @@ struct _WayIDStartAndEndEdge { | |||||||
|     _WayIDStartAndEndEdge(unsigned w, NodeID fs, NodeID ft, NodeID ls, NodeID lt) :  wayID(w), firstStart(fs), firstTarget(ft), lastStart(ls), lastTarget(lt) {} |     _WayIDStartAndEndEdge(unsigned w, NodeID fs, NodeID ft, NodeID ls, NodeID lt) :  wayID(w), firstStart(fs), firstTarget(ft), lastStart(ls), lastTarget(lt) {} | ||||||
| 
 | 
 | ||||||
|     static _WayIDStartAndEndEdge min_value() { |     static _WayIDStartAndEndEdge min_value() { | ||||||
|         return _WayIDStartAndEndEdge((numeric_limits<unsigned>::min)(), (numeric_limits<unsigned>::min)(), (numeric_limits<unsigned>::min)(), (numeric_limits<unsigned>::min)(), (numeric_limits<unsigned>::min)()); |         return _WayIDStartAndEndEdge((std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)(), (std::numeric_limits<unsigned>::min)()); | ||||||
|     } |     } | ||||||
|     static _WayIDStartAndEndEdge max_value() { |     static _WayIDStartAndEndEdge max_value() { | ||||||
|         return _WayIDStartAndEndEdge((numeric_limits<unsigned>::max)(), (numeric_limits<unsigned>::max)(), (numeric_limits<unsigned>::max)(), (numeric_limits<unsigned>::max)(), (numeric_limits<unsigned>::max)()); |         return _WayIDStartAndEndEdge((std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)(), (std::numeric_limits<unsigned>::max)()); | ||||||
|     } |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| @ -171,38 +179,33 @@ struct CmpNodeByID : public std::binary_function<_Node, _Node, bool> { | |||||||
|     } |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| struct CmpEdgeByStartID : public std::binary_function<_Edge, _Edge, bool> | struct CmpEdgeByStartID : public std::binary_function<InternalExtractorEdge, InternalExtractorEdge, bool> { | ||||||
| { |     typedef InternalExtractorEdge value_type; | ||||||
|     typedef _Edge value_type; |     bool operator ()  (const InternalExtractorEdge & a, const InternalExtractorEdge & b) const { | ||||||
|     bool operator ()  (const _Edge & a, const _Edge & b) const { |  | ||||||
|         return a.start < b.start; |         return a.start < b.start; | ||||||
|     } |     } | ||||||
|     value_type max_value() { |     value_type max_value() { | ||||||
|         return _Edge::max_value(); |         return InternalExtractorEdge::max_value(); | ||||||
|     } |     } | ||||||
|     value_type min_value() { |     value_type min_value() { | ||||||
|         return _Edge::min_value(); |         return InternalExtractorEdge::min_value(); | ||||||
|     } |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| struct CmpEdgeByTargetID : public std::binary_function<_Edge, _Edge, bool> | struct CmpEdgeByTargetID : public std::binary_function<InternalExtractorEdge, InternalExtractorEdge, bool> { | ||||||
| { |     typedef InternalExtractorEdge value_type; | ||||||
|     typedef _Edge value_type; |     bool operator ()  (const InternalExtractorEdge & a, const InternalExtractorEdge & b) const { | ||||||
|     bool operator ()  (const _Edge & a, const _Edge & b) const |  | ||||||
|     { |  | ||||||
|         return a.target < b.target; |         return a.target < b.target; | ||||||
|     } |     } | ||||||
|     value_type max_value() |     value_type max_value() { | ||||||
|     { |         return InternalExtractorEdge::max_value(); | ||||||
|         return _Edge::max_value(); |  | ||||||
|     } |     } | ||||||
|     value_type min_value() |     value_type min_value() { | ||||||
|     { |         return InternalExtractorEdge::min_value(); | ||||||
|         return _Edge::min_value(); |  | ||||||
|     } |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| inline string GetRandomString() { | inline std::string GetRandomString() { | ||||||
|     char s[128]; |     char s[128]; | ||||||
|     static const char alphanum[] = |     static const char alphanum[] = | ||||||
|             "0123456789" |             "0123456789" | ||||||
| @ -213,7 +216,7 @@ inline string GetRandomString() { | |||||||
|         s[i] = alphanum[rand() % (sizeof(alphanum) - 1)]; |         s[i] = alphanum[rand() % (sizeof(alphanum) - 1)]; | ||||||
|     } |     } | ||||||
|     s[127] = 0; |     s[127] = 0; | ||||||
|     return string(s); |     return std::string(s); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| #endif /* EXTRACTORSTRUCTS_H_ */ | #endif /* EXTRACTORSTRUCTS_H_ */ | ||||||
|  | |||||||
							
								
								
									
										490
									
								
								Extractor/PBFParser.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										490
									
								
								Extractor/PBFParser.cpp
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,490 @@ | |||||||
|  | /*
 | ||||||
|  |  open source routing machine | ||||||
|  |  Copyright (C) Dennis Luxen, others 2010 | ||||||
|  | 
 | ||||||
|  |  This program is free software; you can redistribute it and/or modify | ||||||
|  |  it under the terms of the GNU AFFERO General Public License as published by | ||||||
|  |  the Free Software Foundation; either version 3 of the License, or | ||||||
|  |  any later version. | ||||||
|  | 
 | ||||||
|  |  This program is distributed in the hope that it will be useful, | ||||||
|  |  but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  |  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  |  GNU General Public License for more details. | ||||||
|  | 
 | ||||||
|  |  You should have received a copy of the GNU Affero General Public License | ||||||
|  |  along with this program; if not, write to the Free Software | ||||||
|  |  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
|  |  or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  |  */ | ||||||
|  | 
 | ||||||
|  | #include "PBFParser.h" | ||||||
|  | 
 | ||||||
|  | PBFParser::PBFParser(const char * fileName, ExtractorCallbacks* ec, ScriptingEnvironment& se) : BaseParser( ec, se ) { | ||||||
|  | 	GOOGLE_PROTOBUF_VERIFY_VERSION; | ||||||
|  | 	//TODO: What is the bottleneck here? Filling the queue or reading the stuff from disk?
 | ||||||
|  | 	//NOTE: With Lua scripting, it is parsing the stuff. I/O is virtually for free.
 | ||||||
|  | 	threadDataQueue = boost::make_shared<ConcurrentQueue<_ThreadData*> >( 2500 ); /* Max 2500 items in queue, hardcoded. */ | ||||||
|  | 	input.open(fileName, std::ios::in | std::ios::binary); | ||||||
|  | 
 | ||||||
|  | 	if (!input) { | ||||||
|  | 		std::cerr << fileName << ": File not found." << std::endl; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | #ifndef NDEBUG | ||||||
|  | 	blockCount = 0; | ||||||
|  | 	groupCount = 0; | ||||||
|  | #endif | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | PBFParser::~PBFParser() { | ||||||
|  | 	if(input.is_open()) { | ||||||
|  | 		input.close(); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	// Clean up any leftover ThreadData objects in the queue
 | ||||||
|  | 	_ThreadData* td; | ||||||
|  | 	while (threadDataQueue->try_pop(td)) { | ||||||
|  | 		delete td; | ||||||
|  | 	} | ||||||
|  | 	google::protobuf::ShutdownProtobufLibrary(); | ||||||
|  | 
 | ||||||
|  | #ifndef NDEBUG | ||||||
|  | 	DEBUG("parsed " << blockCount << " blocks from pbf with " << groupCount << " groups"); | ||||||
|  | #endif | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::ReadHeader() { | ||||||
|  | 	_ThreadData initData; | ||||||
|  | 	/** read Header */ | ||||||
|  | 	if(!readPBFBlobHeader(input, &initData)) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if(readBlob(input, &initData)) { | ||||||
|  | 		if(!initData.PBFHeaderBlock.ParseFromArray(&(initData.charBuffer[0]), initData.charBuffer.size() ) ) { | ||||||
|  | 			std::cerr << "[error] Header not parseable!" << std::endl; | ||||||
|  | 			return false; | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		for(int i = 0, featureSize = initData.PBFHeaderBlock.required_features_size(); i < featureSize; ++i) { | ||||||
|  | 			const std::string& feature = initData.PBFHeaderBlock.required_features( i ); | ||||||
|  | 			bool supported = false; | ||||||
|  | 			if ( "OsmSchema-V0.6" == feature ) { | ||||||
|  | 				supported = true; | ||||||
|  | 			} | ||||||
|  | 			else if ( "DenseNodes" == feature ) { | ||||||
|  | 				supported = true; | ||||||
|  | 			} | ||||||
|  | 			 | ||||||
|  | 			if ( !supported ) { | ||||||
|  | 				std::cerr << "[error] required feature not supported: " << feature.data() << std::endl; | ||||||
|  | 				return false; | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		std::cerr << "[error] blob not loaded!" << std::endl; | ||||||
|  | 	} | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::ReadData() { | ||||||
|  | 	bool keepRunning = true; | ||||||
|  | 	do { | ||||||
|  | 		_ThreadData *threadData = new _ThreadData(); | ||||||
|  | 		keepRunning = readNextBlock(input, threadData); | ||||||
|  | 
 | ||||||
|  | 		if (keepRunning) { | ||||||
|  | 			threadDataQueue->push(threadData); | ||||||
|  | 		} else { | ||||||
|  | 			threadDataQueue->push(NULL); // No more data to read, parse stops when NULL encountered
 | ||||||
|  | 			delete threadData; | ||||||
|  | 		} | ||||||
|  | 	} while(keepRunning); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::ParseData() { | ||||||
|  | 	while (true) { | ||||||
|  | 		_ThreadData *threadData; | ||||||
|  | 		threadDataQueue->wait_and_pop(threadData); | ||||||
|  | 		if( NULL==threadData ) { | ||||||
|  | 			INFO("Parse Data Thread Finished"); | ||||||
|  | 			threadDataQueue->push(NULL); // Signal end of data for other threads
 | ||||||
|  | 			break; | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		loadBlock(threadData); | ||||||
|  | 
 | ||||||
|  | 		for(int i = 0, groupSize = threadData->PBFprimitiveBlock.primitivegroup_size(); i < groupSize; ++i) { | ||||||
|  | 			threadData->currentGroupID = i; | ||||||
|  | 			loadGroup(threadData); | ||||||
|  | 
 | ||||||
|  | 			if(threadData->entityTypeIndicator == TypeNode) { | ||||||
|  | 				parseNode(threadData); | ||||||
|  | 			} | ||||||
|  | 			if(threadData->entityTypeIndicator == TypeWay) { | ||||||
|  | 				parseWay(threadData); | ||||||
|  | 			} | ||||||
|  | 			if(threadData->entityTypeIndicator == TypeRelation) { | ||||||
|  | 				parseRelation(threadData); | ||||||
|  | 			} | ||||||
|  | 			if(threadData->entityTypeIndicator == TypeDenseNode) { | ||||||
|  | 				parseDenseNode(threadData); | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		delete threadData; | ||||||
|  | 		threadData = NULL; | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::Parse() { | ||||||
|  | 	// Start the read and parse threads
 | ||||||
|  | 	boost::thread readThread(boost::bind(&PBFParser::ReadData, this)); | ||||||
|  | 
 | ||||||
|  | 	//Open several parse threads that are synchronized before call to
 | ||||||
|  | 	boost::thread parseThread(boost::bind(&PBFParser::ParseData, this)); | ||||||
|  | 
 | ||||||
|  | 	// Wait for the threads to finish
 | ||||||
|  | 	readThread.join(); | ||||||
|  | 	parseThread.join(); | ||||||
|  | 
 | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::parseDenseNode(_ThreadData * threadData) { | ||||||
|  | 	const OSMPBF::DenseNodes& dense = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).dense(); | ||||||
|  | 	int denseTagIndex = 0; | ||||||
|  | 	int m_lastDenseID = 0; | ||||||
|  | 	int m_lastDenseLatitude = 0; | ||||||
|  | 	int m_lastDenseLongitude = 0; | ||||||
|  | 
 | ||||||
|  | 	ImportNode n; | ||||||
|  | 	std::vector<ImportNode> extracted_nodes_vector; | ||||||
|  | 	const int number_of_nodes = dense.id_size(); | ||||||
|  | 	extracted_nodes_vector.reserve(number_of_nodes); | ||||||
|  | 	for(int i = 0; i < number_of_nodes; ++i) { | ||||||
|  | 		n.Clear(); | ||||||
|  | 		m_lastDenseID += dense.id( i ); | ||||||
|  | 		m_lastDenseLatitude += dense.lat( i ); | ||||||
|  | 		m_lastDenseLongitude += dense.lon( i ); | ||||||
|  | 		n.id = m_lastDenseID; | ||||||
|  | 		n.lat = 100000*( ( double ) m_lastDenseLatitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lat_offset() ) / NANO; | ||||||
|  | 		n.lon = 100000*( ( double ) m_lastDenseLongitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lon_offset() ) / NANO; | ||||||
|  | 		while (denseTagIndex < dense.keys_vals_size()) { | ||||||
|  | 			const int tagValue = dense.keys_vals( denseTagIndex ); | ||||||
|  | 			if( 0==tagValue ) { | ||||||
|  | 				++denseTagIndex; | ||||||
|  | 				break; | ||||||
|  | 			} | ||||||
|  | 			const int keyValue = dense.keys_vals ( denseTagIndex+1 ); | ||||||
|  | 			const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(tagValue).data(); | ||||||
|  | 			const std::string & value = threadData->PBFprimitiveBlock.stringtable().s(keyValue).data(); | ||||||
|  | 			n.keyVals.Add(key, value); | ||||||
|  | 			denseTagIndex += 2; | ||||||
|  | 		} | ||||||
|  | 		extracted_nodes_vector.push_back(n); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | #pragma omp parallel for schedule ( guided ) | ||||||
|  | 	for(int i = 0; i < number_of_nodes; ++i) { | ||||||
|  | 	    ImportNode &n = extracted_nodes_vector[i]; | ||||||
|  | 	    ParseNodeInLua( n, scriptingEnvironment.getLuaStateForThreadID(omp_get_thread_num()) ); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	BOOST_FOREACH(ImportNode &n, extracted_nodes_vector) { | ||||||
|  | 	    extractor_callbacks->nodeFunction(n); | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::parseNode(_ThreadData * ) { | ||||||
|  | 	ERR("Parsing of simple nodes not supported. PBF should use dense nodes"); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::parseRelation(_ThreadData * threadData) { | ||||||
|  | 	//TODO: leave early, if relation is not a restriction
 | ||||||
|  | 	//TODO: reuse rawRestriction container
 | ||||||
|  | 	if( !use_turn_restrictions ) { | ||||||
|  | 		return; | ||||||
|  | 	} | ||||||
|  | 	const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ); | ||||||
|  | 	for(int i = 0; i < group.relations_size(); ++i ) { | ||||||
|  | 		std::string except_tag_string; | ||||||
|  | 		const OSMPBF::Relation& inputRelation = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).relations(i); | ||||||
|  | 		bool isRestriction = false; | ||||||
|  | 		bool isOnlyRestriction = false; | ||||||
|  | 		for(int k = 0, endOfKeys = inputRelation.keys_size(); k < endOfKeys; ++k) { | ||||||
|  | 			const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.keys(k)); | ||||||
|  | 			const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.vals(k)); | ||||||
|  | 			if ("type" == key) { | ||||||
|  | 				if( "restriction" == val) { | ||||||
|  | 					isRestriction = true; | ||||||
|  | 				} else { | ||||||
|  | 					break; | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			if ("restriction" == key) { | ||||||
|  | 				if(val.find("only_") == 0) { | ||||||
|  | 					isOnlyRestriction = true; | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			if ("except" == key) { | ||||||
|  | 				except_tag_string = val; | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		if( isRestriction && ShouldIgnoreRestriction(except_tag_string) ) { | ||||||
|  | 			continue; | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		if(isRestriction) { | ||||||
|  | 			int64_t lastRef = 0; | ||||||
|  | 			_RawRestrictionContainer currentRestrictionContainer(isOnlyRestriction); | ||||||
|  | 			for(int rolesIndex = 0; rolesIndex < inputRelation.roles_sid_size(); ++rolesIndex) { | ||||||
|  | 				std::string role(threadData->PBFprimitiveBlock.stringtable().s( inputRelation.roles_sid( rolesIndex ) ).data()); | ||||||
|  | 				lastRef += inputRelation.memids(rolesIndex); | ||||||
|  | 
 | ||||||
|  | 				if(!("from" == role || "to" == role || "via" == role)) { | ||||||
|  | 					continue; | ||||||
|  | 				} | ||||||
|  | 
 | ||||||
|  | 				switch(inputRelation.types(rolesIndex)) { | ||||||
|  | 				case 0: //node
 | ||||||
|  | 					if("from" == role || "to" == role) { //Only via should be a node
 | ||||||
|  | 						continue; | ||||||
|  | 					} | ||||||
|  | 					assert("via" == role); | ||||||
|  | 					if(UINT_MAX != currentRestrictionContainer.viaNode) { | ||||||
|  | 						currentRestrictionContainer.viaNode = UINT_MAX; | ||||||
|  | 					} | ||||||
|  | 					assert(UINT_MAX == currentRestrictionContainer.viaNode); | ||||||
|  | 					currentRestrictionContainer.restriction.viaNode = lastRef; | ||||||
|  | 					break; | ||||||
|  | 				case 1: //way
 | ||||||
|  | 					assert("from" == role || "to" == role || "via" == role); | ||||||
|  | 					if("from" == role) { | ||||||
|  | 						currentRestrictionContainer.fromWay = lastRef; | ||||||
|  | 					} | ||||||
|  | 					if ("to" == role) { | ||||||
|  | 						currentRestrictionContainer.toWay = lastRef; | ||||||
|  | 					} | ||||||
|  | 					if ("via" == role) { | ||||||
|  | 						assert(currentRestrictionContainer.restriction.toNode == UINT_MAX); | ||||||
|  | 						currentRestrictionContainer.viaNode = lastRef; | ||||||
|  | 					} | ||||||
|  | 					break; | ||||||
|  | 				case 2: //relation, not used. relations relating to relations are evil.
 | ||||||
|  | 					continue; | ||||||
|  | 					assert(false); | ||||||
|  | 					break; | ||||||
|  | 
 | ||||||
|  | 				default: //should not happen
 | ||||||
|  | 					//cout << "unknown";
 | ||||||
|  | 					assert(false); | ||||||
|  | 					break; | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			if(!extractor_callbacks->restrictionFunction(currentRestrictionContainer)) { | ||||||
|  | 				std::cerr << "[PBFParser] relation not parsed" << std::endl; | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::parseWay(_ThreadData * threadData) { | ||||||
|  | 	ExtractionWay w; | ||||||
|  | 	std::vector<ExtractionWay> waysToParse; | ||||||
|  | 	const int number_of_ways = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways_size(); | ||||||
|  | 	waysToParse.reserve(number_of_ways); | ||||||
|  | 	for(int i = 0; i < number_of_ways; ++i) { | ||||||
|  | 		w.Clear(); | ||||||
|  | 		const OSMPBF::Way& inputWay = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways( i ); | ||||||
|  | 		w.id = inputWay.id(); | ||||||
|  | 		unsigned pathNode(0); | ||||||
|  | 		const int number_of_referenced_nodes = inputWay.refs_size(); | ||||||
|  | 		for(int i = 0; i < number_of_referenced_nodes; ++i) { | ||||||
|  | 			pathNode += inputWay.refs(i); | ||||||
|  | 			w.path.push_back(pathNode); | ||||||
|  | 		} | ||||||
|  | 		assert(inputWay.keys_size() == inputWay.vals_size()); | ||||||
|  | 		const int number_of_keys = inputWay.keys_size(); | ||||||
|  | 		for(int i = 0; i < number_of_keys; ++i) { | ||||||
|  | 			const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputWay.keys(i)); | ||||||
|  | 			const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputWay.vals(i)); | ||||||
|  | 			w.keyVals.Add(key, val); | ||||||
|  | 		} | ||||||
|  | 		waysToParse.push_back(w); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | #pragma omp parallel for schedule ( guided ) | ||||||
|  | 	for(int i = 0; i < number_of_ways; ++i) { | ||||||
|  | 	    ExtractionWay & w = waysToParse[i]; | ||||||
|  | 	    ParseWayInLua( w, scriptingEnvironment.getLuaStateForThreadID(omp_get_thread_num()) ); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	BOOST_FOREACH(ExtractionWay & w, waysToParse) { | ||||||
|  | 	    extractor_callbacks->wayFunction(w); | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::loadGroup(_ThreadData * threadData) { | ||||||
|  | #ifndef NDEBUG | ||||||
|  | 	++groupCount; | ||||||
|  | #endif | ||||||
|  | 
 | ||||||
|  | 	const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ); | ||||||
|  | 	threadData->entityTypeIndicator = 0; | ||||||
|  | 	if ( group.nodes_size() != 0 ) { | ||||||
|  | 		threadData->entityTypeIndicator = TypeNode; | ||||||
|  | 	} | ||||||
|  | 	if ( group.ways_size() != 0 ) { | ||||||
|  | 		threadData->entityTypeIndicator = TypeWay; | ||||||
|  | 	} | ||||||
|  | 	if ( group.relations_size() != 0 ) { | ||||||
|  | 		threadData->entityTypeIndicator = TypeRelation; | ||||||
|  | 	} | ||||||
|  | 	if ( group.has_dense() )  { | ||||||
|  | 		threadData->entityTypeIndicator = TypeDenseNode; | ||||||
|  | 		assert( group.dense().id_size() != 0 ); | ||||||
|  | 	} | ||||||
|  | 	assert( threadData->entityTypeIndicator != 0 ); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline void PBFParser::loadBlock(_ThreadData * threadData) { | ||||||
|  | #ifndef NDEBUG | ||||||
|  | 	++blockCount; | ||||||
|  | #endif | ||||||
|  | 	threadData->currentGroupID = 0; | ||||||
|  | 	threadData->currentEntityID = 0; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::readPBFBlobHeader(std::fstream& stream, _ThreadData * threadData) { | ||||||
|  | 	int size(0); | ||||||
|  | 	stream.read((char *)&size, sizeof(int)); | ||||||
|  | 	size = swapEndian(size); | ||||||
|  | 	if(stream.eof()) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 	if ( size > MAX_BLOB_HEADER_SIZE || size < 0 ) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 	char *data = new char[size]; | ||||||
|  | 	stream.read(data, size*sizeof(data[0])); | ||||||
|  | 
 | ||||||
|  | 	bool dataSuccessfullyParsed = (threadData->PBFBlobHeader).ParseFromArray( data, size); | ||||||
|  | 	delete[] data; | ||||||
|  | 	return dataSuccessfullyParsed; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::unpackZLIB(std::fstream &, _ThreadData * threadData) { | ||||||
|  | 	unsigned rawSize = threadData->PBFBlob.raw_size(); | ||||||
|  | 	char* unpackedDataArray = new char[rawSize]; | ||||||
|  | 	z_stream compressedDataStream; | ||||||
|  | 	compressedDataStream.next_in = ( unsigned char* ) threadData->PBFBlob.zlib_data().data(); | ||||||
|  | 	compressedDataStream.avail_in = threadData->PBFBlob.zlib_data().size(); | ||||||
|  | 	compressedDataStream.next_out = ( unsigned char* ) unpackedDataArray; | ||||||
|  | 	compressedDataStream.avail_out = rawSize; | ||||||
|  | 	compressedDataStream.zalloc = Z_NULL; | ||||||
|  | 	compressedDataStream.zfree = Z_NULL; | ||||||
|  | 	compressedDataStream.opaque = Z_NULL; | ||||||
|  | 	int ret = inflateInit( &compressedDataStream ); | ||||||
|  | 	if ( ret != Z_OK ) { | ||||||
|  | 		std::cerr << "[error] failed to init zlib stream" << std::endl; | ||||||
|  | 		delete[] unpackedDataArray; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	ret = inflate( &compressedDataStream, Z_FINISH ); | ||||||
|  | 	if ( ret != Z_STREAM_END ) { | ||||||
|  | 		std::cerr << "[error] failed to inflate zlib stream" << std::endl; | ||||||
|  | 		std::cerr << "[error] Error type: " << ret << std::endl; | ||||||
|  | 		delete[] unpackedDataArray; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	ret = inflateEnd( &compressedDataStream ); | ||||||
|  | 	if ( ret != Z_OK ) { | ||||||
|  | 		std::cerr << "[error] failed to deinit zlib stream" << std::endl; | ||||||
|  | 		delete[] unpackedDataArray; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	threadData->charBuffer.clear(); threadData->charBuffer.resize(rawSize); | ||||||
|  | 	std::copy(unpackedDataArray, unpackedDataArray + rawSize, threadData->charBuffer.begin()); | ||||||
|  | 	delete[] unpackedDataArray; | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::unpackLZMA(std::fstream &, _ThreadData * ) { | ||||||
|  | 	return false; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | inline bool PBFParser::readBlob(std::fstream& stream, _ThreadData * threadData) { | ||||||
|  | 	if(stream.eof()) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 	 | ||||||
|  | 	const int size = threadData->PBFBlobHeader.datasize(); | ||||||
|  | 	if ( size < 0 || size > MAX_BLOB_SIZE ) { | ||||||
|  | 		std::cerr << "[error] invalid Blob size:" << size << std::endl; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	char* data = new char[size]; | ||||||
|  | 	stream.read(data, sizeof(data[0])*size); | ||||||
|  | 
 | ||||||
|  | 	if ( !threadData->PBFBlob.ParseFromArray( data, size ) ) { | ||||||
|  | 		std::cerr << "[error] failed to parse blob" << std::endl; | ||||||
|  | 		delete[] data; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( threadData->PBFBlob.has_raw() ) { | ||||||
|  | 		const std::string& data = threadData->PBFBlob.raw(); | ||||||
|  | 		threadData->charBuffer.clear(); | ||||||
|  | 		threadData->charBuffer.resize( data.size() ); | ||||||
|  | 		std::copy(data.begin(), data.end(), threadData->charBuffer.begin()); | ||||||
|  | 	} else if ( threadData->PBFBlob.has_zlib_data() ) { | ||||||
|  | 		if ( !unpackZLIB(stream, threadData) ) { | ||||||
|  | 			std::cerr << "[error] zlib data encountered that could not be unpacked" << std::endl; | ||||||
|  | 			delete[] data; | ||||||
|  | 			return false; | ||||||
|  | 		} | ||||||
|  | 	} else if ( threadData->PBFBlob.has_lzma_data() ) { | ||||||
|  | 		if ( !unpackLZMA(stream, threadData) ) { | ||||||
|  | 			std::cerr << "[error] lzma data encountered that could not be unpacked" << std::endl; | ||||||
|  | 		} | ||||||
|  | 		delete[] data; | ||||||
|  | 		return false; | ||||||
|  | 	} else { | ||||||
|  | 		std::cerr << "[error] Blob contains no data" << std::endl; | ||||||
|  | 		delete[] data; | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 	delete[] data; | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | bool PBFParser::readNextBlock(std::fstream& stream, _ThreadData * threadData) { | ||||||
|  | 	if(stream.eof()) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( !readPBFBlobHeader(stream, threadData) ){ | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( threadData->PBFBlobHeader.type() != "OSMData" ) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( !readBlob(stream, threadData) ) { | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( !threadData->PBFprimitiveBlock.ParseFromArray( &(threadData->charBuffer[0]), threadData-> charBuffer.size() ) ) { | ||||||
|  | 		ERR("failed to parse PrimitiveBlock"); | ||||||
|  | 		return false; | ||||||
|  | 	} | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
| @ -1,600 +1,101 @@ | |||||||
| /*
 | /*
 | ||||||
|     open source routing machine |  open source routing machine | ||||||
|     Copyright (C) Dennis Luxen, others 2010 |  Copyright (C) Dennis Luxen, others 2010 | ||||||
| 
 |   | ||||||
| This program is free software; you can redistribute it and/or modify |  This program is free software; you can redistribute it and/or modify | ||||||
| it under the terms of the GNU AFFERO General Public License as published by |  it under the terms of the GNU AFFERO General Public License as published by | ||||||
| the Free Software Foundation; either version 3 of the License, or |  the Free Software Foundation; either version 3 of the License, or | ||||||
| any later version. |  any later version. | ||||||
| 
 |   | ||||||
| This program is distributed in the hope that it will be useful, |  This program is distributed in the hope that it will be useful, | ||||||
| but WITHOUT ANY WARRANTY; without even the implied warranty of |  but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
| GNU General Public License for more details. |  GNU General Public License for more details. | ||||||
| 
 |   | ||||||
| You should have received a copy of the GNU Affero General Public License |  You should have received a copy of the GNU Affero General Public License | ||||||
| along with this program; if not, write to the Free Software |  along with this program; if not, write to the Free Software | ||||||
| Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA |  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
| or see http://www.gnu.org/licenses/agpl.txt.
 |  or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  */ |  */ | ||||||
| 
 | 
 | ||||||
| #ifndef PBFPARSER_H_ | #ifndef PBFPARSER_H_ | ||||||
| #define PBFPARSER_H_ | #define PBFPARSER_H_ | ||||||
| 
 | 
 | ||||||
| #include <zlib.h> | #include <boost/shared_ptr.hpp> | ||||||
| #include <boost/make_shared.hpp> | #include <boost/make_shared.hpp> | ||||||
| #include <boost/ref.hpp> | #include <boost/ref.hpp> | ||||||
| #include <boost/shared_ptr.hpp> |  | ||||||
| 
 | 
 | ||||||
| #include <osmpbf/fileformat.pb.h> | #include <osmpbf/fileformat.pb.h> | ||||||
| #include <osmpbf/osmformat.pb.h> | #include <osmpbf/osmformat.pb.h> | ||||||
| 
 | 
 | ||||||
| #include "../typedefs.h" | #include <zlib.h> | ||||||
| 
 | 
 | ||||||
| #include "BaseParser.h" | #include "../typedefs.h" | ||||||
| #include "ExtractorCallbacks.h" |  | ||||||
| #include "ExtractorStructs.h" |  | ||||||
| #include "ScriptingEnvironment.h" |  | ||||||
| #include "../DataStructures/HashTable.h" | #include "../DataStructures/HashTable.h" | ||||||
| #include "../DataStructures/ConcurrentQueue.h" | #include "../DataStructures/ConcurrentQueue.h" | ||||||
|  | #include "../Util/MachineInfo.h" | ||||||
| #include "../Util/OpenMPWrapper.h" | #include "../Util/OpenMPWrapper.h" | ||||||
| 
 | 
 | ||||||
| class PBFParser : public BaseParser<ExtractorCallbacks, _Node, _RawRestrictionContainer, _Way> { | #include "BaseParser.h" | ||||||
| 
 |  | ||||||
| //    typedef BaseParser<ExtractorCallbacks, _Node, _RawRestrictionContainer, _Way> super;
 |  | ||||||
| 
 | 
 | ||||||
|  | class PBFParser : public BaseParser { | ||||||
|  |      | ||||||
|     enum EntityType { |     enum EntityType { | ||||||
|         TypeNode = 1, |         TypeNode = 1, | ||||||
|         TypeWay = 2, |         TypeWay = 2, | ||||||
|         TypeRelation = 4, |         TypeRelation = 4, | ||||||
|         TypeDenseNode = 8 |         TypeDenseNode = 8 | ||||||
|     } ; |     } ; | ||||||
| 
 |      | ||||||
|     enum Endianness { |  | ||||||
|         LittleEndian = 1, |  | ||||||
|         BigEndian = 2 |  | ||||||
|     }; |  | ||||||
| 
 |  | ||||||
|     struct _ThreadData { |     struct _ThreadData { | ||||||
|         int currentGroupID; |         int currentGroupID; | ||||||
|         int currentEntityID; |         int currentEntityID; | ||||||
|         short entityTypeIndicator; |         short entityTypeIndicator; | ||||||
| 
 |          | ||||||
|         OSMPBF::BlobHeader PBFBlobHeader; |         OSMPBF::BlobHeader PBFBlobHeader; | ||||||
|         OSMPBF::Blob PBFBlob; |         OSMPBF::Blob PBFBlob; | ||||||
| 
 |          | ||||||
|         OSMPBF::HeaderBlock PBFHeaderBlock; |         OSMPBF::HeaderBlock PBFHeaderBlock; | ||||||
|         OSMPBF::PrimitiveBlock PBFprimitiveBlock; |         OSMPBF::PrimitiveBlock PBFprimitiveBlock; | ||||||
| 
 |          | ||||||
|         std::vector<char> charBuffer; |         std::vector<char> charBuffer; | ||||||
|     }; |     }; | ||||||
| 
 |      | ||||||
| public: | public: | ||||||
|     PBFParser(const char * fileName) : externalMemory(NULL){ |     PBFParser(const char * fileName, ExtractorCallbacks* ec, ScriptingEnvironment& se); | ||||||
|         GOOGLE_PROTOBUF_VERIFY_VERSION; |     virtual ~PBFParser(); | ||||||
|         //TODO: What is the bottleneck here? Filling the queue or reading the stuff from disk?
 |      | ||||||
|         //NOTE: With Lua scripting, it is parsing the stuff. I/O is virtually for free.
 |     inline bool ReadHeader(); | ||||||
|         threadDataQueue = boost::make_shared<ConcurrentQueue<_ThreadData*> >( 2500 ); /* Max 2500 items in queue, hardcoded. */ | 	inline bool Parse(); | ||||||
|         input.open(fileName, std::ios::in | std::ios::binary); |      | ||||||
| 
 |  | ||||||
|         if (!input) { |  | ||||||
|             std::cerr << fileName << ": File not found." << std::endl; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
| #ifndef NDEBUG |  | ||||||
|         blockCount = 0; |  | ||||||
|         groupCount = 0; |  | ||||||
| #endif |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     void RegisterCallbacks(ExtractorCallbacks * em) { |  | ||||||
|         externalMemory = em; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     //call by value, but who cares. It is done once.
 |  | ||||||
|     void RegisterScriptingEnvironment(ScriptingEnvironment & _se) { |  | ||||||
|         scriptingEnvironment = _se; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     ~PBFParser() { |  | ||||||
|         if(input.is_open()) |  | ||||||
|             input.close(); |  | ||||||
| 
 |  | ||||||
|         // Clean up any leftover ThreadData objects in the queue
 |  | ||||||
|         _ThreadData* td; |  | ||||||
|         while (threadDataQueue->try_pop(td)) { |  | ||||||
|             delete td; |  | ||||||
|         } |  | ||||||
|         google::protobuf::ShutdownProtobufLibrary(); |  | ||||||
| 
 |  | ||||||
| #ifndef NDEBUG |  | ||||||
|         DEBUG("parsed " << blockCount << " blocks from pbf with " << groupCount << " groups"); |  | ||||||
| #endif |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool Init() { |  | ||||||
|         _ThreadData initData; |  | ||||||
|         /** read Header */ |  | ||||||
|         if(!readPBFBlobHeader(input, &initData)) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if(readBlob(input, &initData)) { |  | ||||||
|             if(!initData.PBFHeaderBlock.ParseFromArray(&(initData.charBuffer[0]), initData.charBuffer.size() ) ) { |  | ||||||
|                 std::cerr << "[error] Header not parseable!" << std::endl; |  | ||||||
|                 return false; |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             for(int i = 0, featureSize = initData.PBFHeaderBlock.required_features_size(); i < featureSize; ++i) { |  | ||||||
|                 const std::string& feature = initData.PBFHeaderBlock.required_features( i ); |  | ||||||
|                 bool supported = false; |  | ||||||
|                 if ( "OsmSchema-V0.6" == feature ) |  | ||||||
|                     supported = true; |  | ||||||
|                 else if ( "DenseNodes" == feature ) |  | ||||||
|                     supported = true; |  | ||||||
| 
 |  | ||||||
|                 if ( !supported ) { |  | ||||||
|                     std::cerr << "[error] required feature not supported: " << feature.data() << std::endl; |  | ||||||
|                     return false; |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } else { |  | ||||||
|             std::cerr << "[error] blob not loaded!" << std::endl; |  | ||||||
|         } |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void ReadData() { |  | ||||||
|         bool keepRunning = true; |  | ||||||
|         do { |  | ||||||
|             _ThreadData *threadData = new _ThreadData(); |  | ||||||
|             keepRunning = readNextBlock(input, threadData); |  | ||||||
| 
 |  | ||||||
|             if (keepRunning) |  | ||||||
|                 threadDataQueue->push(threadData); |  | ||||||
|             else { |  | ||||||
|                 threadDataQueue->push(NULL); // No more data to read, parse stops when NULL encountered
 |  | ||||||
|                 delete threadData; |  | ||||||
|             } |  | ||||||
|         } while(keepRunning); |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void ParseData() { |  | ||||||
|         while (1) { |  | ||||||
|             _ThreadData *threadData; |  | ||||||
|             threadDataQueue->wait_and_pop(threadData); |  | ||||||
|             if (threadData == NULL) { |  | ||||||
|                 INFO("Parse Data Thread Finished"); |  | ||||||
|                 threadDataQueue->push(NULL); // Signal end of data for other threads
 |  | ||||||
|                 break; |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             loadBlock(threadData); |  | ||||||
| 
 |  | ||||||
|             for(int i = 0, groupSize = threadData->PBFprimitiveBlock.primitivegroup_size(); i < groupSize; ++i) { |  | ||||||
|                 threadData->currentGroupID = i; |  | ||||||
|                 loadGroup(threadData); |  | ||||||
| 
 |  | ||||||
|                 if(threadData->entityTypeIndicator == TypeNode) |  | ||||||
|                     parseNode(threadData); |  | ||||||
|                 if(threadData->entityTypeIndicator == TypeWay) |  | ||||||
|                     parseWay(threadData); |  | ||||||
|                 if(threadData->entityTypeIndicator == TypeRelation) |  | ||||||
|                     parseRelation(threadData); |  | ||||||
|                 if(threadData->entityTypeIndicator == TypeDenseNode) |  | ||||||
|                     parseDenseNode(threadData); |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             delete threadData; |  | ||||||
|             threadData = NULL; |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool Parse() { |  | ||||||
|         // Start the read and parse threads
 |  | ||||||
|         boost::thread readThread(boost::bind(&PBFParser::ReadData, this)); |  | ||||||
| 
 |  | ||||||
|         //Open several parse threads that are synchronized before call to
 |  | ||||||
|         boost::thread parseThread(boost::bind(&PBFParser::ParseData, this)); |  | ||||||
| 
 |  | ||||||
|         // Wait for the threads to finish
 |  | ||||||
|         readThread.join(); |  | ||||||
|         parseThread.join(); |  | ||||||
| 
 |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
| private: | private: | ||||||
| 
 |     inline void ReadData(); | ||||||
|     inline void parseDenseNode(_ThreadData * threadData) { |     inline void ParseData(); | ||||||
|         const OSMPBF::DenseNodes& dense = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).dense(); |     inline void parseDenseNode(_ThreadData * threadData); | ||||||
|         int denseTagIndex = 0; |     inline void parseNode(_ThreadData * ); | ||||||
|         int m_lastDenseID = 0; |     inline void parseRelation(_ThreadData * threadData); | ||||||
|         int m_lastDenseLatitude = 0; |     inline void parseWay(_ThreadData * threadData); | ||||||
|         int m_lastDenseLongitude = 0; |      | ||||||
| 
 |     inline void loadGroup(_ThreadData * threadData); | ||||||
|         ImportNode n; |     inline void loadBlock(_ThreadData * threadData); | ||||||
| 		std::vector<ImportNode> nodesToParse; |     inline bool readPBFBlobHeader(std::fstream& stream, _ThreadData * threadData); | ||||||
|         for(int i = 0, idSize = dense.id_size(); i < idSize; ++i) { |     inline bool unpackZLIB(std::fstream &, _ThreadData * threadData); | ||||||
| 			n.Clear(); |     inline bool unpackLZMA(std::fstream &, _ThreadData * ); | ||||||
|             m_lastDenseID += dense.id( i ); |     inline bool readBlob(std::fstream& stream, _ThreadData * threadData) ; | ||||||
|             m_lastDenseLatitude += dense.lat( i ); |     inline bool readNextBlock(std::fstream& stream, _ThreadData * threadData); | ||||||
|             m_lastDenseLongitude += dense.lon( i ); |      | ||||||
|             n.id = m_lastDenseID; |  | ||||||
|             n.lat = 100000*( ( double ) m_lastDenseLatitude * threadData->PBFprimitiveBlock.granularity() +threadData-> PBFprimitiveBlock.lat_offset() ) / NANO; |  | ||||||
|             n.lon = 100000*( ( double ) m_lastDenseLongitude * threadData->PBFprimitiveBlock.granularity() + threadData->PBFprimitiveBlock.lon_offset() ) / NANO; |  | ||||||
|             while (denseTagIndex < dense.keys_vals_size()) { |  | ||||||
|                 const int tagValue = dense.keys_vals( denseTagIndex ); |  | ||||||
|                 if(tagValue == 0) { |  | ||||||
|                     ++denseTagIndex; |  | ||||||
|                     break; |  | ||||||
|                 } |  | ||||||
|                 const int keyValue = dense.keys_vals ( denseTagIndex+1 ); |  | ||||||
|                 const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(tagValue).data(); |  | ||||||
|                 const std::string & value = threadData->PBFprimitiveBlock.stringtable().s(keyValue).data(); |  | ||||||
|                 n.keyVals.Add(key, value); |  | ||||||
|                 denseTagIndex += 2; |  | ||||||
|             } |  | ||||||
|             nodesToParse.push_back(n); |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         unsigned endi_nodes = nodesToParse.size(); |  | ||||||
| #pragma omp parallel for schedule ( guided ) |  | ||||||
|         for(unsigned i = 0; i < endi_nodes; ++i) { |  | ||||||
|             ImportNode &n = nodesToParse[i]; |  | ||||||
|             /** Pass the unpacked node to the LUA call back **/ |  | ||||||
|             try { |  | ||||||
|                 luabind::call_function<int>( |  | ||||||
|                         scriptingEnvironment.getLuaStateForThreadID(omp_get_thread_num()), |  | ||||||
|                         "node_function", |  | ||||||
|                         boost::ref(n) |  | ||||||
|                 ); |  | ||||||
|             } catch (const luabind::error &er) { |  | ||||||
|                 lua_State* Ler=er.state(); |  | ||||||
|                 report_errors(Ler, -1); |  | ||||||
|                 ERR(er.what()); |  | ||||||
|             } |  | ||||||
| //            catch (...) {
 |  | ||||||
| //                ERR("Unknown error occurred during PBF dense node parsing!");
 |  | ||||||
| //            }
 |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         BOOST_FOREACH(ImportNode &n, nodesToParse) { |  | ||||||
|             if(!externalMemory->nodeFunction(n)) |  | ||||||
|                 std::cerr << "[PBFParser] dense node not parsed" << std::endl; |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void parseNode(_ThreadData * ) { |  | ||||||
|         ERR("Parsing of simple nodes not supported. PBF should use dense nodes"); |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void parseRelation(_ThreadData * threadData) { |  | ||||||
| 		//TODO: leave early, if relatio is not a restriction
 |  | ||||||
| 		//TODO: reuse rawRestriction container
 |  | ||||||
|         const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ); |  | ||||||
|         for(int i = 0; i < group.relations_size(); ++i ) { |  | ||||||
|             const OSMPBF::Relation& inputRelation = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).relations(i); |  | ||||||
|             bool isRestriction = false; |  | ||||||
|             bool isOnlyRestriction = false; |  | ||||||
|             for(int k = 0, endOfKeys = inputRelation.keys_size(); k < endOfKeys; ++k) { |  | ||||||
|                 const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.keys(k)); |  | ||||||
|                 const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputRelation.vals(k)); |  | ||||||
|                 if ("type" == key) { |  | ||||||
| 					if( "restriction" == val) |  | ||||||
|                     	isRestriction = true; |  | ||||||
| 					else  |  | ||||||
| 						break; |  | ||||||
|                 } |  | ||||||
|                 if ("restriction" == key) { |  | ||||||
|                     if(val.find("only_") == 0) |  | ||||||
|                         isOnlyRestriction = true; |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|             } |  | ||||||
|             if(isRestriction) { |  | ||||||
|                 long long lastRef = 0; |  | ||||||
|                 _RawRestrictionContainer currentRestrictionContainer(isOnlyRestriction); |  | ||||||
|                 for(int rolesIndex = 0; rolesIndex < inputRelation.roles_sid_size(); ++rolesIndex) { |  | ||||||
|                     std::string role(threadData->PBFprimitiveBlock.stringtable().s( inputRelation.roles_sid( rolesIndex ) ).data()); |  | ||||||
|                     lastRef += inputRelation.memids(rolesIndex); |  | ||||||
| 
 |  | ||||||
|                     if(false == ("from" == role || "to" == role || "via" == role)) { |  | ||||||
|                         continue; |  | ||||||
|                     } |  | ||||||
| 
 |  | ||||||
|                     switch(inputRelation.types(rolesIndex)) { |  | ||||||
|                     case 0: //node
 |  | ||||||
|                         if("from" == role || "to" == role) //Only via should be a node
 |  | ||||||
|                             continue; |  | ||||||
|                         assert("via" == role); |  | ||||||
|                         if(UINT_MAX != currentRestrictionContainer.viaNode) |  | ||||||
|                             currentRestrictionContainer.viaNode = UINT_MAX; |  | ||||||
|                         assert(UINT_MAX == currentRestrictionContainer.viaNode); |  | ||||||
|                         currentRestrictionContainer.restriction.viaNode = lastRef; |  | ||||||
|                         break; |  | ||||||
|                     case 1: //way
 |  | ||||||
|                         assert("from" == role || "to" == role || "via" == role); |  | ||||||
|                         if("from" == role) { |  | ||||||
|                             currentRestrictionContainer.fromWay = lastRef; |  | ||||||
|                         } |  | ||||||
|                         if ("to" == role) { |  | ||||||
|                             currentRestrictionContainer.toWay = lastRef; |  | ||||||
|                         } |  | ||||||
|                         if ("via" == role) { |  | ||||||
|                             assert(currentRestrictionContainer.restriction.toNode == UINT_MAX); |  | ||||||
|                             currentRestrictionContainer.viaNode = lastRef; |  | ||||||
|                         } |  | ||||||
|                         break; |  | ||||||
|                     case 2: //relation, not used. relations relating to relations are evil.
 |  | ||||||
|                         continue; |  | ||||||
|                         assert(false); |  | ||||||
|                         break; |  | ||||||
| 
 |  | ||||||
|                     default: //should not happen
 |  | ||||||
|                         //cout << "unknown";
 |  | ||||||
|                         assert(false); |  | ||||||
|                         break; |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|                 //                if(UINT_MAX != currentRestriction.viaNode) {
 |  | ||||||
|                 //                    cout << "restr from " << currentRestriction.from << " via ";
 |  | ||||||
|                 //                    cout << "node " << currentRestriction.viaNode;
 |  | ||||||
|                 //                    cout << " to " << currentRestriction.to << endl;
 |  | ||||||
|                 //                }
 |  | ||||||
|                 if(!externalMemory->restrictionFunction(currentRestrictionContainer)) |  | ||||||
|                     std::cerr << "[PBFParser] relation not parsed" << std::endl; |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void parseWay(_ThreadData * threadData) { |  | ||||||
|             _Way w; |  | ||||||
|             std::vector<_Way> waysToParse(threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways_size()); |  | ||||||
|             for(int i = 0, ways_size = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways_size(); i < ways_size; ++i) { |  | ||||||
| 				w.Clear(); |  | ||||||
|                 const OSMPBF::Way& inputWay = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ).ways( i ); |  | ||||||
|                 w.id = inputWay.id(); |  | ||||||
|                 unsigned pathNode(0); |  | ||||||
|                 for(int i = 0; i < inputWay.refs_size(); ++i) { |  | ||||||
|                     pathNode += inputWay.refs(i); |  | ||||||
|                     w.path.push_back(pathNode); |  | ||||||
|                 } |  | ||||||
|                 assert(inputWay.keys_size() == inputWay.vals_size()); |  | ||||||
|                 for(int i = 0; i < inputWay.keys_size(); ++i) { |  | ||||||
|                     const std::string & key = threadData->PBFprimitiveBlock.stringtable().s(inputWay.keys(i)); |  | ||||||
|                     const std::string & val = threadData->PBFprimitiveBlock.stringtable().s(inputWay.vals(i)); |  | ||||||
|                     w.keyVals.Add(key, val); |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|                 waysToParse.push_back(w); |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             unsigned endi_ways = waysToParse.size(); |  | ||||||
| #pragma omp parallel for schedule ( guided ) |  | ||||||
|             for(unsigned i = 0; i < endi_ways; ++i) { |  | ||||||
|                 _Way & w = waysToParse[i]; |  | ||||||
|                 /** Pass the unpacked way to the LUA call back **/ |  | ||||||
|                 try { |  | ||||||
|                     luabind::call_function<int>( |  | ||||||
|                         scriptingEnvironment.getLuaStateForThreadID(omp_get_thread_num()), |  | ||||||
|                         "way_function", |  | ||||||
|                         boost::ref(w), |  | ||||||
|                         w.path.size() |  | ||||||
|                     ); |  | ||||||
| 
 |  | ||||||
|                 } catch (const luabind::error &er) { |  | ||||||
|                     lua_State* Ler=er.state(); |  | ||||||
|                     report_errors(Ler, -1); |  | ||||||
|                     ERR(er.what()); |  | ||||||
|                 } |  | ||||||
| //                catch (...) {
 |  | ||||||
| //                    ERR("Unknown error!");
 |  | ||||||
| //                }
 |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             BOOST_FOREACH(_Way & w, waysToParse) { |  | ||||||
|                 if(!externalMemory->wayFunction(w)) { |  | ||||||
|                     std::cerr << "[PBFParser] way not parsed" << std::endl; |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void loadGroup(_ThreadData * threadData) { |  | ||||||
| #ifndef NDEBUG |  | ||||||
|         ++groupCount; |  | ||||||
| #endif |  | ||||||
| 		 |  | ||||||
|         const OSMPBF::PrimitiveGroup& group = threadData->PBFprimitiveBlock.primitivegroup( threadData->currentGroupID ); |  | ||||||
|         threadData->entityTypeIndicator = 0; |  | ||||||
|         if ( group.nodes_size() != 0 ) { |  | ||||||
|             threadData->entityTypeIndicator = TypeNode; |  | ||||||
|         } |  | ||||||
|         if ( group.ways_size() != 0 ) { |  | ||||||
|             threadData->entityTypeIndicator = TypeWay; |  | ||||||
|         } |  | ||||||
|         if ( group.relations_size() != 0 ) { |  | ||||||
|             threadData->entityTypeIndicator = TypeRelation; |  | ||||||
|         } |  | ||||||
|         if ( group.has_dense() )  { |  | ||||||
|             threadData->entityTypeIndicator = TypeDenseNode; |  | ||||||
|             assert( group.dense().id_size() != 0 ); |  | ||||||
|         } |  | ||||||
|         assert( threadData->entityTypeIndicator != 0 ); |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline void loadBlock(_ThreadData * threadData) { |  | ||||||
| #ifndef NDEBUG |  | ||||||
|         ++blockCount; |  | ||||||
| #endif |  | ||||||
|         threadData->currentGroupID = 0; |  | ||||||
|         threadData->currentEntityID = 0; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     /* Reverses Network Byte Order into something usable, compiles down to a bswap-mov combination */ |  | ||||||
|     inline unsigned swapEndian(unsigned x) const { |  | ||||||
|         if(getMachineEndianness() == LittleEndian) |  | ||||||
|             return ( (x>>24) | ((x<<8) & 0x00FF0000) | ((x>>8) & 0x0000FF00) | (x<<24) ); |  | ||||||
|         return x; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool readPBFBlobHeader(std::fstream& stream, _ThreadData * threadData) { |  | ||||||
|         int size(0); |  | ||||||
|         stream.read((char *)&size, sizeof(int)); |  | ||||||
|         size = swapEndian(size); |  | ||||||
|         if(stream.eof()) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
|         if ( size > MAX_BLOB_HEADER_SIZE || size < 0 ) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
|         char *data = new char[size]; |  | ||||||
|         stream.read(data, size*sizeof(data[0])); |  | ||||||
| 
 |  | ||||||
|         bool dataSuccessfullyParsed = (threadData->PBFBlobHeader).ParseFromArray( data, size); |  | ||||||
|         delete[] data; |  | ||||||
|         return dataSuccessfullyParsed; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool unpackZLIB(std::fstream &, _ThreadData * threadData) { |  | ||||||
|         unsigned rawSize = threadData->PBFBlob.raw_size(); |  | ||||||
|         char* unpackedDataArray = new char[rawSize]; |  | ||||||
|         z_stream compressedDataStream; |  | ||||||
|         compressedDataStream.next_in = ( unsigned char* ) threadData->PBFBlob.zlib_data().data(); |  | ||||||
|         compressedDataStream.avail_in = threadData->PBFBlob.zlib_data().size(); |  | ||||||
|         compressedDataStream.next_out = ( unsigned char* ) unpackedDataArray; |  | ||||||
|         compressedDataStream.avail_out = rawSize; |  | ||||||
|         compressedDataStream.zalloc = Z_NULL; |  | ||||||
|         compressedDataStream.zfree = Z_NULL; |  | ||||||
|         compressedDataStream.opaque = Z_NULL; |  | ||||||
|         int ret = inflateInit( &compressedDataStream ); |  | ||||||
|         if ( ret != Z_OK ) { |  | ||||||
|             std::cerr << "[error] failed to init zlib stream" << std::endl; |  | ||||||
|             delete[] unpackedDataArray; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         ret = inflate( &compressedDataStream, Z_FINISH ); |  | ||||||
|         if ( ret != Z_STREAM_END ) { |  | ||||||
|             std::cerr << "[error] failed to inflate zlib stream" << std::endl; |  | ||||||
|             std::cerr << "[error] Error type: " << ret << std::endl; |  | ||||||
|             delete[] unpackedDataArray; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         ret = inflateEnd( &compressedDataStream ); |  | ||||||
|         if ( ret != Z_OK ) { |  | ||||||
|             std::cerr << "[error] failed to deinit zlib stream" << std::endl; |  | ||||||
|             delete[] unpackedDataArray; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         threadData->charBuffer.clear(); threadData->charBuffer.resize(rawSize); |  | ||||||
| 		std::copy(unpackedDataArray, unpackedDataArray + rawSize, threadData->charBuffer.begin()); |  | ||||||
|         delete[] unpackedDataArray; |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool unpackLZMA(std::fstream &, _ThreadData * ) const { |  | ||||||
|         return false; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool readBlob(std::fstream& stream, _ThreadData * threadData) { |  | ||||||
|         if(stream.eof()) |  | ||||||
|             return false; |  | ||||||
| 
 |  | ||||||
|         const int size = threadData->PBFBlobHeader.datasize(); |  | ||||||
|         if ( size < 0 || size > MAX_BLOB_SIZE ) { |  | ||||||
|             std::cerr << "[error] invalid Blob size:" << size << std::endl; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         char* data = new char[size]; |  | ||||||
|         stream.read(data, sizeof(data[0])*size); |  | ||||||
| 
 |  | ||||||
|         if ( !threadData->PBFBlob.ParseFromArray( data, size ) ) { |  | ||||||
|             std::cerr << "[error] failed to parse blob" << std::endl; |  | ||||||
|             delete[] data; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( threadData->PBFBlob.has_raw() ) { |  | ||||||
|             const std::string& data = threadData->PBFBlob.raw(); |  | ||||||
|             threadData->charBuffer.clear(); |  | ||||||
|             threadData->charBuffer.resize( data.size() ); |  | ||||||
| 			std::copy(data.begin(), data.end(), threadData->charBuffer.begin()); |  | ||||||
|         } else if ( threadData->PBFBlob.has_zlib_data() ) { |  | ||||||
|             if ( !unpackZLIB(stream, threadData) ) { |  | ||||||
|                 std::cerr << "[error] zlib data encountered that could not be unpacked" << std::endl; |  | ||||||
| 	            delete[] data; |  | ||||||
|                 return false; |  | ||||||
|             } |  | ||||||
|         } else if ( threadData->PBFBlob.has_lzma_data() ) { |  | ||||||
|             if ( !unpackLZMA(stream, threadData) ) |  | ||||||
|                 std::cerr << "[error] lzma data encountered that could not be unpacked" << std::endl; |  | ||||||
|             delete[] data; |  | ||||||
|             return false; |  | ||||||
|         } else { |  | ||||||
|             std::cerr << "[error] Blob contains no data" << std::endl; |  | ||||||
|             delete[] data; |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
|         delete[] data; |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     inline bool readNextBlock(std::fstream& stream, _ThreadData * threadData) { |  | ||||||
|         if(stream.eof()) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( !readPBFBlobHeader(stream, threadData) ){ |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( threadData->PBFBlobHeader.type() != "OSMData" ) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( !readBlob(stream, threadData) ) { |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( !threadData->PBFprimitiveBlock.ParseFromArray( &(threadData->charBuffer[0]), threadData-> charBuffer.size() ) ) { |  | ||||||
|             ERR("failed to parse PrimitiveBlock"); |  | ||||||
|             return false; |  | ||||||
|         } |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
| 	//Is optimized to a single 'mov eax,1' on GCC, clang and icc using -O3
 |  | ||||||
|     inline Endianness getMachineEndianness() const { |  | ||||||
|         int i(1); |  | ||||||
|         char *p = (char *) &i; |  | ||||||
|         if (1 == p[0]) |  | ||||||
|             return LittleEndian; |  | ||||||
|         return BigEndian; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     static const int NANO = 1000 * 1000 * 1000; |     static const int NANO = 1000 * 1000 * 1000; | ||||||
|     static const int MAX_BLOB_HEADER_SIZE = 64 * 1024; |     static const int MAX_BLOB_HEADER_SIZE = 64 * 1024; | ||||||
|     static const int MAX_BLOB_SIZE = 32 * 1024 * 1024; |     static const int MAX_BLOB_SIZE = 32 * 1024 * 1024; | ||||||
| 
 |      | ||||||
| #ifndef NDEBUG | #ifndef NDEBUG | ||||||
|     /* counting the number of read blocks and groups */ |     /* counting the number of read blocks and groups */ | ||||||
|     unsigned groupCount; |     unsigned groupCount; | ||||||
|     unsigned blockCount; |     unsigned blockCount; | ||||||
| #endif | #endif | ||||||
| 	 | 	 | ||||||
|     ExtractorCallbacks * externalMemory; |     std::fstream input;     // the input stream to parse
 | ||||||
|     /* the input stream to parse */ |  | ||||||
|     std::fstream input; |  | ||||||
| 
 |  | ||||||
|     /* ThreadData Queue */ |  | ||||||
|     boost::shared_ptr<ConcurrentQueue < _ThreadData* > > threadDataQueue; |     boost::shared_ptr<ConcurrentQueue < _ThreadData* > > threadDataQueue; | ||||||
| 
 |  | ||||||
|     ScriptingEnvironment scriptingEnvironment; |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| #endif /* PBFPARSER_H_ */ | #endif /* PBFPARSER_H_ */ | ||||||
|  | |||||||
| @ -18,15 +18,7 @@ | |||||||
|  or see http://www.gnu.org/licenses/agpl.txt.
 |  or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  */ |  */ | ||||||
| 
 | 
 | ||||||
| extern "C" { |  | ||||||
| #include <lua.h> |  | ||||||
| #include <lauxlib.h> |  | ||||||
| #include <lualib.h> |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| #include "ScriptingEnvironment.h" | #include "ScriptingEnvironment.h" | ||||||
| #include "../typedefs.h" |  | ||||||
| #include "../Util/OpenMPWrapper.h" |  | ||||||
| 
 | 
 | ||||||
| ScriptingEnvironment::ScriptingEnvironment() {} | ScriptingEnvironment::ScriptingEnvironment() {} | ||||||
| ScriptingEnvironment::ScriptingEnvironment(const char * fileName) { | ScriptingEnvironment::ScriptingEnvironment(const char * fileName) { | ||||||
| @ -44,6 +36,8 @@ ScriptingEnvironment::ScriptingEnvironment(const char * fileName) { | |||||||
|         //open utility libraries string library;
 |         //open utility libraries string library;
 | ||||||
|         luaL_openlibs(myLuaState); |         luaL_openlibs(myLuaState); | ||||||
| 
 | 
 | ||||||
|  |         luaAddScriptFolderToLoadPath( myLuaState, fileName ); | ||||||
|  | 
 | ||||||
|         // Add our function to the state's global scope
 |         // Add our function to the state's global scope
 | ||||||
|         luabind::module(myLuaState) [ |         luabind::module(myLuaState) [ | ||||||
|                                      luabind::def("print", LUA_print<std::string>), |                                      luabind::def("print", LUA_print<std::string>), | ||||||
| @ -51,15 +45,7 @@ ScriptingEnvironment::ScriptingEnvironment(const char * fileName) { | |||||||
|                                      luabind::def("durationIsValid", durationIsValid), |                                      luabind::def("durationIsValid", durationIsValid), | ||||||
|                                      luabind::def("parseDuration", parseDuration) |                                      luabind::def("parseDuration", parseDuration) | ||||||
|         ]; |         ]; | ||||||
| //#pragma omp critical
 | 
 | ||||||
| //        {
 |  | ||||||
| //            if(0 != luaL_dostring(
 |  | ||||||
| //                    myLuaState,
 |  | ||||||
| //                    "print('Initializing LUA engine')\n"
 |  | ||||||
| //            )) {
 |  | ||||||
| //                ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block");
 |  | ||||||
| //            }
 |  | ||||||
| //        }
 |  | ||||||
|         luabind::module(myLuaState) [ |         luabind::module(myLuaState) [ | ||||||
|                                      luabind::class_<HashTable<std::string, std::string> >("keyVals") |                                      luabind::class_<HashTable<std::string, std::string> >("keyVals") | ||||||
|                                      .def("Add", &HashTable<std::string, std::string>::Add) |                                      .def("Add", &HashTable<std::string, std::string>::Add) | ||||||
| @ -79,32 +65,32 @@ ScriptingEnvironment::ScriptingEnvironment(const char * fileName) { | |||||||
|                                      ]; |                                      ]; | ||||||
| 
 | 
 | ||||||
|         luabind::module(myLuaState) [ |         luabind::module(myLuaState) [ | ||||||
|                                      luabind::class_<_Way>("Way") |                                      luabind::class_<ExtractionWay>("Way") | ||||||
|                                      .def(luabind::constructor<>()) |                                      .def(luabind::constructor<>()) | ||||||
|                                      .def_readwrite("name", &_Way::name) |                                      .def_readwrite("name", &ExtractionWay::name) | ||||||
|                                      .def_readwrite("speed", &_Way::speed) |                                      .def_readwrite("speed", &ExtractionWay::speed) | ||||||
|                                      .def_readwrite("type", &_Way::type) |                                      .def_readwrite("backward_speed", &ExtractionWay::backward_speed) | ||||||
|                                      .def_readwrite("access", &_Way::access) |                                      .def_readwrite("duration", &ExtractionWay::duration) | ||||||
|                                      .def_readwrite("roundabout", &_Way::roundabout) |                                      .def_readwrite("type", &ExtractionWay::type) | ||||||
|                                      .def_readwrite("is_duration_set", &_Way::isDurationSet) |                                      .def_readwrite("access", &ExtractionWay::access) | ||||||
|                                      .def_readwrite("is_access_restricted", &_Way::isAccessRestricted) |                                      .def_readwrite("roundabout", &ExtractionWay::roundabout) | ||||||
|                                      .def_readwrite("ignore_in_grid", &_Way::ignoreInGrid) |                                      .def_readwrite("is_access_restricted", &ExtractionWay::isAccessRestricted) | ||||||
|                                      .def_readwrite("tags", &_Way::keyVals) |                                      .def_readwrite("ignore_in_grid", &ExtractionWay::ignoreInGrid) | ||||||
|                                      .def_readwrite("direction", &_Way::direction) |                                      .def_readwrite("tags", &ExtractionWay::keyVals) | ||||||
|  |                                      .def_readwrite("direction", &ExtractionWay::direction) | ||||||
|                                      .enum_("constants") |                                      .enum_("constants") | ||||||
|                                      [ | 										 [ | ||||||
|                                       luabind::value("notSure", 0), | 										  luabind::value("notSure", 0), | ||||||
|                                       luabind::value("oneway", 1), | 										  luabind::value("oneway", 1), | ||||||
|                                       luabind::value("bidirectional", 2), | 										  luabind::value("bidirectional", 2), | ||||||
|                                       luabind::value("opposite", 3) | 										  luabind::value("opposite", 3) | ||||||
|         ] | 										 ] | ||||||
|         ]; |         							 ]; | ||||||
|  |         luabind::module(myLuaState) [ | ||||||
|  |                                      luabind::class_<std::vector<std::string> >("vector") | ||||||
|  |                                      .def("Add", &std::vector<std::string>::push_back) | ||||||
|  |                                      ]; | ||||||
| 
 | 
 | ||||||
|         // Now call our function in a lua script
 |  | ||||||
| //#pragma omp critical
 |  | ||||||
| //        {
 |  | ||||||
| //            INFO("Parsing speedprofile from " << fileName );
 |  | ||||||
| //        }
 |  | ||||||
|         if(0 != luaL_dofile(myLuaState, fileName) ) { |         if(0 != luaL_dofile(myLuaState, fileName) ) { | ||||||
|             ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); |             ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); | ||||||
|         } |         } | ||||||
|  | |||||||
| @ -30,9 +30,11 @@ extern "C" { | |||||||
| 
 | 
 | ||||||
| #include "ExtractionHelperFunctions.h" | #include "ExtractionHelperFunctions.h" | ||||||
| #include "ExtractorStructs.h" | #include "ExtractorStructs.h" | ||||||
| #include "LuaUtil.h" |  | ||||||
| 
 | 
 | ||||||
|  | #include "../typedefs.h" | ||||||
| #include "../DataStructures/ImportNode.h" | #include "../DataStructures/ImportNode.h" | ||||||
|  | #include "../Util/LuaUtil.h" | ||||||
|  | #include "../Util/OpenMPWrapper.h" | ||||||
| 
 | 
 | ||||||
| class ScriptingEnvironment { | class ScriptingEnvironment { | ||||||
| public: | public: | ||||||
|  | |||||||
							
								
								
									
										276
									
								
								Extractor/XMLParser.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										276
									
								
								Extractor/XMLParser.cpp
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,276 @@ | |||||||
|  | /*
 | ||||||
|  |  open source routing machine | ||||||
|  |  Copyright (C) Dennis Luxen, others 2010 | ||||||
|  | 
 | ||||||
|  |  This program is free software; you can redistribute it and/or modify | ||||||
|  |  it under the terms of the GNU AFFERO General Public License as published by | ||||||
|  |  the Free Software Foundation; either version 3 of the License, or | ||||||
|  |  any later version. | ||||||
|  | 
 | ||||||
|  |  This program is distributed in the hope that it will be useful, | ||||||
|  |  but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  |  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  |  GNU General Public License for more details. | ||||||
|  | 
 | ||||||
|  |  You should have received a copy of the GNU Affero General Public License | ||||||
|  |  along with this program; if not, write to the Free Software | ||||||
|  |  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
|  |  or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  |  */ | ||||||
|  | 
 | ||||||
|  | #include <boost/ref.hpp> | ||||||
|  | 
 | ||||||
|  | #include "XMLParser.h" | ||||||
|  | 
 | ||||||
|  | #include "ExtractorStructs.h" | ||||||
|  | #include "../DataStructures/HashTable.h" | ||||||
|  | #include "../DataStructures/InputReaderFactory.h" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | XMLParser::XMLParser(const char * filename, ExtractorCallbacks* ec, ScriptingEnvironment& se) : BaseParser(ec, se) { | ||||||
|  | 	WARN("Parsing plain .osm/.osm.bz2 is deprecated. Switch to .pbf"); | ||||||
|  | 	inputReader = inputReaderFactory(filename); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | bool XMLParser::ReadHeader() { | ||||||
|  | 	return (xmlTextReaderRead( inputReader ) == 1); | ||||||
|  | } | ||||||
|  | bool XMLParser::Parse() { | ||||||
|  | 	while ( xmlTextReaderRead( inputReader ) == 1 ) { | ||||||
|  | 		const int type = xmlTextReaderNodeType( inputReader ); | ||||||
|  | 
 | ||||||
|  | 		//1 is Element
 | ||||||
|  | 		if ( type != 1 ) { | ||||||
|  | 			continue; | ||||||
|  | 		} | ||||||
|  | 		 | ||||||
|  | 		xmlChar* currentName = xmlTextReaderName( inputReader ); | ||||||
|  | 		if ( currentName == NULL ) { | ||||||
|  | 			continue; | ||||||
|  | 		} | ||||||
|  | 		 | ||||||
|  | 		if ( xmlStrEqual( currentName, ( const xmlChar* ) "node" ) == 1 ) { | ||||||
|  | 			ImportNode n = _ReadXMLNode(); | ||||||
|  | 			ParseNodeInLua( n, luaState ); | ||||||
|  | 			extractor_callbacks->nodeFunction(n); | ||||||
|  | //			if(!extractor_callbacks->nodeFunction(n))
 | ||||||
|  | //				std::cerr << "[XMLParser] dense node not parsed" << std::endl;
 | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
|  | 		if ( xmlStrEqual( currentName, ( const xmlChar* ) "way" ) == 1 ) { | ||||||
|  | 			ExtractionWay way = _ReadXMLWay( ); | ||||||
|  | 			ParseWayInLua( way, luaState ); | ||||||
|  | 			extractor_callbacks->wayFunction(way); | ||||||
|  | //			if(!extractor_callbacks->wayFunction(way))
 | ||||||
|  | //				std::cerr << "[PBFParser] way not parsed" << std::endl;
 | ||||||
|  | 		} | ||||||
|  | 		if( use_turn_restrictions ) { | ||||||
|  | 			if ( xmlStrEqual( currentName, ( const xmlChar* ) "relation" ) == 1 ) { | ||||||
|  | 				_RawRestrictionContainer r = _ReadXMLRestriction(); | ||||||
|  | 				if(r.fromWay != UINT_MAX) { | ||||||
|  | 					if(!extractor_callbacks->restrictionFunction(r)) { | ||||||
|  | 						std::cerr << "[XMLParser] restriction not parsed" << std::endl; | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 		xmlFree( currentName ); | ||||||
|  | 	} | ||||||
|  | 	return true; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | _RawRestrictionContainer XMLParser::_ReadXMLRestriction() { | ||||||
|  |     _RawRestrictionContainer restriction; | ||||||
|  |     std::string except_tag_string; | ||||||
|  | 
 | ||||||
|  | 	if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { | ||||||
|  | 		const int depth = xmlTextReaderDepth( inputReader );while ( xmlTextReaderRead( inputReader ) == 1 ) { | ||||||
|  | 			const int childType = xmlTextReaderNodeType( inputReader ); | ||||||
|  | 			if ( childType != 1 && childType != 15 ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 			const int childDepth = xmlTextReaderDepth( inputReader ); | ||||||
|  | 			xmlChar* childName = xmlTextReaderName( inputReader ); | ||||||
|  | 			if ( childName == NULL ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 			if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "relation" ) == 1 ) { | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				break; | ||||||
|  | 			} | ||||||
|  | 			if ( childType != 1 ) { | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { | ||||||
|  | 				xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); | ||||||
|  | 				xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); | ||||||
|  | 				if ( k != NULL && value != NULL ) { | ||||||
|  | 					if(xmlStrEqual(k, ( const xmlChar* ) "restriction" )){ | ||||||
|  | 						if(0 == std::string((const char *) value).find("only_")) { | ||||||
|  | 							restriction.restriction.flags.isOnly = true; | ||||||
|  | 						} | ||||||
|  | 					} | ||||||
|  | 					if ( xmlStrEqual(k, (const xmlChar *) "except") ) { | ||||||
|  | 						except_tag_string = (const char*) value; | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 
 | ||||||
|  | 				if ( k != NULL ) { | ||||||
|  | 					xmlFree( k ); | ||||||
|  | 				} | ||||||
|  | 				if ( value != NULL ) { | ||||||
|  | 					xmlFree( value ); | ||||||
|  | 				} | ||||||
|  | 			} else if ( xmlStrEqual( childName, ( const xmlChar* ) "member" ) == 1 ) { | ||||||
|  | 				xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" ); | ||||||
|  | 				if ( ref != NULL ) { | ||||||
|  | 					xmlChar * role = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "role" ); | ||||||
|  | 					xmlChar * type = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "type" ); | ||||||
|  | 
 | ||||||
|  | 					if(xmlStrEqual(role, (const xmlChar *) "to") && xmlStrEqual(type, (const xmlChar *) "way")) { | ||||||
|  | 						restriction.toWay = atoi((const char*) ref); | ||||||
|  | 					} | ||||||
|  | 					if(xmlStrEqual(role, (const xmlChar *) "from") && xmlStrEqual(type, (const xmlChar *) "way")) { | ||||||
|  | 						restriction.fromWay = atoi((const char*) ref); | ||||||
|  | 					} | ||||||
|  | 					if(xmlStrEqual(role, (const xmlChar *) "via") && xmlStrEqual(type, (const xmlChar *) "node")) { | ||||||
|  | 						restriction.restriction.viaNode = atoi((const char*) ref); | ||||||
|  | 					} | ||||||
|  | 
 | ||||||
|  | 					if(NULL != type) { | ||||||
|  | 						xmlFree( type ); | ||||||
|  | 					} | ||||||
|  | 					if(NULL != role) { | ||||||
|  | 						xmlFree( role ); | ||||||
|  | 					} | ||||||
|  | 					if(NULL != ref) { | ||||||
|  | 						xmlFree( ref ); | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			xmlFree( childName ); | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if( ShouldIgnoreRestriction(except_tag_string) ) { | ||||||
|  | 		restriction.fromWay = UINT_MAX;				 //workaround to ignore the restriction
 | ||||||
|  | 	} | ||||||
|  | 	return restriction; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | ExtractionWay XMLParser::_ReadXMLWay() { | ||||||
|  | 	ExtractionWay way; | ||||||
|  | 	if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { | ||||||
|  | 		const int depth = xmlTextReaderDepth( inputReader ); | ||||||
|  | 		while ( xmlTextReaderRead( inputReader ) == 1 ) { | ||||||
|  | 			const int childType = xmlTextReaderNodeType( inputReader ); | ||||||
|  | 			if ( childType != 1 && childType != 15 ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 			const int childDepth = xmlTextReaderDepth( inputReader ); | ||||||
|  | 			xmlChar* childName = xmlTextReaderName( inputReader ); | ||||||
|  | 			if ( childName == NULL ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "way" ) == 1 ) { | ||||||
|  | 				xmlChar* id = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" ); | ||||||
|  | 				way.id = atoi((char*)id); | ||||||
|  | 				xmlFree(id); | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				break; | ||||||
|  | 			} | ||||||
|  | 			if ( childType != 1 ) { | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { | ||||||
|  | 				xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); | ||||||
|  | 				xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); | ||||||
|  | 				//				cout << "->k=" << k << ", v=" << value << endl;
 | ||||||
|  | 				if ( k != NULL && value != NULL ) { | ||||||
|  | 					way.keyVals.Add(std::string( (char *) k ), std::string( (char *) value)); | ||||||
|  | 				} | ||||||
|  | 				if ( k != NULL ) { | ||||||
|  | 					xmlFree( k ); | ||||||
|  | 				} | ||||||
|  | 				if ( value != NULL ) { | ||||||
|  | 					xmlFree( value ); | ||||||
|  | 				} | ||||||
|  | 			} else if ( xmlStrEqual( childName, ( const xmlChar* ) "nd" ) == 1 ) { | ||||||
|  | 				xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" ); | ||||||
|  | 				if ( ref != NULL ) { | ||||||
|  | 					way.path.push_back( atoi(( const char* ) ref ) ); | ||||||
|  | 					xmlFree( ref ); | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			xmlFree( childName ); | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return way; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | ImportNode XMLParser::_ReadXMLNode() { | ||||||
|  | 	ImportNode node; | ||||||
|  | 
 | ||||||
|  | 	xmlChar* attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lat" ); | ||||||
|  | 	if ( attribute != NULL ) { | ||||||
|  | 		node.lat =  static_cast<NodeID>(100000.*atof(( const char* ) attribute ) ); | ||||||
|  | 		xmlFree( attribute ); | ||||||
|  | 	} | ||||||
|  | 	attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lon" ); | ||||||
|  | 	if ( attribute != NULL ) { | ||||||
|  | 		node.lon =  static_cast<NodeID>(100000.*atof(( const char* ) attribute )); | ||||||
|  | 		xmlFree( attribute ); | ||||||
|  | 	} | ||||||
|  | 	attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" ); | ||||||
|  | 	if ( attribute != NULL ) { | ||||||
|  | 		node.id =  atoi(( const char* ) attribute ); | ||||||
|  | 		xmlFree( attribute ); | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { | ||||||
|  | 		const int depth = xmlTextReaderDepth( inputReader ); | ||||||
|  | 		while ( xmlTextReaderRead( inputReader ) == 1 ) { | ||||||
|  | 			const int childType = xmlTextReaderNodeType( inputReader ); | ||||||
|  | 			// 1 = Element, 15 = EndElement
 | ||||||
|  | 			if ( childType != 1 && childType != 15 ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 			const int childDepth = xmlTextReaderDepth( inputReader ); | ||||||
|  | 			xmlChar* childName = xmlTextReaderName( inputReader ); | ||||||
|  | 			if ( childName == NULL ) { | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "node" ) == 1 ) { | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				break; | ||||||
|  | 			} | ||||||
|  | 			if ( childType != 1 ) { | ||||||
|  | 				xmlFree( childName ); | ||||||
|  | 				continue; | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { | ||||||
|  | 				xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); | ||||||
|  | 				xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); | ||||||
|  | 				if ( k != NULL && value != NULL ) { | ||||||
|  | 					node.keyVals.Add(std::string( reinterpret_cast<char*>(k) ), std::string( reinterpret_cast<char*>(value))); | ||||||
|  | 				} | ||||||
|  | 				if ( k != NULL ) { | ||||||
|  | 					xmlFree( k ); | ||||||
|  | 				} | ||||||
|  | 				if ( value != NULL ) { | ||||||
|  | 					xmlFree( value ); | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 
 | ||||||
|  | 			xmlFree( childName ); | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return node; | ||||||
|  | } | ||||||
| @ -1,308 +1,42 @@ | |||||||
| /*
 | /*
 | ||||||
|     open source routing machine |  open source routing machine | ||||||
|     Copyright (C) Dennis Luxen, others 2010 |  Copyright (C) Dennis Luxen, others 2010 | ||||||
| 
 |   | ||||||
| This program is free software; you can redistribute it and/or modify |  This program is free software; you can redistribute it and/or modify | ||||||
| it under the terms of the GNU AFFERO General Public License as published by |  it under the terms of the GNU AFFERO General Public License as published by | ||||||
| the Free Software Foundation; either version 3 of the License, or |  the Free Software Foundation; either version 3 of the License, or | ||||||
| any later version. |  any later version. | ||||||
| 
 |   | ||||||
| This program is distributed in the hope that it will be useful, |  This program is distributed in the hope that it will be useful, | ||||||
| but WITHOUT ANY WARRANTY; without even the implied warranty of |  but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
| GNU General Public License for more details. |  GNU General Public License for more details. | ||||||
| 
 |   | ||||||
| You should have received a copy of the GNU Affero General Public License |  You should have received a copy of the GNU Affero General Public License | ||||||
| along with this program; if not, write to the Free Software |  along with this program; if not, write to the Free Software | ||||||
| Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA |  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
| or see http://www.gnu.org/licenses/agpl.txt.
 |  or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  */ |  */ | ||||||
| 
 | 
 | ||||||
| #ifndef XMLPARSER_H_ | #ifndef XMLPARSER_H_ | ||||||
| #define XMLPARSER_H_ | #define XMLPARSER_H_ | ||||||
| 
 | 
 | ||||||
| #include <boost/ref.hpp> |  | ||||||
| #include <libxml/xmlreader.h> | #include <libxml/xmlreader.h> | ||||||
| 
 | 
 | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| #include "BaseParser.h" | #include "BaseParser.h" | ||||||
| #include "ExtractorStructs.h" |  | ||||||
| #include "ExtractorCallbacks.h" |  | ||||||
| #include "ScriptingEnvironment.h" |  | ||||||
| #include "../DataStructures/HashTable.h" |  | ||||||
| #include "../DataStructures/InputReaderFactory.h" |  | ||||||
| 
 | 
 | ||||||
| class XMLParser : public BaseParser<ExtractorCallbacks, _Node, _RawRestrictionContainer, _Way> { | class XMLParser : public BaseParser { | ||||||
| public: | public: | ||||||
|     XMLParser(const char * filename) : externalMemory(NULL), myLuaState(NULL){ |     XMLParser(const char* filename, ExtractorCallbacks* ec, ScriptingEnvironment& se); | ||||||
|         WARN("Parsing plain .osm/.osm.bz2 is deprecated. Switch to .pbf"); |     bool ReadHeader(); | ||||||
|         inputReader = inputReaderFactory(filename); |     bool Parse(); | ||||||
|     } |      | ||||||
|     virtual ~XMLParser() {} |  | ||||||
| 
 |  | ||||||
|     void RegisterCallbacks(ExtractorCallbacks * em) { |  | ||||||
|         externalMemory = em; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     void RegisterScriptingEnvironment(ScriptingEnvironment & _se) { |  | ||||||
|         myLuaState = _se.getLuaStateForThreadID(0); |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     bool Init() { |  | ||||||
|         return (xmlTextReaderRead( inputReader ) == 1); |  | ||||||
|     } |  | ||||||
|     bool Parse() { |  | ||||||
|         while ( xmlTextReaderRead( inputReader ) == 1 ) { |  | ||||||
|             const int type = xmlTextReaderNodeType( inputReader ); |  | ||||||
| 
 |  | ||||||
|             //1 is Element
 |  | ||||||
|             if ( type != 1 ) |  | ||||||
|                 continue; |  | ||||||
| 
 |  | ||||||
|             xmlChar* currentName = xmlTextReaderName( inputReader ); |  | ||||||
|             if ( currentName == NULL ) |  | ||||||
|                 continue; |  | ||||||
| 
 |  | ||||||
|             if ( xmlStrEqual( currentName, ( const xmlChar* ) "node" ) == 1 ) { |  | ||||||
|                 ImportNode n = _ReadXMLNode(  ); |  | ||||||
|                 /** Pass the unpacked node to the LUA call back **/ |  | ||||||
|                 try { |  | ||||||
|                     luabind::call_function<int>( |  | ||||||
|                             myLuaState, |  | ||||||
|                             "node_function", |  | ||||||
|                             boost::ref(n) |  | ||||||
|                     ); |  | ||||||
|                     if(!externalMemory->nodeFunction(n)) |  | ||||||
|                         std::cerr << "[XMLParser] dense node not parsed" << std::endl; |  | ||||||
|                 } catch (const luabind::error &er) { |  | ||||||
|                     cerr << er.what() << endl; |  | ||||||
|                     lua_State* Ler=er.state(); |  | ||||||
|                     report_errors(Ler, -1); |  | ||||||
|                 } catch (std::exception & e) { |  | ||||||
|                     ERR(e.what()); |  | ||||||
|                 } catch (...) { |  | ||||||
|                     ERR("Unknown error occurred during XML node parsing!"); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
| 
 |  | ||||||
|             if ( xmlStrEqual( currentName, ( const xmlChar* ) "way" ) == 1 ) { |  | ||||||
|                 string name; |  | ||||||
|                 _Way way = _ReadXMLWay( ); |  | ||||||
| 
 |  | ||||||
|                 /** Pass the unpacked way to the LUA call back **/ |  | ||||||
|                 try { |  | ||||||
|                     luabind::call_function<int>( |  | ||||||
|                             myLuaState, |  | ||||||
|                             "way_function", |  | ||||||
|                             boost::ref(way), |  | ||||||
|                             way.path.size() |  | ||||||
|                     ); |  | ||||||
|                     if(!externalMemory->wayFunction(way)) { |  | ||||||
|                         std::cerr << "[PBFParser] way not parsed" << std::endl; |  | ||||||
|                     } |  | ||||||
|                 } catch (const luabind::error &er) { |  | ||||||
|                     cerr << er.what() << endl; |  | ||||||
|                     lua_State* Ler=er.state(); |  | ||||||
|                     report_errors(Ler, -1); |  | ||||||
|                 } catch (std::exception & e) { |  | ||||||
|                     ERR(e.what()); |  | ||||||
|                 } catch (...) { |  | ||||||
|                     ERR("Unknown error occurred during XML way parsing!"); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|             if ( xmlStrEqual( currentName, ( const xmlChar* ) "relation" ) == 1 ) { |  | ||||||
|                 _RawRestrictionContainer r = _ReadXMLRestriction(); |  | ||||||
|                 if(r.fromWay != UINT_MAX) { |  | ||||||
|                     if(!externalMemory->restrictionFunction(r)) { |  | ||||||
|                         std::cerr << "[XMLParser] restriction not parsed" << std::endl; |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|             xmlFree( currentName ); |  | ||||||
|         } |  | ||||||
|         return true; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
| private: | private: | ||||||
|     _RawRestrictionContainer _ReadXMLRestriction ( ) { |     _RawRestrictionContainer _ReadXMLRestriction(); | ||||||
|         _RawRestrictionContainer restriction; |     ExtractionWay _ReadXMLWay(); | ||||||
| 
 |     ImportNode _ReadXMLNode(); | ||||||
|         if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { |  | ||||||
|             const int depth = xmlTextReaderDepth( inputReader );while ( xmlTextReaderRead( inputReader ) == 1 ) { |  | ||||||
|                 const int childType = xmlTextReaderNodeType( inputReader ); |  | ||||||
|                 if ( childType != 1 && childType != 15 ) |  | ||||||
|                     continue; |  | ||||||
|                 const int childDepth = xmlTextReaderDepth( inputReader ); |  | ||||||
|                 xmlChar* childName = xmlTextReaderName( inputReader ); |  | ||||||
|                 if ( childName == NULL ) |  | ||||||
|                     continue; |  | ||||||
| 
 |  | ||||||
|                 if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "relation" ) == 1 ) { |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     break; |  | ||||||
|                 } |  | ||||||
|                 if ( childType != 1 ) { |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     continue; |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|                 if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { |  | ||||||
|                     xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); |  | ||||||
|                     xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); |  | ||||||
|                     if ( k != NULL && value != NULL ) { |  | ||||||
|                         if(xmlStrEqual(k, ( const xmlChar* ) "restriction" )){ |  | ||||||
|                             if(0 == std::string((const char *) value).find("only_")) |  | ||||||
|                                 restriction.restriction.flags.isOnly = true; |  | ||||||
|                         } |  | ||||||
| 
 |  | ||||||
|                     } |  | ||||||
|                     if ( k != NULL ) |  | ||||||
|                         xmlFree( k ); |  | ||||||
|                     if ( value != NULL ) |  | ||||||
|                         xmlFree( value ); |  | ||||||
|                 } else if ( xmlStrEqual( childName, ( const xmlChar* ) "member" ) == 1 ) { |  | ||||||
|                     xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" ); |  | ||||||
|                     if ( ref != NULL ) { |  | ||||||
|                         xmlChar * role = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "role" ); |  | ||||||
|                         xmlChar * type = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "type" ); |  | ||||||
|                         if(xmlStrEqual(role, (const xmlChar *) "to") && xmlStrEqual(type, (const xmlChar *) "way")) { |  | ||||||
|                             restriction.toWay = atoi((const char*) ref); |  | ||||||
|                         } |  | ||||||
|                         if(xmlStrEqual(role, (const xmlChar *) "from") && xmlStrEqual(type, (const xmlChar *) "way")) { |  | ||||||
|                             restriction.fromWay = atoi((const char*) ref); |  | ||||||
|                         } |  | ||||||
|                         if(xmlStrEqual(role, (const xmlChar *) "via") && xmlStrEqual(type, (const xmlChar *) "node")) { |  | ||||||
|                             restriction.restriction.viaNode = atoi((const char*) ref); |  | ||||||
|                         } |  | ||||||
| 
 |  | ||||||
|                         if(NULL != type) |  | ||||||
|                             xmlFree( type ); |  | ||||||
|                         if(NULL != role) |  | ||||||
|                             xmlFree( role ); |  | ||||||
|                         if(NULL != ref) |  | ||||||
|                             xmlFree( ref ); |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|                 xmlFree( childName ); |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|         return restriction; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     _Way _ReadXMLWay( ) { |  | ||||||
|         _Way way; |  | ||||||
|         if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { |  | ||||||
|             const int depth = xmlTextReaderDepth( inputReader ); |  | ||||||
|             while ( xmlTextReaderRead( inputReader ) == 1 ) { |  | ||||||
|                 const int childType = xmlTextReaderNodeType( inputReader ); |  | ||||||
|                 if ( childType != 1 && childType != 15 ) |  | ||||||
|                     continue; |  | ||||||
|                 const int childDepth = xmlTextReaderDepth( inputReader ); |  | ||||||
|                 xmlChar* childName = xmlTextReaderName( inputReader ); |  | ||||||
|                 if ( childName == NULL ) |  | ||||||
|                     continue; |  | ||||||
| 
 |  | ||||||
|                 if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "way" ) == 1 ) { |  | ||||||
|                     xmlChar* id = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" ); |  | ||||||
|                     way.id = atoi((char*)id); |  | ||||||
|                     xmlFree(id); |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     break; |  | ||||||
|                 } |  | ||||||
|                 if ( childType != 1 ) { |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     continue; |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|                 if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { |  | ||||||
|                     xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); |  | ||||||
|                     xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); |  | ||||||
|                     //                cout << "->k=" << k << ", v=" << value << endl;
 |  | ||||||
|                     if ( k != NULL && value != NULL ) { |  | ||||||
| 
 |  | ||||||
|                         way.keyVals.Add(std::string( (char *) k ), std::string( (char *) value)); |  | ||||||
|                     } |  | ||||||
|                     if ( k != NULL ) |  | ||||||
|                         xmlFree( k ); |  | ||||||
|                     if ( value != NULL ) |  | ||||||
|                         xmlFree( value ); |  | ||||||
|                 } else if ( xmlStrEqual( childName, ( const xmlChar* ) "nd" ) == 1 ) { |  | ||||||
|                     xmlChar* ref = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "ref" ); |  | ||||||
|                     if ( ref != NULL ) { |  | ||||||
|                         way.path.push_back( atoi(( const char* ) ref ) ); |  | ||||||
|                         xmlFree( ref ); |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|                 xmlFree( childName ); |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|         return way; |  | ||||||
|     } |  | ||||||
| 
 |  | ||||||
|     ImportNode _ReadXMLNode( ) { |  | ||||||
|         ImportNode node; |  | ||||||
| 
 |  | ||||||
|         xmlChar* attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lat" ); |  | ||||||
|         if ( attribute != NULL ) { |  | ||||||
|             node.lat =  static_cast<NodeID>(100000.*atof(( const char* ) attribute ) ); |  | ||||||
|             xmlFree( attribute ); |  | ||||||
|         } |  | ||||||
|         attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "lon" ); |  | ||||||
|         if ( attribute != NULL ) { |  | ||||||
|             node.lon =  static_cast<NodeID>(100000.*atof(( const char* ) attribute )); |  | ||||||
|             xmlFree( attribute ); |  | ||||||
|         } |  | ||||||
|         attribute = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "id" ); |  | ||||||
|         if ( attribute != NULL ) { |  | ||||||
|             node.id =  atoi(( const char* ) attribute ); |  | ||||||
|             xmlFree( attribute ); |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         if ( xmlTextReaderIsEmptyElement( inputReader ) != 1 ) { |  | ||||||
|             const int depth = xmlTextReaderDepth( inputReader ); |  | ||||||
|             while ( xmlTextReaderRead( inputReader ) == 1 ) { |  | ||||||
|                 const int childType = xmlTextReaderNodeType( inputReader ); |  | ||||||
|                 // 1 = Element, 15 = EndElement
 |  | ||||||
|                 if ( childType != 1 && childType != 15 ) |  | ||||||
|                     continue; |  | ||||||
|                 const int childDepth = xmlTextReaderDepth( inputReader ); |  | ||||||
|                 xmlChar* childName = xmlTextReaderName( inputReader ); |  | ||||||
|                 if ( childName == NULL ) |  | ||||||
|                     continue; |  | ||||||
| 
 |  | ||||||
|                 if ( depth == childDepth && childType == 15 && xmlStrEqual( childName, ( const xmlChar* ) "node" ) == 1 ) { |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     break; |  | ||||||
|                 } |  | ||||||
|                 if ( childType != 1 ) { |  | ||||||
|                     xmlFree( childName ); |  | ||||||
|                     continue; |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|                 if ( xmlStrEqual( childName, ( const xmlChar* ) "tag" ) == 1 ) { |  | ||||||
|                     xmlChar* k = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "k" ); |  | ||||||
|                     xmlChar* value = xmlTextReaderGetAttribute( inputReader, ( const xmlChar* ) "v" ); |  | ||||||
|                     if ( k != NULL && value != NULL ) { |  | ||||||
|                         node.keyVals.Add(std::string( reinterpret_cast<char*>(k) ), std::string( reinterpret_cast<char*>(value))); |  | ||||||
|                     } |  | ||||||
|                     if ( k != NULL ) |  | ||||||
|                         xmlFree( k ); |  | ||||||
|                     if ( value != NULL ) |  | ||||||
|                         xmlFree( value ); |  | ||||||
|                 } |  | ||||||
| 
 |  | ||||||
|                 xmlFree( childName ); |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|         return node; |  | ||||||
|     } |  | ||||||
|     /* Input Reader */ |  | ||||||
|     xmlTextReaderPtr inputReader; |     xmlTextReaderPtr inputReader; | ||||||
| 
 |  | ||||||
|     //holds the callback functions and storage for our temporary data
 |  | ||||||
|     ExtractorCallbacks * externalMemory; |  | ||||||
| 
 |  | ||||||
|     lua_State *myLuaState; |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| #endif /* XMLPARSER_H_ */ | #endif /* XMLPARSER_H_ */ | ||||||
|  | |||||||
| @ -52,12 +52,12 @@ public: | |||||||
| 
 | 
 | ||||||
|         //query to helpdesk
 |         //query to helpdesk
 | ||||||
|         _Coordinate result; |         _Coordinate result; | ||||||
|         std::string JSONParameter, tmp; |         std::string tmp; | ||||||
|         //json
 |         //json
 | ||||||
| 
 | 
 | ||||||
| //        JSONParameter = routeParameters.options.Find("jsonp");
 | //        JSONParameter = routeParameters.options.Find("jsonp");
 | ||||||
|         if("" != routeParameters.jsonpParameter) { |         if("" != routeParameters.jsonpParameter) { | ||||||
|             reply.content += JSONParameter; |             reply.content += routeParameters.jsonpParameter; | ||||||
|             reply.content += "("; |             reply.content += "("; | ||||||
|         } |         } | ||||||
|         reply.status = http::Reply::ok; |         reply.status = http::Reply::ok; | ||||||
| @ -82,7 +82,7 @@ public: | |||||||
|         reply.content += ",\"transactionId\": \"OSRM Routing Engine JSON Locate (v0.3)\""; |         reply.content += ",\"transactionId\": \"OSRM Routing Engine JSON Locate (v0.3)\""; | ||||||
|         reply.content += ("}"); |         reply.content += ("}"); | ||||||
|         reply.headers.resize(3); |         reply.headers.resize(3); | ||||||
|         if("" != JSONParameter) { |         if("" != routeParameters.jsonpParameter) { | ||||||
|             reply.content += ")"; |             reply.content += ")"; | ||||||
|             reply.headers[1].name = "Content-Type"; |             reply.headers[1].name = "Content-Type"; | ||||||
|             reply.headers[1].value = "text/javascript"; |             reply.headers[1].value = "text/javascript"; | ||||||
|  | |||||||
| @ -60,7 +60,6 @@ public: | |||||||
|         nodeHelpDesk->FindPhantomNodeForCoordinate(routeParameters.coordinates[0], result, routeParameters.zoomLevel); |         nodeHelpDesk->FindPhantomNodeForCoordinate(routeParameters.coordinates[0], result, routeParameters.zoomLevel); | ||||||
| 
 | 
 | ||||||
|         std::string tmp; |         std::string tmp; | ||||||
|         std::string JSONParameter; |  | ||||||
|         //json
 |         //json
 | ||||||
| 
 | 
 | ||||||
|         if("" != routeParameters.jsonpParameter) { |         if("" != routeParameters.jsonpParameter) { | ||||||
| @ -93,7 +92,7 @@ public: | |||||||
|         reply.content += ",\"transactionId\":\"OSRM Routing Engine JSON Nearest (v0.3)\""; |         reply.content += ",\"transactionId\":\"OSRM Routing Engine JSON Nearest (v0.3)\""; | ||||||
|         reply.content += ("}"); |         reply.content += ("}"); | ||||||
|         reply.headers.resize(3); |         reply.headers.resize(3); | ||||||
|         if("" != JSONParameter) { |         if("" != routeParameters.jsonpParameter) { | ||||||
|             reply.content += ")"; |             reply.content += ")"; | ||||||
|             reply.headers[1].name = "Content-Type"; |             reply.headers[1].name = "Content-Type"; | ||||||
|             reply.headers[1].value = "text/javascript"; |             reply.headers[1].value = "text/javascript"; | ||||||
|  | |||||||
							
								
								
									
										21
									
								
								Rakefile
									
									
									
									
									
								
							
							
						
						
									
										21
									
								
								Rakefile
									
									
									
									
									
								
							| @ -7,6 +7,7 @@ require 'sys/proctable' | |||||||
| DATA_FOLDER = 'sandbox' | DATA_FOLDER = 'sandbox' | ||||||
| PROFILE = 'bicycle' | PROFILE = 'bicycle' | ||||||
| OSRM_PORT = 5000 | OSRM_PORT = 5000 | ||||||
|  | PROFILES_FOLDER = '../profiles' | ||||||
| 
 | 
 | ||||||
| Cucumber::Rake::Task.new do |t| | Cucumber::Rake::Task.new do |t| | ||||||
|   t.cucumber_opts = %w{--format pretty} |   t.cucumber_opts = %w{--format pretty} | ||||||
| @ -16,7 +17,7 @@ areas = { | |||||||
|   :kbh => { :country => 'denmark', :bbox => 'top=55.6972 left=12.5222 right=12.624 bottom=55.6376' }, |   :kbh => { :country => 'denmark', :bbox => 'top=55.6972 left=12.5222 right=12.624 bottom=55.6376' }, | ||||||
|   :frd => { :country => 'denmark', :bbox => 'top=55.7007 left=12.4765 bottom=55.6576 right=12.5698' }, |   :frd => { :country => 'denmark', :bbox => 'top=55.7007 left=12.4765 bottom=55.6576 right=12.5698' }, | ||||||
|   :regh => { :country => 'denmark', :bbox => 'top=56.164 left=11.792 bottom=55.403 right=12.731' }, |   :regh => { :country => 'denmark', :bbox => 'top=56.164 left=11.792 bottom=55.403 right=12.731' }, | ||||||
|   :dk => { :country => 'denmark', :bbox => nil }, |   :denmark => { :country => 'denmark', :bbox => nil }, | ||||||
|   :skaane => { :country => 'sweden', :bbox => 'top=56.55 left=12.4 bottom=55.3 right=14.6' } |   :skaane => { :country => 'sweden', :bbox => 'top=56.55 left=12.4 bottom=55.3 right=14.6' } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -115,11 +116,25 @@ end | |||||||
| 
 | 
 | ||||||
| desc "Reprocess OSM data." | desc "Reprocess OSM data." | ||||||
| task :process => :setup do | task :process => :setup do | ||||||
|  |   Dir.chdir DATA_FOLDER do | ||||||
|  |     raise "Error while extracting data." unless system "../osrm-extract #{osm_data_area_name}.osm.pbf #{PROFILES_FOLDER}/#{PROFILE}.lua" | ||||||
|  |     puts | ||||||
|  |     raise "Error while preparing data." unless system "../osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions #{PROFILES_FOLDER}/#{PROFILE}.lua" | ||||||
|  |     puts | ||||||
|  |   end | ||||||
|  | end | ||||||
|  | 
 | ||||||
|  | desc "Extract OSM data." | ||||||
|  | task :extract => :setup do | ||||||
|   Dir.chdir DATA_FOLDER do |   Dir.chdir DATA_FOLDER do | ||||||
|     raise "Error while extracting data." unless system "../osrm-extract #{osm_data_area_name}.osm.pbf ../profiles/#{PROFILE}.lua" |     raise "Error while extracting data." unless system "../osrm-extract #{osm_data_area_name}.osm.pbf ../profiles/#{PROFILE}.lua" | ||||||
|     puts |   end | ||||||
|  | end | ||||||
|  | 
 | ||||||
|  | desc "Prepare OSM data." | ||||||
|  | task :prepare => :setup do | ||||||
|  |   Dir.chdir DATA_FOLDER do | ||||||
|     raise "Error while preparing data." unless system "../osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions ../profiles/#{PROFILE}.lua" |     raise "Error while preparing data." unless system "../osrm-prepare #{osm_data_area_name}.osrm #{osm_data_area_name}.osrm.restrictions ../profiles/#{PROFILE}.lua" | ||||||
|     puts |  | ||||||
|   end |   end | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -21,23 +21,25 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #ifndef ALTERNATIVEROUTES_H_ | #ifndef ALTERNATIVEROUTES_H_ | ||||||
| #define ALTERNATIVEROUTES_H_ | #define ALTERNATIVEROUTES_H_ | ||||||
| 
 | 
 | ||||||
|  | #include <boost/unordered_map.hpp> | ||||||
|  | #include <vector> | ||||||
| #include <cmath> | #include <cmath> | ||||||
| 
 | 
 | ||||||
| #include "BasicRoutingInterface.h" | #include "BasicRoutingInterface.h" | ||||||
| 
 | 
 | ||||||
| const double VIAPATH_ALPHA   = 0.25; | const double VIAPATH_ALPHA   = 0.15; | ||||||
| const double VIAPATH_EPSILON = 0.25; | const double VIAPATH_EPSILON = 0.10; //alternative at most 15% longer
 | ||||||
| const double VIAPATH_GAMMA   = 0.80; | const double VIAPATH_GAMMA   = 0.75; //alternative shares at most 75% with the shortest.
 | ||||||
| 
 | 
 | ||||||
| template<class QueryDataT> | template<class QueryDataT> | ||||||
| class AlternativeRouting : private BasicRoutingInterface<QueryDataT>{ | class AlternativeRouting : private BasicRoutingInterface<QueryDataT> { | ||||||
|     typedef BasicRoutingInterface<QueryDataT> super; |     typedef BasicRoutingInterface<QueryDataT> super; | ||||||
|     typedef std::pair<NodeID, int> PreselectedNode; |     typedef typename QueryDataT::Graph SearchGraph; | ||||||
|     typedef typename QueryDataT::HeapPtr HeapPtr; |     typedef typename QueryDataT::QueryHeap QueryHeap; | ||||||
|     typedef std::pair<NodeID, NodeID> UnpackEdge; |     typedef std::pair<NodeID, NodeID> SearchSpaceEdge; | ||||||
| 
 | 
 | ||||||
|     struct RankedCandidateNode { |     struct RankedCandidateNode { | ||||||
|         RankedCandidateNode(NodeID n, int l, int s) : node(n), length(l), sharing(s) {} |         RankedCandidateNode(const NodeID n, const int l, const int s) : node(n), length(l), sharing(s) {} | ||||||
|         NodeID node; |         NodeID node; | ||||||
|         int length; |         int length; | ||||||
|         int sharing; |         int sharing; | ||||||
| @ -45,9 +47,12 @@ class AlternativeRouting : private BasicRoutingInterface<QueryDataT>{ | |||||||
|             return (2*length + sharing) < (2*other.length + other.sharing); |             return (2*length + sharing) < (2*other.length + other.sharing); | ||||||
|         } |         } | ||||||
|     }; |     }; | ||||||
|  | 
 | ||||||
|  |     const SearchGraph * search_graph; | ||||||
|  | 
 | ||||||
| public: | public: | ||||||
| 
 | 
 | ||||||
|     AlternativeRouting(QueryDataT & qd) : super(qd) { } |     AlternativeRouting(QueryDataT & qd) : super(qd), search_graph(qd.graph) { } | ||||||
| 
 | 
 | ||||||
|     ~AlternativeRouting() {} |     ~AlternativeRouting() {} | ||||||
| 
 | 
 | ||||||
| @ -59,81 +64,115 @@ public: | |||||||
| 
 | 
 | ||||||
|         std::vector<NodeID> alternativePath; |         std::vector<NodeID> alternativePath; | ||||||
|         std::vector<NodeID> viaNodeCandidates; |         std::vector<NodeID> viaNodeCandidates; | ||||||
|         std::vector <NodeID> packedShortestPath; |         std::vector<SearchSpaceEdge> forward_search_space; | ||||||
|         std::vector<PreselectedNode> nodesThatPassPreselection; |         std::vector<SearchSpaceEdge> reverse_search_space; | ||||||
| 
 | 
 | ||||||
|         HeapPtr & forwardHeap = super::_queryData.forwardHeap; |         //Initialize Queues, semi-expensive because access to TSS invokes a system call
 | ||||||
|         HeapPtr & backwardHeap = super::_queryData.backwardHeap; |  | ||||||
|         HeapPtr & forwardHeap2 = super::_queryData.forwardHeap2; |  | ||||||
|         HeapPtr & backwardHeap2 = super::_queryData.backwardHeap2; |  | ||||||
| 
 |  | ||||||
|         //Initialize Queues
 |  | ||||||
|         super::_queryData.InitializeOrClearFirstThreadLocalStorage(); |         super::_queryData.InitializeOrClearFirstThreadLocalStorage(); | ||||||
|         int _upperBound = INT_MAX; |         super::_queryData.InitializeOrClearSecondThreadLocalStorage(); | ||||||
|         NodeID middle = UINT_MAX; |         super::_queryData.InitializeOrClearThirdThreadLocalStorage(); | ||||||
|         forwardHeap->Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); | 
 | ||||||
|  |         QueryHeap & forward_heap1 = *(super::_queryData.forwardHeap); | ||||||
|  |         QueryHeap & reverse_heap1 = *(super::_queryData.backwardHeap); | ||||||
|  |         QueryHeap & forward_heap2 = *(super::_queryData.forwardHeap2); | ||||||
|  |         QueryHeap & reverse_heap2 = *(super::_queryData.backwardHeap2); | ||||||
|  | 
 | ||||||
|  |         int upper_bound_to_shortest_path_distance = INT_MAX; | ||||||
|  |         NodeID middle_node = UINT_MAX; | ||||||
|  |         forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); | ||||||
|         if(phantomNodePair.startPhantom.isBidirected() ) { |         if(phantomNodePair.startPhantom.isBidirected() ) { | ||||||
|             forwardHeap->Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); |             forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); | ||||||
|         } |         } | ||||||
|         backwardHeap->Insert(phantomNodePair.targetPhantom.edgeBasedNode, phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.edgeBasedNode); |         reverse_heap1.Insert(phantomNodePair.targetPhantom.edgeBasedNode, phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.edgeBasedNode); | ||||||
|         if(phantomNodePair.targetPhantom.isBidirected() ) { |         if(phantomNodePair.targetPhantom.isBidirected() ) { | ||||||
|             backwardHeap->Insert(phantomNodePair.targetPhantom.edgeBasedNode+1, phantomNodePair.targetPhantom.weight2, phantomNodePair.targetPhantom.edgeBasedNode+1); |         	reverse_heap1.Insert(phantomNodePair.targetPhantom.edgeBasedNode+1, phantomNodePair.targetPhantom.weight2, phantomNodePair.targetPhantom.edgeBasedNode+1); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         const int offset = (phantomNodePair.startPhantom.isBidirected() ? std::max(phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.weight2) : phantomNodePair.startPhantom.weight1) |         const int forward_offset = phantomNodePair.startPhantom.weight1 + (phantomNodePair.startPhantom.isBidirected() ? phantomNodePair.startPhantom.weight2 : 0); | ||||||
|             + (phantomNodePair.targetPhantom.isBidirected() ? std::max(phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.weight2) : phantomNodePair.targetPhantom.weight1); |         const int reverse_offset = phantomNodePair.targetPhantom.weight1 + (phantomNodePair.targetPhantom.isBidirected() ? phantomNodePair.targetPhantom.weight2 : 0); | ||||||
| 
 | 
 | ||||||
|         //exploration dijkstra from nodes s and t until deletemin/(1+epsilon) > _lengthOfShortestPath
 |         //exploration dijkstra from nodes s and t until deletemin/(1+epsilon) > _lengthOfShortestPath
 | ||||||
|         while(forwardHeap->Size() + backwardHeap->Size() > 0){ |         while(0 < (forward_heap1.Size() + reverse_heap1.Size())){ | ||||||
|             if(forwardHeap->Size() > 0){ |             if(0 < forward_heap1.Size()){ | ||||||
|                 AlternativeRoutingStep(forwardHeap, backwardHeap, &middle, &_upperBound, 2*offset, true, viaNodeCandidates); |                 AlternativeRoutingStep<true >(forward_heap1, reverse_heap1, &middle_node, &upper_bound_to_shortest_path_distance, viaNodeCandidates, forward_search_space, forward_offset); | ||||||
|             } |             } | ||||||
|             if(backwardHeap->Size() > 0){ |             if(0 < reverse_heap1.Size()){ | ||||||
|                 AlternativeRoutingStep(backwardHeap, forwardHeap, &middle, &_upperBound, 2*offset, false, viaNodeCandidates); |                 AlternativeRoutingStep<false>(reverse_heap1, forward_heap1, &middle_node, &upper_bound_to_shortest_path_distance, viaNodeCandidates, reverse_search_space, reverse_offset); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         std::sort(viaNodeCandidates.begin(), viaNodeCandidates.end()); |         sort_unique_resize(viaNodeCandidates); | ||||||
|         int size = std::unique(viaNodeCandidates.begin(), viaNodeCandidates.end())- viaNodeCandidates.begin(); |  | ||||||
|         viaNodeCandidates.resize(size); |  | ||||||
| 
 | 
 | ||||||
|         //save (packed) shortest path of shortest path and keep it for later use.
 |         std::vector<NodeID> packed_forward_path; | ||||||
|         //we need it during the checks and dont want to recompute it always
 |         std::vector<NodeID> packed_reverse_path; | ||||||
|         super::RetrievePackedPathFromHeap(forwardHeap, backwardHeap, middle, packedShortestPath); |  | ||||||
| 
 | 
 | ||||||
|         //ch-pruning of via nodes in both search spaces
 |         super::RetrievePackedPathFromSingleHeap(forward_heap1, middle_node, packed_forward_path); | ||||||
|  |         super::RetrievePackedPathFromSingleHeap(reverse_heap1, middle_node, packed_reverse_path); | ||||||
|  |         boost::unordered_map<NodeID, int> approximated_forward_sharing; | ||||||
|  |         boost::unordered_map<NodeID, int> approximated_reverse_sharing; | ||||||
|  | 
 | ||||||
|  |         unsigned index_into_forward_path = 0; | ||||||
|  |         //sweep over search space, compute forward sharing for each current edge (u,v)
 | ||||||
|  |         BOOST_FOREACH(const SearchSpaceEdge & current_edge, forward_search_space) { | ||||||
|  |         	const NodeID u = current_edge.first; | ||||||
|  |         	const NodeID v = current_edge.second; | ||||||
|  |         	if(packed_forward_path.size() < index_into_forward_path && current_edge == forward_search_space[index_into_forward_path]) { | ||||||
|  |         		//current_edge is on shortest path => sharing(u):=queue.GetKey(u);
 | ||||||
|  |         		++index_into_forward_path; | ||||||
|  |         		approximated_forward_sharing[v] = forward_heap1.GetKey(u); | ||||||
|  |         	} else { | ||||||
|  |         		//sharing (s) = sharing (t)
 | ||||||
|  |         		approximated_forward_sharing[v] = approximated_forward_sharing[u]; | ||||||
|  |         	} | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         unsigned index_into_reverse_path = 0; | ||||||
|  |         //sweep over search space, compute backward sharing
 | ||||||
|  |         BOOST_FOREACH(const SearchSpaceEdge & current_edge, reverse_search_space) { | ||||||
|  |         	const NodeID u = current_edge.first; | ||||||
|  |         	const NodeID v = current_edge.second; | ||||||
|  |         	if(packed_reverse_path.size() < index_into_reverse_path && current_edge == reverse_search_space[index_into_reverse_path]) { | ||||||
|  |         		//current_edge is on shortest path => sharing(u):=queue.GetKey(u);
 | ||||||
|  |         		++index_into_reverse_path; | ||||||
|  |         		approximated_reverse_sharing[v] = reverse_heap1.GetKey(u); | ||||||
|  |         	} else { | ||||||
|  |         		//sharing (s) = sharing (t)
 | ||||||
|  |         		approximated_reverse_sharing[v] = approximated_reverse_sharing[u]; | ||||||
|  |         	} | ||||||
|  |         } | ||||||
|  |         std::vector<NodeID> nodes_that_passed_preselection; | ||||||
|         BOOST_FOREACH(const NodeID node, viaNodeCandidates) { |         BOOST_FOREACH(const NodeID node, viaNodeCandidates) { | ||||||
|             if(node == middle) //subpath optimality tells us that this case is just the shortest path
 |             int approximated_sharing = approximated_forward_sharing[node] + approximated_reverse_sharing[node]; | ||||||
|                 continue; |             int approximated_length = forward_heap1.GetKey(node)+reverse_heap1.GetKey(node); | ||||||
|  |             bool lengthPassed = (approximated_length < upper_bound_to_shortest_path_distance*(1+VIAPATH_EPSILON)); | ||||||
|  |             bool sharingPassed = (approximated_sharing <= upper_bound_to_shortest_path_distance*VIAPATH_GAMMA); | ||||||
|  |             bool stretchPassed = approximated_length - approximated_sharing < (1.+VIAPATH_EPSILON)*(upper_bound_to_shortest_path_distance-approximated_sharing); | ||||||
| 
 | 
 | ||||||
|             int sharing = approximateAmountOfSharing(node, forwardHeap, backwardHeap, packedShortestPath); |             if(lengthPassed && sharingPassed && stretchPassed) { | ||||||
|             int length1 = forwardHeap->GetKey(node); |                 nodes_that_passed_preselection.push_back(node); | ||||||
|             int length2 = backwardHeap->GetKey(node); |             } | ||||||
|             bool lengthPassed = (length1+length2 < _upperBound*(1+VIAPATH_EPSILON)); |  | ||||||
|             bool sharingPassed = (sharing <= _upperBound*VIAPATH_GAMMA); |  | ||||||
|             bool stretchPassed = length1+length2 - sharing < (1.+VIAPATH_EPSILON)*(_upperBound-sharing); |  | ||||||
| 
 |  | ||||||
|             if(lengthPassed && sharingPassed && stretchPassed) |  | ||||||
|                 nodesThatPassPreselection.push_back(std::make_pair(node, length1+length2)); |  | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|  |         std::vector<NodeID> & packedShortestPath = packed_forward_path; | ||||||
|  |         std::reverse(packedShortestPath.begin(), packedShortestPath.end()); | ||||||
|  |         packedShortestPath.push_back(middle_node); | ||||||
|  |         packedShortestPath.insert(packedShortestPath.end(),packed_reverse_path.begin(), packed_reverse_path.end()); | ||||||
|         std::vector<RankedCandidateNode > rankedCandidates; |         std::vector<RankedCandidateNode > rankedCandidates; | ||||||
| 
 | 
 | ||||||
|         //prioritizing via nodes for deep inspection
 |         //prioritizing via nodes for deep inspection
 | ||||||
|         BOOST_FOREACH(const PreselectedNode node, nodesThatPassPreselection) { |         BOOST_FOREACH(const NodeID node, nodes_that_passed_preselection) { | ||||||
|             int lengthOfViaPath = 0, sharingOfViaPath = 0; |             int lengthOfViaPath = 0, sharingOfViaPath = 0; | ||||||
| 
 |             computeLengthAndSharingOfViaPath(node, &lengthOfViaPath, &sharingOfViaPath, forward_offset+reverse_offset, packedShortestPath); | ||||||
|             computeLengthAndSharingOfViaPath(node, &lengthOfViaPath, &sharingOfViaPath, offset, packedShortestPath); |             if(sharingOfViaPath <= upper_bound_to_shortest_path_distance*VIAPATH_GAMMA) { | ||||||
|             if(sharingOfViaPath <= VIAPATH_GAMMA*_upperBound) |                 rankedCandidates.push_back(RankedCandidateNode(node, lengthOfViaPath, sharingOfViaPath)); | ||||||
|                 rankedCandidates.push_back(RankedCandidateNode(node.first, lengthOfViaPath, sharingOfViaPath)); |             } | ||||||
|         } |         } | ||||||
| 
 |  | ||||||
|         std::sort(rankedCandidates.begin(), rankedCandidates.end()); |         std::sort(rankedCandidates.begin(), rankedCandidates.end()); | ||||||
| 
 | 
 | ||||||
|         NodeID selectedViaNode = UINT_MAX; |         NodeID selectedViaNode = UINT_MAX; | ||||||
|         int lengthOfViaPath = INT_MAX; |         int lengthOfViaPath = INT_MAX; | ||||||
|         NodeID s_v_middle = UINT_MAX, v_t_middle = UINT_MAX; |         NodeID s_v_middle = UINT_MAX, v_t_middle = UINT_MAX; | ||||||
|         BOOST_FOREACH(const RankedCandidateNode candidate, rankedCandidates){ |         BOOST_FOREACH(const RankedCandidateNode & candidate, rankedCandidates){ | ||||||
|             if(viaNodeCandidatePasses_T_Test(forwardHeap, backwardHeap, forwardHeap2, backwardHeap2, candidate, offset, _upperBound, &lengthOfViaPath, &s_v_middle, &v_t_middle)) { |             if(viaNodeCandidatePasses_T_Test(forward_heap1, reverse_heap1, forward_heap2, reverse_heap2, candidate, forward_offset+reverse_offset, upper_bound_to_shortest_path_distance, &lengthOfViaPath, &s_v_middle, &v_t_middle)) { | ||||||
|                 // select first admissable
 |                 // select first admissable
 | ||||||
|                 selectedViaNode = candidate.node; |                 selectedViaNode = candidate.node; | ||||||
|                 break; |                 break; | ||||||
| @ -141,15 +180,15 @@ public: | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //Unpack shortest path and alternative, if they exist
 |         //Unpack shortest path and alternative, if they exist
 | ||||||
|         if(INT_MAX != _upperBound) { |         if(INT_MAX != upper_bound_to_shortest_path_distance) { | ||||||
|             super::UnpackPath(packedShortestPath, rawRouteData.computedShortestPath); |             super::UnpackPath(packedShortestPath, rawRouteData.computedShortestPath); | ||||||
|             rawRouteData.lengthOfShortestPath = _upperBound; |             rawRouteData.lengthOfShortestPath = upper_bound_to_shortest_path_distance; | ||||||
|         } else { |         } else { | ||||||
|             rawRouteData.lengthOfShortestPath = INT_MAX; |             rawRouteData.lengthOfShortestPath = INT_MAX; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         if(selectedViaNode != UINT_MAX) { |         if(selectedViaNode != UINT_MAX) { | ||||||
|             retrievePackedViaPath(forwardHeap, backwardHeap, forwardHeap2, backwardHeap2, s_v_middle, v_t_middle, rawRouteData.computedAlternativePath); |             retrievePackedViaPath(forward_heap1, reverse_heap1, forward_heap2, reverse_heap2, s_v_middle, v_t_middle, rawRouteData.computedAlternativePath); | ||||||
|             rawRouteData.lengthOfAlternativePath = lengthOfViaPath; |             rawRouteData.lengthOfAlternativePath = lengthOfViaPath; | ||||||
|         } else { |         } else { | ||||||
|             rawRouteData.lengthOfAlternativePath = INT_MAX; |             rawRouteData.lengthOfAlternativePath = INT_MAX; | ||||||
| @ -158,7 +197,7 @@ public: | |||||||
| 
 | 
 | ||||||
| private: | private: | ||||||
|     //unpack <s,..,v,..,t> by exploring search spaces from v
 |     //unpack <s,..,v,..,t> by exploring search spaces from v
 | ||||||
|     inline void retrievePackedViaPath(const HeapPtr & _forwardHeap1, const HeapPtr & _backwardHeap1, const HeapPtr & _forwardHeap2, const HeapPtr & _backwardHeap2, |     inline void retrievePackedViaPath(QueryHeap & _forwardHeap1, QueryHeap & _backwardHeap1, QueryHeap & _forwardHeap2, QueryHeap & _backwardHeap2, | ||||||
|             const NodeID s_v_middle, const NodeID v_t_middle, std::vector<_PathData> & unpackedPath) { |             const NodeID s_v_middle, const NodeID v_t_middle, std::vector<_PathData> & unpackedPath) { | ||||||
|         //unpack [s,v)
 |         //unpack [s,v)
 | ||||||
|         std::vector<NodeID> packed_s_v_path, packed_v_t_path; |         std::vector<NodeID> packed_s_v_path, packed_v_t_path; | ||||||
| @ -170,16 +209,16 @@ private: | |||||||
|         super::UnpackPath(packed_s_v_path, unpackedPath); |         super::UnpackPath(packed_s_v_path, unpackedPath); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void computeLengthAndSharingOfViaPath(const PreselectedNode& node, int *lengthOfViaPath, int *sharingOfViaPath, |     inline void computeLengthAndSharingOfViaPath(const NodeID via_node, int *real_length_of_via_path, int *sharing_of_via_path, | ||||||
|             const int offset, const std::vector<NodeID> & packedShortestPath) { |             const int offset, const std::vector<NodeID> & packed_shortest_path) { | ||||||
|         //compute and unpack <s,..,v> and <v,..,t> by exploring search spaces from v and intersecting against queues
 |         //compute and unpack <s,..,v> and <v,..,t> by exploring search spaces from v and intersecting against queues
 | ||||||
|         //only half-searches have to be done at this stage
 |         //only half-searches have to be done at this stage
 | ||||||
|         super::_queryData.InitializeOrClearSecondThreadLocalStorage(); |         super::_queryData.InitializeOrClearSecondThreadLocalStorage(); | ||||||
| 
 | 
 | ||||||
|         HeapPtr & existingForwardHeap  = super::_queryData.forwardHeap; |         QueryHeap & existingForwardHeap  = *super::_queryData.forwardHeap; | ||||||
|         HeapPtr & existingBackwardHeap = super::_queryData.backwardHeap; |         QueryHeap & existingBackwardHeap = *super::_queryData.backwardHeap; | ||||||
|         HeapPtr & newForwardHeap       = super::_queryData.forwardHeap2; |         QueryHeap & newForwardHeap       = *super::_queryData.forwardHeap2; | ||||||
|         HeapPtr & newBackwardHeap      = super::_queryData.backwardHeap2; |         QueryHeap & newBackwardHeap      = *super::_queryData.backwardHeap2; | ||||||
| 
 | 
 | ||||||
|         std::vector < NodeID > packed_s_v_path; |         std::vector < NodeID > packed_s_v_path; | ||||||
|         std::vector < NodeID > packed_v_t_path; |         std::vector < NodeID > packed_v_t_path; | ||||||
| @ -189,18 +228,18 @@ private: | |||||||
| 
 | 
 | ||||||
|         NodeID s_v_middle = UINT_MAX; |         NodeID s_v_middle = UINT_MAX; | ||||||
|         int upperBoundFor_s_v_Path = INT_MAX;//compute path <s,..,v> by reusing forward search from s
 |         int upperBoundFor_s_v_Path = INT_MAX;//compute path <s,..,v> by reusing forward search from s
 | ||||||
|         newBackwardHeap->Insert(node.first, 0, node.first); |         newBackwardHeap.Insert(via_node, 0, via_node); | ||||||
|         while (newBackwardHeap->Size() > 0) { |         while (0 < newBackwardHeap.Size()) { | ||||||
|             super::RoutingStep(newBackwardHeap, existingForwardHeap, &s_v_middle, &upperBoundFor_s_v_Path, 2 * offset, false); |             super::RoutingStep(newBackwardHeap, existingForwardHeap, &s_v_middle, &upperBoundFor_s_v_Path, 2 * offset, false); | ||||||
|         } |         } | ||||||
|         //compute path <v,..,t> by reusing backward search from node t
 |         //compute path <v,..,t> by reusing backward search from node t
 | ||||||
|         NodeID v_t_middle = UINT_MAX; |         NodeID v_t_middle = UINT_MAX; | ||||||
|         int upperBoundFor_v_t_Path = INT_MAX; |         int upperBoundFor_v_t_Path = INT_MAX; | ||||||
|         newForwardHeap->Insert(node.first, 0, node.first); |         newForwardHeap.Insert(via_node, 0, via_node); | ||||||
|         while (newForwardHeap->Size() > 0) { |         while (0 < newForwardHeap.Size() ) { | ||||||
|             super::RoutingStep(newForwardHeap, existingBackwardHeap, &v_t_middle, &upperBoundFor_v_t_Path, 2 * offset, true); |             super::RoutingStep(newForwardHeap, existingBackwardHeap, &v_t_middle, &upperBoundFor_v_t_Path, 2 * offset, true); | ||||||
|         } |         } | ||||||
|         *lengthOfViaPath = upperBoundFor_s_v_Path + upperBoundFor_v_t_Path; |         *real_length_of_via_path = upperBoundFor_s_v_Path + upperBoundFor_v_t_Path; | ||||||
| 
 | 
 | ||||||
|         if(UINT_MAX == s_v_middle || UINT_MAX == v_t_middle) |         if(UINT_MAX == s_v_middle || UINT_MAX == v_t_middle) | ||||||
|             return; |             return; | ||||||
| @ -211,35 +250,35 @@ private: | |||||||
| 
 | 
 | ||||||
|         //partial unpacking, compute sharing
 |         //partial unpacking, compute sharing
 | ||||||
|         //First partially unpack s-->v until paths deviate, note length of common path.
 |         //First partially unpack s-->v until paths deviate, note length of common path.
 | ||||||
|         for (unsigned i = 0, lengthOfPackedPath = std::min( packed_s_v_path.size(), packedShortestPath.size()) - 1; (i < lengthOfPackedPath); ++i) { |         for (unsigned i = 0, lengthOfPackedPath = std::min( packed_s_v_path.size(), packed_shortest_path.size()) - 1; (i < lengthOfPackedPath); ++i) { | ||||||
|             if (packed_s_v_path[i] == packedShortestPath[i] && packed_s_v_path[i + 1] == packedShortestPath[i + 1]) { |             if (packed_s_v_path[i] == packed_shortest_path[i] && packed_s_v_path[i + 1] == packed_shortest_path[i + 1]) { | ||||||
|                 typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection(packed_s_v_path[i], packed_s_v_path[i + 1]); |                 typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection(packed_s_v_path[i], packed_s_v_path[i + 1]); | ||||||
|                 *sharingOfViaPath += super::_queryData.graph->GetEdgeData(edgeID).distance; |                 *sharing_of_via_path += search_graph->GetEdgeData(edgeID).distance; | ||||||
|             } else { |             } else { | ||||||
|                 if (packed_s_v_path[i] == packedShortestPath[i]) { |                 if (packed_s_v_path[i] == packed_shortest_path[i]) { | ||||||
|                     super::UnpackEdge(packed_s_v_path[i], packed_s_v_path[i+1], partiallyUnpackedViaPath); |                     super::UnpackEdge(packed_s_v_path[i], packed_s_v_path[i+1], partiallyUnpackedViaPath); | ||||||
|                     super::UnpackEdge(packedShortestPath[i], packedShortestPath[i+1], partiallyUnpackedShortestPath); |                     super::UnpackEdge(packed_shortest_path[i], packed_shortest_path[i+1], partiallyUnpackedShortestPath); | ||||||
|                     break; |                     break; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         //traverse partially unpacked edge and note common prefix
 |         //traverse partially unpacked edge and note common prefix
 | ||||||
|         for (int i = 0, lengthOfPackedPath = std::min( partiallyUnpackedViaPath.size(), partiallyUnpackedShortestPath.size()) - 1; (i < lengthOfPackedPath) && (partiallyUnpackedViaPath[i] == partiallyUnpackedShortestPath[i] && partiallyUnpackedViaPath[i+1] == partiallyUnpackedShortestPath[i+1]); ++i) { |         for (int i = 0, lengthOfPackedPath = std::min( partiallyUnpackedViaPath.size(), partiallyUnpackedShortestPath.size()) - 1; (i < lengthOfPackedPath) && (partiallyUnpackedViaPath[i] == partiallyUnpackedShortestPath[i] && partiallyUnpackedViaPath[i+1] == partiallyUnpackedShortestPath[i+1]); ++i) { | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection(partiallyUnpackedViaPath[i], partiallyUnpackedViaPath[i+1]); |             typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection(partiallyUnpackedViaPath[i], partiallyUnpackedViaPath[i+1]); | ||||||
|             *sharingOfViaPath += super::_queryData.graph->GetEdgeData(edgeID).distance; |             *sharing_of_via_path += search_graph->GetEdgeData(edgeID).distance; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         //Second, partially unpack v-->t in reverse order until paths deviate and note lengths
 |         //Second, partially unpack v-->t in reverse order until paths deviate and note lengths
 | ||||||
|         int viaPathIndex = packed_v_t_path.size() - 1; |         int viaPathIndex = packed_v_t_path.size() - 1; | ||||||
|         int shortestPathIndex = packedShortestPath.size() - 1; |         int shortestPathIndex = packed_shortest_path.size() - 1; | ||||||
|         for (; viaPathIndex > 0 && shortestPathIndex > 0; --viaPathIndex,--shortestPathIndex ) { |         for (; viaPathIndex > 0 && shortestPathIndex > 0; --viaPathIndex,--shortestPathIndex ) { | ||||||
|             if (packed_v_t_path[viaPathIndex - 1] == packedShortestPath[shortestPathIndex - 1] && packed_v_t_path[viaPathIndex] == packedShortestPath[shortestPathIndex]) { |             if (packed_v_t_path[viaPathIndex - 1] == packed_shortest_path[shortestPathIndex - 1] && packed_v_t_path[viaPathIndex] == packed_shortest_path[shortestPathIndex]) { | ||||||
|                 typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection( packed_v_t_path[viaPathIndex - 1], packed_v_t_path[viaPathIndex]); |                 typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection( packed_v_t_path[viaPathIndex - 1], packed_v_t_path[viaPathIndex]); | ||||||
|                 *sharingOfViaPath += super::_queryData.graph->GetEdgeData(edgeID).distance; |                 *sharing_of_via_path += search_graph->GetEdgeData(edgeID).distance; | ||||||
|             } else { |             } else { | ||||||
|                 if (packed_v_t_path[viaPathIndex] == packedShortestPath[shortestPathIndex]) { |                 if (packed_v_t_path[viaPathIndex] == packed_shortest_path[shortestPathIndex]) { | ||||||
|                     super::UnpackEdge(packed_v_t_path[viaPathIndex-1], packed_v_t_path[viaPathIndex], partiallyUnpackedViaPath); |                     super::UnpackEdge(packed_v_t_path[viaPathIndex-1], packed_v_t_path[viaPathIndex], partiallyUnpackedViaPath); | ||||||
|                     super::UnpackEdge(packedShortestPath[shortestPathIndex-1] , packedShortestPath[shortestPathIndex], partiallyUnpackedShortestPath); |                     super::UnpackEdge(packed_shortest_path[shortestPathIndex-1] , packed_shortest_path[shortestPathIndex], partiallyUnpackedShortestPath); | ||||||
|                     break; |                     break; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| @ -249,16 +288,16 @@ private: | |||||||
|         shortestPathIndex = partiallyUnpackedShortestPath.size() - 1; |         shortestPathIndex = partiallyUnpackedShortestPath.size() - 1; | ||||||
|         for (; viaPathIndex > 0 && shortestPathIndex > 0; --viaPathIndex,--shortestPathIndex) { |         for (; viaPathIndex > 0 && shortestPathIndex > 0; --viaPathIndex,--shortestPathIndex) { | ||||||
|             if (partiallyUnpackedViaPath[viaPathIndex - 1] == partiallyUnpackedShortestPath[shortestPathIndex - 1] && partiallyUnpackedViaPath[viaPathIndex] == partiallyUnpackedShortestPath[shortestPathIndex]) { |             if (partiallyUnpackedViaPath[viaPathIndex - 1] == partiallyUnpackedShortestPath[shortestPathIndex - 1] && partiallyUnpackedViaPath[viaPathIndex] == partiallyUnpackedShortestPath[shortestPathIndex]) { | ||||||
|                 typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection( partiallyUnpackedViaPath[viaPathIndex - 1], partiallyUnpackedViaPath[viaPathIndex]); |                 typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection( partiallyUnpackedViaPath[viaPathIndex - 1], partiallyUnpackedViaPath[viaPathIndex]); | ||||||
|                 *sharingOfViaPath += super::_queryData.graph->GetEdgeData(edgeID).distance; |                 *sharing_of_via_path += search_graph->GetEdgeData(edgeID).distance; | ||||||
|             } else { |             } else { | ||||||
|                 break; |                 break; | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         //finished partial unpacking spree! Amount of sharing is stored to appropriate poiner variable
 |         //finished partial unpacking spree! Amount of sharing is stored to appropriate pointer variable
 | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline int approximateAmountOfSharing(const NodeID middleNodeIDOfAlternativePath, HeapPtr & _forwardHeap, HeapPtr & _backwardHeap, const std::vector<NodeID> & packedShortestPath) { |     inline int approximateAmountOfSharing(const NodeID middleNodeIDOfAlternativePath, QueryHeap & _forwardHeap, QueryHeap & _backwardHeap, const std::vector<NodeID> & packedShortestPath) { | ||||||
|         std::vector<NodeID> packedAlternativePath; |         std::vector<NodeID> packedAlternativePath; | ||||||
|         super::RetrievePackedPathFromHeap(_forwardHeap, _backwardHeap, middleNodeIDOfAlternativePath, packedAlternativePath); |         super::RetrievePackedPathFromHeap(_forwardHeap, _backwardHeap, middleNodeIDOfAlternativePath, packedAlternativePath); | ||||||
| 
 | 
 | ||||||
| @ -270,8 +309,8 @@ private: | |||||||
|         //compute forward sharing
 |         //compute forward sharing
 | ||||||
|         while( (packedAlternativePath[aindex] == packedShortestPath[aindex]) && (packedAlternativePath[aindex+1] == packedShortestPath[aindex+1]) ) { |         while( (packedAlternativePath[aindex] == packedShortestPath[aindex]) && (packedAlternativePath[aindex+1] == packedShortestPath[aindex+1]) ) { | ||||||
|             //            INFO("retrieving edge (" << packedAlternativePath[aindex] << "," << packedAlternativePath[aindex+1] << ")");
 |             //            INFO("retrieving edge (" << packedAlternativePath[aindex] << "," << packedAlternativePath[aindex+1] << ")");
 | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection(packedAlternativePath[aindex], packedAlternativePath[aindex+1]); |             typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection(packedAlternativePath[aindex], packedAlternativePath[aindex+1]); | ||||||
|             sharing += super::_queryData.graph->GetEdgeData(edgeID).distance; |             sharing += search_graph->GetEdgeData(edgeID).distance; | ||||||
|             ++aindex; |             ++aindex; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
| @ -279,55 +318,65 @@ private: | |||||||
|         int bindex = packedShortestPath.size()-1; |         int bindex = packedShortestPath.size()-1; | ||||||
|         //compute backward sharing
 |         //compute backward sharing
 | ||||||
|         while( aindex > 0 && bindex > 0 && (packedAlternativePath[aindex] == packedShortestPath[bindex]) && (packedAlternativePath[aindex-1] == packedShortestPath[bindex-1]) ) { |         while( aindex > 0 && bindex > 0 && (packedAlternativePath[aindex] == packedShortestPath[bindex]) && (packedAlternativePath[aindex-1] == packedShortestPath[bindex-1]) ) { | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection(packedAlternativePath[aindex], packedAlternativePath[aindex-1]); |             typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection(packedAlternativePath[aindex], packedAlternativePath[aindex-1]); | ||||||
|             sharing += super::_queryData.graph->GetEdgeData(edgeID).distance; |             sharing += search_graph->GetEdgeData(edgeID).distance; | ||||||
|             --aindex; --bindex; |             --aindex; --bindex; | ||||||
|         } |         } | ||||||
|         return sharing; |         return sharing; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void AlternativeRoutingStep(HeapPtr & _forwardHeap, HeapPtr & _backwardHeap, NodeID *middle, int *_upperbound, const int edgeBasedOffset, const bool forwardDirection, std::vector<NodeID>& searchSpaceIntersection) const { |     template<bool forwardDirection> | ||||||
|         const NodeID node = _forwardHeap->DeleteMin(); |     inline void AlternativeRoutingStep( | ||||||
|  |     		QueryHeap & _forward_heap, | ||||||
|  |     		QueryHeap & _reverse_heap, | ||||||
|  |     		NodeID *middle_node, | ||||||
|  |     		int *upper_bound_to_shortest_path_distance, | ||||||
|  |     		std::vector<NodeID>& searchSpaceIntersection, | ||||||
|  |     		std::vector<SearchSpaceEdge> & search_space, | ||||||
|  |     		const int edgeBasedOffset | ||||||
|  |     		) const { | ||||||
|  |         const NodeID node = _forward_heap.DeleteMin(); | ||||||
|  |         const int distance = _forward_heap.GetKey(node); | ||||||
|  |         int scaledDistance = (distance-edgeBasedOffset)/(1.+VIAPATH_EPSILON); | ||||||
|  |         if(scaledDistance > *upper_bound_to_shortest_path_distance){ | ||||||
|  |             _forward_heap.DeleteAll(); | ||||||
|  |             return; | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|         const int distance = _forwardHeap->GetKey(node); |         search_space.push_back(std::make_pair(_forward_heap.GetData( node ).parent, node)); | ||||||
|         if(_backwardHeap->WasInserted(node) ){ | 
 | ||||||
|  |         if(_reverse_heap.WasInserted(node) ){ | ||||||
|             searchSpaceIntersection.push_back(node); |             searchSpaceIntersection.push_back(node); | ||||||
| 
 | 
 | ||||||
|             const int newDistance = _backwardHeap->GetKey(node) + distance; |             const int newDistance = _reverse_heap.GetKey(node) + distance; | ||||||
|             if(newDistance < *_upperbound ){ |             if(newDistance < *upper_bound_to_shortest_path_distance ){ | ||||||
|                 if(newDistance>=0 ) { |                 if(newDistance>=0 ) { | ||||||
|                     *middle = node; |                     *middle_node = node; | ||||||
|                     *_upperbound = newDistance; |                     *upper_bound_to_shortest_path_distance = newDistance; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         int scaledDistance = (distance-edgeBasedOffset)/(1.+VIAPATH_EPSILON); |         for ( typename SearchGraph::EdgeIterator edge = search_graph->BeginEdges( node ); edge < search_graph->EndEdges(node); edge++ ) { | ||||||
|         if(scaledDistance > *_upperbound){ |             const typename SearchGraph::EdgeData & data = search_graph->GetEdgeData(edge); | ||||||
|             _forwardHeap->DeleteAll(); |  | ||||||
|             return; |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         for ( typename QueryDataT::Graph::EdgeIterator edge = super::_queryData.graph->BeginEdges( node ); edge < super::_queryData.graph->EndEdges(node); edge++ ) { |  | ||||||
|             const typename QueryDataT::Graph::EdgeData & data = super::_queryData.graph->GetEdgeData(edge); |  | ||||||
|             bool forwardDirectionFlag = (forwardDirection ? data.forward : data.backward ); |             bool forwardDirectionFlag = (forwardDirection ? data.forward : data.backward ); | ||||||
|             if(forwardDirectionFlag) { |             if(forwardDirectionFlag) { | ||||||
| 
 | 
 | ||||||
|                 const NodeID to = super::_queryData.graph->GetTarget(edge); |                 const NodeID to = search_graph->GetTarget(edge); | ||||||
|                 const int edgeWeight = data.distance; |                 const int edgeWeight = data.distance; | ||||||
| 
 | 
 | ||||||
|                 assert( edgeWeight > 0 ); |                 assert( edgeWeight > 0 ); | ||||||
|                 const int toDistance = distance + edgeWeight; |                 const int toDistance = distance + edgeWeight; | ||||||
| 
 | 
 | ||||||
|                 //New Node discovered -> Add to Heap + Node Info Storage
 |                 //New Node discovered -> Add to Heap + Node Info Storage
 | ||||||
|                 if ( !_forwardHeap->WasInserted( to ) ) { |                 if ( !_forward_heap.WasInserted( to ) ) { | ||||||
|                     _forwardHeap->Insert( to, toDistance, node ); |                     _forward_heap.Insert( to, toDistance, node ); | ||||||
| 
 | 
 | ||||||
|                 } |                 } | ||||||
|                 //Found a shorter Path -> Update distance
 |                 //Found a shorter Path -> Update distance
 | ||||||
|                 else if ( toDistance < _forwardHeap->GetKey( to ) ) { |                 else if ( toDistance < _forward_heap.GetKey( to ) ) { | ||||||
|                     _forwardHeap->GetData( to ).parent = node; |                     _forward_heap.GetData( to ).parent = node; | ||||||
|                     _forwardHeap->DecreaseKey( to, toDistance ); |                     _forward_heap.DecreaseKey( to, toDistance ); | ||||||
|                     //new parent
 |                     //new parent
 | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| @ -335,16 +384,17 @@ private: | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     //conduct T-Test
 |     //conduct T-Test
 | ||||||
|     inline bool viaNodeCandidatePasses_T_Test( HeapPtr& existingForwardHeap, HeapPtr& existingBackwardHeap, HeapPtr& newForwardHeap, HeapPtr& newBackwardHeap, const RankedCandidateNode& candidate, const int offset, const int lengthOfShortestPath, int * lengthOfViaPath, NodeID * s_v_middle, NodeID * v_t_middle) { |     inline bool viaNodeCandidatePasses_T_Test( QueryHeap& existingForwardHeap, QueryHeap& existingBackwardHeap, QueryHeap& newForwardHeap, QueryHeap& newBackwardHeap, const RankedCandidateNode& candidate, const int offset, const int lengthOfShortestPath, int * lengthOfViaPath, NodeID * s_v_middle, NodeID * v_t_middle) { | ||||||
|  |     	newForwardHeap.Clear(); | ||||||
|  |     	newBackwardHeap.Clear(); | ||||||
|         std::vector < NodeID > packed_s_v_path; |         std::vector < NodeID > packed_s_v_path; | ||||||
|         std::vector < NodeID > packed_v_t_path; |         std::vector < NodeID > packed_v_t_path; | ||||||
| 
 | 
 | ||||||
|         super::_queryData.InitializeOrClearSecondThreadLocalStorage(); |  | ||||||
|         *s_v_middle = UINT_MAX; |         *s_v_middle = UINT_MAX; | ||||||
|         int upperBoundFor_s_v_Path = INT_MAX; |         int upperBoundFor_s_v_Path = INT_MAX; | ||||||
|         //compute path <s,..,v> by reusing forward search from s
 |         //compute path <s,..,v> by reusing forward search from s
 | ||||||
|         newBackwardHeap->Insert(candidate.node, 0, candidate.node); |         newBackwardHeap.Insert(candidate.node, 0, candidate.node); | ||||||
|         while (newBackwardHeap->Size() > 0) { |         while (newBackwardHeap.Size() > 0) { | ||||||
|             super::RoutingStep(newBackwardHeap, existingForwardHeap, s_v_middle, &upperBoundFor_s_v_Path, 2*offset, false); |             super::RoutingStep(newBackwardHeap, existingForwardHeap, s_v_middle, &upperBoundFor_s_v_Path, 2*offset, false); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
| @ -354,8 +404,8 @@ private: | |||||||
|         //compute path <v,..,t> by reusing backward search from t
 |         //compute path <v,..,t> by reusing backward search from t
 | ||||||
|         *v_t_middle = UINT_MAX; |         *v_t_middle = UINT_MAX; | ||||||
|         int upperBoundFor_v_t_Path = INT_MAX; |         int upperBoundFor_v_t_Path = INT_MAX; | ||||||
|         newForwardHeap->Insert(candidate.node, 0, candidate.node); |         newForwardHeap.Insert(candidate.node, 0, candidate.node); | ||||||
|         while (newForwardHeap->Size() > 0) { |         while (newForwardHeap.Size() > 0) { | ||||||
|             super::RoutingStep(newForwardHeap, existingBackwardHeap, v_t_middle, &upperBoundFor_v_t_Path, 2*offset, true); |             super::RoutingStep(newForwardHeap, existingBackwardHeap, v_t_middle, &upperBoundFor_v_t_Path, 2*offset, true); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
| @ -369,14 +419,21 @@ private: | |||||||
|         super::RetrievePackedPathFromHeap(newForwardHeap, existingBackwardHeap, *v_t_middle, packed_v_t_path); |         super::RetrievePackedPathFromHeap(newForwardHeap, existingBackwardHeap, *v_t_middle, packed_v_t_path); | ||||||
| 
 | 
 | ||||||
|         NodeID s_P = *s_v_middle, t_P = *v_t_middle; |         NodeID s_P = *s_v_middle, t_P = *v_t_middle; | ||||||
|  |         if(UINT_MAX == s_P) { | ||||||
|  |             return false; | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         if(UINT_MAX == t_P) { | ||||||
|  |             return false; | ||||||
|  |         } | ||||||
|         const int T_threshold = VIAPATH_EPSILON * lengthOfShortestPath; |         const int T_threshold = VIAPATH_EPSILON * lengthOfShortestPath; | ||||||
|         int unpackedUntilDistance = 0; |         int unpackedUntilDistance = 0; | ||||||
| 
 | 
 | ||||||
|         std::stack<UnpackEdge> unpackStack; |         std::stack<SearchSpaceEdge> unpackStack; | ||||||
|         //Traverse path s-->v
 |         //Traverse path s-->v
 | ||||||
|         for (unsigned i = packed_s_v_path.size() - 1; (i > 0) && unpackStack.empty(); --i) { |         for (unsigned i = packed_s_v_path.size() - 1; (i > 0) && unpackStack.empty(); --i) { | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection( packed_s_v_path[i - 1], packed_s_v_path[i]); |             typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection( packed_s_v_path[i - 1], packed_s_v_path[i]); | ||||||
|             int lengthOfCurrentEdge = super::_queryData.graph->GetEdgeData(edgeID).distance; |             int lengthOfCurrentEdge = search_graph->GetEdgeData(edgeID).distance; | ||||||
|             if (lengthOfCurrentEdge + unpackedUntilDistance >= T_threshold) { |             if (lengthOfCurrentEdge + unpackedUntilDistance >= T_threshold) { | ||||||
|                 unpackStack.push(std::make_pair(packed_s_v_path[i - 1], packed_s_v_path[i])); |                 unpackStack.push(std::make_pair(packed_s_v_path[i - 1], packed_s_v_path[i])); | ||||||
|             } else { |             } else { | ||||||
| @ -386,17 +443,17 @@ private: | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         while (!unpackStack.empty()) { |         while (!unpackStack.empty()) { | ||||||
|             const UnpackEdge viaPathEdge = unpackStack.top(); |             const SearchSpaceEdge viaPathEdge = unpackStack.top(); | ||||||
|             unpackStack.pop(); |             unpackStack.pop(); | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeIDInViaPath = super::_queryData.graph->FindEdgeInEitherDirection(viaPathEdge.first, viaPathEdge.second); |             typename SearchGraph::EdgeIterator edgeIDInViaPath = search_graph->FindEdgeInEitherDirection(viaPathEdge.first, viaPathEdge.second); | ||||||
|             if(UINT_MAX == edgeIDInViaPath) |             if(UINT_MAX == edgeIDInViaPath) | ||||||
|                 return false; |                 return false; | ||||||
|             typename QueryDataT::Graph::EdgeData currentEdgeData = super::_queryData.graph->GetEdgeData(edgeIDInViaPath); |             typename SearchGraph::EdgeData currentEdgeData = search_graph->GetEdgeData(edgeIDInViaPath); | ||||||
|             bool IsViaEdgeShortCut = currentEdgeData.shortcut; |             bool IsViaEdgeShortCut = currentEdgeData.shortcut; | ||||||
|             if (IsViaEdgeShortCut) { |             if (IsViaEdgeShortCut) { | ||||||
|                 const NodeID middleOfViaPath = currentEdgeData.id; |                 const NodeID middleOfViaPath = currentEdgeData.id; | ||||||
|                 typename QueryDataT::Graph::EdgeIterator edgeIDOfSecondSegment = super::_queryData.graph->FindEdgeInEitherDirection(middleOfViaPath, viaPathEdge.second); |                 typename SearchGraph::EdgeIterator edgeIDOfSecondSegment = search_graph->FindEdgeInEitherDirection(middleOfViaPath, viaPathEdge.second); | ||||||
|                 int lengthOfSecondSegment = super::_queryData.graph->GetEdgeData(edgeIDOfSecondSegment).distance; |                 int lengthOfSecondSegment = search_graph->GetEdgeData(edgeIDOfSecondSegment).distance; | ||||||
|                 //attention: !unpacking in reverse!
 |                 //attention: !unpacking in reverse!
 | ||||||
|                 //Check if second segment is the one to go over treshold? if yes add second segment to stack, else push first segment to stack and add distance of second one.
 |                 //Check if second segment is the one to go over treshold? if yes add second segment to stack, else push first segment to stack and add distance of second one.
 | ||||||
|                 if (unpackedUntilDistance + lengthOfSecondSegment >= T_threshold) { |                 if (unpackedUntilDistance + lengthOfSecondSegment >= T_threshold) { | ||||||
| @ -416,8 +473,8 @@ private: | |||||||
|         unpackedUntilDistance = 0; |         unpackedUntilDistance = 0; | ||||||
|         //Traverse path s-->v
 |         //Traverse path s-->v
 | ||||||
|         for (unsigned i = 0, lengthOfPackedPath = packed_v_t_path.size() - 1; (i < lengthOfPackedPath) && unpackStack.empty(); ++i) { |         for (unsigned i = 0, lengthOfPackedPath = packed_v_t_path.size() - 1; (i < lengthOfPackedPath) && unpackStack.empty(); ++i) { | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeID = super::_queryData.graph->FindEdgeInEitherDirection( packed_v_t_path[i], packed_v_t_path[i + 1]); |             typename SearchGraph::EdgeIterator edgeID = search_graph->FindEdgeInEitherDirection( packed_v_t_path[i], packed_v_t_path[i + 1]); | ||||||
|             int lengthOfCurrentEdge = super::_queryData.graph->GetEdgeData(edgeID).distance; |             int lengthOfCurrentEdge = search_graph->GetEdgeData(edgeID).distance; | ||||||
|             if (lengthOfCurrentEdge + unpackedUntilDistance >= T_threshold) { |             if (lengthOfCurrentEdge + unpackedUntilDistance >= T_threshold) { | ||||||
|                 unpackStack.push( std::make_pair(packed_v_t_path[i], packed_v_t_path[i + 1])); |                 unpackStack.push( std::make_pair(packed_v_t_path[i], packed_v_t_path[i + 1])); | ||||||
|             } else { |             } else { | ||||||
| @ -427,17 +484,17 @@ private: | |||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         while (!unpackStack.empty()) { |         while (!unpackStack.empty()) { | ||||||
|             const UnpackEdge viaPathEdge = unpackStack.top(); |             const SearchSpaceEdge viaPathEdge = unpackStack.top(); | ||||||
|             unpackStack.pop(); |             unpackStack.pop(); | ||||||
|             typename QueryDataT::Graph::EdgeIterator edgeIDInViaPath = super::_queryData.graph->FindEdgeInEitherDirection(viaPathEdge.first, viaPathEdge.second); |             typename SearchGraph::EdgeIterator edgeIDInViaPath = search_graph->FindEdgeInEitherDirection(viaPathEdge.first, viaPathEdge.second); | ||||||
|             if(UINT_MAX == edgeIDInViaPath) |             if(UINT_MAX == edgeIDInViaPath) | ||||||
|                 return false; |                 return false; | ||||||
|             typename QueryDataT::Graph::EdgeData currentEdgeData = super::_queryData.graph->GetEdgeData(edgeIDInViaPath); |             typename SearchGraph::EdgeData currentEdgeData = search_graph->GetEdgeData(edgeIDInViaPath); | ||||||
|             const bool IsViaEdgeShortCut = currentEdgeData.shortcut; |             const bool IsViaEdgeShortCut = currentEdgeData.shortcut; | ||||||
|             if (IsViaEdgeShortCut) { |             if (IsViaEdgeShortCut) { | ||||||
|                 const NodeID middleOfViaPath = currentEdgeData.id; |                 const NodeID middleOfViaPath = currentEdgeData.id; | ||||||
|                 typename QueryDataT::Graph::EdgeIterator edgeIDOfFirstSegment = super::_queryData.graph->FindEdgeInEitherDirection(viaPathEdge.first, middleOfViaPath); |                 typename SearchGraph::EdgeIterator edgeIDOfFirstSegment = search_graph->FindEdgeInEitherDirection(viaPathEdge.first, middleOfViaPath); | ||||||
|                 int lengthOfFirstSegment = super::_queryData.graph->GetEdgeData( edgeIDOfFirstSegment).distance; |                 int lengthOfFirstSegment = search_graph->GetEdgeData( edgeIDOfFirstSegment).distance; | ||||||
|                 //Check if first segment is the one to go over treshold? if yes first segment to stack, else push second segment to stack and add distance of first one.
 |                 //Check if first segment is the one to go over treshold? if yes first segment to stack, else push second segment to stack and add distance of first one.
 | ||||||
|                 if (unpackedUntilDistance + lengthOfFirstSegment >= T_threshold) { |                 if (unpackedUntilDistance + lengthOfFirstSegment >= T_threshold) { | ||||||
|                     unpackStack.push( std::make_pair(viaPathEdge.first, middleOfViaPath)); |                     unpackStack.push( std::make_pair(viaPathEdge.first, middleOfViaPath)); | ||||||
| @ -454,20 +511,21 @@ private: | |||||||
| 
 | 
 | ||||||
|         lengthOfPathT_Test_Path += unpackedUntilDistance; |         lengthOfPathT_Test_Path += unpackedUntilDistance; | ||||||
|         //Run actual T-Test query and compare if distances equal.
 |         //Run actual T-Test query and compare if distances equal.
 | ||||||
|         HeapPtr& forwardHeap = super::_queryData.forwardHeap3; |  | ||||||
|         HeapPtr& backwardHeap = super::_queryData.backwardHeap3; |  | ||||||
|         super::_queryData.InitializeOrClearThirdThreadLocalStorage(); |         super::_queryData.InitializeOrClearThirdThreadLocalStorage(); | ||||||
|  | 
 | ||||||
|  |         QueryHeap& forward_heap3 = *super::_queryData.forwardHeap3; | ||||||
|  |         QueryHeap& backward_heap3 = *super::_queryData.backwardHeap3; | ||||||
|         int _upperBound = INT_MAX; |         int _upperBound = INT_MAX; | ||||||
|         NodeID middle = UINT_MAX; |         NodeID middle = UINT_MAX; | ||||||
|         forwardHeap->Insert(s_P, 0, s_P); |         forward_heap3.Insert(s_P, 0, s_P); | ||||||
|         backwardHeap->Insert(t_P, 0, t_P); |         backward_heap3.Insert(t_P, 0, t_P); | ||||||
|         //exploration from s and t until deletemin/(1+epsilon) > _lengthOfShortestPath
 |         //exploration from s and t until deletemin/(1+epsilon) > _lengthOfShortestPath
 | ||||||
|         while (forwardHeap->Size() + backwardHeap->Size() > 0) { |         while (forward_heap3.Size() + backward_heap3.Size() > 0) { | ||||||
|             if (forwardHeap->Size() > 0) { |             if (forward_heap3.Size() > 0) { | ||||||
|                 super::RoutingStep(forwardHeap, backwardHeap, &middle, &_upperBound, offset, true); |                 super::RoutingStep(forward_heap3, backward_heap3, &middle, &_upperBound, offset, true); | ||||||
|             } |             } | ||||||
|             if (backwardHeap->Size() > 0) { |             if (backward_heap3.Size() > 0) { | ||||||
|                 super::RoutingStep(backwardHeap, forwardHeap, &middle, &_upperBound, offset, false); |                 super::RoutingStep(backward_heap3, forward_heap3, &middle, &_upperBound, offset, false); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         return (_upperBound <= lengthOfPathT_Test_Path); |         return (_upperBound <= lengthOfPathT_Test_Path); | ||||||
|  | |||||||
| @ -23,43 +23,43 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #ifndef BASICROUTINGINTERFACE_H_ | #ifndef BASICROUTINGINTERFACE_H_ | ||||||
| #define BASICROUTINGINTERFACE_H_ | #define BASICROUTINGINTERFACE_H_ | ||||||
| 
 | 
 | ||||||
|  | #include <boost/noncopyable.hpp> | ||||||
|  | 
 | ||||||
| #include <cassert> | #include <cassert> | ||||||
| #include <climits> | #include <climits> | ||||||
| 
 | 
 | ||||||
| #include "../Plugins/RawRouteData.h" | #include "../Plugins/RawRouteData.h" | ||||||
|  | #include "../Util/ContainerUtils.h" | ||||||
| 
 | 
 | ||||||
| template<class QueryDataT> | template<class QueryDataT> | ||||||
| class BasicRoutingInterface { | class BasicRoutingInterface : boost::noncopyable{ | ||||||
| protected: | protected: | ||||||
|     QueryDataT & _queryData; |     QueryDataT & _queryData; | ||||||
| public: | public: | ||||||
|     BasicRoutingInterface(QueryDataT & qd) : _queryData(qd) { } |     BasicRoutingInterface(QueryDataT & qd) : _queryData(qd) { } | ||||||
|     virtual ~BasicRoutingInterface(){ }; |     virtual ~BasicRoutingInterface(){ }; | ||||||
| 
 | 
 | ||||||
|     inline void RoutingStep(typename QueryDataT::HeapPtr & _forwardHeap, typename QueryDataT::HeapPtr & _backwardHeap, NodeID *middle, int *_upperbound, const int edgeBasedOffset, const bool forwardDirection) const { |     inline void RoutingStep(typename QueryDataT::QueryHeap & _forwardHeap, typename QueryDataT::QueryHeap & _backwardHeap, NodeID *middle, int *_upperbound, const int edgeBasedOffset, const bool forwardDirection) const { | ||||||
|         const NodeID node = _forwardHeap->DeleteMin(); |         const NodeID node = _forwardHeap.DeleteMin(); | ||||||
|         const int distance = _forwardHeap->GetKey(node); |         const int distance = _forwardHeap.GetKey(node); | ||||||
| //        INFO((forwardDirection ? "[forw]" : "[back]") << " settled node " << node << " at distance " << distance);
 |         if(_backwardHeap.WasInserted(node) ){ | ||||||
|         if(_backwardHeap->WasInserted(node) ){ |             const int newDistance = _backwardHeap.GetKey(node) + distance; | ||||||
| //            INFO((forwardDirection ? "[forw]"     : "[back]") << " scanned node " << node << " in both directions, upper bound: " << *_upperbound);
 |  | ||||||
|             const int newDistance = _backwardHeap->GetKey(node) + distance; |  | ||||||
|             if(newDistance < *_upperbound ){ |             if(newDistance < *_upperbound ){ | ||||||
|                 if(newDistance>=0 ) { |                 if(newDistance>=0 ) { | ||||||
| //                    INFO((forwardDirection ? "[forw]" : "[back]") << " -> node " << node << " is new middle at total distance " << newDistance);
 |  | ||||||
|                     *middle = node; |                     *middle = node; | ||||||
|                     *_upperbound = newDistance; |                     *_upperbound = newDistance; | ||||||
|                 } else { |                 } else { | ||||||
| //                    INFO((forwardDirection ? "[forw]" : "[back]") << " -> ignored " << node << " as new middle at total distance " << newDistance);
 |  | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         if(distance-edgeBasedOffset > *_upperbound){ |         if(distance-edgeBasedOffset > *_upperbound){ | ||||||
|             _forwardHeap->DeleteAll(); |             _forwardHeap.DeleteAll(); | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         for ( typename QueryDataT::Graph::EdgeIterator edge = _queryData.graph->BeginEdges( node ); edge < _queryData.graph->EndEdges(node); edge++ ) { |         //Stalling
 | ||||||
|  |         for ( typename QueryDataT::Graph::EdgeIterator edge = _queryData.graph->BeginEdges( node ); edge < _queryData.graph->EndEdges(node); ++edge ) { | ||||||
|             const typename QueryDataT::Graph::EdgeData & data = _queryData.graph->GetEdgeData(edge); |             const typename QueryDataT::Graph::EdgeData & data = _queryData.graph->GetEdgeData(edge); | ||||||
|             bool backwardDirectionFlag = (!forwardDirection) ? data.forward : data.backward; |             bool backwardDirectionFlag = (!forwardDirection) ? data.forward : data.backward; | ||||||
|             if(backwardDirectionFlag) { |             if(backwardDirectionFlag) { | ||||||
| @ -68,16 +68,15 @@ public: | |||||||
| 
 | 
 | ||||||
|                 assert( edgeWeight > 0 ); |                 assert( edgeWeight > 0 ); | ||||||
| 
 | 
 | ||||||
|                 //Stalling
 |                 if(_forwardHeap.WasInserted( to )) { | ||||||
|                 if(_forwardHeap->WasInserted( to )) { |                     if(_forwardHeap.GetKey( to ) + edgeWeight < distance) { | ||||||
|                     if(_forwardHeap->GetKey( to ) + edgeWeight < distance) { |  | ||||||
|                         return; |                         return; | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         for ( typename QueryDataT::Graph::EdgeIterator edge = _queryData.graph->BeginEdges( node ); edge < _queryData.graph->EndEdges(node); edge++ ) { |         for ( typename QueryDataT::Graph::EdgeIterator edge = _queryData.graph->BeginEdges( node ); edge < _queryData.graph->EndEdges(node); ++edge ) { | ||||||
|             const typename QueryDataT::Graph::EdgeData & data = _queryData.graph->GetEdgeData(edge); |             const typename QueryDataT::Graph::EdgeData & data = _queryData.graph->GetEdgeData(edge); | ||||||
|             bool forwardDirectionFlag = (forwardDirection ? data.forward : data.backward ); |             bool forwardDirectionFlag = (forwardDirection ? data.forward : data.backward ); | ||||||
|             if(forwardDirectionFlag) { |             if(forwardDirectionFlag) { | ||||||
| @ -89,23 +88,20 @@ public: | |||||||
|                 const int toDistance = distance + edgeWeight; |                 const int toDistance = distance + edgeWeight; | ||||||
| 
 | 
 | ||||||
|                 //New Node discovered -> Add to Heap + Node Info Storage
 |                 //New Node discovered -> Add to Heap + Node Info Storage
 | ||||||
|                 if ( !_forwardHeap->WasInserted( to ) ) { |                 if ( !_forwardHeap.WasInserted( to ) ) { | ||||||
|                     //                    INFO((forwardDirection ? "[forw]" : "[back]") << " scanning edge (" << node << "," << to << ") with distance " << toDistance << ", edge length: " << data.distance);
 |                     _forwardHeap.Insert( to, toDistance, node ); | ||||||
|                     _forwardHeap->Insert( to, toDistance, node ); |  | ||||||
|                 } |                 } | ||||||
|                 //Found a shorter Path -> Update distance
 |                 //Found a shorter Path -> Update distance
 | ||||||
|                 else if ( toDistance < _forwardHeap->GetKey( to ) ) { |                 else if ( toDistance < _forwardHeap.GetKey( to ) ) { | ||||||
|                     //                    INFO((forwardDirection ? "[forw]" : "[back]") << " decrease and scanning edge (" << node << "," << to << ") from " << _forwardHeap->GetKey(to) << "to " << toDistance << ", edge length: " << data.distance);
 |                     _forwardHeap.GetData( to ).parent = node; | ||||||
|                     _forwardHeap->GetData( to ).parent = node; |                     _forwardHeap.DecreaseKey( to, toDistance ); | ||||||
|                     _forwardHeap->DecreaseKey( to, toDistance ); |  | ||||||
|                     //new parent
 |                     //new parent
 | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void UnpackPath(std::vector<NodeID> & packedPath, std::vector<_PathData> & unpackedPath) const { |     inline void UnpackPath(const std::vector<NodeID> & packedPath, std::vector<_PathData> & unpackedPath) const { | ||||||
| 
 |  | ||||||
|         const unsigned sizeOfPackedPath = packedPath.size(); |         const unsigned sizeOfPackedPath = packedPath.size(); | ||||||
|         std::stack<std::pair<NodeID, NodeID> > recursionStack; |         std::stack<std::pair<NodeID, NodeID> > recursionStack; | ||||||
| 
 | 
 | ||||||
| @ -118,15 +114,12 @@ public: | |||||||
|         while(!recursionStack.empty()) { |         while(!recursionStack.empty()) { | ||||||
|             edge = recursionStack.top(); |             edge = recursionStack.top(); | ||||||
|             recursionStack.pop(); |             recursionStack.pop(); | ||||||
| //            INFO("Unpacking edge (" << edge.first << "," << edge.second << ")");
 |  | ||||||
| 
 | 
 | ||||||
|             typename QueryDataT::Graph::EdgeIterator smallestEdge = SPECIAL_EDGEID; |             typename QueryDataT::Graph::EdgeIterator smallestEdge = SPECIAL_EDGEID; | ||||||
|             int smallestWeight = INT_MAX; |             int smallestWeight = INT_MAX; | ||||||
|             for(typename QueryDataT::Graph::EdgeIterator eit = _queryData.graph->BeginEdges(edge.first);eit < _queryData.graph->EndEdges(edge.first);++eit){ |             for(typename QueryDataT::Graph::EdgeIterator eit = _queryData.graph->BeginEdges(edge.first);eit < _queryData.graph->EndEdges(edge.first);++eit){ | ||||||
|                 const int weight = _queryData.graph->GetEdgeData(eit).distance; |                 const int weight = _queryData.graph->GetEdgeData(eit).distance; | ||||||
| //                INFO("Checking edge (" << edge.first << "/" << _queryData.graph->GetTarget(eit) << ")");
 |  | ||||||
|                 if(_queryData.graph->GetTarget(eit) == edge.second && weight < smallestWeight && _queryData.graph->GetEdgeData(eit).forward){ |                 if(_queryData.graph->GetTarget(eit) == edge.second && weight < smallestWeight && _queryData.graph->GetEdgeData(eit).forward){ | ||||||
| //                    INFO("1smallest " << eit << ", " << weight);
 |  | ||||||
|                     smallestEdge = eit; |                     smallestEdge = eit; | ||||||
|                     smallestWeight = weight; |                     smallestWeight = weight; | ||||||
|                 } |                 } | ||||||
| @ -135,9 +128,7 @@ public: | |||||||
|             if(smallestEdge == SPECIAL_EDGEID){ |             if(smallestEdge == SPECIAL_EDGEID){ | ||||||
|                 for(typename QueryDataT::Graph::EdgeIterator eit = _queryData.graph->BeginEdges(edge.second);eit < _queryData.graph->EndEdges(edge.second);++eit){ |                 for(typename QueryDataT::Graph::EdgeIterator eit = _queryData.graph->BeginEdges(edge.second);eit < _queryData.graph->EndEdges(edge.second);++eit){ | ||||||
|                     const int weight = _queryData.graph->GetEdgeData(eit).distance; |                     const int weight = _queryData.graph->GetEdgeData(eit).distance; | ||||||
| //                    INFO("Checking edge (" << edge.first << "/" << _queryData.graph->GetTarget(eit) << ")");
 |  | ||||||
|                     if(_queryData.graph->GetTarget(eit) == edge.first && weight < smallestWeight && _queryData.graph->GetEdgeData(eit).backward){ |                     if(_queryData.graph->GetTarget(eit) == edge.first && weight < smallestWeight && _queryData.graph->GetEdgeData(eit).backward){ | ||||||
| //                      INFO("2smallest " << eit << ", " << weight);
 |  | ||||||
|                         smallestEdge = eit; |                         smallestEdge = eit; | ||||||
|                         smallestWeight = weight; |                         smallestWeight = weight; | ||||||
|                     } |                     } | ||||||
| @ -159,7 +150,6 @@ public: | |||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void UnpackEdge(const NodeID s, const NodeID t, std::vector<NodeID> & unpackedPath) const { |     inline void UnpackEdge(const NodeID s, const NodeID t, std::vector<NodeID> & unpackedPath) const { | ||||||
| 
 |  | ||||||
|         std::stack<std::pair<NodeID, NodeID> > recursionStack; |         std::stack<std::pair<NodeID, NodeID> > recursionStack; | ||||||
|         recursionStack.push(std::make_pair(s,t)); |         recursionStack.push(std::make_pair(s,t)); | ||||||
| 
 | 
 | ||||||
| @ -193,7 +183,6 @@ public: | |||||||
|             if(ed.shortcut) {//unpack
 |             if(ed.shortcut) {//unpack
 | ||||||
|                 const NodeID middle = ed.id; |                 const NodeID middle = ed.id; | ||||||
|                 //again, we need to this in reversed order
 |                 //again, we need to this in reversed order
 | ||||||
| //                INFO("unpacking (" << middle << "," <<  edge.second << ") and (" << edge.first << "," << middle << ")");
 |  | ||||||
|                 recursionStack.push(std::make_pair(middle, edge.second)); |                 recursionStack.push(std::make_pair(middle, edge.second)); | ||||||
|                 recursionStack.push(std::make_pair(edge.first, middle)); |                 recursionStack.push(std::make_pair(edge.first, middle)); | ||||||
|             } else { |             } else { | ||||||
| @ -204,22 +193,29 @@ public: | |||||||
|         unpackedPath.push_back(t); |         unpackedPath.push_back(t); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     inline void RetrievePackedPathFromHeap(const typename QueryDataT::HeapPtr & _fHeap, const typename QueryDataT::HeapPtr & _bHeap, const NodeID middle, std::vector<NodeID>& packedPath) { |     inline void RetrievePackedPathFromHeap(typename QueryDataT::QueryHeap & _fHeap, typename QueryDataT::QueryHeap & _bHeap, const NodeID middle, std::vector<NodeID>& packedPath) const { | ||||||
|         NodeID pathNode = middle; |         NodeID pathNode = middle; | ||||||
|         while(pathNode != _fHeap->GetData(pathNode).parent) { |         while(pathNode != _fHeap.GetData(pathNode).parent) { | ||||||
|             pathNode = _fHeap->GetData(pathNode).parent; |             pathNode = _fHeap.GetData(pathNode).parent; | ||||||
|             packedPath.push_back(pathNode); |             packedPath.push_back(pathNode); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         std::reverse(packedPath.begin(), packedPath.end()); |         std::reverse(packedPath.begin(), packedPath.end()); | ||||||
| 
 |  | ||||||
|         packedPath.push_back(middle); |         packedPath.push_back(middle); | ||||||
|         pathNode = middle; |         pathNode = middle; | ||||||
|         while (pathNode != _bHeap->GetData(pathNode).parent){ |         while (pathNode != _bHeap.GetData(pathNode).parent){ | ||||||
|             pathNode = _bHeap->GetData(pathNode).parent; |             pathNode = _bHeap.GetData(pathNode).parent; | ||||||
|             packedPath.push_back(pathNode); |             packedPath.push_back(pathNode); | ||||||
|     	} |     	} | ||||||
|     } |     } | ||||||
|  | 
 | ||||||
|  |     inline void RetrievePackedPathFromSingleHeap(typename QueryDataT::QueryHeap & search_heap, const NodeID middle, std::vector<NodeID>& packed_path) const { | ||||||
|  |         NodeID pathNode = middle; | ||||||
|  |         while(pathNode != search_heap.GetData(pathNode).parent) { | ||||||
|  |             pathNode = search_heap.GetData(pathNode).parent; | ||||||
|  |             packed_path.push_back(pathNode); | ||||||
|  |         } | ||||||
|  |     } | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -28,13 +28,14 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| template<class QueryDataT> | template<class QueryDataT> | ||||||
| class ShortestPathRouting : public BasicRoutingInterface<QueryDataT>{ | class ShortestPathRouting : public BasicRoutingInterface<QueryDataT>{ | ||||||
|     typedef BasicRoutingInterface<QueryDataT> super; |     typedef BasicRoutingInterface<QueryDataT> super; | ||||||
|  |     typedef typename QueryDataT::QueryHeap QueryHeap; | ||||||
| public: | public: | ||||||
|     ShortestPathRouting(QueryDataT & qd) : super(qd) {} |     ShortestPathRouting( QueryDataT & qd) : super(qd) {} | ||||||
| 
 | 
 | ||||||
|     ~ShortestPathRouting() {} |     ~ShortestPathRouting() {} | ||||||
| 
 | 
 | ||||||
|     void operator()(std::vector<PhantomNodes> & phantomNodesVector,  RawRouteData & rawRouteData) { |     void operator()(std::vector<PhantomNodes> & phantomNodesVector,  RawRouteData & rawRouteData) const { | ||||||
|         BOOST_FOREACH(PhantomNodes & phantomNodePair, phantomNodesVector) { |         BOOST_FOREACH(const PhantomNodes & phantomNodePair, phantomNodesVector) { | ||||||
|             if(!phantomNodePair.AtLeastOnePhantomNodeIsUINTMAX()) { |             if(!phantomNodePair.AtLeastOnePhantomNodeIsUINTMAX()) { | ||||||
|                 rawRouteData.lengthOfShortestPath = rawRouteData.lengthOfAlternativePath = INT_MAX; |                 rawRouteData.lengthOfShortestPath = rawRouteData.lengthOfAlternativePath = INT_MAX; | ||||||
|                 return; |                 return; | ||||||
| @ -43,125 +44,102 @@ public: | |||||||
|         int distance1 = 0; |         int distance1 = 0; | ||||||
|         int distance2 = 0; |         int distance2 = 0; | ||||||
| 
 | 
 | ||||||
|         bool searchFrom1stStartNode(true); |         bool searchFrom1stStartNode = true; | ||||||
|         bool searchFrom2ndStartNode(true); |         bool searchFrom2ndStartNode = true; | ||||||
|         NodeID middle1 = ( NodeID ) UINT_MAX; |         NodeID middle1 = UINT_MAX; | ||||||
|         NodeID middle2 = ( NodeID ) UINT_MAX; |         NodeID middle2 = UINT_MAX; | ||||||
|         std::vector<NodeID> packedPath1; |         std::vector<NodeID> packedPath1; | ||||||
|         std::vector<NodeID> packedPath2; |         std::vector<NodeID> packedPath2; | ||||||
| 
 | 
 | ||||||
|         typename QueryDataT::HeapPtr & forwardHeap = super::_queryData.forwardHeap; |         super::_queryData.InitializeOrClearFirstThreadLocalStorage(); | ||||||
|         typename QueryDataT::HeapPtr & backwardHeap = super::_queryData.backwardHeap; |         super::_queryData.InitializeOrClearSecondThreadLocalStorage(); | ||||||
| 
 |         super::_queryData.InitializeOrClearThirdThreadLocalStorage(); | ||||||
|         typename QueryDataT::HeapPtr & forwardHeap2 = super::_queryData.forwardHeap2; |  | ||||||
|         typename QueryDataT::HeapPtr & backwardHeap2 = super::_queryData.backwardHeap2; |  | ||||||
| 
 | 
 | ||||||
|  |         QueryHeap & forward_heap1 = *(super::_queryData.forwardHeap); | ||||||
|  |         QueryHeap & reverse_heap1 = *(super::_queryData.backwardHeap); | ||||||
|  |         QueryHeap & forward_heap2 = *(super::_queryData.forwardHeap2); | ||||||
|  |         QueryHeap & reverse_heap2 = *(super::_queryData.backwardHeap2); | ||||||
| 
 | 
 | ||||||
|         //Get distance to next pair of target nodes.
 |         //Get distance to next pair of target nodes.
 | ||||||
|         BOOST_FOREACH(PhantomNodes & phantomNodePair, phantomNodesVector) { |         BOOST_FOREACH(const PhantomNodes & phantomNodePair, phantomNodesVector) { | ||||||
|             super::_queryData.InitializeOrClearFirstThreadLocalStorage(); |             forward_heap1.Clear();	forward_heap2.Clear(); | ||||||
|             super::_queryData.InitializeOrClearSecondThreadLocalStorage(); |             reverse_heap1.Clear();	reverse_heap2.Clear(); | ||||||
| 
 |  | ||||||
|             int _localUpperbound1 = INT_MAX; |             int _localUpperbound1 = INT_MAX; | ||||||
|             int _localUpperbound2 = INT_MAX; |             int _localUpperbound2 = INT_MAX; | ||||||
| 
 | 
 | ||||||
|  |             middle1 = UINT_MAX; | ||||||
|  |             middle2 = UINT_MAX; | ||||||
|  | 
 | ||||||
|             //insert new starting nodes into forward heap, adjusted by previous distances.
 |             //insert new starting nodes into forward heap, adjusted by previous distances.
 | ||||||
|             if(searchFrom1stStartNode) { |             if(searchFrom1stStartNode) { | ||||||
|                 forwardHeap->Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); |                 forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); | ||||||
|                 forwardHeap2->Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); |                 forward_heap2.Insert(phantomNodePair.startPhantom.edgeBasedNode, -phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.edgeBasedNode); | ||||||
| //              INFO("a 1,2)forw insert " << phantomNodePair.startPhantom.edgeBasedNode << " with weight " << phantomNodePair.startPhantom.weight1);
 |  | ||||||
| //          } else {
 |  | ||||||
| //              INFO("Skipping first start node");
 |  | ||||||
|             } |             } | ||||||
|             if(phantomNodePair.startPhantom.isBidirected() && searchFrom2ndStartNode) { |             if(phantomNodePair.startPhantom.isBidirected() && searchFrom2ndStartNode) { | ||||||
|                 forwardHeap->Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); |                 forward_heap1.Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); | ||||||
|                 forwardHeap2->Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); |                 forward_heap2.Insert(phantomNodePair.startPhantom.edgeBasedNode+1, -phantomNodePair.startPhantom.weight2, phantomNodePair.startPhantom.edgeBasedNode+1); | ||||||
| //              INFO("b 1,2)forw insert " << phantomNodePair.startPhantom.edgeBasedNode+1 << " with weight " << -phantomNodePair.startPhantom.weight1);
 |  | ||||||
| //          } else if(!searchFrom2ndStartNode) {
 |  | ||||||
| //              INFO("Skipping second start node");
 |  | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
| //            backwardHeap->Clear();
 |  | ||||||
| //            backwardHeap2->Clear();
 |  | ||||||
|             //insert new backward nodes into backward heap, unadjusted.
 |             //insert new backward nodes into backward heap, unadjusted.
 | ||||||
|             backwardHeap->Insert(phantomNodePair.targetPhantom.edgeBasedNode, phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.edgeBasedNode); |             reverse_heap1.Insert(phantomNodePair.targetPhantom.edgeBasedNode, phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.edgeBasedNode); | ||||||
| //          INFO("1) back insert " << phantomNodePair.targetPhantom.edgeBasedNode << " with weight " << phantomNodePair.targetPhantom.weight1);
 |  | ||||||
|             if(phantomNodePair.targetPhantom.isBidirected() ) { |             if(phantomNodePair.targetPhantom.isBidirected() ) { | ||||||
| //              INFO("2) back insert " << phantomNodePair.targetPhantom.edgeBasedNode+1 << " with weight " << phantomNodePair.targetPhantom.weight2);
 |                 reverse_heap2.Insert(phantomNodePair.targetPhantom.edgeBasedNode+1, phantomNodePair.targetPhantom.weight2, phantomNodePair.targetPhantom.edgeBasedNode+1); | ||||||
|                 backwardHeap2->Insert(phantomNodePair.targetPhantom.edgeBasedNode+1, phantomNodePair.targetPhantom.weight2, phantomNodePair.targetPhantom.edgeBasedNode+1); |  | ||||||
|             } |             } | ||||||
|             int offset = (phantomNodePair.startPhantom.isBidirected() ? std::max(phantomNodePair.startPhantom.weight1, phantomNodePair.startPhantom.weight2) : phantomNodePair.startPhantom.weight1) ; |             const int forward_offset = phantomNodePair.startPhantom.weight1 + (phantomNodePair.startPhantom.isBidirected() ? phantomNodePair.startPhantom.weight2 : 0); | ||||||
|             offset += (phantomNodePair.targetPhantom.isBidirected() ? std::max(phantomNodePair.targetPhantom.weight1, phantomNodePair.targetPhantom.weight2) : phantomNodePair.targetPhantom.weight1) ; |             const int reverse_offset = phantomNodePair.targetPhantom.weight1 + (phantomNodePair.targetPhantom.isBidirected() ? phantomNodePair.targetPhantom.weight2 : 0); | ||||||
| 
 | 
 | ||||||
|             //run two-Target Dijkstra routing step.
 |             //run two-Target Dijkstra routing step.
 | ||||||
|             while(forwardHeap->Size() + backwardHeap->Size() > 0){ |             while(0 < (forward_heap1.Size() + reverse_heap1.Size() )){ | ||||||
|                 if(forwardHeap->Size() > 0){ |                 if(0 < forward_heap1.Size()){ | ||||||
|                     super::RoutingStep(forwardHeap, backwardHeap, &middle1, &_localUpperbound1, 2*offset, true); |                     super::RoutingStep(forward_heap1, reverse_heap1, &middle1, &_localUpperbound1, forward_offset, true); | ||||||
|                 } |                 } | ||||||
|                 if(backwardHeap->Size() > 0){ |                 if(0 < reverse_heap1.Size() ){ | ||||||
|                     super::RoutingStep(backwardHeap, forwardHeap, &middle1, &_localUpperbound1, 2*offset, false); |                     super::RoutingStep(reverse_heap1, forward_heap1, &middle1, &_localUpperbound1, reverse_offset, false); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|             if(backwardHeap2->Size() > 0) { |             if(0 < reverse_heap2.Size()) { | ||||||
|                 while(forwardHeap2->Size() + backwardHeap2->Size() > 0){ |                 while(0 < (forward_heap2.Size() + reverse_heap2.Size() )){ | ||||||
|                     if(forwardHeap2->Size() > 0){ |                     if(0 < forward_heap2.Size()){ | ||||||
|                         super::RoutingStep(forwardHeap2, backwardHeap2, &middle2, &_localUpperbound2, 2*offset, true); |                         super::RoutingStep(forward_heap2, reverse_heap2, &middle2, &_localUpperbound2, forward_offset, true); | ||||||
|                     } |                     } | ||||||
|                     if(backwardHeap2->Size() > 0){ |                     if(0 < reverse_heap2.Size()){ | ||||||
|                         super::RoutingStep(backwardHeap2, forwardHeap2, &middle2, &_localUpperbound2, 2*offset, false); |                         super::RoutingStep(reverse_heap2, forward_heap2, &middle2, &_localUpperbound2, reverse_offset, false); | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| //          INFO("upperbound1: " << _localUpperbound1 << ", distance1: " << distance1);
 |  | ||||||
| //          INFO("upperbound2: " << _localUpperbound2 << ", distance2: " << distance2);
 |  | ||||||
| 
 | 
 | ||||||
|             //No path found for both target nodes?
 |             //No path found for both target nodes?
 | ||||||
|             if(INT_MAX == _localUpperbound1 && INT_MAX == _localUpperbound2) { |             if((INT_MAX == _localUpperbound1) && (INT_MAX == _localUpperbound2)) { | ||||||
|                 rawRouteData.lengthOfShortestPath = rawRouteData.lengthOfAlternativePath = INT_MAX; |                 rawRouteData.lengthOfShortestPath = rawRouteData.lengthOfAlternativePath = INT_MAX; | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             if(UINT_MAX == middle1) { |             if(UINT_MAX == middle1) { | ||||||
|                 searchFrom1stStartNode = false; |                 searchFrom1stStartNode = false; | ||||||
| //              INFO("Next Search will not start from 1st");
 |  | ||||||
|             } else { |  | ||||||
| //              INFO("Next Search will start from 1st");
 |  | ||||||
|                 searchFrom1stStartNode = true; |  | ||||||
|             } |             } | ||||||
|             if(UINT_MAX == middle2) { |             if(UINT_MAX == middle2) { | ||||||
|                 searchFrom2ndStartNode = false; |                 searchFrom2ndStartNode = false; | ||||||
| //              INFO("Next Search will not start from 2nd");
 |  | ||||||
|             } else { |  | ||||||
|                 searchFrom2ndStartNode = true; |  | ||||||
| //              INFO("Next Search will start from 2nd");
 |  | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             //Was at most one of the two paths not found?
 |             //Was at most one of the two paths not found?
 | ||||||
|             assert(!(INT_MAX == distance1 && INT_MAX == distance2)); |             assert(!(INT_MAX == distance1 && INT_MAX == distance2)); | ||||||
| 
 | 
 | ||||||
| //          INFO("middle1: " << middle1);
 |  | ||||||
| 
 |  | ||||||
|             //Unpack paths if they exist
 |             //Unpack paths if they exist
 | ||||||
|             std::vector<NodeID> temporaryPackedPath1; |             std::vector<NodeID> temporaryPackedPath1; | ||||||
|             std::vector<NodeID> temporaryPackedPath2; |             std::vector<NodeID> temporaryPackedPath2; | ||||||
|             if(INT_MAX != _localUpperbound1) { |             if(INT_MAX != _localUpperbound1) { | ||||||
|                 super::RetrievePackedPathFromHeap(forwardHeap, backwardHeap, middle1, temporaryPackedPath1); |                 super::RetrievePackedPathFromHeap(forward_heap1, reverse_heap1, middle1, temporaryPackedPath1); | ||||||
| //              INFO("temporaryPackedPath1 ends with " << *(temporaryPackedPath1.end()-1) );
 |  | ||||||
|             } |             } | ||||||
| //          INFO("middle2: " << middle2);
 |  | ||||||
| 
 | 
 | ||||||
|             if(INT_MAX != _localUpperbound2) { |             if(INT_MAX != _localUpperbound2) { | ||||||
|                 super::RetrievePackedPathFromHeap(forwardHeap2, backwardHeap2, middle2, temporaryPackedPath2); |                 super::RetrievePackedPathFromHeap(forward_heap2, reverse_heap2, middle2, temporaryPackedPath2); | ||||||
| //                INFO("temporaryPackedPath2 ends with " << *(temporaryPackedPath2.end()-1) );
 |  | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             //if one of the paths was not found, replace it with the other one.
 |             //if one of the paths was not found, replace it with the other one.
 | ||||||
|             if(0 == temporaryPackedPath1.size()) { |             if(0 == temporaryPackedPath1.size()) { | ||||||
| //              INFO("Deleting path 1");
 |  | ||||||
|                 temporaryPackedPath1.insert(temporaryPackedPath1.end(), temporaryPackedPath2.begin(), temporaryPackedPath2.end()); |                 temporaryPackedPath1.insert(temporaryPackedPath1.end(), temporaryPackedPath2.begin(), temporaryPackedPath2.end()); | ||||||
|                 _localUpperbound1 = _localUpperbound2; |                 _localUpperbound1 = _localUpperbound2; | ||||||
|             } |             } | ||||||
|             if(0 == temporaryPackedPath2.size()) { |             if(0 == temporaryPackedPath2.size()) { | ||||||
| //              INFO("Deleting path 2");
 |  | ||||||
|                 temporaryPackedPath2.insert(temporaryPackedPath2.end(), temporaryPackedPath1.begin(), temporaryPackedPath1.end()); |                 temporaryPackedPath2.insert(temporaryPackedPath2.end(), temporaryPackedPath1.begin(), temporaryPackedPath1.end()); | ||||||
|                 _localUpperbound2 = _localUpperbound1; |                 _localUpperbound2 = _localUpperbound1; | ||||||
|             } |             } | ||||||
| @ -170,28 +148,21 @@ public: | |||||||
| 
 | 
 | ||||||
|             //Plug paths together, s.t. end of packed path is begin of temporary packed path
 |             //Plug paths together, s.t. end of packed path is begin of temporary packed path
 | ||||||
|             if(0 < packedPath1.size() && 0 < packedPath2.size() ) { |             if(0 < packedPath1.size() && 0 < packedPath2.size() ) { | ||||||
| //              INFO("Both paths are non-empty");
 |  | ||||||
|                 if( *(temporaryPackedPath1.begin()) == *(temporaryPackedPath2.begin())) { |                 if( *(temporaryPackedPath1.begin()) == *(temporaryPackedPath2.begin())) { | ||||||
| //                  INFO("both paths start with the same node:" << *(temporaryPackedPath1.begin()));
 |  | ||||||
|                     //both new route segments start with the same node, thus one of the packedPath must go.
 |                     //both new route segments start with the same node, thus one of the packedPath must go.
 | ||||||
|                     assert( (packedPath1.size() == packedPath2.size() ) || (*(packedPath1.end()-1) != *(packedPath2.end()-1)) ); |                     assert( (packedPath1.size() == packedPath2.size() ) || (*(packedPath1.end()-1) != *(packedPath2.end()-1)) ); | ||||||
|                     if( *(packedPath1.end()-1) == *(temporaryPackedPath1.begin())) { |                     if( *(packedPath1.end()-1) == *(temporaryPackedPath1.begin())) { | ||||||
| //                      INFO("Deleting packedPath2 that ends with " << *(packedPath2.end()-1) << ", other ends with " << *(packedPath1.end()-1));
 |  | ||||||
|                         packedPath2.clear(); |                         packedPath2.clear(); | ||||||
|                         packedPath2.insert(packedPath2.end(), packedPath1.begin(), packedPath1.end()); |                         packedPath2.insert(packedPath2.end(), packedPath1.begin(), packedPath1.end()); | ||||||
|                         distance2 = distance1; |                         distance2 = distance1; | ||||||
| //                      INFO("packedPath2 now ends with " <<  *(packedPath2.end()-1));
 |  | ||||||
|                     } else { |                     } else { | ||||||
| //                      INFO("Deleting path1 that ends with " << *(packedPath1.end()-1) << ", other ends with " << *(packedPath2.end()-1));
 |  | ||||||
|                         packedPath1.clear(); |                         packedPath1.clear(); | ||||||
|                         packedPath1.insert(packedPath1.end(), packedPath2.begin(), packedPath2.end()); |                         packedPath1.insert(packedPath1.end(), packedPath2.begin(), packedPath2.end()); | ||||||
|                         distance1 = distance2; |                         distance1 = distance2; | ||||||
| //                      INFO("Path1 now ends with " <<  *(packedPath1.end()-1));
 |  | ||||||
|                     } |                     } | ||||||
|                 } else  { |                 } else  { | ||||||
|                     //packed paths 1 and 2 may need to switch.
 |                     //packed paths 1 and 2 may need to switch.
 | ||||||
|                     if(*(packedPath1.end()-1) != *(temporaryPackedPath1.begin())) { |                     if(*(packedPath1.end()-1) != *(temporaryPackedPath1.begin())) { | ||||||
| //                      INFO("Switching");
 |  | ||||||
|                         packedPath1.swap(packedPath2); |                         packedPath1.swap(packedPath2); | ||||||
|                         std::swap(distance1, distance2); |                         std::swap(distance1, distance2); | ||||||
|                     } |                     } | ||||||
| @ -201,61 +172,24 @@ public: | |||||||
|             packedPath2.insert(packedPath2.end(), temporaryPackedPath2.begin(), temporaryPackedPath2.end()); |             packedPath2.insert(packedPath2.end(), temporaryPackedPath2.begin(), temporaryPackedPath2.end()); | ||||||
| 
 | 
 | ||||||
|             if( (packedPath1.back() == packedPath2.back()) && phantomNodePair.targetPhantom.isBidirected() ) { |             if( (packedPath1.back() == packedPath2.back()) && phantomNodePair.targetPhantom.isBidirected() ) { | ||||||
| //              INFO("both paths end in same direction on bidirected edge, make sure start only start with : " << packedPath1.back());
 |  | ||||||
| 
 | 
 | ||||||
|                 NodeID lastNodeID = packedPath2.back(); |                 NodeID lastNodeID = packedPath2.back(); | ||||||
|                 searchFrom1stStartNode &= !(lastNodeID == phantomNodePair.targetPhantom.edgeBasedNode+1); |                 searchFrom1stStartNode &= !(lastNodeID == phantomNodePair.targetPhantom.edgeBasedNode+1); | ||||||
|                 searchFrom2ndStartNode &= !(lastNodeID == phantomNodePair.targetPhantom.edgeBasedNode); |                 searchFrom2ndStartNode &= !(lastNodeID == phantomNodePair.targetPhantom.edgeBasedNode); | ||||||
| //                INFO("Next search from node " << phantomNodePair.targetPhantom.edgeBasedNode << ": " << (searchFrom1stStartNode ? "yes" : "no") );
 |  | ||||||
| //                INFO("Next search from node " << phantomNodePair.targetPhantom.edgeBasedNode+1 << ": " << (searchFrom2ndStartNode ? "yes" : "no") );
 |  | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             distance1 += _localUpperbound1; |             distance1 += _localUpperbound1; | ||||||
|             distance2 += _localUpperbound2; |             distance2 += _localUpperbound2; | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
| //      INFO("length path1: " << distance1);
 |         if(distance1 > distance2){ | ||||||
| //      INFO("length path2: " << distance2);
 |  | ||||||
|         if(distance1 <= distance2){ |  | ||||||
|             //remove consecutive duplicates
 |  | ||||||
| //          std::cout << "unclean 1: ";
 |  | ||||||
| //          for(unsigned i = 0; i < packedPath1.size(); ++i)
 |  | ||||||
| //              std::cout << packedPath1[i] << " ";
 |  | ||||||
| //          std::cout << std::endl;
 |  | ||||||
| 
 |  | ||||||
| //          std::cout << "cleaned 1: ";
 |  | ||||||
| //          for(unsigned i = 0; i < packedPath1.size(); ++i)
 |  | ||||||
| //              std::cout << packedPath1[i] << " ";
 |  | ||||||
| //          std::cout << std::endl;
 |  | ||||||
| //            super::UnpackPath(packedPath1, rawRouteData.computedShortestPath);
 |  | ||||||
|         } else { |  | ||||||
|             std::swap(packedPath1, packedPath2); |             std::swap(packedPath1, packedPath2); | ||||||
| //          std::cout << "unclean 2: ";
 |  | ||||||
| //          for(unsigned i = 0; i < packedPath2.size(); ++i)
 |  | ||||||
| //              std::cout << packedPath2[i] << " ";
 |  | ||||||
| //          std::cout << std::endl;
 |  | ||||||
| //            _RemoveConsecutiveDuplicatesFromContainer(packedPath2);
 |  | ||||||
| //          std::cout << "cleaned 2: ";
 |  | ||||||
| //          for(unsigned i = 0; i < packedPath2.size(); ++i)
 |  | ||||||
| //              std::cout << packedPath2[i] << " ";
 |  | ||||||
| //          std::cout << std::endl;
 |  | ||||||
| //            super::UnpackPath(packedPath2, unpackedPath);
 |  | ||||||
|         } |         } | ||||||
|         _RemoveConsecutiveDuplicatesFromContainer(packedPath1); |         remove_consecutive_duplicates_from_vector(packedPath1); | ||||||
|         super::UnpackPath(packedPath1, rawRouteData.computedShortestPath); |         super::UnpackPath(packedPath1, rawRouteData.computedShortestPath); | ||||||
|         rawRouteData.lengthOfShortestPath = std::min(distance1, distance2); |         rawRouteData.lengthOfShortestPath = std::min(distance1, distance2); | ||||||
| //      INFO("Found via route with distance " << std::min(distance1, distance2));
 |  | ||||||
|         return; |         return; | ||||||
|     } |     } | ||||||
| private: |  | ||||||
|     template<class ContainerT> |  | ||||||
|     void _RemoveConsecutiveDuplicatesFromContainer(ContainerT & packedPath) { |  | ||||||
|         //remove consecutive duplicates
 |  | ||||||
|         typename ContainerT::iterator it; |  | ||||||
|         // using default comparison:
 |  | ||||||
|         it = std::unique(packedPath.begin(), packedPath.end()); |  | ||||||
|         packedPath.resize(it - packedPath.begin()); |  | ||||||
|     } |  | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| #endif /* SHORTESTPATHROUTING_H_ */ | #endif /* SHORTESTPATHROUTING_H_ */ | ||||||
|  | |||||||
| @ -87,7 +87,7 @@ if "clang" in env["CXX"]: | |||||||
|     if GetOption('allflags') is not None: |     if GetOption('allflags') is not None: | ||||||
|         env.Append(CXXFLAGS = ["-W#warnings", "-Wc++0x-compat", "-Waddress-of-temporary", "-Wambiguous-member-template", "-Warray-bounds", "-Watomic-properties", "-Wbind-to-temporary-copy", "-Wbuiltin-macro-redefined", "-Wc++-compat", "-Wc++0x-extensions", "-Wcomments", "-Wconditional-uninitialized", "-Wconstant-logical-operand", "-Wdeclaration-after-statement", "-Wdeprecated", "-Wdeprecated-implementations", "-Wdeprecated-writable-strings", "-Wduplicate-method-arg", "-Wempty-body", "-Wendif-labels", "-Wenum-compare", "-Wformat=2", "-Wfour-char-constants", "-Wgnu", "-Wincomplete-implementation", "-Winvalid-noreturn", "-Winvalid-offsetof", "-Winvalid-token-paste", "-Wlocal-type-template-args", "-Wmethod-signatures", "-Wmicrosoft", "-Wmissing-declarations", "-Wnon-pod-varargs", "-Wnonfragile-abi2", "-Wnull-dereference", "-Wout-of-line-declaration", "-Woverlength-strings", "-Wpacked", "-Wpointer-arith", "-Wpointer-sign", "-Wprotocol", "-Wreadonly-setter-attrs", "-Wselector", "-Wshift-overflow", "-Wshift-sign-overflow", "-Wstrict-selector-match", "-Wsuper-class-method-mismatch", "-Wtautological-compare", "-Wtypedef-redefinition", "-Wundeclared-selector", "-Wunnamed-type-template-args", "-Wunused-exception-parameter", "-Wunused-member-function", "-Wused-but-marked-unused", "-Wvariadic-macros"]) |         env.Append(CXXFLAGS = ["-W#warnings", "-Wc++0x-compat", "-Waddress-of-temporary", "-Wambiguous-member-template", "-Warray-bounds", "-Watomic-properties", "-Wbind-to-temporary-copy", "-Wbuiltin-macro-redefined", "-Wc++-compat", "-Wc++0x-extensions", "-Wcomments", "-Wconditional-uninitialized", "-Wconstant-logical-operand", "-Wdeclaration-after-statement", "-Wdeprecated", "-Wdeprecated-implementations", "-Wdeprecated-writable-strings", "-Wduplicate-method-arg", "-Wempty-body", "-Wendif-labels", "-Wenum-compare", "-Wformat=2", "-Wfour-char-constants", "-Wgnu", "-Wincomplete-implementation", "-Winvalid-noreturn", "-Winvalid-offsetof", "-Winvalid-token-paste", "-Wlocal-type-template-args", "-Wmethod-signatures", "-Wmicrosoft", "-Wmissing-declarations", "-Wnon-pod-varargs", "-Wnonfragile-abi2", "-Wnull-dereference", "-Wout-of-line-declaration", "-Woverlength-strings", "-Wpacked", "-Wpointer-arith", "-Wpointer-sign", "-Wprotocol", "-Wreadonly-setter-attrs", "-Wselector", "-Wshift-overflow", "-Wshift-sign-overflow", "-Wstrict-selector-match", "-Wsuper-class-method-mismatch", "-Wtautological-compare", "-Wtypedef-redefinition", "-Wundeclared-selector", "-Wunnamed-type-template-args", "-Wunused-exception-parameter", "-Wunused-member-function", "-Wused-but-marked-unused", "-Wvariadic-macros"]) | ||||||
| else: | else: | ||||||
| 	env.Append(CCFLAGS = ['-minline-all-stringops', '-fopenmp']) | 	env.Append(CCFLAGS = ['-minline-all-stringops', '-fopenmp', '-Wall']) | ||||||
| 	env.Append(LINKFLAGS = '-fopenmp') | 	env.Append(LINKFLAGS = '-fopenmp') | ||||||
| 
 | 
 | ||||||
| if GetOption('buildconfiguration') == 'debug': | if GetOption('buildconfiguration') == 'debug': | ||||||
|  | |||||||
| @ -33,7 +33,7 @@ template <typename Iterator, class HandlerT> | |||||||
| struct APIGrammar : qi::grammar<Iterator> { | struct APIGrammar : qi::grammar<Iterator> { | ||||||
|     APIGrammar(HandlerT * h) : APIGrammar::base_type(api_call), handler(h) { |     APIGrammar(HandlerT * h) : APIGrammar::base_type(api_call), handler(h) { | ||||||
|         api_call = qi::lit('/') >> string[boost::bind(&HandlerT::setService, handler, ::_1)] >> *(query); |         api_call = qi::lit('/') >> string[boost::bind(&HandlerT::setService, handler, ::_1)] >> *(query); | ||||||
|         query    = ('?') >> (+(zoom | output | jsonp | checksum | location | hint | cmp | language | instruction | alt_route | old_API) ) ; |         query    = ('?') >> (+(zoom | output | jsonp | checksum | location | hint | cmp | language | instruction | geometry | alt_route | old_API) ) ; | ||||||
| 
 | 
 | ||||||
|         zoom        = (-qi::lit('&')) >> qi::lit('z')            >> '=' >> qi::short_[boost::bind(&HandlerT::setZoomLevel, handler, ::_1)]; |         zoom        = (-qi::lit('&')) >> qi::lit('z')            >> '=' >> qi::short_[boost::bind(&HandlerT::setZoomLevel, handler, ::_1)]; | ||||||
|         output      = (-qi::lit('&')) >> qi::lit("output")       >> '=' >> string[boost::bind(&HandlerT::setOutputFormat, handler, ::_1)]; |         output      = (-qi::lit('&')) >> qi::lit("output")       >> '=' >> string[boost::bind(&HandlerT::setOutputFormat, handler, ::_1)]; | ||||||
|  | |||||||
| @ -21,8 +21,11 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #ifndef BASIC_DATASTRUCTURES_H | #ifndef BASIC_DATASTRUCTURES_H | ||||||
| #define BASIC_DATASTRUCTURES_H | #define BASIC_DATASTRUCTURES_H | ||||||
| #include <string> | #include <string> | ||||||
|  | #include <boost/foreach.hpp> | ||||||
| #include <boost/lexical_cast.hpp> | #include <boost/lexical_cast.hpp> | ||||||
| 
 | 
 | ||||||
|  | #include "../Util/StringUtil.h" | ||||||
|  | 
 | ||||||
| namespace http { | namespace http { | ||||||
| 
 | 
 | ||||||
| const std::string okString 					= "HTTP/1.0 200 OK\r\n"; | const std::string okString 					= "HTTP/1.0 200 OK\r\n"; | ||||||
| @ -70,15 +73,13 @@ struct Reply { | |||||||
|     std::vector<boost::asio::const_buffer> HeaderstoBuffers(); |     std::vector<boost::asio::const_buffer> HeaderstoBuffers(); | ||||||
| 	std::string content; | 	std::string content; | ||||||
| 	static Reply stockReply(status_type status); | 	static Reply stockReply(status_type status); | ||||||
| 	void setSize(unsigned size) { | 	void setSize(const unsigned size) { | ||||||
| 	    for (std::size_t i = 0; i < headers.size(); ++i) { | 		BOOST_FOREACH ( Header& h,  headers) { | ||||||
| 	            Header& h = headers[i]; | 			if("Content-Length" == h.name) { | ||||||
| 	            if("Content-Length" == h.name) { | 				std::string sizeString; | ||||||
| 	                std::stringstream sizeString; | 				intToString(size,h.value ); | ||||||
| 	                sizeString << size; | 			} | ||||||
| 	                h.value = sizeString.str(); | 		} | ||||||
| 	            } |  | ||||||
| 	    } |  | ||||||
| 	} | 	} | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| @ -138,11 +139,13 @@ Reply Reply::stockReply(Reply::status_type status) { | |||||||
| 	Reply rep; | 	Reply rep; | ||||||
| 	rep.status = status; | 	rep.status = status; | ||||||
| 	rep.content = ToString(status); | 	rep.content = ToString(status); | ||||||
| 	rep.headers.resize(2); | 	rep.headers.resize(3);	 | ||||||
| 	rep.headers[0].name = "Content-Length"; | 	rep.headers[0].name = "Access-Control-Allow-Origin"; | ||||||
| 	rep.headers[0].value = boost::lexical_cast<std::string>(rep.content.size()); | 	rep.headers[0].value = "*"; | ||||||
| 	rep.headers[1].name = "Content-Type"; | 	rep.headers[1].name = "Content-Length"; | ||||||
| 	rep.headers[1].value = "text/html"; | 	rep.headers[1].value = boost::lexical_cast<std::string>(rep.content.size()); | ||||||
|  | 	rep.headers[2].name = "Content-Type"; | ||||||
|  | 	rep.headers[2].value = "text/html"; | ||||||
| 	return rep; | 	return rep; | ||||||
| } | } | ||||||
| } // namespace http
 | } // namespace http
 | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include <boost/shared_ptr.hpp> | #include <boost/shared_ptr.hpp> | ||||||
| #include <boost/enable_shared_from_this.hpp> | #include <boost/enable_shared_from_this.hpp> | ||||||
| 
 | 
 | ||||||
| #include "../DataStructures/Util.h" |  | ||||||
| #include "BasicDatastructures.h" | #include "BasicDatastructures.h" | ||||||
| #include "RequestHandler.h" | #include "RequestHandler.h" | ||||||
| #include "RequestParser.h" | #include "RequestParser.h" | ||||||
|  | |||||||
| @ -33,6 +33,7 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include "../DataStructures/HashTable.h" | #include "../DataStructures/HashTable.h" | ||||||
| #include "../Plugins/BasePlugin.h" | #include "../Plugins/BasePlugin.h" | ||||||
| #include "../Plugins/RouteParameters.h" | #include "../Plugins/RouteParameters.h" | ||||||
|  | #include "../Util/StringUtil.h" | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| 
 | 
 | ||||||
| namespace http { | namespace http { | ||||||
| @ -71,16 +72,17 @@ public: | |||||||
|             bool result = boost::spirit::qi::parse(it, request.end(), apiParser);    // returns true if successful
 |             bool result = boost::spirit::qi::parse(it, request.end(), apiParser);    // returns true if successful
 | ||||||
|             if (!result || (it != request.end()) ) { |             if (!result || (it != request.end()) ) { | ||||||
|                 rep = http::Reply::stockReply(http::Reply::badRequest); |                 rep = http::Reply::stockReply(http::Reply::badRequest); | ||||||
|                 std::stringstream content; |  | ||||||
|                 int position = std::distance(request.begin(), it); |                 int position = std::distance(request.begin(), it); | ||||||
|                 content << "Input seems to be malformed close to position " << position << "<br>"; |                 std::string tmp_position_string; | ||||||
|                 content << "<pre>"; |                 intToString(position, tmp_position_string); | ||||||
|                 content << req.uri << "<br>"; |                 rep.content += "Input seems to be malformed close to position "; | ||||||
|  |                 rep.content += "<br><pre>"; | ||||||
|  |                 rep.content += request; | ||||||
|  |                 rep.content += tmp_position_string; | ||||||
|  |                 rep.content += "<br>"; | ||||||
|                 for(unsigned i = 0, end = std::distance(request.begin(), it); i < end; ++i) |                 for(unsigned i = 0, end = std::distance(request.begin(), it); i < end; ++i) | ||||||
|                     content << " "; |                     rep.content += " "; | ||||||
|                 content << "^" << "<br>"; |                 rep.content += "^<br></pre>"; | ||||||
|                 content << "</pre>"; |  | ||||||
|                 rep.content += content.str(); |  | ||||||
|             } else { |             } else { | ||||||
|                 //Finished parsing, lets call the right plugin to handle the request
 |                 //Finished parsing, lets call the right plugin to handle the request
 | ||||||
|                 if(pluginMap.Holds(routeParameters.service)) { |                 if(pluginMap.Holds(routeParameters.service)) { | ||||||
|  | |||||||
							
								
								
									
										34
									
								
								Util/ContainerUtils.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								Util/ContainerUtils.h
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,34 @@ | |||||||
|  | /*
 | ||||||
|  |  * ContainerUtils.h | ||||||
|  |  * | ||||||
|  |  *  Created on: 02.02.2013 | ||||||
|  |  *      Author: dennis | ||||||
|  |  */ | ||||||
|  | 
 | ||||||
|  | #ifndef CONTAINERUTILS_H_ | ||||||
|  | #define CONTAINERUTILS_H_ | ||||||
|  | 
 | ||||||
|  | #include <algorithm> | ||||||
|  | #include <vector> | ||||||
|  | 
 | ||||||
|  | template<typename T> | ||||||
|  | inline void sort_unique_resize(std::vector<T> & vector) { | ||||||
|  | 	std::sort(vector.begin(), vector.end()); | ||||||
|  | 	unsigned number_of_unique_elements = std::unique(vector.begin(), vector.end()) - vector.begin(); | ||||||
|  | 	vector.resize(number_of_unique_elements); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | template<typename T> | ||||||
|  | inline void sort_unique_resize_shrink_vector(std::vector<T> & vector) { | ||||||
|  | 	sort_unique_resize(vector); | ||||||
|  | 	std::vector<T>().swap(vector); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | template<typename T> | ||||||
|  | inline void remove_consecutive_duplicates_from_vector(std::vector<T> & vector) { | ||||||
|  |     unsigned number_of_unique_elements = std::unique(vector.begin(), vector.end()) - vector.begin(); | ||||||
|  |     vector.resize(number_of_unique_elements); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | #endif /* CONTAINERUTILS_H_ */ | ||||||
| @ -101,7 +101,7 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector<EdgeT>& edgeL | |||||||
|     short type; |     short type; | ||||||
|     NodeID nameID; |     NodeID nameID; | ||||||
|     int length; |     int length; | ||||||
|     bool isRoundabout, ignoreInGrid, isAccessRestricted; |     bool isRoundabout, ignoreInGrid, isAccessRestricted, isContraFlow; | ||||||
| 
 | 
 | ||||||
|     for (EdgeID i=0; i<m; ++i) { |     for (EdgeID i=0; i<m; ++i) { | ||||||
|         in.read((char*)&source,             sizeof(unsigned)); |         in.read((char*)&source,             sizeof(unsigned)); | ||||||
| @ -114,6 +114,7 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector<EdgeT>& edgeL | |||||||
|         in.read((char*)&isRoundabout,       sizeof(bool)); |         in.read((char*)&isRoundabout,       sizeof(bool)); | ||||||
|         in.read((char*)&ignoreInGrid,       sizeof(bool)); |         in.read((char*)&ignoreInGrid,       sizeof(bool)); | ||||||
|         in.read((char*)&isAccessRestricted, sizeof(bool)); |         in.read((char*)&isAccessRestricted, sizeof(bool)); | ||||||
|  |         in.read((char*)&isContraFlow,       sizeof(bool)); | ||||||
| 
 | 
 | ||||||
|         GUARANTEE(length > 0, "loaded null length edge" ); |         GUARANTEE(length > 0, "loaded null length edge" ); | ||||||
|         GUARANTEE(weight > 0, "loaded null weight"); |         GUARANTEE(weight > 0, "loaded null weight"); | ||||||
| @ -150,7 +151,7 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector<EdgeT>& edgeL | |||||||
|             std::swap(forward, backward); |             std::swap(forward, backward); | ||||||
|         } |         } | ||||||
| 
 | 
 | ||||||
|         EdgeT inputEdge(source, target, nameID, weight, forward, backward, type, isRoundabout, ignoreInGrid, isAccessRestricted ); |         EdgeT inputEdge(source, target, nameID, weight, forward, backward, type, isRoundabout, ignoreInGrid, isAccessRestricted, isContraFlow ); | ||||||
|         edgeList.push_back(inputEdge); |         edgeList.push_back(inputEdge); | ||||||
|     } |     } | ||||||
|     std::sort(edgeList.begin(), edgeList.end()); |     std::sort(edgeList.begin(), edgeList.end()); | ||||||
| @ -169,14 +170,14 @@ NodeID readBinaryOSRMGraphFromStream(std::istream &in, std::vector<EdgeT>& edgeL | |||||||
|                     edgeList[i]._source = UINT_MAX; |                     edgeList[i]._source = UINT_MAX; | ||||||
|                 } else { |                 } else { | ||||||
|                     //edge i-1 is open in both directions, but edge i is smaller in one direction. Close edge i-1 in this direction
 |                     //edge i-1 is open in both directions, but edge i is smaller in one direction. Close edge i-1 in this direction
 | ||||||
|                     edgeList[i-1].forward = ~edgeList[i].isForward(); |                     edgeList[i-1].forward = !edgeList[i].isForward(); | ||||||
|                     edgeList[i-1].backward = ~edgeList[i].isBackward(); |                     edgeList[i-1].backward = !edgeList[i].isBackward(); | ||||||
|                 } |                 } | ||||||
|             } else if (edgeFlagsAreSuperSet2) { |             } else if (edgeFlagsAreSuperSet2) { | ||||||
|                 if(edgeList[i-1].weight() <= edgeList[i].weight()) { |                 if(edgeList[i-1].weight() <= edgeList[i].weight()) { | ||||||
|                      //edge i-1 is smaller for one direction. edge i is open in both. close edge i in the other direction
 |                      //edge i-1 is smaller for one direction. edge i is open in both. close edge i in the other direction
 | ||||||
|                      edgeList[i].forward = ~edgeList[i-1].isForward(); |                      edgeList[i].forward = !edgeList[i-1].isForward(); | ||||||
|                      edgeList[i].backward = ~edgeList[i-1].isBackward(); |                      edgeList[i].backward = !edgeList[i-1].isBackward(); | ||||||
|                  } else { |                  } else { | ||||||
|                      //edge i is smaller and goes in both direction. Throw away edge i-1
 |                      //edge i is smaller and goes in both direction. Throw away edge i-1
 | ||||||
|                      edgeList[i-1]._source = UINT_MAX; |                      edgeList[i-1]._source = UINT_MAX; | ||||||
|  | |||||||
							
								
								
									
										52
									
								
								Util/LuaUtil.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								Util/LuaUtil.h
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,52 @@ | |||||||
|  | /*
 | ||||||
|  |     open source routing machine | ||||||
|  |     Copyright (C) Dennis Luxen, others 2010 | ||||||
|  | 
 | ||||||
|  | This program is free software; you can redistribute it and/or modify | ||||||
|  | it under the terms of the GNU AFFERO General Public License as published by | ||||||
|  | the Free Software Foundation; either version 3 of the License, or | ||||||
|  | any later version. | ||||||
|  | 
 | ||||||
|  | This program is distributed in the hope that it will be useful, | ||||||
|  | but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | GNU General Public License for more details. | ||||||
|  | 
 | ||||||
|  | You should have received a copy of the GNU Affero General Public License | ||||||
|  | along with this program; if not, write to the Free Software | ||||||
|  | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | ||||||
|  | or see http://www.gnu.org/licenses/agpl.txt.
 | ||||||
|  |  */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | #ifndef LUAUTIL_H_ | ||||||
|  | #define LUAUTIL_H_ | ||||||
|  | 
 | ||||||
|  | #include <iostream> | ||||||
|  | #include <string> | ||||||
|  | #include <boost/filesystem/convenience.hpp> | ||||||
|  | 
 | ||||||
|  | template<typename T> | ||||||
|  | void LUA_print(T number) { | ||||||
|  |   std::cout << "[LUA] " << number << std::endl; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Check if the lua function <name> is defined
 | ||||||
|  | inline bool lua_function_exists(lua_State* lua_state, const char* name) { | ||||||
|  |     luabind::object g = luabind::globals(lua_state); | ||||||
|  |     luabind::object func = g[name]; | ||||||
|  |     return func && (luabind::type(func) == LUA_TFUNCTION); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Add the folder contain the script to the lua load path, so script can easily require() other lua scripts inside that folder, or subfolders.
 | ||||||
|  | // See http://lua-users.org/wiki/PackagePath for details on the package.path syntax.
 | ||||||
|  | inline void luaAddScriptFolderToLoadPath(lua_State* myLuaState, const char* fileName) { | ||||||
|  |     const boost::filesystem::path profilePath( fileName ); | ||||||
|  |     std::string folder = profilePath.parent_path().string(); | ||||||
|  |     //TODO: This code is most probably not Windows safe since it uses UNIX'ish path delimiters
 | ||||||
|  |     const std::string luaCode = "package.path = \"" + folder + "/?.lua;profiles/?.lua;\" .. package.path"; | ||||||
|  |     luaL_dostring( myLuaState, luaCode.c_str() ); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | #endif /* LUAUTIL_H_ */ | ||||||
| @ -30,8 +30,29 @@ extern "C" { | |||||||
| #include <windows.h> | #include <windows.h> | ||||||
| #endif | #endif | ||||||
| 
 | 
 | ||||||
| /* Returns the physical memory size in kilobytes */ | enum Endianness { | ||||||
| unsigned GetPhysicalmemory(void){ |     LittleEndian = 1, | ||||||
|  |     BigEndian = 2 | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //Function is optimized to a single 'mov eax,1' on GCC, clang and icc using -O3
 | ||||||
|  | inline Endianness getMachineEndianness() { | ||||||
|  |     int i(1); | ||||||
|  |     char *p = (char *) &i; | ||||||
|  |     if (1 == p[0]) | ||||||
|  |         return LittleEndian; | ||||||
|  |     return BigEndian; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Reverses Network Byte Order into something usable, compiles down to a bswap-mov combination
 | ||||||
|  | inline unsigned swapEndian(unsigned x) { | ||||||
|  |     if(getMachineEndianness() == LittleEndian) | ||||||
|  |         return ( (x>>24) | ((x<<8) & 0x00FF0000) | ((x>>8) & 0x0000FF00) | (x<<24) ); | ||||||
|  |     return x; | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Returns the physical memory size in kilobytes
 | ||||||
|  | inline unsigned GetPhysicalmemory(void){ | ||||||
| #if defined(SUN5) || defined(__linux__) | #if defined(SUN5) || defined(__linux__) | ||||||
| 	return (sysconf(_SC_PHYS_PAGES) * sysconf(_SC_PAGESIZE)); | 	return (sysconf(_SC_PHYS_PAGES) * sysconf(_SC_PAGESIZE)); | ||||||
| 	 | 	 | ||||||
| @ -61,4 +82,5 @@ unsigned GetPhysicalmemory(void){ | |||||||
| 
 | 
 | ||||||
| #endif | #endif | ||||||
| } | } | ||||||
| #endif | #endif // MACHINE_INFO_H
 | ||||||
|  | 
 | ||||||
|  | |||||||
| @ -21,14 +21,14 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #ifndef STRINGUTIL_H_ | #ifndef STRINGUTIL_H_ | ||||||
| #define STRINGUTIL_H_ | #define STRINGUTIL_H_ | ||||||
| 
 | 
 | ||||||
| #include <cstdio> |  | ||||||
| #include <cstdlib> |  | ||||||
| #include <string> | #include <string> | ||||||
| #include <sstream> | #include <boost/algorithm/string.hpp> | ||||||
| 
 | 
 | ||||||
| #include <boost/spirit/include/karma.hpp> | #include <boost/spirit/include/karma.hpp> | ||||||
| #include <boost/spirit/include/qi.hpp> | #include <boost/spirit/include/qi.hpp> | ||||||
| 
 | 
 | ||||||
|  | #include <cstdio> | ||||||
|  | 
 | ||||||
| #include "../DataStructures/Coordinate.h" | #include "../DataStructures/Coordinate.h" | ||||||
| #include "../typedefs.h" | #include "../typedefs.h" | ||||||
| 
 | 
 | ||||||
| @ -80,6 +80,20 @@ static inline int stringToInt(const std::string& input) { | |||||||
|     return value; |     return value; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | static inline void doubleToString(const double value, std::string & output){ | ||||||
|  |     output.clear(); | ||||||
|  |     std::back_insert_iterator<std::string> sink(output); | ||||||
|  |     boost::spirit::karma::generate(sink, boost::spirit::karma::double_, value); | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | static inline void doubleToStringWithTwoDigitsBehindComma(const double value, std::string & output){ | ||||||
|  |     // The largest 32-bit integer is 4294967295, that is 10 chars
 | ||||||
|  |     // On the safe side, add 1 for sign, and 1 for trailing zero
 | ||||||
|  |     char buffer[12] ; | ||||||
|  |     sprintf(buffer, "%g", value) ; | ||||||
|  |     output = buffer ; | ||||||
|  | } | ||||||
|  | 
 | ||||||
| static inline void convertInternalLatLonToString(const int value, std::string & output) { | static inline void convertInternalLatLonToString(const int value, std::string & output) { | ||||||
|     char buffer[100]; |     char buffer[100]; | ||||||
|     buffer[10] = 0; // Nullterminierung
 |     buffer[10] = 0; // Nullterminierung
 | ||||||
| @ -106,70 +120,38 @@ static inline void convertInternalReversedCoordinateToString(const _Coordinate & | |||||||
|     output += " "; |     output += " "; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| static inline void doubleToString(const double value, std::string & output){ | inline void replaceAll(std::string &s, const std::string &sub, const std::string &other) { | ||||||
|     // The largest 32-bit integer is 4294967295, that is 10 chars
 | 	boost::replace_all(s, sub, other); | ||||||
|     // On the safe side, add 1 for sign, and 1 for trailing zero
 |  | ||||||
|     char buffer[12] ; |  | ||||||
|     sprintf(buffer, "%f", value) ; |  | ||||||
|     output = buffer ; |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| static inline void doubleToStringWithTwoDigitsBehindComma(const double value, std::string & output){ |  | ||||||
|     // The largest 32-bit integer is 4294967295, that is 10 chars
 |  | ||||||
|     // On the safe side, add 1 for sign, and 1 for trailing zero
 |  | ||||||
|     char buffer[12] ; |  | ||||||
|     sprintf(buffer, "%g", value) ; |  | ||||||
|     output = buffer ; |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| inline std::string & replaceAll(std::string &s, const std::string &sub, const std::string &other) { |  | ||||||
|     assert(!sub.empty()); |  | ||||||
|     size_t b = 0; |  | ||||||
|     for (;;) { |  | ||||||
|         b = s.find(sub, b); |  | ||||||
|         if (b == s.npos) break; |  | ||||||
|         s.replace(b, sub.size(), other); |  | ||||||
|         b += other.size(); |  | ||||||
|     } |  | ||||||
|     return s; |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| inline void stringSplit(const std::string &s, const char delim, std::vector<std::string>& result) { | inline void stringSplit(const std::string &s, const char delim, std::vector<std::string>& result) { | ||||||
|     std::stringstream ss(s); | 	boost::split(result, s, boost::is_any_of(std::string(&delim))); | ||||||
|     std::string item; |  | ||||||
|     while(std::getline(ss, item, delim)) { |  | ||||||
|         if(item.size() > 0) |  | ||||||
|             result.push_back(item); |  | ||||||
|     } |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| static std::string originals[] = {"&", "\"",  "<",  ">", "'", "[", "]", "\\"}; | static std::string originals[] = {"&", "\"",  "<",  ">", "'", "[", "]", "\\"}; | ||||||
| static std::string entities[] = {"&", """, "<", ">", "'", "&91;", "&93;", " \" }; | static std::string entities[] = {"&", """, "<", ">", "'", "&91;", "&93;", " \" }; | ||||||
| 
 | 
 | ||||||
| inline std::string HTMLEntitize( std::string result) { | inline std::string HTMLEntitize( const std::string & input) { | ||||||
|     for(unsigned i = 0; i < sizeof(originals)/sizeof(std::string); i++) { |     std::string result(input); | ||||||
|         result = replaceAll(result, originals[i], entities[i]); |     for(unsigned i = 0; i < sizeof(originals)/sizeof(std::string); ++i) { | ||||||
|  |         replaceAll(result, originals[i], entities[i]); | ||||||
|     } |     } | ||||||
|     return result; |     return result; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| inline std::string HTMLDeEntitize( std::string result) { | inline std::string HTMLDeEntitize( std::string & result) { | ||||||
|     for(unsigned i = 0; i < sizeof(originals)/sizeof(std::string); i++) { |     for(unsigned i = 0; i < sizeof(originals)/sizeof(std::string); ++i) { | ||||||
|         result = replaceAll(result, entities[i], originals[i]); |         replaceAll(result, entities[i], originals[i]); | ||||||
|     } |     } | ||||||
|     return result; |     return result; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| inline bool StringStartsWith(std::string & input, std::string & prefix) { | inline bool StringStartsWith(const std::string & input, const std::string & prefix) { | ||||||
|     return (input.find(prefix) == 0); |     return boost::starts_with(input, prefix); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | // Function returns a 'random' filename in temporary directors.
 | ||||||
| /*
 | // May not be platform independent.
 | ||||||
|  * Function returns a 'random' filename in temporary directors. |  | ||||||
|  * May not be platform independent. |  | ||||||
|  */ |  | ||||||
| inline void GetTemporaryFileName(std::string & filename) { | inline void GetTemporaryFileName(std::string & filename) { | ||||||
|     char buffer[L_tmpnam]; |     char buffer[L_tmpnam]; | ||||||
|     char * retPointer = tmpnam (buffer); |     char * retPointer = tmpnam (buffer); | ||||||
|  | |||||||
| @ -44,12 +44,11 @@ extern "C" { | |||||||
| #include "DataStructures/NNGrid.h" | #include "DataStructures/NNGrid.h" | ||||||
| #include "DataStructures/QueryEdge.h" | #include "DataStructures/QueryEdge.h" | ||||||
| #include "Util/BaseConfiguration.h" | #include "Util/BaseConfiguration.h" | ||||||
| #include "Util/InputFileUtil.h" |  | ||||||
| #include "Util/GraphLoader.h" | #include "Util/GraphLoader.h" | ||||||
|  | #include "Util/InputFileUtil.h" | ||||||
|  | #include "Util/LuaUtil.h" | ||||||
| #include "Util/StringUtil.h" | #include "Util/StringUtil.h" | ||||||
| 
 | 
 | ||||||
| using namespace std; |  | ||||||
| 
 |  | ||||||
| typedef QueryEdge::EdgeData EdgeData; | typedef QueryEdge::EdgeData EdgeData; | ||||||
| typedef DynamicGraph<EdgeData>::InputEdge InputEdge; | typedef DynamicGraph<EdgeData>::InputEdge InputEdge; | ||||||
| typedef StaticGraph<EdgeData>::InputEdge StaticEdge; | typedef StaticGraph<EdgeData>::InputEdge StaticEdge; | ||||||
| @ -59,6 +58,7 @@ std::vector<NodeInfo> internalToExternalNodeMapping; | |||||||
| std::vector<_Restriction> inputRestrictions; | std::vector<_Restriction> inputRestrictions; | ||||||
| std::vector<NodeID> bollardNodes; | std::vector<NodeID> bollardNodes; | ||||||
| std::vector<NodeID> trafficLightNodes; | std::vector<NodeID> trafficLightNodes; | ||||||
|  | std::vector<ImportEdge> edgeList; | ||||||
| 
 | 
 | ||||||
| int main (int argc, char *argv[]) { | int main (int argc, char *argv[]) { | ||||||
|     if(argc < 3) { |     if(argc < 3) { | ||||||
| @ -76,7 +76,7 @@ int main (int argc, char *argv[]) { | |||||||
|     omp_set_num_threads(numberOfThreads); |     omp_set_num_threads(numberOfThreads); | ||||||
| 
 | 
 | ||||||
|     INFO("Using restrictions from file: " << argv[2]); |     INFO("Using restrictions from file: " << argv[2]); | ||||||
|     std::ifstream restrictionsInstream(argv[2], ios::binary); |     std::ifstream restrictionsInstream(argv[2], std::ios::binary); | ||||||
|     if(!restrictionsInstream.good()) { |     if(!restrictionsInstream.good()) { | ||||||
|         ERR("Could not access <osrm-restrictions> files"); |         ERR("Could not access <osrm-restrictions> files"); | ||||||
|     } |     } | ||||||
| @ -93,12 +93,11 @@ int main (int argc, char *argv[]) { | |||||||
|         ERR("Cannot open " << argv[1]); |         ERR("Cannot open " << argv[1]); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     char nodeOut[1024];         strcpy(nodeOut, argv[1]);           strcat(nodeOut, ".nodes"); |     std::string nodeOut(argv[1]);		nodeOut += ".nodes"; | ||||||
|     char edgeOut[1024];         strcpy(edgeOut, argv[1]);           strcat(edgeOut, ".edges"); |     std::string edgeOut(argv[1]);		edgeOut += ".edges"; | ||||||
|     char graphOut[1024];    	strcpy(graphOut, argv[1]);      	strcat(graphOut, ".hsgr"); |     std::string graphOut(argv[1]);		graphOut += ".hsgr"; | ||||||
|     char ramIndexOut[1024];    	strcpy(ramIndexOut, argv[1]);    	strcat(ramIndexOut, ".ramIndex"); |     std::string ramIndexOut(argv[1]);	ramIndexOut += ".ramIndex"; | ||||||
|     char fileIndexOut[1024];    strcpy(fileIndexOut, argv[1]);    	strcat(fileIndexOut, ".fileIndex"); |     std::string fileIndexOut(argv[1]);	fileIndexOut += ".fileIndex"; | ||||||
|     char levelInfoOut[1024];    strcpy(levelInfoOut, argv[1]);    	strcat(levelInfoOut, ".levels"); |  | ||||||
| 
 | 
 | ||||||
|     /*** Setup Scripting Environment ***/ |     /*** Setup Scripting Environment ***/ | ||||||
|     if(!testDataFile( (argc > 3 ? argv[3] : "profile.lua") )) { |     if(!testDataFile( (argc > 3 ? argv[3] : "profile.lua") )) { | ||||||
| @ -111,6 +110,11 @@ int main (int argc, char *argv[]) { | |||||||
|     // Connect LuaBind to this lua state
 |     // Connect LuaBind to this lua state
 | ||||||
|     luabind::open(myLuaState); |     luabind::open(myLuaState); | ||||||
| 
 | 
 | ||||||
|  |     //open utility libraries string library;
 | ||||||
|  |     luaL_openlibs(myLuaState); | ||||||
|  | 
 | ||||||
|  |     //adjust lua load path
 | ||||||
|  |     luaAddScriptFolderToLoadPath( myLuaState, (argc > 3 ? argv[3] : "profile.lua") ); | ||||||
| 
 | 
 | ||||||
|     // Now call our function in a lua script
 |     // Now call our function in a lua script
 | ||||||
|     INFO("Parsing speedprofile from " << (argc > 3 ? argv[3] : "profile.lua") ); |     INFO("Parsing speedprofile from " << (argc > 3 ? argv[3] : "profile.lua") ); | ||||||
| @ -129,8 +133,9 @@ int main (int argc, char *argv[]) { | |||||||
|         ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); |         ERR(lua_tostring(myLuaState,-1)<< " occured in scripting block"); | ||||||
|     } |     } | ||||||
|     speedProfile.uTurnPenalty = 10*lua_tointeger(myLuaState, -1); |     speedProfile.uTurnPenalty = 10*lua_tointeger(myLuaState, -1); | ||||||
| 
 |      | ||||||
| 
 |     speedProfile.has_turn_penalty_function = lua_function_exists( myLuaState, "turn_function" ); | ||||||
|  |      | ||||||
|     std::vector<ImportEdge> edgeList; |     std::vector<ImportEdge> edgeList; | ||||||
|     NodeID nodeBasedNodeNumber = readBinaryOSRMGraphFromStream(in, edgeList, bollardNodes, trafficLightNodes, &internalToExternalNodeMapping, inputRestrictions); |     NodeID nodeBasedNodeNumber = readBinaryOSRMGraphFromStream(in, edgeList, bollardNodes, trafficLightNodes, &internalToExternalNodeMapping, inputRestrictions); | ||||||
|     in.close(); |     in.close(); | ||||||
| @ -146,34 +151,27 @@ int main (int argc, char *argv[]) { | |||||||
|     INFO("Generating edge-expanded graph representation"); |     INFO("Generating edge-expanded graph representation"); | ||||||
|     EdgeBasedGraphFactory * edgeBasedGraphFactory = new EdgeBasedGraphFactory (nodeBasedNodeNumber, edgeList, bollardNodes, trafficLightNodes, inputRestrictions, internalToExternalNodeMapping, speedProfile); |     EdgeBasedGraphFactory * edgeBasedGraphFactory = new EdgeBasedGraphFactory (nodeBasedNodeNumber, edgeList, bollardNodes, trafficLightNodes, inputRestrictions, internalToExternalNodeMapping, speedProfile); | ||||||
|     std::vector<ImportEdge>().swap(edgeList); |     std::vector<ImportEdge>().swap(edgeList); | ||||||
|     edgeBasedGraphFactory->Run(edgeOut); |     edgeBasedGraphFactory->Run(edgeOut.c_str(), myLuaState); | ||||||
|     std::vector<_Restriction>().swap(inputRestrictions); |     std::vector<_Restriction>().swap(inputRestrictions); | ||||||
|     std::vector<NodeID>().swap(bollardNodes); |     std::vector<NodeID>().swap(bollardNodes); | ||||||
|     std::vector<NodeID>().swap(trafficLightNodes); |     std::vector<NodeID>().swap(trafficLightNodes); | ||||||
|     NodeID edgeBasedNodeNumber = edgeBasedGraphFactory->GetNumberOfNodes(); |     NodeID edgeBasedNodeNumber = edgeBasedGraphFactory->GetNumberOfNodes(); | ||||||
|     DeallocatingVector<EdgeBasedEdge> edgeBasedEdgeList; |     DeallocatingVector<EdgeBasedEdge> edgeBasedEdgeList; | ||||||
|     edgeBasedGraphFactory->GetEdgeBasedEdges(edgeBasedEdgeList); |     edgeBasedGraphFactory->GetEdgeBasedEdges(edgeBasedEdgeList); | ||||||
|  |     DeallocatingVector<EdgeBasedGraphFactory::EdgeBasedNode> nodeBasedEdgeList; | ||||||
|  |     edgeBasedGraphFactory->GetEdgeBasedNodes(nodeBasedEdgeList); | ||||||
|  |     delete edgeBasedGraphFactory; | ||||||
| 
 | 
 | ||||||
|     /***
 |     /***
 | ||||||
|      * Writing info on original (node-based) nodes |      * Writing info on original (node-based) nodes | ||||||
|      */ |      */ | ||||||
| 
 | 
 | ||||||
|     INFO("writing node map ..."); |     INFO("writing node map ..."); | ||||||
|     std::ofstream mapOutFile(nodeOut, std::ios::binary); |     std::ofstream mapOutFile(nodeOut.c_str(), std::ios::binary); | ||||||
|     mapOutFile.write((char *)&(internalToExternalNodeMapping[0]), internalToExternalNodeMapping.size()*sizeof(NodeInfo)); |     mapOutFile.write((char *)&(internalToExternalNodeMapping[0]), internalToExternalNodeMapping.size()*sizeof(NodeInfo)); | ||||||
|     mapOutFile.close(); |     mapOutFile.close(); | ||||||
|     std::vector<NodeInfo>().swap(internalToExternalNodeMapping); |     std::vector<NodeInfo>().swap(internalToExternalNodeMapping); | ||||||
| 
 | 
 | ||||||
|     /***
 |  | ||||||
|      * Writing info on original (node-based) edges |  | ||||||
|      */ |  | ||||||
|     INFO("writing info on original edges"); |  | ||||||
|     std::vector<OriginalEdgeData> originalEdgeData; |  | ||||||
|     edgeBasedGraphFactory->GetOriginalEdgeData(originalEdgeData); |  | ||||||
| 
 |  | ||||||
|     DeallocatingVector<EdgeBasedGraphFactory::EdgeBasedNode> nodeBasedEdgeList; |  | ||||||
|     edgeBasedGraphFactory->GetEdgeBasedNodes(nodeBasedEdgeList); |  | ||||||
|     delete edgeBasedGraphFactory; |  | ||||||
|     double expansionHasFinishedTime = get_timestamp() - startupTime; |     double expansionHasFinishedTime = get_timestamp() - startupTime; | ||||||
| 
 | 
 | ||||||
|     /***
 |     /***
 | ||||||
| @ -182,7 +180,7 @@ int main (int argc, char *argv[]) { | |||||||
| 
 | 
 | ||||||
|     INFO("building grid ..."); |     INFO("building grid ..."); | ||||||
|     WritableGrid * writeableGrid = new WritableGrid(); |     WritableGrid * writeableGrid = new WritableGrid(); | ||||||
|     writeableGrid->ConstructGrid(nodeBasedEdgeList, ramIndexOut, fileIndexOut); |     writeableGrid->ConstructGrid(nodeBasedEdgeList, ramIndexOut.c_str(), fileIndexOut.c_str()); | ||||||
|     delete writeableGrid; |     delete writeableGrid; | ||||||
|     IteratorbasedCRC32<DeallocatingVector<EdgeBasedGraphFactory::EdgeBasedNode> > crc32; |     IteratorbasedCRC32<DeallocatingVector<EdgeBasedGraphFactory::EdgeBasedNode> > crc32; | ||||||
|     unsigned crc32OfNodeBasedEdgeList = crc32(nodeBasedEdgeList.begin(), nodeBasedEdgeList.end() ); |     unsigned crc32OfNodeBasedEdgeList = crc32(nodeBasedEdgeList.begin(), nodeBasedEdgeList.end() ); | ||||||
| @ -208,13 +206,13 @@ int main (int argc, char *argv[]) { | |||||||
|      */ |      */ | ||||||
| 
 | 
 | ||||||
|     INFO("Building Node Array"); |     INFO("Building Node Array"); | ||||||
|     sort(contractedEdgeList.begin(), contractedEdgeList.end()); |     std::sort(contractedEdgeList.begin(), contractedEdgeList.end()); | ||||||
|     unsigned numberOfNodes = 0; |     unsigned numberOfNodes = 0; | ||||||
|     unsigned numberOfEdges = contractedEdgeList.size(); |     unsigned numberOfEdges = contractedEdgeList.size(); | ||||||
|     INFO("Serializing compacted graph"); |     INFO("Serializing compacted graph"); | ||||||
|     ofstream edgeOutFile(graphOut, ios::binary); |     std::ofstream edgeOutFile(graphOut.c_str(), std::ios::binary); | ||||||
| 
 | 
 | ||||||
|     BOOST_FOREACH(QueryEdge & edge, contractedEdgeList) { |     BOOST_FOREACH(const QueryEdge & edge, contractedEdgeList) { | ||||||
|         if(edge.source > numberOfNodes) { |         if(edge.source > numberOfNodes) { | ||||||
|             numberOfNodes = edge.source; |             numberOfNodes = edge.source; | ||||||
|         } |         } | ||||||
|  | |||||||
| @ -32,6 +32,7 @@ or see http://www.gnu.org/licenses/agpl.txt. | |||||||
| #include "Util/BaseConfiguration.h" | #include "Util/BaseConfiguration.h" | ||||||
| #include "Util/InputFileUtil.h" | #include "Util/InputFileUtil.h" | ||||||
| #include "Util/MachineInfo.h" | #include "Util/MachineInfo.h" | ||||||
|  | #include "Util/OpenMPWrapper.h" | ||||||
| #include "Util/StringUtil.h" | #include "Util/StringUtil.h" | ||||||
| 
 | 
 | ||||||
| typedef BaseConfiguration ExtractorConfiguration; | typedef BaseConfiguration ExtractorConfiguration; | ||||||
| @ -39,6 +40,8 @@ typedef BaseConfiguration ExtractorConfiguration; | |||||||
| ExtractorCallbacks * extractCallBacks; | ExtractorCallbacks * extractCallBacks; | ||||||
| 
 | 
 | ||||||
| int main (int argc, char *argv[]) { | int main (int argc, char *argv[]) { | ||||||
|  |     double earliestTime = get_timestamp(); | ||||||
|  | 
 | ||||||
|     if(argc < 2) { |     if(argc < 2) { | ||||||
|         ERR("usage: \n" << argv[0] << " <file.osm/.osm.bz2/.osm.pbf> [<profile.lua>]"); |         ERR("usage: \n" << argv[0] << " <file.osm/.osm.bz2/.osm.pbf> [<profile.lua>]"); | ||||||
|     } |     } | ||||||
| @ -55,7 +58,6 @@ int main (int argc, char *argv[]) { | |||||||
|     } |     } | ||||||
|     omp_set_num_threads(numberOfThreads); |     omp_set_num_threads(numberOfThreads); | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     INFO("extracting data from input file " << argv[1]); |     INFO("extracting data from input file " << argv[1]); | ||||||
|     bool isPBF(false); |     bool isPBF(false); | ||||||
|     std::string outputFileName(argv[1]); |     std::string outputFileName(argv[1]); | ||||||
| @ -67,12 +69,12 @@ int main (int argc, char *argv[]) { | |||||||
|             isPBF = true; |             isPBF = true; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     if(pos!=string::npos) { |     if(pos!=std::string::npos) { | ||||||
|         outputFileName.replace(pos, 8, ".osrm"); |         outputFileName.replace(pos, 8, ".osrm"); | ||||||
|         restrictionsFileName.replace(pos, 8, ".osrm.restrictions"); |         restrictionsFileName.replace(pos, 8, ".osrm.restrictions"); | ||||||
|     } else { |     } else { | ||||||
|         pos=outputFileName.find(".osm"); |         pos=outputFileName.find(".osm"); | ||||||
|         if(pos!=string::npos) { |         if(pos!=std::string::npos) { | ||||||
|             outputFileName.replace(pos, 5, ".osrm"); |             outputFileName.replace(pos, 5, ".osrm"); | ||||||
|             restrictionsFileName.replace(pos, 5, ".osrm.restrictions"); |             restrictionsFileName.replace(pos, 5, ".osrm.restrictions"); | ||||||
|         } else { |         } else { | ||||||
| @ -90,30 +92,30 @@ int main (int argc, char *argv[]) { | |||||||
|     StringMap stringMap; |     StringMap stringMap; | ||||||
|     ExtractionContainers externalMemory; |     ExtractionContainers externalMemory; | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     stringMap[""] = 0; |     stringMap[""] = 0; | ||||||
|     extractCallBacks = new ExtractorCallbacks(&externalMemory, &stringMap); |     extractCallBacks = new ExtractorCallbacks(&externalMemory, &stringMap); | ||||||
|     BaseParser<ExtractorCallbacks, _Node, _RawRestrictionContainer, _Way> * parser; |     BaseParser* parser; | ||||||
|     if(isPBF) { |     if(isPBF) { | ||||||
|         parser = new PBFParser(argv[1]); |         parser = new PBFParser(argv[1], extractCallBacks, scriptingEnvironment); | ||||||
|     } else { |     } else { | ||||||
|         parser = new XMLParser(argv[1]); |         parser = new XMLParser(argv[1], extractCallBacks, scriptingEnvironment); | ||||||
|     } |     } | ||||||
|     parser->RegisterCallbacks(extractCallBacks); |      | ||||||
|     parser->RegisterScriptingEnvironment(scriptingEnvironment); |     if(!parser->ReadHeader()) { | ||||||
| 
 |  | ||||||
|     if(!parser->Init()) |  | ||||||
|         ERR("Parser not initialized!"); |         ERR("Parser not initialized!"); | ||||||
|  |     } | ||||||
|  |     INFO("Parsing in progress.."); | ||||||
|     double time = get_timestamp(); |     double time = get_timestamp(); | ||||||
|     parser->Parse(); |     parser->Parse(); | ||||||
|     INFO("parsing finished after " << get_timestamp() - time << " seconds"); |     INFO("Parsing finished after " << get_timestamp() - time << " seconds"); | ||||||
| 
 | 
 | ||||||
|     externalMemory.PrepareData(outputFileName, restrictionsFileName, amountOfRAM); |     externalMemory.PrepareData(outputFileName, restrictionsFileName, amountOfRAM); | ||||||
| 
 | 
 | ||||||
|     stringMap.clear(); |     stringMap.clear(); | ||||||
|     delete parser; |     delete parser; | ||||||
|     delete extractCallBacks; |     delete extractCallBacks; | ||||||
|     INFO("finished"); |     INFO("finished after " << get_timestamp() - earliestTime << "s"); | ||||||
|  | 
 | ||||||
|     std::cout << "\nRun:\n" |     std::cout << "\nRun:\n" | ||||||
|                    "./osrm-prepare " << outputFileName << " " << restrictionsFileName << std::endl; |                    "./osrm-prepare " << outputFileName << " " << restrictionsFileName << std::endl; | ||||||
|     return 0; |     return 0; | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| @routing @bicycle @access | @routing @bicycle @access | ||||||
| Feature: Bike - Restricted access | Feature: Bike - Access tags on ways | ||||||
| Reference: http://wiki.openstreetmap.org/wiki/Key:access | Reference: http://wiki.openstreetmap.org/wiki/Key:access | ||||||
| 
 | 
 | ||||||
| 	Background: | 	Background: | ||||||
| @ -7,37 +7,95 @@ Reference: http://wiki.openstreetmap.org/wiki/Key:access | |||||||
| 		 | 		 | ||||||
| 	Scenario: Bike - Access tag hierachy on ways | 	Scenario: Bike - Access tag hierachy on ways | ||||||
| 		Then routability should be | 		Then routability should be | ||||||
| 		 | access | vehicle | bicycle | bothw | | 		 | highway | access | vehicle | bicycle | bothw | | ||||||
| 		 |        |         |         | x     | | 		 |         |        |         |         | x     | | ||||||
| 		 | yes    |         |         | x     | | 		 |         | yes    |         |         | x     | | ||||||
| 		 | no     |         |         |       | | 		 |         | no     |         |         |       | | ||||||
| 		 |        | yes     |         | x     | | 		 |         |        | yes     |         | x     | | ||||||
| 		 |        | no      |         |       | | 		 |         |        | no      |         |       | | ||||||
| 		 | no     | yes     |         | x     | | 		 |         | no     | yes     |         | x     | | ||||||
| 		 | yes    | no      |         |       | | 		 |         | yes    | no      |         |       | | ||||||
| 		 |        |         | yes     | x     | | 		 |         |        |         | yes     | x     | | ||||||
| 		 |        |         | no      |       | | 		 |         |        |         | no      |       | | ||||||
| 		 | no     |         | yes     | x     | | 		 |         | no     |         | yes     | x     | | ||||||
| 		 | yes    |         | no      |       | | 		 |         | yes    |         | no      |       | | ||||||
| 		 |        | no      | yes     | x     | | 		 |         |        | no      | yes     | x     | | ||||||
| 		 |        | yes     | no      |       | | 		 |         |        | yes     | no      |       | | ||||||
|  | 		 | runway  |        |         |         |       | | ||||||
|  | 		 | runway  | yes    |         |         | x     | | ||||||
|  | 		 | runway  | no     |         |         |       | | ||||||
|  | 		 | runway  |        | yes     |         | x     | | ||||||
|  | 		 | runway  |        | no      |         |       | | ||||||
|  | 		 | runway  | no     | yes     |         | x     | | ||||||
|  | 		 | runway  | yes    | no      |         |       | | ||||||
|  | 		 | runway  |        |         | yes     | x     | | ||||||
|  | 		 | runway  |        |         | no      |       | | ||||||
|  | 		 | runway  | no     |         | yes     | x     | | ||||||
|  | 		 | runway  | yes    |         | no      |       | | ||||||
|  | 		 | runway  |        | no      | yes     | x     | | ||||||
|  | 		 | runway  |        | yes     | no      |       | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Access tag hierachy on nodes |     @todo | ||||||
| 		Then routability should be |   	Scenario: Bike - Access tag in forward direction | ||||||
| 		 | node/access | node/vehicle | node/bicycle | bothw | |   		Then routability should be | ||||||
| 		 |             |              |              | x     | |   		 | highway | access:forward | vehicle:forward | bicycle:forward | forw | backw | | ||||||
| 		 | yes         |              |              | x     | |   		 |         |                |                 |                 | x    |       | | ||||||
| 		 | no          |              |              |       | |   		 |         | yes            |                 |                 | x    |       | | ||||||
| 		 |             | yes          |              | x     | |   		 |         | no             |                 |                 |      |       | | ||||||
| 		 |             | no           |              |       | |   		 |         |                | yes             |                 | x    |       | | ||||||
| 		 | no          | yes          |              | x     | |   		 |         |                | no              |                 |      |       | | ||||||
| 		 | yes         | no           |              |       | |   		 |         | no             | yes             |                 | x    |       | | ||||||
| 		 |             |              | yes          | x     | |   		 |         | yes            | no              |                 |      |       | | ||||||
| 		 |             |              | no           |       | |   		 |         |                |                 | yes             | x    |       | | ||||||
| 		 | no          |              | yes          | x     | |   		 |         |                |                 | no              |      |       | | ||||||
| 		 | yes         |              | no           |       | |   		 |         | no             |                 | yes             | x    |       | | ||||||
| 		 |             | no           | yes          | x     | |   		 |         | yes            |                 | no              |      |       | | ||||||
| 		 |             | yes          | no           |       | |   		 |         |                | no              | yes             | x    |       | | ||||||
|  |   		 |         |                | yes             | no              |      |       | | ||||||
|  |   		 | runway  |                |                 |                 | x    |       | | ||||||
|  |   		 | runway  | yes            |                 |                 | x    |       | | ||||||
|  |   		 | runway  | no             |                 |                 |      |       | | ||||||
|  |   		 | runway  |                | yes             |                 | x    |       | | ||||||
|  |   		 | runway  |                | no              |                 |      |       | | ||||||
|  |   		 | runway  | no             | yes             |                 | x    |       | | ||||||
|  |   		 | runway  | yes            | no              |                 |      |       | | ||||||
|  |   		 | runway  |                |                 | yes             | x    |       | | ||||||
|  |   		 | runway  |                |                 | no              |      |       | | ||||||
|  |   		 | runway  | no             |                 | yes             | x    |       | | ||||||
|  |   		 | runway  | yes            |                 | no              |      |       | | ||||||
|  |   		 | runway  |                | no              | yes             | x    |       | | ||||||
|  |   		 | runway  |                | yes             | no              |      |       | | ||||||
|  | 
 | ||||||
|  |     @todo | ||||||
|  |    	Scenario: Bike - Access tag in backward direction | ||||||
|  |    		Then routability should be | ||||||
|  |    		 | highway | access:forward | vehicle:forward | bicycle:forward | forw | backw | | ||||||
|  |    		 |         |                |                 |                 |      | x     | | ||||||
|  |    		 |         | yes            |                 |                 |      | x     | | ||||||
|  |    		 |         | no             |                 |                 |      |       | | ||||||
|  |    		 |         |                | yes             |                 |      | x     | | ||||||
|  |    		 |         |                | no              |                 |      |       | | ||||||
|  |    		 |         | no             | yes             |                 |      | x     | | ||||||
|  |    		 |         | yes            | no              |                 |      |       | | ||||||
|  |    		 |         |                |                 | yes             |      | x     | | ||||||
|  |    		 |         |                |                 | no              |      |       | | ||||||
|  |    		 |         | no             |                 | yes             |      | x     | | ||||||
|  |    		 |         | yes            |                 | no              |      |       | | ||||||
|  |    		 |         |                | no              | yes             |      | x     | | ||||||
|  |    		 |         |                | yes             | no              |      |       | | ||||||
|  |    		 | runway  |                |                 |                 |      | x     | | ||||||
|  |    		 | runway  | yes            |                 |                 |      | x     | | ||||||
|  |    		 | runway  | no             |                 |                 |      |       | | ||||||
|  |    		 | runway  |                | yes             |                 |      | x     | | ||||||
|  |    		 | runway  |                | no              |                 |      |       | | ||||||
|  |    		 | runway  | no             | yes             |                 |      | x     | | ||||||
|  |    		 | runway  | yes            | no              |                 |      |       | | ||||||
|  |    		 | runway  |                |                 | yes             |      | x     | | ||||||
|  |    		 | runway  |                |                 | no              |      |       | | ||||||
|  |    		 | runway  | no             |                 | yes             |      | x     | | ||||||
|  |    		 | runway  | yes            |                 | no              |      |       | | ||||||
|  |    		 | runway  |                | no              | yes             |      | x     | | ||||||
|  |    		 | runway  |                | yes             | no              |      |       | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Overwriting implied acccess on ways | 	Scenario: Bike - Overwriting implied acccess on ways | ||||||
| 		Then routability should be | 		Then routability should be | ||||||
| @ -51,18 +109,6 @@ Reference: http://wiki.openstreetmap.org/wiki/Key:access | |||||||
| 		 | runway   |        | yes     |         | x     | | 		 | runway   |        | yes     |         | x     | | ||||||
| 		 | runway   |        |         | yes     | x     | | 		 | runway   |        |         | yes     | x     | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Overwriting implied acccess on nodes |  | ||||||
| 		Then routability should be |  | ||||||
| 		 | highway  | node/access | node/vehicle | node/bicycle | bothw | |  | ||||||
| 		 | cycleway |             |              |              | x     | |  | ||||||
| 		 | runway   |             |              |              |       | |  | ||||||
| 		 | cycleway | no          |              |              |       | |  | ||||||
| 		 | cycleway |             | no           |              |       | |  | ||||||
| 		 | cycleway |             |              | no           |       | |  | ||||||
| 		 | runway   | yes         |              |              |       | |  | ||||||
| 		 | runway   |             | yes          |              |       | |  | ||||||
| 		 | runway   |             |              | yes          |       | |  | ||||||
| 
 |  | ||||||
| 	Scenario: Bike - Access tags on ways | 	Scenario: Bike - Access tags on ways | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | access       | vehicle      | bicycle      | bothw | | 		 | access       | vehicle      | bicycle      | bothw | | ||||||
| @ -92,35 +138,6 @@ Reference: http://wiki.openstreetmap.org/wiki/Key:access | |||||||
| 		 |              |              | agricultural |       | | 		 |              |              | agricultural |       | | ||||||
| 		 |              |              | forestery    |       | | 		 |              |              | forestery    |       | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Access tags on nodes |  | ||||||
| 	 	Then routability should be |  | ||||||
| 		 | node/access  | node/vehicle | node/bicycle | bothw | |  | ||||||
| 		 |              |              |              | x     | |  | ||||||
| 		 | yes          |              |              | x     | |  | ||||||
| 		 | permissive   |              |              | x     | |  | ||||||
| 		 | designated   |              |              | x     | |  | ||||||
| 		 | some_tag     |              |              | x     | |  | ||||||
| 		 | no           |              |              |       | |  | ||||||
| 		 | private      |              |              |       | |  | ||||||
| 		 | agricultural |              |              |       | |  | ||||||
| 		 | forestery    |              |              |       | |  | ||||||
| 		 |              | yes          |              | x     | |  | ||||||
| 		 |              | permissive   |              | x     | |  | ||||||
| 		 |              | designated   |              | x     | |  | ||||||
| 		 |              | some_tag     |              | x     | |  | ||||||
| 		 |              | no           |              |       | |  | ||||||
| 		 |              | private      |              |       | |  | ||||||
| 		 |              | agricultural |              |       | |  | ||||||
| 		 |              | forestery    |              |       | |  | ||||||
| 		 |              |              | yes          | x     | |  | ||||||
| 		 |              |              | permissive   | x     | |  | ||||||
| 		 |              |              | designated   | x     | |  | ||||||
| 		 |              |              | some_tag     | x     | |  | ||||||
| 		 |              |              | no           |       | |  | ||||||
| 		 |              |              | private      |       | |  | ||||||
| 		 |              |              | agricultural |       | |  | ||||||
| 		 |              |              | forestery    |       | |  | ||||||
| 
 |  | ||||||
| 	Scenario: Bike - Access tags on both node and way | 	Scenario: Bike - Access tags on both node and way | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | access   | node/access | bothw | | 		 | access   | node/access | bothw | | ||||||
| @ -147,10 +164,10 @@ Reference: http://wiki.openstreetmap.org/wiki/Key:access | |||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Ignore access tags for other modes | 	Scenario: Bike - Ignore access tags for other modes | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | highway  | foot | motor_vehicle | moped | bothw | | 		 | highway  | boat | motor_vehicle | moped | bothw | | ||||||
| 		 | runway   | yes  |               |       |       | | 		 | river    | yes  |               |       |       | | ||||||
| 		 | cycleway | no   |               |       | x     | | 		 | cycleway | no   |               |       | x     | | ||||||
| 		 | runway   |      | yes           |       |       | | 		 | runway   |      | yes           |       |       | | ||||||
| 		 | cycleway |      | no            |       | x     | | 		 | cycleway |      | no            |       | x     | | ||||||
| 		 | runway   |      |               | yes   |       | | 		 | runway   |      |               | yes   |       | | ||||||
| 		 | cycleway |      |               | no    | x     | | 		 | cycleway |      |               | no    | x     | | ||||||
|  | |||||||
							
								
								
									
										64
									
								
								features/bicycle/access_node.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								features/bicycle/access_node.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,64 @@ | |||||||
|  | @routing @bicycle @access | ||||||
|  | Feature: Bike - Access tags on nodes | ||||||
|  | Reference: http://wiki.openstreetmap.org/wiki/Key:access | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "bicycle" | ||||||
|  | 
 | ||||||
|  | 	Scenario: Bike - Access tag hierachy on nodes | ||||||
|  | 		Then routability should be | ||||||
|  | 		 | node/access | node/vehicle | node/bicycle | bothw | | ||||||
|  | 		 |             |              |              | x     | | ||||||
|  | 		 | yes         |              |              | x     | | ||||||
|  | 		 | no          |              |              |       | | ||||||
|  | 		 |             | yes          |              | x     | | ||||||
|  | 		 |             | no           |              |       | | ||||||
|  | 		 | no          | yes          |              | x     | | ||||||
|  | 		 | yes         | no           |              |       | | ||||||
|  | 		 |             |              | yes          | x     | | ||||||
|  | 		 |             |              | no           |       | | ||||||
|  | 		 | no          |              | yes          | x     | | ||||||
|  | 		 | yes         |              | no           |       | | ||||||
|  | 		 |             | no           | yes          | x     | | ||||||
|  | 		 |             | yes          | no           |       | | ||||||
|  | 
 | ||||||
|  | 	Scenario: Bike - Overwriting implied acccess on nodes | ||||||
|  | 		Then routability should be | ||||||
|  | 		 | highway  | node/access | node/vehicle | node/bicycle | bothw | | ||||||
|  | 		 | cycleway |             |              |              | x     | | ||||||
|  | 		 | runway   |             |              |              |       | | ||||||
|  | 		 | cycleway | no          |              |              |       | | ||||||
|  | 		 | cycleway |             | no           |              |       | | ||||||
|  | 		 | cycleway |             |              | no           |       | | ||||||
|  | 		 | runway   | yes         |              |              |       | | ||||||
|  | 		 | runway   |             | yes          |              |       | | ||||||
|  | 		 | runway   |             |              | yes          |       | | ||||||
|  | 
 | ||||||
|  | 	Scenario: Bike - Access tags on nodes | ||||||
|  | 	 	Then routability should be | ||||||
|  | 		 | node/access  | node/vehicle | node/bicycle | bothw | | ||||||
|  | 		 |              |              |              | x     | | ||||||
|  | 		 | yes          |              |              | x     | | ||||||
|  | 		 | permissive   |              |              | x     | | ||||||
|  | 		 | designated   |              |              | x     | | ||||||
|  | 		 | some_tag     |              |              | x     | | ||||||
|  | 		 | no           |              |              |       | | ||||||
|  | 		 | private      |              |              |       | | ||||||
|  | 		 | agricultural |              |              |       | | ||||||
|  | 		 | forestery    |              |              |       | | ||||||
|  | 		 |              | yes          |              | x     | | ||||||
|  | 		 |              | permissive   |              | x     | | ||||||
|  | 		 |              | designated   |              | x     | | ||||||
|  | 		 |              | some_tag     |              | x     | | ||||||
|  | 		 |              | no           |              |       | | ||||||
|  | 		 |              | private      |              |       | | ||||||
|  | 		 |              | agricultural |              |       | | ||||||
|  | 		 |              | forestery    |              |       | | ||||||
|  | 		 |              |              | yes          | x     | | ||||||
|  | 		 |              |              | permissive   | x     | | ||||||
|  | 		 |              |              | designated   | x     | | ||||||
|  | 		 |              |              | some_tag     | x     | | ||||||
|  | 		 |              |              | no           |       | | ||||||
|  | 		 |              |              | private      |       | | ||||||
|  | 		 |              |              | agricultural |       | | ||||||
|  | 		 |              |              | forestery    |       | | ||||||
| @ -29,16 +29,16 @@ Reference: http://wiki.openstreetmap.org/wiki/Key:cycleway | |||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | highway | cycleway | cycleway:left | cycleway:right | forw | backw | | 		 | highway | cycleway | cycleway:left | cycleway:right | forw | backw | | ||||||
| 		 | primary |          |               |                | x    | x     | | 		 | primary |          |               |                | x    | x     | | ||||||
| 		 | pirmary | track    |               |                | x    | x     | | 		 | primary | track    |               |                | x    | x     | | ||||||
| 		 | pirmary | opposite |               |                | x    | x     | | 		 | primary | opposite |               |                | x    | x     | | ||||||
| 		 | pirmary |          | track         |                | x    | x     | | 		 | primary |          | track         |                | x    | x     | | ||||||
| 		 | pirmary |          | opposite      |                | x    | x     | | 		 | primary |          | opposite      |                | x    | x     | | ||||||
| 		 | pirmary |          |               | track          | x    | x     | | 		 | primary |          |               | track          | x    | x     | | ||||||
| 		 | pirmary |          |               | opposite       | x    | x     | | 		 | primary |          |               | opposite       | x    | x     | | ||||||
| 		 | pirmary |          | track         | track          | x    | x     | | 		 | primary |          | track         | track          | x    | x     | | ||||||
| 		 | pirmary |          | opposite      | opposite       | x    | x     | | 		 | primary |          | opposite      | opposite       | x    | x     | | ||||||
| 		 | pirmary |          | track         | opposite       | x    | x     | | 		 | primary |          | track         | opposite       | x    | x     | | ||||||
| 		 | pirmary |          | opposite      | track          | x    | x     | | 		 | primary |          | opposite      | track          | x    | x     | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Left/right side cycleways on implied oneways    | 	Scenario: Bike - Left/right side cycleways on implied oneways    | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
|  | |||||||
| @ -58,79 +58,6 @@ Feature: Bike - Handle ferry routes | |||||||
| 		 | abcd  |         | ferry | yes     | 1:00     | | 		 | abcd  |         | ferry | yes     | 1:00     | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time      | | 		 | from | to | route | time       | | ||||||
| 		 | a    | d  | abcd  | 3600s +-1 | | 		 | a    | d  | abcd  | 3600s +-10 | | ||||||
| 		 | d    | a  | abcd  | 3600s +-1 | | 		 | d    | a  | abcd  | 3600s +-10 | | ||||||
| 
 |  | ||||||
| 	Scenario: Bike - Ferry duration, connected routes |  | ||||||
| 		Given the node map |  | ||||||
| 		  | x |   |   |   |   |   |   |   | y | |  | ||||||
| 		  |   | a | b | c | d | e | f | g |   | |  | ||||||
| 
 |  | ||||||
| 		And the ways |  | ||||||
| 		 | nodes | highway | route | bicycle | duration | |  | ||||||
| 		 | xa    | primary |       |         |          | |  | ||||||
| 		 | yg    | primary |       |         |          | |  | ||||||
| 		 | abcd  |         | ferry | yes     | 0:30     | |  | ||||||
| 		 | defg  |         | ferry | yes     | 0:30     | |  | ||||||
| 
 |  | ||||||
| 		When I route I should get |  | ||||||
| 		 | from | to | route     | time      | |  | ||||||
| 		 | a    | g  | abcd,defg | 3600s +-1 | |  | ||||||
| 		 | g    | a  | defg,abcd | 3600s +-1 | |  | ||||||
| 
 |  | ||||||
| 	Scenario: Bike - Prefer road when faster than ferry |  | ||||||
| 		Given the node map |  | ||||||
| 		  | x | a | b | c |   | |  | ||||||
| 		  |   |   |   |   | d | |  | ||||||
| 		  | y | g | f | e |   | |  | ||||||
| 		 |  | ||||||
| 		And the ways |  | ||||||
| 		 | nodes | highway | route | bicycle | duration | |  | ||||||
| 		 | xa    | primary |       |         |          | |  | ||||||
| 		 | yg    | primary |       |         |          | |  | ||||||
| 		 | xy    | primary |       |         |          | |  | ||||||
| 		 | abcd  |         | ferry | yes     | 0:01     | |  | ||||||
| 		 | defg  |         | ferry | yes     | 0:01     | |  | ||||||
| 
 |  | ||||||
| 		When I route I should get |  | ||||||
| 		 | from | to | route    | time      | |  | ||||||
| 		 | a    | g  | xa,xy,yg | 60s +-25% | |  | ||||||
| 		 | g    | a  | yg,xy,xa | 60s +-25% | |  | ||||||
| 
 |  | ||||||
| 	Scenario: Bike - Long winding ferry route |  | ||||||
| 		Given the node map |  | ||||||
| 		  | x |   | b |   | d |   | f |   | y | |  | ||||||
| 		  |   | a |   | c |   | e |   | g |   | |  | ||||||
| 
 |  | ||||||
| 		And the ways |  | ||||||
| 		 | nodes   | highway | route | bicycle | duration | |  | ||||||
| 		 | xa      | primary |       |         |          | |  | ||||||
| 		 | yg      | primary |       |         |          | |  | ||||||
| 		 | abcdefg |         | ferry | yes     | 6:30     | |  | ||||||
| 
 |  | ||||||
| 		When I route I should get |  | ||||||
| 		 | from | to | route   | time       | |  | ||||||
| 		 | a    | g  | abcdefg | 23400s +-1 | |  | ||||||
| 		 | g    | a  | abcdefg | 23400s +-1 | |  | ||||||
|      |  | ||||||
|     @todo |  | ||||||
| 	Scenario: Bike - Ferry duration, individual parts |  | ||||||
| 		Given the node map |  | ||||||
| 		  | x | y |  | z |  |  | v | |  | ||||||
| 		  | a | b |  | c |  |  | d | |  | ||||||
| 
 |  | ||||||
| 		And the ways |  | ||||||
| 		 | nodes | highway | route | bicycle | duration | |  | ||||||
| 		 | xa    | primary |       |         |          | |  | ||||||
| 		 | yb    | primary |       |         |          | |  | ||||||
| 		 | zc    | primary |       |         |          | |  | ||||||
| 		 | vd    | primary |       |         |          | |  | ||||||
| 		 | abcd  |         | ferry | yes     | 1:00     | |  | ||||||
| 
 |  | ||||||
| 		When I route I should get |  | ||||||
| 		 | from | to | route | time      | |  | ||||||
| 		 | a    | d  | abcd  | 3600s +-1 | |  | ||||||
| 		 | a    | b  | abcd  | 600s +-1  | |  | ||||||
| 		 | b    | c  | abcd  | 1200s +-1 | |  | ||||||
| 		 | c    | d  | abcd  | 1800s +-1 | |  | ||||||
|  | |||||||
| @ -3,20 +3,39 @@ Feature: Bike - Max speed restrictions | |||||||
| 
 | 
 | ||||||
| 	Background: Use specific speeds | 	Background: Use specific speeds | ||||||
| 		Given the profile "bicycle" | 		Given the profile "bicycle" | ||||||
| 	 |  | ||||||
| 	Scenario: Bike - Respect maxspeeds when lower that way type speed |  | ||||||
| 		Given the node map |  | ||||||
| 		 | a | b | c | |  | ||||||
| 
 | 
 | ||||||
| 		And the ways | 	Scenario: Bicycle - Respect maxspeeds when lower that way type speed | ||||||
| 		 | nodes | highway     | maxspeed | |     	Then routability should be | ||||||
| 		 | ab    | residential |          | |     	 | highway     | maxspeed | bothw    | | ||||||
| 		 | bc    | residential | 10       | |     	 | residential |          | 49s ~10% | | ||||||
|  |     	 | residential | 10       | 72s ~10% | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 	Scenario: Bicycle - Ignore maxspeed when higher than way speed | ||||||
| 		 | from | to | route | time    | |     	Then routability should be | ||||||
| 		 | a    | b  | ab    | 20s ~5% | |     	 | highway     | maxspeed | bothw    | | ||||||
| 		 | b    | c  | bc    | 36s ~5% | |     	 | residential |          | 49s ~10% | | ||||||
|  |     	 | residential | 80       | 49s ~10% | | ||||||
|  |      | ||||||
|  |     @todo | ||||||
|  |   	Scenario: Bicycle - Maxspeed formats | ||||||
|  |  		Then routability should be | ||||||
|  |  		 | highway     | maxspeed  | bothw     | | ||||||
|  |  		 | residential |           | 49s ~10%  | | ||||||
|  |  		 | residential | 5         | 144s ~10% | | ||||||
|  |  		 | residential | 5mph      | 90s ~10%  | | ||||||
|  |  		 | residential | 5 mph     | 90s ~10%  | | ||||||
|  |  		 | residential | 5MPH      | 90s ~10%  | | ||||||
|  |  		 | residential | 5 MPH     | 90s ~10%  | | ||||||
|  |  		 | trunk       | 5unknown  | 49s ~10%  | | ||||||
|  |  		 | trunk       | 5 unknown | 49s ~10%  | | ||||||
|  | 
 | ||||||
|  |     @todo | ||||||
|  |    	Scenario: Bicycle - Maxspeed special tags | ||||||
|  |   		Then routability should be | ||||||
|  |   		 | highway     | maxspeed | bothw    | | ||||||
|  |   		 | residential |          | 49s ~10% | | ||||||
|  |   		 | residential | none     | 49s ~10% | | ||||||
|  |   		 | residential | signals  | 49s ~10% | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Do not use maxspeed when higher that way type speed | 	Scenario: Bike - Do not use maxspeed when higher that way type speed | ||||||
| 		Given the node map | 		Given the node map | ||||||
| @ -29,5 +48,23 @@ Feature: Bike - Max speed restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time    | | 		 | from | to | route | time    | | ||||||
| 		 | a    | b  | ab    | 20s ~5% | | 		 | a    | b  | ab    | 24s ~5% | | ||||||
| 		 | b    | c  | bc    | 20s ~5% | | 		 | b    | c  | bc    | 24s ~5% | | ||||||
|  | 
 | ||||||
|  |      Scenario: Bike - Forward/backward maxspeed | ||||||
|  |         Given the shortcuts | ||||||
|  |  		 | key   | value    | | ||||||
|  |  		 | bike  | 49s ~10% | | ||||||
|  |  		 | run   | 73s ~10% | | ||||||
|  |  		 | walk  | 145s ~10% | | ||||||
|  |  		 | snail | 720s ~10% | | ||||||
|  | 
 | ||||||
|  |   	 	Then routability should be | ||||||
|  |      	 | maxspeed | maxspeed:forward | maxspeed:backward | forw  | backw | | ||||||
|  |      	 |          |                  |                   | bike  | bike   | | ||||||
|  |      	 | 10       |                  |                   | run   | run    | | ||||||
|  |      	 |          | 10               |                   | run   | bike   | | ||||||
|  |      	 |          |                  | 10                | bike  | run    | | ||||||
|  |      	 | 1        | 10               |                   | run   | snail  | | ||||||
|  |      	 | 1        |                  | 10                | snail | run    | | ||||||
|  |      	 | 1        | 5                | 10                | walk  | run    | | ||||||
|  | |||||||
| @ -1,19 +1,20 @@ | |||||||
| @routing @bicycle @oneway | @routing @bicycle @oneway | ||||||
| Feature: Bike - Oneway streets | Feature: Bike - Oneway streets | ||||||
| Handle oneways streets, as defined at http://wiki.openstreetmap.org/wiki/OSM_tags_for_routing | Handle oneways streets, as defined at http://wiki.openstreetmap.org/wiki/OSM_tags_for_routing | ||||||
|  | Usually we can push bikes against oneways, but we use foot=no to prevent this in these tests | ||||||
| 
 | 
 | ||||||
| 	Background: | 	Background: | ||||||
| 		Given the profile "bicycle" | 		Given the profile "bicycle" | ||||||
| 	 | 	 | ||||||
| 	Scenario: Bike - Simple oneway | 	Scenario: Bike - Simple oneway | ||||||
| 		Then routability should be | 		Then routability should be | ||||||
| 		 | highway | oneway | forw | backw | | 		 | highway | foot | oneway | forw | backw | | ||||||
| 		 | primary | yes    | x    |       | | 		 | primary | no   | yes    | x    |       | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Simple reverse oneway | 	Scenario: Simple reverse oneway | ||||||
| 		Then routability should be | 		Then routability should be | ||||||
| 		 | highway | oneway | forw | backw | | 		 | highway | foot | oneway | forw | backw | | ||||||
| 		 | primary | -1     |      | x     | | 		 | primary | no   | -1     |      | x     | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Around the Block | 	Scenario: Bike - Around the Block | ||||||
| 		Given the node map | 		Given the node map | ||||||
| @ -21,11 +22,11 @@ Handle oneways streets, as defined at http://wiki.openstreetmap.org/wiki/OSM_tag | |||||||
| 		 | d | c | | 		 | d | c | | ||||||
| 	 | 	 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | ab    | yes    | | 		 | ab    | yes    | no   | | ||||||
| 		 | bc    |        | | 		 | bc    |        | no   | | ||||||
| 		 | cd    |        | | 		 | cd    |        | no   | | ||||||
| 		 | da    |        | | 		 | da    |        | no   | | ||||||
|      |      | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route    | | 		 | from | to | route    | | ||||||
| @ -34,80 +35,80 @@ Handle oneways streets, as defined at http://wiki.openstreetmap.org/wiki/OSM_tag | |||||||
| 	 | 	 | ||||||
| 	Scenario: Bike - Handle various oneway tag values | 	Scenario: Bike - Handle various oneway tag values | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | oneway   | forw | backw | | 		 | foot | oneway   | forw | backw | | ||||||
| 		 |          | x    | x     | | 		 | no   |          | x    | x     | | ||||||
| 		 | nonsense | x    | x     | | 		 | no   | nonsense | x    | x     | | ||||||
| 		 | no       | x    | x     | | 		 | no   | no       | x    | x     | | ||||||
| 		 | false    | x    | x     | | 		 | no   | false    | x    | x     | | ||||||
| 		 | 0        | x    | x     | | 		 | no   | 0        | x    | x     | | ||||||
| 		 | yes      | x    |       | | 		 | no   | yes      | x    |       | | ||||||
| 		 | true     | x    |       | | 		 | no   | true     | x    |       | | ||||||
| 		 | 1        | x    |       | | 		 | no   | 1        | x    |       | | ||||||
| 		 | -1       |      | x     | | 		 | no   | -1       |      | x     | | ||||||
| 	 | 	 | ||||||
| 	Scenario: Bike - Implied oneways | 	Scenario: Bike - Implied oneways | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | highway       | bicycle | junction   | forw | backw | | 		 | highway       | foot | bicycle | junction   | forw | backw | | ||||||
| 		 |               |         |            | x    | x     | | 		 |               | no   |         |            | x    | x     | | ||||||
| 		 |               |         | roundabout | x    |       | | 		 |               | no   |         | roundabout | x    |       | | ||||||
| 		 | motorway      | yes     |            | x    |       | | 		 | motorway      | no   | yes     |            | x    |       | | ||||||
| 		 | motorway_link | yes     |            | x    |       | | 		 | motorway_link | no   | yes     |            | x    |       | | ||||||
| 		 | motorway      | yes     | roundabout | x    |       | | 		 | motorway      | no   | yes     | roundabout | x    |       | | ||||||
| 		 | motorway_link | yes     | roundabout | x    |       | | 		 | motorway_link | no   | yes     | roundabout | x    |       | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Overriding implied oneways | 	Scenario: Bike - Overriding implied oneways | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | highway       | junction   | oneway | forw | backw | | 		 | highway       | foot | junction   | oneway | forw | backw | | ||||||
| 		 | primary       | roundabout | no     | x    | x     | | 		 | primary       | no   | roundabout | no     | x    | x     | | ||||||
| 		 | primary       | roundabout | yes    | x    |       | | 		 | primary       | no   | roundabout | yes    | x    |       | | ||||||
| 		 | motorway_link |            | -1     |      |       | | 		 | motorway_link | no   |            | -1     |      |       | | ||||||
| 		 | trunk_link    |            | -1     |      |       | | 		 | trunk_link    | no   |            | -1     |      |       | | ||||||
| 		 | primary       | roundabout | -1     |      | x     | | 		 | primary       | no   | roundabout | -1     |      | x     | | ||||||
| 	 | 	 | ||||||
| 	Scenario: Bike - Oneway:bicycle should override normal oneways tags | 	Scenario: Bike - Oneway:bicycle should override normal oneways tags | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | oneway:bicycle | oneway | junction   | forw | backw | | 		 | foot | oneway:bicycle | oneway | junction   | forw | backw | | ||||||
| 		 | yes            |        |            | x    |       | | 		 | no   | yes            |        |            | x    |       | | ||||||
| 		 | yes            | yes    |            | x    |       | | 		 | no   | yes            | yes    |            | x    |       | | ||||||
| 		 | yes            | no     |            | x    |       | | 		 | no   | yes            | no     |            | x    |       | | ||||||
| 		 | yes            | -1     |            | x    |       | | 		 | no   | yes            | -1     |            | x    |       | | ||||||
| 		 | yes            |        | roundabout | x    |       | | 		 | no   | yes            |        | roundabout | x    |       | | ||||||
| 		 | no             |        |            | x    | x     | | 		 | no   | no             |        |            | x    | x     | | ||||||
| 		 | no             | yes    |            | x    | x     | | 		 | no   | no             | yes    |            | x    | x     | | ||||||
| 		 | no             | no     |            | x    | x     | | 		 | no   | no             | no     |            | x    | x     | | ||||||
| 		 | no             | -1     |            | x    | x     | | 		 | no   | no             | -1     |            | x    | x     | | ||||||
| 		 | no             |        | roundabout | x    | x     | | 		 | no   | no             |        | roundabout | x    | x     | | ||||||
| 		 | -1             |        |            |      | x     | | 		 | no   | -1             |        |            |      | x     | | ||||||
| 		 | -1             | yes    |            |      | x     | | 		 | no   | -1             | yes    |            |      | x     | | ||||||
| 		 | -1             | no     |            |      | x     | | 		 | no   | -1             | no     |            |      | x     | | ||||||
| 		 | -1             | -1     |            |      | x     | | 		 | no   | -1             | -1     |            |      | x     | | ||||||
| 		 | -1             |        | roundabout |      | x     | | 		 | no   | -1             |        | roundabout |      | x     | | ||||||
| 	 | 	 | ||||||
| 	Scenario: Bike - Contra flow | 	Scenario: Bike - Contra flow | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | oneway | cycleway       | forw | backw | | 		 | foot | oneway | cycleway       | forw | backw | | ||||||
| 		 | yes    | opposite       | x    | x     | | 		 | no   | yes    | opposite       | x    | x     | | ||||||
| 		 | yes    | opposite_track | x    | x     | | 		 | no   | yes    | opposite_track | x    | x     | | ||||||
| 		 | yes    | opposite_lane  | x    | x     | | 		 | no   | yes    | opposite_lane  | x    | x     | | ||||||
| 		 | -1     | opposite       | x    | x     | | 		 | no   | -1     | opposite       | x    | x     | | ||||||
| 		 | -1     | opposite_track | x    | x     | | 		 | no   | -1     | opposite_track | x    | x     | | ||||||
| 		 | -1     | opposite_lane  | x    | x     | | 		 | no   | -1     | opposite_lane  | x    | x     | | ||||||
| 		 | no     | opposite       | x    | x     | | 		 | no   | no     | opposite       | x    | x     | | ||||||
| 		 | no     | opposite_track | x    | x     | | 		 | no   | no     | opposite_track | x    | x     | | ||||||
| 		 | no     | opposite_lane  | x    | x     | | 		 | no   | no     | opposite_lane  | x    | x     | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Should not be affected by car tags | 	Scenario: Bike - Should not be affected by car tags | ||||||
| 		Then routability should be | 		Then routability should be | ||||||
| 		 | junction   | oneway | oneway:car | forw | backw | | 		 | foot | junction   | oneway | oneway:car | forw | backw | | ||||||
| 		 |            | yes    | yes        | x    |       | | 		 | no   |            | yes    | yes        | x    |       | | ||||||
| 		 |            | yes    | no         | x    |       | | 		 | no   |            | yes    | no         | x    |       | | ||||||
| 		 |            | yes    | -1         | x    |       | | 		 | no   |            | yes    | -1         | x    |       | | ||||||
| 		 |            | no     | yes        | x    | x     | | 		 | no   |            | no     | yes        | x    | x     | | ||||||
| 		 |            | no     | no         | x    | x     | | 		 | no   |            | no     | no         | x    | x     | | ||||||
| 		 |            | no     | -1         | x    | x     | | 		 | no   |            | no     | -1         | x    | x     | | ||||||
| 		 |            | -1     | yes        |      | x     | | 		 | no   |            | -1     | yes        |      | x     | | ||||||
| 		 |            | -1     | no         |      | x     | | 		 | no   |            | -1     | no         |      | x     | | ||||||
| 		 |            | -1     | -1         |      | x     | | 		 | no   |            | -1     | -1         |      | x     | | ||||||
| 		 | roundabout |        | yes        | x    |       | | 		 | no   | roundabout |        | yes        | x    |       | | ||||||
| 		 | roundabout |        | no         | x    |       | | 		 | no   | roundabout |        | no         | x    |       | | ||||||
| 		 | roundabout |        | -1         | x    |       | | 		 | no   | roundabout |        | -1         | x    |       | | ||||||
|  | |||||||
							
								
								
									
										87
									
								
								features/bicycle/pushing.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								features/bicycle/pushing.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,87 @@ | |||||||
|  | @routing @bicycle @pushing | ||||||
|  | Feature: Bike - Accessability of different way types | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "bicycle" | ||||||
|  |         Given the shortcuts | ||||||
|  | 		 | key  | value     | | ||||||
|  | 		 | bike | 49s ~20%  | | ||||||
|  | 		 | foot | 121s ~20% | | ||||||
|  | 
 | ||||||
|  | 	Scenario: Bike - Pushing bikes on pedestrian-only ways | ||||||
|  | 	 	Then routability should be | ||||||
|  | 		 | highway    | oneway | forw | backw | | ||||||
|  | 		 | (nil)      |        |      |       | | ||||||
|  | 		 | cycleway   |        | bike | bike  | | ||||||
|  | 		 | primary    |        | bike | bike  | | ||||||
|  | 		 | pedestrian |        | foot | foot  | | ||||||
|  | 		 | footway    |        | foot | foot  | | ||||||
|  | 		 | primary    | yes    | bike | foot  | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Bike - Pushing bikes against normal oneways | ||||||
|  |  	 	Then routability should be | ||||||
|  |  		 | highway    | oneway | forw | backw | | ||||||
|  |  		 | (nil)      |        |      |       | | ||||||
|  |  		 | primary    | yes    | bike | foot  | | ||||||
|  |  		 | pedestrian | yes    | foot | foot  | | ||||||
|  | 
 | ||||||
|  |   	Scenario: Bike - Pushing bikes against reverse oneways | ||||||
|  |   	 	Then routability should be | ||||||
|  |   		 | highway    | oneway | forw | backw | | ||||||
|  |   		 | (nil)      |        |      |       | | ||||||
|  |   		 | primary    | -1     | foot | bike  | | ||||||
|  |   		 | pedestrian | -1     | foot | foot  | | ||||||
|  | 
 | ||||||
|  |  	@square	 | ||||||
|  |  	Scenario: Bike - Push bikes on pedestrian areas | ||||||
|  |  		Given the node map | ||||||
|  |  		 | x |   | | ||||||
|  |  		 | a | b | | ||||||
|  |  		 | d | c | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | area | highway    | | ||||||
|  |  		 | xa    |      | primary    | | ||||||
|  |  		 | abcda | yes  | pedestrian | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | | ||||||
|  |  		 | a    | b  | abcda | | ||||||
|  |  		 | a    | d  | abcda | | ||||||
|  |  		 | b    | c  | abcda | | ||||||
|  |  		 | c    | b  | abcda | | ||||||
|  |  		 | c    | d  | abcda | | ||||||
|  |  		 | d    | c  | abcda | | ||||||
|  |  		 | d    | a  | abcda | | ||||||
|  |  		 | a    | d  | abcda | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Bike - Pushing bikes on ways with foot=yes | ||||||
|  |  	 	Then routability should be | ||||||
|  |  		 | highway  | foot | bothw | | ||||||
|  |  		 | motorway |      |       | | ||||||
|  |  		 | motorway | yes  | foot  | | ||||||
|  |  		 | runway   |      |       | | ||||||
|  |  		 | runway   | yes  | foot  | | ||||||
|  |      | ||||||
|  |     @todo | ||||||
|  |   	Scenario: Bike - Pushing bikes on ways with foot=yes in one direction | ||||||
|  |   	 	Then routability should be | ||||||
|  |   		 | highway  | foot:forward | foot:backward | forw | backw | | ||||||
|  |   		 | motorway |              |               |      |       | | ||||||
|  |   		 | motorway | yes          |               | foot |       | | ||||||
|  |   		 | motorway |              | yes           |      | foot  | | ||||||
|  | 
 | ||||||
|  |     @construction | ||||||
|  |  	Scenario: Bike - Don't allow routing on ways still under construction  | ||||||
|  |  	 	Then routability should be | ||||||
|  |  		 | highway      | foot | bicycle | bothw | | ||||||
|  |  		 | primary      |      |         | x     | | ||||||
|  |  		 | construction |      |         |       | | ||||||
|  |  		 | construction | yes  |         |       | | ||||||
|  |  		 | construction |      | yes     |       | | ||||||
|  |          | ||||||
|  |     @roundabout | ||||||
|  |   	Scenario: Bike - Don't push bikes against oneway flow on roundabouts | ||||||
|  |   	 	Then routability should be | ||||||
|  |   		 | junction   | forw | backw | | ||||||
|  |   		 | roundabout | x    |       | | ||||||
| @ -1,9 +1,9 @@ | |||||||
| @routing @bicycle @restrictions | @routing @bicycle @restrictions | ||||||
| Feature: Bike - Turn restrictions | Feature: Bike - Turn restrictions | ||||||
| 	Handle turn restrictions as defined by http://wiki.openstreetmap.org/wiki/Relation:restriction | 	Ignore turn restrictions on bicycle, since you always become a temporary pedestrian. | ||||||
| 	Note that if u-turns are allowed, turn restrictions can lead to suprising, but correct, routes. | 	Note that if u-turns are allowed, turn restrictions can lead to suprising, but correct, routes. | ||||||
| 	 | 	 | ||||||
| 	Background: Use car routing | 	Background: | ||||||
| 		Given the profile "bicycle" | 		Given the profile "bicycle" | ||||||
| 	 | 	 | ||||||
| 	@no_turning | 	@no_turning | ||||||
| @ -14,11 +14,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction  | | 		 | type        | way:from | way:to | node:via | restriction  | | ||||||
| @ -26,7 +26,7 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  | sj,ej | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| @ -38,11 +38,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction   | | 		 | type        | way:from | way:to | node:via | restriction   | | ||||||
| @ -52,7 +52,7 @@ Feature: Bike - Turn restrictions | |||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  | sj,wj | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  |       | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| 	@no_turning | 	@no_turning | ||||||
| 	Scenario: Bike - No u-turn | 	Scenario: Bike - No u-turn | ||||||
| @ -62,11 +62,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction | | 		 | type        | way:from | way:to | node:via | restriction | | ||||||
| @ -74,7 +74,7 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  | sj,ej | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| @ -86,11 +86,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction      | | 		 | type        | way:from | way:to | node:via | restriction      | | ||||||
| @ -98,7 +98,7 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  | sj,ej | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| @ -110,11 +110,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction    | | 		 | type        | way:from | way:to | node:via | restriction    | | ||||||
| @ -123,8 +123,8 @@ Feature: Bike - Turn restrictions | |||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  | sj,wj | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  |       | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  |       | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| 	@only_turning | 	@only_turning | ||||||
| 	Scenario: Bike - Only right turn | 	Scenario: Bike - Only right turn | ||||||
| @ -134,11 +134,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction     | | 		 | type        | way:from | way:to | node:via | restriction     | | ||||||
| @ -146,8 +146,8 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  |       | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  | sj,ej | | 		 | s    | e  | sj,ej | | ||||||
| 	 | 	 | ||||||
| 	@only_turning | 	@only_turning | ||||||
| @ -158,11 +158,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction      | | 		 | type        | way:from | way:to | node:via | restriction      | | ||||||
| @ -170,9 +170,9 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  |       | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
| 	@no_turning | 	@no_turning | ||||||
| 	Scenario: Bike - Handle any only_* restriction | 	Scenario: Bike - Handle any only_* restriction | ||||||
| @ -182,11 +182,11 @@ Feature: Bike - Turn restrictions | |||||||
| 		 |   | s |   | | 		 |   | s |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | oneway | | 		 | nodes | oneway | foot | | ||||||
| 		 | sj    | yes    | | 		 | sj    | yes    | no   | | ||||||
| 		 | nj    | -1     | | 		 | nj    | -1     | no   | | ||||||
| 		 | wj    | -1     | | 		 | wj    | -1     | no   | | ||||||
| 		 | ej    | -1     | | 		 | ej    | -1     | no   | | ||||||
| 
 | 
 | ||||||
| 		And the relations | 		And the relations | ||||||
| 		 | type        | way:from | way:to | node:via | restriction        | | 		 | type        | way:from | way:to | node:via | restriction        | | ||||||
| @ -194,11 +194,11 @@ Feature: Bike - Turn restrictions | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | | 		 | from | to | route | | ||||||
| 		 | s    | w  |       | | 		 | s    | w  | sj,wj | | ||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  |       | | 		 | s    | e  | sj,ej | | ||||||
| 
 | 
 | ||||||
|  	@except @todo |  	@except | ||||||
|  	Scenario: Bike - Except tag and on no_ restrictions |  	Scenario: Bike - Except tag and on no_ restrictions | ||||||
|  		Given the node map |  		Given the node map | ||||||
|  		 | b | x | c | |  		 | b | x | c | | ||||||
| @ -206,29 +206,29 @@ Feature: Bike - Turn restrictions | |||||||
|  		 |   | s |   | |  		 |   | s |   | | ||||||
| 
 | 
 | ||||||
|  		And the ways |  		And the ways | ||||||
|  		 | nodes | oneway | |  		 | nodes | oneway | foot | | ||||||
|  		 | sj    | yes    | |  		 | sj    | no     | no   | | ||||||
|  		 | xj    | -1     | |  		 | xj    | -1     | no   | | ||||||
|  		 | aj    | -1     | |  		 | aj    | -1     | no   | | ||||||
|  		 | bj    | -1     | |  		 | bj    | no     | no   | | ||||||
|  		 | cj    | -1     | |  		 | cj    | -1     | no   | | ||||||
|  		 | dj    | -1     | |  		 | dj    | -1     | no   | | ||||||
| 
 | 
 | ||||||
|  		And the relations |  		And the relations | ||||||
|  		 | type        | way:from | way:to | node:via | restriction   | except  | |  		 | type        | way:from | way:to | node:via | restriction   | except  | | ||||||
|  		 | restriction | sj       | aj     | j        | no_left_turn  | bicycle | |  		 | restriction | sj       | aj     | j        | no_left_turn  | bicycle | | ||||||
|  		 | restriction | sj       | bj     | j        | no_left_turn  |         | |  		 | restriction | sj       | bj     | j        | no_left_turn  |         | | ||||||
|  		 | restriction | sj       | cj     | j        | no_right_turn | bicycle | |  		 | restriction | sj       | cj     | j        | no_right_turn |         | | ||||||
|  		 | restriction | sj       | dj     | j        | no_right_turn |         | |  		 | restriction | sj       | dj     | j        | no_right_turn | bicycle | | ||||||
| 
 | 
 | ||||||
|  		When I route I should get |  		When I route I should get | ||||||
|  		 | from | to | route | |  		 | from | to | route | | ||||||
|  		 | s    | a  | sj,aj | |  		 | s    | a  | sj,aj | | ||||||
|  		 | s    | b  |       | |  		 | s    | b  | sj,bj | | ||||||
|  		 | s    | c  |       | |  		 | s    | c  | sj,cj | | ||||||
|  		 | s    | d  | sj,dj | |  		 | s    | d  | sj,dj | | ||||||
| 
 | 
 | ||||||
|   	@except @todo |   	@except | ||||||
|   	Scenario: Bike - Except tag and on only_ restrictions |   	Scenario: Bike - Except tag and on only_ restrictions | ||||||
|   		Given the node map |   		Given the node map | ||||||
|   		 | a |   | b | |   		 | a |   | b | | ||||||
| @ -236,10 +236,10 @@ Feature: Bike - Turn restrictions | |||||||
|   		 |   | s |   | |   		 |   | s |   | | ||||||
| 
 | 
 | ||||||
|   		And the ways |   		And the ways | ||||||
|   		 | nodes | oneway | |   		 | nodes | oneway | foot | | ||||||
|   		 | sj    | yes    | |   		 | sj    | yes    | no   | | ||||||
|   		 | aj    | -1     | |   		 | aj    | no     | no   | | ||||||
|   		 | bj    | -1     | |   		 | bj    | no     | no   | | ||||||
| 
 | 
 | ||||||
|   		And the relations |   		And the relations | ||||||
|   		 | type        | way:from | way:to | node:via | restriction      | except  | |   		 | type        | way:from | way:to | node:via | restriction      | except  | | ||||||
| @ -249,3 +249,41 @@ Feature: Bike - Turn restrictions | |||||||
|   		 | from | to | route | |   		 | from | to | route | | ||||||
|   		 | s    | a  | sj,aj | |   		 | s    | a  | sj,aj | | ||||||
|   		 | s    | b  | sj,bj | |   		 | s    | b  | sj,bj | | ||||||
|  | 
 | ||||||
|  |   	@except | ||||||
|  |   	Scenario: Bike - Multiple except tag values | ||||||
|  |   		Given the node map | ||||||
|  |   		 | s | j | a | | ||||||
|  |   		 |   |   | b | | ||||||
|  |   		 |   |   | c | | ||||||
|  |   		 |   |   | d | | ||||||
|  |   		 |   |   | e | | ||||||
|  |   		 |   |   | f | | ||||||
|  | 
 | ||||||
|  |   		And the ways | ||||||
|  |   		 | nodes | oneway | foot | | ||||||
|  |   		 | sj    | yes    | no   | | ||||||
|  |   		 | ja    | yes    | no   | | ||||||
|  |   		 | jb    | yes    | no   | | ||||||
|  |   		 | jc    | yes    | no   | | ||||||
|  |   		 | jd    | yes    | no   | | ||||||
|  |   		 | je    | yes    | no   | | ||||||
|  |   		 | jf    | yes    | no   | | ||||||
|  | 
 | ||||||
|  |   		And the relations | ||||||
|  |   		 | type        | way:from | way:to | node:via | restriction    | except           | | ||||||
|  |   		 | restriction | sj       | ja     | j        | no_straight_on |                  | | ||||||
|  |   		 | restriction | sj       | jb     | j        | no_straight_on | bicycle          | | ||||||
|  |   		 | restriction | sj       | jc     | j        | no_straight_on | bus; bicycle     | | ||||||
|  |   		 | restriction | sj       | jd     | j        | no_straight_on | bicycle; motocar | | ||||||
|  |   		 | restriction | sj       | je     | j        | no_straight_on | bus, bicycle     | | ||||||
|  |   		 | restriction | sj       | jf     | j        | no_straight_on | bicycle, bus     | | ||||||
|  | 
 | ||||||
|  |   		When I route I should get | ||||||
|  |   		 | from | to | route | | ||||||
|  |   		 | s    | a  | sj,ja | | ||||||
|  |   		 | s    | b  | sj,jb | | ||||||
|  |   		 | s    | c  | sj,jc | | ||||||
|  |   		 | s    | d  | sj,jd | | ||||||
|  |   		 | s    | e  | sj,je | | ||||||
|  |   		 | s    | f  | sj,jf | | ||||||
|  | |||||||
| @ -30,3 +30,10 @@ Bringing bikes on trains and subways | |||||||
| 		 | (nil)   | some_tag   |         |       | | 		 | (nil)   | some_tag   |         |       | | ||||||
| 		 | (nil)   | some_tag   | no      |       | | 		 | (nil)   | some_tag   | no      |       | | ||||||
| 		 | (nil)   | some_tag   | yes     | x     | | 		 | (nil)   | some_tag   | yes     | x     | | ||||||
|  | 
 | ||||||
|  | 	@construction | ||||||
|  |  	Scenario: Bike - Don't route on railways under construction | ||||||
|  |  	 	Then routability should be | ||||||
|  |  		 | highway | railway      | bicycle | bothw | | ||||||
|  |  		 | primary |              |         | x     | | ||||||
|  |  		 | (nil)   | construction | yes     |       | | ||||||
|  | |||||||
							
								
								
									
										33
									
								
								features/bicycle/turn_penalty.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								features/bicycle/turn_penalty.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,33 @@ | |||||||
|  | @routing @bicycle @turn_penalty | ||||||
|  | Feature: Turn Penalties | ||||||
|  | 
 | ||||||
|  |     Background: | ||||||
|  |         Given the profile "turnbot"      | ||||||
|  | 
 | ||||||
|  |     Scenario: Bike - turns should incur a delay that depend on the angle | ||||||
|  | 
 | ||||||
|  |         Given the node map | ||||||
|  |          | c | d | e | | ||||||
|  |          | b | j | f | | ||||||
|  |          | a | s | g | | ||||||
|  | 
 | ||||||
|  |         And the ways | ||||||
|  |          | nodes | | ||||||
|  |          | sj    | | ||||||
|  |          | ja    | | ||||||
|  |          | jb    | | ||||||
|  |          | jc    | | ||||||
|  |          | jd    | | ||||||
|  |          | je    | | ||||||
|  |          | jf    | | ||||||
|  |          | jg    | | ||||||
|  | 
 | ||||||
|  |         When I route I should get | ||||||
|  |          | from | to | route | time    | distance | | ||||||
|  |          | s    | a  | sj,ja | 39s +-1 | 242m +-1 | | ||||||
|  |          | s    | b  | sj,jb | 30s +-1 | 200m +-1 | | ||||||
|  |          | s    | c  | sj,jc | 29s +-1 | 242m +-1 | | ||||||
|  |          | s    | d  | sj,jd | 20s +-1 | 200m +-1 | | ||||||
|  |          | s    | e  | sj,je | 29s +-1 | 242m +-1 | | ||||||
|  |          | s    | f  | sj,jf | 30s +-1 | 200m +-1 | | ||||||
|  |          | s    | g  | sj,jg | 39s +-1 | 242m +-1 | | ||||||
| @ -4,32 +4,38 @@ Feature: Bike - Accessability of different way types | |||||||
| 	Background: | 	Background: | ||||||
| 		Given the profile "bicycle" | 		Given the profile "bicycle" | ||||||
| 
 | 
 | ||||||
| 	Scenario: Bike - Basic access | 	Scenario: Bike - Routability of way types | ||||||
| 	Bikes are allowed on footways etc because you can pull your bike at a lower speed. | 	Bikes are allowed on footways etc because you can pull your bike at a lower speed. | ||||||
| 	 	Given the profile "bicycle" | 	Pier is not allowed, since it's tagged using man_made=pier. | ||||||
| 	 	Then routability should be | 	 	Then routability should be | ||||||
| 		 | highway        | forw | | 		 | highway        | bothw | | ||||||
| 		 | (nil)          |      | | 		 | (nil)          |       | | ||||||
| 		 | motorway       |      | | 		 | motorway       |       | | ||||||
| 		 | motorway_link  |      | | 		 | motorway_link  |       | | ||||||
| 		 | trunk          |      | | 		 | trunk          |       | | ||||||
| 		 | trunk_link     |      | | 		 | trunk_link     |       | | ||||||
| 		 | primary        | x    | | 		 | primary        | x     | | ||||||
| 		 | primary_link   | x    | | 		 | primary_link   | x     | | ||||||
| 		 | secondary      | x    | | 		 | secondary      | x     | | ||||||
| 		 | secondary_link | x    | | 		 | secondary_link | x     | | ||||||
| 		 | tertiary       | x    | | 		 | tertiary       | x     | | ||||||
| 		 | tertiary_link  | x    | | 		 | tertiary_link  | x     | | ||||||
| 		 | residential    | x    | | 		 | residential    | x     | | ||||||
| 		 | service        | x    | | 		 | service        | x     | | ||||||
| 		 | unclassified   | x    | | 		 | unclassified   | x     | | ||||||
| 		 | living_street  | x    | | 		 | living_street  | x     | | ||||||
| 		 | road           | x    | | 		 | road           | x     | | ||||||
| 		 | track          | x    | | 		 | track          | x     | | ||||||
| 		 | path           | x    | | 		 | path           | x     | | ||||||
| 		 | footway        | x    | | 		 | footway        | x     | | ||||||
| 		 | pedestrian     | x    | | 		 | pedestrian     | x     | | ||||||
| 		 | steps          | x    | | 		 | steps          | x     | | ||||||
| 		 | pier           | x    | | 		 | cycleway       | x     | | ||||||
| 		 | cycleway       | x    | | 		 | bridleway      |       | | ||||||
| 		 | bridleway      |      | | 		 | pier           |       | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Bike - Routability of man_made structures | ||||||
|  |  	 	Then routability should be | ||||||
|  |  		 | highway | man_made | bothw | | ||||||
|  |  		 | (nil)   | (nil)    |       | | ||||||
|  |  		 | (nil)   | pier     | x     | | ||||||
|  | |||||||
| @ -10,16 +10,16 @@ Feature: Car - Max speed restrictions | |||||||
| 		 | a | b | c | | 		 | a | b | c | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | highway  | maxspeed | | 		 | nodes | highway | maxspeed | | ||||||
| 		 | ab    | trunk |          | | 		 | ab    | trunk   |          | | ||||||
| 		 | bc    | trunk | 10       | | 		 | bc    | trunk   | 10       | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time      | | 		 | from | to | route | time      | | ||||||
| 		 | a    | b  | ab    | 42s ~10%  | | 		 | a    | b  | ab    | 42s ~10%  | | ||||||
| 		 | b    | c  | bc    | 360s ~10% | | 		 | b    | c  | bc    | 360s ~10% | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Car - Ignore maxspeed when higher than way speed | 	Scenario: Car - Do not ignore maxspeed when higher than way speed | ||||||
| 		Given the node map | 		Given the node map | ||||||
| 		 | a | b | c | | 		 | a | b | c | | ||||||
| 
 | 
 | ||||||
| @ -31,4 +31,24 @@ Feature: Car - Max speed restrictions | |||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time      | | 		 | from | to | route | time      | | ||||||
| 		 | a    | b  | ab    | 144s ~10% | | 		 | a    | b  | ab    | 144s ~10% | | ||||||
| 		 | b    | c  | bc    | 144s ~10%  | | 		 | b    | c  | bc    | 63s ~10%  | | ||||||
|  | 
 | ||||||
|  |      Scenario: Car - Forward/backward maxspeed | ||||||
|  |         Given the shortcuts | ||||||
|  |  		 | key   | value     | | ||||||
|  |  		 | car   | 12s ~10%  | | ||||||
|  |  		 | run   | 73s ~10%  | | ||||||
|  |  		 | walk  | 146s ~10% | | ||||||
|  |  		 | snail | 720s ~10% | | ||||||
|  |   		 | ||||||
|  |   		And a grid size of 100 meters | ||||||
|  | 
 | ||||||
|  |   	 	Then routability should be | ||||||
|  |      	 | maxspeed | maxspeed:forward | maxspeed:backward | forw  | backw | | ||||||
|  |      	 |          |                  |                   | car   | car   | | ||||||
|  |      	 | 10       |                  |                   | run   | run   | | ||||||
|  |      	 |          | 10               |                   | run   | car   | | ||||||
|  |      	 |          |                  | 10                | car   | run   | | ||||||
|  |      	 | 1        | 10               |                   | run   | snail | | ||||||
|  |      	 | 1        |                  | 10                | snail | run   | | ||||||
|  |      	 | 1        | 5                | 10                | walk  | run   | | ||||||
|  | |||||||
							
								
								
									
										0
									
								
								features/car/permissive.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								features/car/permissive.feature
									
									
									
									
									
										Normal file
									
								
							| @ -198,8 +198,8 @@ Feature: Car - Turn restrictions | |||||||
| 		 | s    | n  | sj,nj | | 		 | s    | n  | sj,nj | | ||||||
| 		 | s    | e  |       | | 		 | s    | e  |       | | ||||||
| 
 | 
 | ||||||
|   	@except @todo |   	@except | ||||||
|   	Scenario: Bike - Except tag and on no_ restrictions |   	Scenario: Car - Except tag and on no_ restrictions | ||||||
|   		Given the node map |   		Given the node map | ||||||
|   		 | b | x | c | |   		 | b | x | c | | ||||||
|   		 | a | j | d | |   		 | a | j | d | | ||||||
| @ -207,19 +207,19 @@ Feature: Car - Turn restrictions | |||||||
| 
 | 
 | ||||||
|   		And the ways |   		And the ways | ||||||
|   		 | nodes | oneway | |   		 | nodes | oneway | | ||||||
|   		 | sj    | yes    | |   		 | sj    | no     | | ||||||
|   		 | xj    | -1     | |   		 | xj    | -1     | | ||||||
|   		 | aj    | -1     | |   		 | aj    | -1     | | ||||||
|   		 | bj    | -1     | |   		 | bj    | no     | | ||||||
|   		 | cj    | -1     | |   		 | cj    | no     | | ||||||
|   		 | dj    | -1     | |   		 | dj    | -1     | | ||||||
| 
 | 
 | ||||||
|   		And the relations |   		And the relations | ||||||
|   		 | type        | way:from | way:to | node:via | restriction   | except   | |   		 | type        | way:from | way:to | node:via | restriction   | except   | | ||||||
|   		 | restriction | sj       | aj     | j        | no_left_turn  | motorcar | |   		 | restriction | sj       | aj     | j        | no_left_turn  | motorcar | | ||||||
|   		 | restriction | sj       | bj     | j        | no_left_turn  |          | |   		 | restriction | sj       | bj     | j        | no_left_turn  |          | | ||||||
|   		 | restriction | sj       | cj     | j        | no_right_turn | motorcar | |   		 | restriction | sj       | cj     | j        | no_right_turn |          | | ||||||
|   		 | restriction | sj       | dj     | j        | no_right_turn |          | |   		 | restriction | sj       | dj     | j        | no_right_turn | motorcar | | ||||||
| 
 | 
 | ||||||
|   		When I route I should get |   		When I route I should get | ||||||
|   		 | from | to | route | |   		 | from | to | route | | ||||||
| @ -228,8 +228,8 @@ Feature: Car - Turn restrictions | |||||||
|   		 | s    | c  |       | |   		 | s    | c  |       | | ||||||
|   		 | s    | d  | sj,dj | |   		 | s    | d  | sj,dj | | ||||||
| 
 | 
 | ||||||
|    	@except @todo |    	@except | ||||||
|    	Scenario: Bike - Except tag and on only_ restrictions |    	Scenario: Car - Except tag and on only_ restrictions | ||||||
|    		Given the node map |    		Given the node map | ||||||
|    		 | a |   | b | |    		 | a |   | b | | ||||||
|    		 |   | j |   | |    		 |   | j |   | | ||||||
| @ -238,8 +238,8 @@ Feature: Car - Turn restrictions | |||||||
|    		And the ways |    		And the ways | ||||||
|    		 | nodes | oneway | |    		 | nodes | oneway | | ||||||
|    		 | sj    | yes    | |    		 | sj    | yes    | | ||||||
|    		 | aj    | -1     | |    		 | aj    | no     | | ||||||
|    		 | bj    | -1     | |    		 | bj    | no     | | ||||||
| 
 | 
 | ||||||
|    		And the relations |    		And the relations | ||||||
|    		 | type        | way:from | way:to | node:via | restriction      | except   | |    		 | type        | way:from | way:to | node:via | restriction      | except   | | ||||||
|  | |||||||
							
								
								
									
										31
									
								
								features/car/shuttle_train.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								features/car/shuttle_train.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,31 @@ | |||||||
|  | @routing @car @shuttle_train | ||||||
|  | Feature: Car - Handle ferryshuttle train routes | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "car" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Car - Use a ferry route | ||||||
|  | 		Given the node map | ||||||
|  | 		 | a | b | c |   |   | | ||||||
|  | 		 |   |   | d |   |   | | ||||||
|  | 		 |   |   | e | f | g | | ||||||
|  | 	 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | highway | route         | bicycle | | ||||||
|  | 		 | abc   | primary |               |         | | ||||||
|  | 		 | cde   |         | shuttle_train | yes     | | ||||||
|  | 		 | ef    | primary |               |         | | ||||||
|  | 		 | fg    |         | ferry_man     |         | | ||||||
|  |     | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route      | | ||||||
|  | 		 | a    | g  | abc,cde,ef | | ||||||
|  | 		 | b    | f  | abc,cde,ef | | ||||||
|  | 		 | e    | c  | cde        | | ||||||
|  | 		 | e    | b  | cde,abc    | | ||||||
|  | 		 | e    | a  | cde,abc    | | ||||||
|  | 		 | c    | e  | cde        | | ||||||
|  | 		 | c    | f  | cde,ef     | | ||||||
|  | 		 | f    | g  |            | | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
							
								
								
									
										20
									
								
								features/foot/maxspeed.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								features/foot/maxspeed.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,20 @@ | |||||||
|  | @routing @maxspeed @foot | ||||||
|  | Feature: Foot - Ignore max speed restrictions | ||||||
|  | 
 | ||||||
|  | Background: Use specific speeds | ||||||
|  | 	Given the profile "foot" | ||||||
|  | 
 | ||||||
|  | @todo | ||||||
|  | Scenario: Foot - Ignore maxspeed | ||||||
|  | 	Then routability should be | ||||||
|  | 	 | highway     | maxspeed  | bothw     | | ||||||
|  | 	 | residential |           | 145s ~10% | | ||||||
|  | 	 | residential | 1         | 145s ~10% | | ||||||
|  | 	 | residential | 100       | 145s ~10% | | ||||||
|  | 	 | residential | 1         | 145s ~10% | | ||||||
|  | 	 | residential | 1mph      | 145s ~10% | | ||||||
|  | 	 | residential | 1 mph     | 145s ~10% | | ||||||
|  | 	 | residential | 1unknown  | 145s ~10% | | ||||||
|  | 	 | residential | 1 unknown | 145s ~10% | | ||||||
|  | 	 | residential | none      | 145s ~10% | | ||||||
|  | 	 | residential | signals   | 145s ~10% | | ||||||
							
								
								
									
										56
									
								
								features/nearest/pick.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								features/nearest/pick.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,56 @@ | |||||||
|  | @nearest | ||||||
|  | Feature: Locating Nearest node on a Way - pick closest way | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Nearest - two ways crossing | ||||||
|  | 		Given the node map | ||||||
|  | 		 |   | 0 | c | 1 |   | | ||||||
|  | 		 | 7 |   | n |   | 2 | | ||||||
|  | 		 | a | k | x | m | b | | ||||||
|  | 		 | 6 |   | l |   | 3 | | ||||||
|  | 		 |   | 5 | d | 4 |   | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | | ||||||
|  | 		 | axb   | | ||||||
|  | 		 | cxd   | | ||||||
|  | 
 | ||||||
|  | 		When I request nearest I should get | ||||||
|  | 		 | in | out | | ||||||
|  | 		 | 0  | c   | | ||||||
|  | 		 | 1  | c   | | ||||||
|  | 		 | 2  | b   | | ||||||
|  | 		 | 3  | b   | | ||||||
|  | 		 | 4  | d   | | ||||||
|  | 		 | 5  | d   | | ||||||
|  | 		 | 6  | a   | | ||||||
|  | 		 | 7  | a   | | ||||||
|  | 		 | k  | k   | | ||||||
|  | 		 | l  | l   | | ||||||
|  | 		 | m  | m   | | ||||||
|  | 		 | n  | n   | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Nearest - inside a triangle | ||||||
|  |  		Given the node map | ||||||
|  | 		 |   |  |  |   |   | c |   |   |  |  |   | | ||||||
|  | 		 |   |  |  |   |   |   |   |   |  |  |   | | ||||||
|  | 		 |   |  |  | y |   |   |   | z |  |  |   | | ||||||
|  | 		 |   |  |  |   | 0 |   | 1 |   |  |  |   | | ||||||
|  | 		 |   |  |  | 2 |   | 3 |   | 4 |  |  |   | | ||||||
|  | 		 | a |  |  | x |   | u |   | w |  |  | b | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | | ||||||
|  |  		 | ab    | | ||||||
|  |  		 | bc    | | ||||||
|  |  		 | ca    | | ||||||
|  | 
 | ||||||
|  |  		When I request nearest I should get | ||||||
|  |  		 | in | out | | ||||||
|  |  		 | 0  | y   | | ||||||
|  |  		 | 1  | z   | | ||||||
|  |  		 | 2  | x   | | ||||||
|  |  		 | 3  | u   | | ||||||
|  |  		 | 4  | w   | | ||||||
							
								
								
									
										105
									
								
								features/nearest/projection.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								features/nearest/projection.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,105 @@ | |||||||
|  | @nearest | ||||||
|  | Feature: Locating Nearest node on a Way - basic projection onto way | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Nearest - easy-west way | ||||||
|  | 		Given the node map | ||||||
|  | 		 | 0 | 1 | 2 | 3 | 4 | | ||||||
|  | 		 |   | a | x | b |   | | ||||||
|  | 		 | 5 | 6 | 7 | 8 | 9 | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | | ||||||
|  | 		 | ab    | | ||||||
|  | 
 | ||||||
|  | 		When I request nearest I should get | ||||||
|  | 		 | in | out | | ||||||
|  | 		 | 0  | a   | | ||||||
|  | 		 | 1  | a   | | ||||||
|  | 		 | 2  | x   | | ||||||
|  | 		 | 3  | b   | | ||||||
|  | 		 | 4  | b   | | ||||||
|  | 		 | 5  | a   | | ||||||
|  | 		 | 6  | a   | | ||||||
|  | 		 | 7  | x   | | ||||||
|  | 		 | 8  | b   | | ||||||
|  | 		 | 9  | b   | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Nearest - north-south way | ||||||
|  |  		Given the node map | ||||||
|  |  		 | 0 |   | 5 | | ||||||
|  |  		 | 1 | a | 6 | | ||||||
|  |  		 | 2 | x | 7 | | ||||||
|  |  		 | 3 | b | 8 | | ||||||
|  |  		 | 4 |   | 9 | | ||||||
|  |           | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | | ||||||
|  |  		 | ab    | | ||||||
|  | 
 | ||||||
|  |  		When I request nearest I should get | ||||||
|  |  		 | in | out | | ||||||
|  |  		 | 0  | a   | | ||||||
|  |  		 | 1  | a   | | ||||||
|  |  		 | 2  | x   | | ||||||
|  |  		 | 3  | b   | | ||||||
|  |  		 | 4  | b   | | ||||||
|  |  		 | 5  | a   | | ||||||
|  |  		 | 6  | a   | | ||||||
|  |  		 | 7  | x   | | ||||||
|  |  		 | 8  | b   | | ||||||
|  |  		 | 9  | b   | | ||||||
|  | 
 | ||||||
|  |   	Scenario: Nearest - diagonal 1 | ||||||
|  |   		Given the node map | ||||||
|  |  		 | 8 |   | 4 |   |   |   | | ||||||
|  |  		 |   | a |   | 5 |   |   | | ||||||
|  |  		 | 0 |   | x |   | 6 |   | | ||||||
|  |  		 |   | 1 |   | y |   | 7 | | ||||||
|  |  		 |   |   | 2 |   | b |   | | ||||||
|  |  		 |   |   |   | 3 |   | 9 | | ||||||
|  | 
 | ||||||
|  |   		And the ways | ||||||
|  |   		 | nodes | | ||||||
|  |   		 | ab    | | ||||||
|  | 
 | ||||||
|  |   		When I request nearest I should get | ||||||
|  |   		 | in | out | | ||||||
|  |   		 | 0  | a   | | ||||||
|  |   		 | 1  | x   | | ||||||
|  |   		 | 2  | y   | | ||||||
|  |   		 | 3  | b   | | ||||||
|  |   		 | 4  | a   | | ||||||
|  |   		 | 5  | x   | | ||||||
|  |   		 | 6  | y   | | ||||||
|  |   		 | 7  | b   | | ||||||
|  |   		 | 8  | a   | | ||||||
|  |   		 | 9  | b   | | ||||||
|  | 
 | ||||||
|  |    	Scenario: Nearest - diagonal 2 | ||||||
|  |    		Given the node map | ||||||
|  |   		 |   |   |   | 3 |   | 9 | | ||||||
|  |   		 |   |   | 2 |   | b |   | | ||||||
|  |   		 |   | 1 |   | y |   | 7 | | ||||||
|  |   		 | 0 |   | x |   | 6 |   | | ||||||
|  |   		 |   | a |   | 5 |   |   | | ||||||
|  |   		 | 8 |   | 4 |   |   |   | | ||||||
|  | 
 | ||||||
|  |    		And the ways | ||||||
|  |    		 | nodes | | ||||||
|  |    		 | ab    | | ||||||
|  | 
 | ||||||
|  |    		When I request nearest I should get | ||||||
|  |    		 | in | out | | ||||||
|  |    		 | 0  | a   | | ||||||
|  |    		 | 1  | x   | | ||||||
|  |    		 | 2  | y   | | ||||||
|  |    		 | 3  | b   | | ||||||
|  |    		 | 4  | a   | | ||||||
|  |    		 | 5  | x   | | ||||||
|  |    		 | 6  | y   | | ||||||
|  |    		 | 7  | b   | | ||||||
|  |    		 | 8  | a   | | ||||||
|  |    		 | 9  | b   | | ||||||
| @ -6,6 +6,12 @@ Given /^a grid size of (\d+) meters$/ do |meters| | |||||||
|   set_grid_size meters |   set_grid_size meters | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
|  | Given /^the shortcuts$/ do |table| | ||||||
|  |   table.hashes.each do |row| | ||||||
|  |     shortcuts_hash[ row['key'] ] = row['value'] | ||||||
|  |   end | ||||||
|  | end | ||||||
|  | 
 | ||||||
| Given /^the node map$/ do |table| | Given /^the node map$/ do |table| | ||||||
|   table.raw.each_with_index do |row,ri| |   table.raw.each_with_index do |row,ri| | ||||||
|     row.each_with_index do |name,ci| |     row.each_with_index do |name,ci| | ||||||
| @ -90,14 +96,18 @@ Given /^the relations$/ do |table| | |||||||
|     relation = OSM::Relation.new make_osm_id, OSM_USER, OSM_TIMESTAMP |     relation = OSM::Relation.new make_osm_id, OSM_USER, OSM_TIMESTAMP | ||||||
|     row.each_pair do |key,value| |     row.each_pair do |key,value| | ||||||
|       if key =~ /^node:(.*)/ |       if key =~ /^node:(.*)/ | ||||||
|         raise "***invalid relation node member '#{value}', must be single character" unless value.size == 1 |         value.split(',').map { |v| v.strip }.each do |node_name| | ||||||
|         node = find_node_by_name(value) |           raise "***invalid relation node member '#{node_name}', must be single character" unless node_name.size == 1 | ||||||
|         raise "*** unknown relation node member '#{value}'" unless node |           node = find_node_by_name(node_name) | ||||||
|         relation << OSM::Member.new( 'node', node.id, $1 ) |           raise "*** unknown relation node member '#{node_name}'" unless node | ||||||
|  |           relation << OSM::Member.new( 'node', node.id, $1 ) | ||||||
|  |         end | ||||||
|       elsif key =~ /^way:(.*)/ |       elsif key =~ /^way:(.*)/ | ||||||
|         way = find_way_by_name(value) |         value.split(',').map { |v| v.strip }.each do |way_name| | ||||||
|         raise "*** unknown relation way member '#{value}'" unless way |           way = find_way_by_name(way_name) | ||||||
|         relation << OSM::Member.new( 'way', way.id, $1 ) |           raise "*** unknown relation way member '#{way_name}'" unless way | ||||||
|  |           relation << OSM::Member.new( 'way', way.id, $1 ) | ||||||
|  |         end | ||||||
|       elsif key =~ /^(.*):(.*)/ |       elsif key =~ /^(.*):(.*)/ | ||||||
|         raise "*** unknown relation member type '#{$1}', must be either 'node' or 'way'" |         raise "*** unknown relation member type '#{$1}', must be either 'node' or 'way'" | ||||||
|       else |       else | ||||||
|  | |||||||
							
								
								
									
										51
									
								
								features/step_definitions/nearest.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								features/step_definitions/nearest.rb
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,51 @@ | |||||||
|  | When /^I request nearest I should get$/ do |table| | ||||||
|  |   reprocess | ||||||
|  |   actual = [] | ||||||
|  |   OSRMLauncher.new do | ||||||
|  |     table.hashes.each_with_index do |row,ri| | ||||||
|  |       in_node = @name_node_hash[ row['in'] ] | ||||||
|  |       raise "*** unknown in-node '#{row['in']}" unless in_node | ||||||
|  | 
 | ||||||
|  |       out_node = @name_node_hash[ row['out'] ] | ||||||
|  |       raise "*** unknown out-node '#{row['out']}" unless out_node | ||||||
|  | 
 | ||||||
|  |       response = request_nearest("#{in_node.lat},#{in_node.lon}") | ||||||
|  |       if response.code == "200" && response.body.empty? == false | ||||||
|  |         json = JSON.parse response.body | ||||||
|  |         if json['status'] == 0 | ||||||
|  |           coord =  json['mapped_coordinate'] | ||||||
|  |         end | ||||||
|  |       end | ||||||
|  |        | ||||||
|  |       got = {'in' => row['in'], 'out' => coord } | ||||||
|  |        | ||||||
|  |       ok = true | ||||||
|  |       row.keys.each do |key| | ||||||
|  |         if key=='out' | ||||||
|  |           if FuzzyMatch.match_location coord, out_node | ||||||
|  |             got[key] = row[key] | ||||||
|  |           else | ||||||
|  |             row[key] = "#{row[key]} [#{out_node.lat},#{out_node.lon}]" | ||||||
|  |             ok = false | ||||||
|  |           end | ||||||
|  |         end | ||||||
|  |       end | ||||||
|  |        | ||||||
|  |       unless ok | ||||||
|  |         failed = { :attempt => 'nearest', :query => @query, :response => response } | ||||||
|  |         log_fail row,got,[failed] | ||||||
|  |       end | ||||||
|  |        | ||||||
|  |       actual << got | ||||||
|  |     end | ||||||
|  |   end | ||||||
|  |   table.routing_diff! actual | ||||||
|  | end | ||||||
|  | 
 | ||||||
|  | When /^I route (\d+) times I should get$/ do |n,table| | ||||||
|  |   ok = true | ||||||
|  |   n.to_i.times do | ||||||
|  |     ok = false unless step "I route I should get", table | ||||||
|  |   end | ||||||
|  |   ok | ||||||
|  | end | ||||||
| @ -16,18 +16,21 @@ Then /^routability should be$/ do |table| | |||||||
|           elsif direction == 'backw' || direction == 'bothw' |           elsif direction == 'backw' || direction == 'bothw' | ||||||
|             response = request_route("#{ORIGIN[1]},#{ORIGIN[0]+(3+WAY_SPACING*i)*@zoom}","#{ORIGIN[1]},#{ORIGIN[0]+(1+WAY_SPACING*i)*@zoom}") |             response = request_route("#{ORIGIN[1]},#{ORIGIN[0]+(3+WAY_SPACING*i)*@zoom}","#{ORIGIN[1]},#{ORIGIN[0]+(1+WAY_SPACING*i)*@zoom}") | ||||||
|           end |           end | ||||||
|  |           want = shortcuts_hash[row[direction]] || row[direction]     #expand shortcuts | ||||||
|           got[direction] = route_status response |           got[direction] = route_status response | ||||||
|           json = JSON.parse(response.body) |           json = JSON.parse(response.body) | ||||||
|           if got[direction].empty? == false |           if got[direction].empty? == false | ||||||
|             route = way_list json['route_instructions'] |             route = way_list json['route_instructions'] | ||||||
|             if route != "w#{i}" |             if route != "w#{i}" | ||||||
|               got[direction] = "testing w#{i}, but got #{route}!?" |               got[direction] = "testing w#{i}, but got #{route}!?" | ||||||
|             elsif row[direction] =~ /\d+s/ |             elsif want =~ /^\d+s/ | ||||||
|               time = json['route_summary']['total_time'] |               time = json['route_summary']['total_time'] | ||||||
|               got[direction] = "#{time}s" |               got[direction] = "#{time}s" | ||||||
|             end |             end | ||||||
|           end |           end | ||||||
|           if got[direction] != row[direction] |           if FuzzyMatch.match got[direction], want | ||||||
|  |             got[direction] = row[direction] | ||||||
|  |           else | ||||||
|             attempts << { :attempt => direction, :query => @query, :response => response } |             attempts << { :attempt => direction, :query => @query, :response => response } | ||||||
|           end |           end | ||||||
|         end |         end | ||||||
|  | |||||||
| @ -46,34 +46,17 @@ When /^I route I should get$/ do |table| | |||||||
|         if table.headers.include? 'turns' |         if table.headers.include? 'turns' | ||||||
|           got['turns'] = turns |           got['turns'] = turns | ||||||
|         end |         end | ||||||
|  |         if table.headers.include? '#'   # comment column | ||||||
|  |           got['#'] = row['#']           # copy value so it always match | ||||||
|  |         end | ||||||
|       end |       end | ||||||
|        |        | ||||||
|       ok = true |       ok = true | ||||||
|       row.keys.each do |key| |       row.keys.each do |key| | ||||||
|         if row[key].match /(.*)\s+~(.+)%$/        #percentage range: 100 ~5% |         if FuzzyMatch.match got[key], row[key] | ||||||
|           margin = 1 - $2.to_f*0.01 |           got[key] = row[key] | ||||||
|           from = $1.to_f*margin |  | ||||||
|           to = $1.to_f/margin |  | ||||||
|           if got[key].to_f >= from && got[key].to_f <= to |  | ||||||
|             got[key] = row[key] |  | ||||||
|           else |  | ||||||
|             ok = false |  | ||||||
|           end |  | ||||||
|         elsif row[key].match /(.*)\s+\+\-(.+)$/   #absolute range: 100 +-5 |  | ||||||
|             margin = $2.to_f |  | ||||||
|             from = $1.to_f-margin |  | ||||||
|             to = $1.to_f+margin |  | ||||||
|             if got[key].to_f >= from && got[key].to_f <= to |  | ||||||
|               got[key] = row[key] |  | ||||||
|             else |  | ||||||
|               ok = false |  | ||||||
|             end |  | ||||||
|         elsif row[key] =~ /^\/(.*)\/$/          #regex: /a,b,.*/ |  | ||||||
|           if got[key] =~ /#{$1}/ |  | ||||||
|             got[key] = row[key] |  | ||||||
|           end |  | ||||||
|         else |         else | ||||||
|           ok = row[key] == got[key] |           ok = false | ||||||
|         end |         end | ||||||
|       end |       end | ||||||
|        |        | ||||||
| @ -94,4 +77,4 @@ When /^I route (\d+) times I should get$/ do |n,table| | |||||||
|     ok = false unless step "I route I should get", table |     ok = false unless step "I route I should get", table | ||||||
|   end |   end | ||||||
|   ok |   ok | ||||||
| end | end | ||||||
|  | |||||||
| @ -13,7 +13,7 @@ OSM_TIMESTAMP = '2000-00-00T00:00:00Z' | |||||||
| DEFAULT_SPEEDPROFILE = 'bicycle' | DEFAULT_SPEEDPROFILE = 'bicycle' | ||||||
| WAY_SPACING = 100 | WAY_SPACING = 100 | ||||||
| DEFAULT_GRID_SIZE = 100   #meters | DEFAULT_GRID_SIZE = 100   #meters | ||||||
| 
 | PROFILES_PATH = '../profiles' | ||||||
| 
 | 
 | ||||||
| ORIGIN = [1,1] | ORIGIN = [1,1] | ||||||
| 
 | 
 | ||||||
| @ -71,8 +71,15 @@ def build_ways_from_table table | |||||||
|     way << node5 |     way << node5 | ||||||
|      |      | ||||||
|     tags = row.dup |     tags = row.dup | ||||||
|     tags.delete 'forw' |      | ||||||
|     tags.delete 'backw' |     # remove tags that describe expected test result | ||||||
|  |     tags.reject! do |k,v| | ||||||
|  |       k =~ /^forw\b/ ||  | ||||||
|  |       k =~ /^backw\b/ || | ||||||
|  |       k =~ /^bothw\b/ | ||||||
|  |     end | ||||||
|  |      | ||||||
|  |     ##remove empty tags | ||||||
|     tags.reject! { |k,v| v=='' } |     tags.reject! { |k,v| v=='' } | ||||||
|      |      | ||||||
|     # sort tag keys in the form of 'node/....' |     # sort tag keys in the form of 'node/....' | ||||||
| @ -198,14 +205,15 @@ def write_timestamp | |||||||
| end | end | ||||||
| 
 | 
 | ||||||
| def reprocess | def reprocess | ||||||
|  |   use_pbf = true | ||||||
|   Dir.chdir TEST_FOLDER do |   Dir.chdir TEST_FOLDER do | ||||||
|     write_osm |     write_osm | ||||||
|     write_timestamp |     write_timestamp | ||||||
|     convert_osm_to_pbf |     convert_osm_to_pbf if use_pbf | ||||||
|     unless extracted? |     unless extracted? | ||||||
|       log_preprocess_info |       log_preprocess_info | ||||||
|       log "== Extracting #{@osm_file}.osm...", :preprocess |       log "== Extracting #{@osm_file}.osm...", :preprocess | ||||||
|       unless system "../osrm-extract #{@osm_file}.osm.pbf 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} ../profiles/#{@profile}.lua" |       unless system "../osrm-extract #{@osm_file}.osm#{'.pbf' if use_pbf} 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" | ||||||
|         log "*** Exited with code #{$?.exitstatus}.", :preprocess |         log "*** Exited with code #{$?.exitstatus}.", :preprocess | ||||||
|         raise ExtractError.new $?.exitstatus, "osrm-extract exited with code #{$?.exitstatus}." |         raise ExtractError.new $?.exitstatus, "osrm-extract exited with code #{$?.exitstatus}." | ||||||
|       end |       end | ||||||
| @ -214,7 +222,7 @@ def reprocess | |||||||
|     unless prepared? |     unless prepared? | ||||||
|       log_preprocess_info |       log_preprocess_info | ||||||
|       log "== Preparing #{@osm_file}.osm...", :preprocess |       log "== Preparing #{@osm_file}.osm...", :preprocess | ||||||
|       unless system "../osrm-prepare #{@osm_file}.osrm #{@osm_file}.osrm.restrictions 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} ../profiles/#{@profile}.lua" |       unless system "../osrm-prepare #{@osm_file}.osrm #{@osm_file}.osrm.restrictions 1>>#{PREPROCESS_LOG_FILE} 2>>#{PREPROCESS_LOG_FILE} #{PROFILES_PATH}/#{@profile}.lua" | ||||||
|         log "*** Exited with code #{$?.exitstatus}.", :preprocess |         log "*** Exited with code #{$?.exitstatus}.", :preprocess | ||||||
|         raise PrepareError.new $?.exitstatus, "osrm-prepare exited with code #{$?.exitstatus}." |         raise PrepareError.new $?.exitstatus, "osrm-prepare exited with code #{$?.exitstatus}." | ||||||
|       end  |       end  | ||||||
|  | |||||||
							
								
								
									
										30
									
								
								features/support/fuzzy.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								features/support/fuzzy.rb
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,30 @@ | |||||||
|  | 
 | ||||||
|  | class FuzzyMatch | ||||||
|  | 
 | ||||||
|  |   def self.match got, want | ||||||
|  |     if got == want | ||||||
|  |       return true | ||||||
|  |     elsif want.match /(.*)\s+~(.+)%$/       #percentage range: 100 ~5% | ||||||
|  |       margin = 1 - $2.to_f*0.01 | ||||||
|  |       from = $1.to_f*margin | ||||||
|  |       to = $1.to_f/margin | ||||||
|  |       return got.to_f >= from && got.to_f <= to | ||||||
|  |     elsif want.match /(.*)\s+\+\-(.+)$/    #absolute range: 100 +-5 | ||||||
|  |       margin = $2.to_f | ||||||
|  |       from = $1.to_f-margin | ||||||
|  |       to = $1.to_f+margin | ||||||
|  |       return got.to_f >= from && got.to_f <= to | ||||||
|  |     elsif want =~ /^\/(.*)\/$/             #regex: /a,b,.*/ | ||||||
|  |       return got =~ /#{$1}/ | ||||||
|  |     else | ||||||
|  |       return false | ||||||
|  |     end       | ||||||
|  |   end | ||||||
|  |    | ||||||
|  |   def self.match_location got, want | ||||||
|  |     match( got[0], "#{want.lat} ~0.002%" ) && | ||||||
|  |     match( got[1], "#{want.lon} ~0.002%" ) | ||||||
|  |   end | ||||||
|  |    | ||||||
|  | end | ||||||
|  | 
 | ||||||
| @ -1,39 +1,47 @@ | |||||||
| require 'digest/sha1' | require 'digest/sha1' | ||||||
| 
 | 
 | ||||||
| def hash_of_file path | def hash_of_files paths | ||||||
|  |   paths = [paths] unless paths.is_a? Array | ||||||
|   hash = Digest::SHA1.new |   hash = Digest::SHA1.new | ||||||
|   open(path,'r') do |io| |   for path in paths do | ||||||
|     while !io.eof |     open(path,'r') do |io| | ||||||
|       buf = io.readpartial 1024 |       while !io.eof | ||||||
|       hash.update buf |         buf = io.readpartial 1024 | ||||||
|  |         hash.update buf | ||||||
|  |       end | ||||||
|     end |     end | ||||||
|   end |   end | ||||||
|   return hash.hexdigest |   return hash.hexdigest | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| def profile_hash | def profile_hash | ||||||
|   @@profile_hashes ||= {} |   @@profile_hashes ||= {} | ||||||
|   @@profile_hashes[@profile] ||= hash_of_file "../profiles/#{@profile}.lua" |   @@profile_hashes[@profile] ||= hash_of_files "#{PROFILES_PATH}/#{@profile}.lua" | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| def osm_hash | def osm_hash | ||||||
|   @osm_hash ||= Digest::SHA1.hexdigest osm_str |   @osm_hash ||= Digest::SHA1.hexdigest osm_str | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
|  | def lua_lib_hash | ||||||
|  |   @lua_lib_hash ||= hash_of_files Dir.glob("../profiles/lib/*.lua") | ||||||
|  | end | ||||||
|  | 
 | ||||||
| def bin_extract_hash | def bin_extract_hash | ||||||
|   @@bin_extract_hash ||= hash_of_file '../osrm-extract' |   @@bin_extract_hash ||= hash_of_files '../osrm-extract' | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| def bin_prepare_hash | def bin_prepare_hash | ||||||
|   @@bin_prepare_hash ||= hash_of_file '../osrm-prepare' |   @@bin_prepare_hash ||= hash_of_files '../osrm-prepare' | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| def bin_routed_hash | def bin_routed_hash | ||||||
|   @@bin_routed_hash ||= hash_of_file '../osrm-routed' |   @@bin_routed_hash ||= hash_of_files '../osrm-routed' | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| #combine state of data, profile and binaries into a hash that identifies the exact test scenario | #combine state of data, profile and binaries into a hash that identifies the exact test scenario | ||||||
| def fingerprint | def fingerprint | ||||||
|   @fingerprint ||= Digest::SHA1.hexdigest "#{bin_extract_hash}-#{bin_prepare_hash}-#{bin_routed_hash}-#{profile_hash}-#{osm_hash}" |   @fingerprint ||= Digest::SHA1.hexdigest "#{bin_extract_hash}-#{bin_prepare_hash}-#{bin_routed_hash}-#{profile_hash}-#{lua_lib_hash}-#{osm_hash}" | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
|  | |||||||
							
								
								
									
										17
									
								
								features/support/nearest.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								features/support/nearest.rb
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,17 @@ | |||||||
|  | require 'net/http' | ||||||
|  | 
 | ||||||
|  | def request_nearest_url path | ||||||
|  |   @query = path | ||||||
|  |   uri = URI.parse "#{HOST}/#{path}" | ||||||
|  |   Timeout.timeout(REQUEST_TIMEOUT) do | ||||||
|  |     Net::HTTP.get_response uri | ||||||
|  |   end | ||||||
|  | rescue Errno::ECONNREFUSED => e | ||||||
|  |   raise "*** osrm-routed is not running." | ||||||
|  | rescue Timeout::Error | ||||||
|  |   raise "*** osrm-routed did not respond." | ||||||
|  | end | ||||||
|  | 
 | ||||||
|  | def request_nearest a | ||||||
|  |   request_nearest_url "nearest?loc=#{a}" | ||||||
|  | end | ||||||
							
								
								
									
										3
									
								
								features/support/shortcuts.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								features/support/shortcuts.rb
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,3 @@ | |||||||
|  | def shortcuts_hash | ||||||
|  |   @shortcuts_hash ||= {} | ||||||
|  | end | ||||||
| @ -29,7 +29,7 @@ Feature: Distance calculation | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | distance | | 		 | from | to | route | distance | | ||||||
| 		 | a    | d  | abcde | 300m +-8 | | 		 | a    | d  | abcde | 300m +-2 | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Distance should equal sum of segments, rightwinded | 	Scenario: Distance should equal sum of segments, rightwinded | ||||||
| 		Given the node map | 		Given the node map | ||||||
| @ -43,7 +43,7 @@ Feature: Distance calculation | |||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | distance | | 		 | from | to | route | distance | | ||||||
| 		 | a    | d  | abcde | 300m +-8 | | 		 | a    | d  | abcde | 300m +-2 | | ||||||
| 
 | 
 | ||||||
| 	Scenario: 10m distances | 	Scenario: 10m distances | ||||||
| 		Given a grid size of 10 meters | 		Given a grid size of 10 meters | ||||||
|  | |||||||
							
								
								
									
										42
									
								
								features/testbot/duration.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								features/testbot/duration.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,42 @@ | |||||||
|  | @routing @testbot @routes @duration | ||||||
|  | Feature: Durations | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 		 | ||||||
|  |     Scenario: Duration of ways | ||||||
|  |     	Given the node map | ||||||
|  |     	 | a | b |  |   |   | f | | ||||||
|  |     	 |   |   |  | e |   |   | | ||||||
|  |     	 |   | c |  |   | d |   | | ||||||
|  | 	  | ||||||
|  |     	And the ways | ||||||
|  |     	 | nodes | highway | duration | | ||||||
|  |     	 | ab    | primary | 0:01     | | ||||||
|  |     	 | bc    | primary | 0:10     | | ||||||
|  |     	 | cd    | primary | 1:00     | | ||||||
|  |     	 | de    | primary | 10:00    | | ||||||
|  |     	 | ef    | primary | 01:02:03 | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | distance | time       | | ||||||
|  |  		 | a    | b  | ab    | 100m +-1 | 60s +-1    | | ||||||
|  |  		 | b    | c  | bc    | 200m +-1 | 600s +-1   | | ||||||
|  |  		 | c    | d  | cd    | 300m +-1 | 3600s +-1  | | ||||||
|  |  		 | d    | e  | de    | 144m +-2 | 36000s +-1 | | ||||||
|  |  		 | e    | f  | ef    | 224m +-2 | 3723s +-1  | | ||||||
|  |      | ||||||
|  |     @todo | ||||||
|  |     Scenario: Partial duration of ways | ||||||
|  |     	Given the node map | ||||||
|  |     	 | a | b |  | c | | ||||||
|  | 
 | ||||||
|  |     	And the ways | ||||||
|  |     	 | nodes | highway | duration | | ||||||
|  |     	 | abc   | primary | 0:01     | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | distance | time    | | ||||||
|  |  		 | a    | c  | abc   | 300m +-1 | 60s +-1 | | ||||||
|  |  		 | a    | b  | ab    | 100m +-1 | 20s +-1 | | ||||||
|  |  		 | b    | c  | bc    | 200m +-1 | 40s +-1 | | ||||||
| @ -1,5 +1,5 @@ | |||||||
| @routing @weight | @routing @fastest | ||||||
| Feature: Choosing route based on length, speed, etc | Feature: Choosing fastest route | ||||||
| 	 | 	 | ||||||
| 	Background: | 	Background: | ||||||
| 		Given the profile "testbot" | 		Given the profile "testbot" | ||||||
| @ -22,7 +22,7 @@ Feature: Choosing route based on length, speed, etc | |||||||
| 		 | x    | y  | xa,atb,by | | 		 | x    | y  | xa,atb,by | | ||||||
| 		 | y    | x  | by,atb,xa | | 		 | y    | x  | by,atb,xa | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Pick  the shortest travel time, even when it's longer | 	Scenario: Pick the fastest route, even when it's longer | ||||||
| 		Given the node map | 		Given the node map | ||||||
| 		 |   | p |   | | 		 |   | p |   | | ||||||
| 		 | a | s | b | | 		 | a | s | b | | ||||||
| @ -7,21 +7,61 @@ Feature: Testbot - Handle ferry routes | |||||||
| 	Scenario: Testbot - Ferry duration, single node | 	Scenario: Testbot - Ferry duration, single node | ||||||
| 		Given the node map | 		Given the node map | ||||||
| 		 | a | b | c | d | | 		 | a | b | c | d | | ||||||
| 		 |   |   | e | f | | 		 | e | f | g | h | | ||||||
| 		 |   |   | g | h | | 		 | i | j | k | l | | ||||||
| 		 |   |   | i | j | | 		 | m | n | o | p | | ||||||
| 
 | 		 | q | r | s | t | | ||||||
|  | 		  | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | highway | route | bicycle | duration | | 		 | nodes | highway | route | duration | | ||||||
| 		 | ab    | primary |       |         |          | | 		 | ab    | primary |       |          | | ||||||
| 		 | cd    | primary |       |         |          | | 		 | cd    | primary |       |          | | ||||||
| 		 | ef    | primary |       |         |          | | 		 | ef    | primary |       |          | | ||||||
| 		 | gh    | primary |       |         |          | | 		 | gh    | primary |       |          | | ||||||
| 		 | ij    | primary |       |         |          | | 		 | ij    | primary |       |          | | ||||||
| 		 | bc    |         | ferry | yes     | 0:01     | | 		 | kl    | primary |       |          | | ||||||
| 		 | be    |         | ferry | yes     | 0:10     | | 		 | mn    | primary |       |          | | ||||||
| 		 | bg    |         | ferry | yes     | 1:00     | | 		 | op    | primary |       |          | | ||||||
| 		 | bi    |         | ferry | yes     | 10:00    | | 		 | qr    | primary |       |          | | ||||||
|  | 		 | st    | primary |       |          | | ||||||
|  | 		 | bc    |         | ferry | 0:01     | | ||||||
|  | 		 | fg    |         | ferry | 0:10     | | ||||||
|  | 		 | jk    |         | ferry | 1:00     | | ||||||
|  | 		 | no    |         | ferry | 24:00    | | ||||||
|  | 		 | rs    |         | ferry | 96:00    | | ||||||
|  | 
 | ||||||
|  |         When I route I should get | ||||||
|  |          | from | to | route | time        | | ||||||
|  |          | b    | c  | bc    | 60s +-1     | | ||||||
|  |          | f    | g  | fg    | 600s +-1    | | ||||||
|  |          | j    | k  | jk    | 3600s +-1   | | ||||||
|  |          | n    | o  | no    | 86400s +-1  | | ||||||
|  |          | r    | s  | rs    | 345600s +-1 | | ||||||
|  |      | ||||||
|  |     @todo | ||||||
|  |  	Scenario: Testbot - Week long ferry routes | ||||||
|  |  		Given the node map | ||||||
|  |  		 | a | b | c | d | | ||||||
|  |  		 | e | f | g | h | | ||||||
|  |  		 | i | j | k | l | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | highway | route | duration | | ||||||
|  |  		 | ab    | primary |       |          | | ||||||
|  |  		 | cd    | primary |       |          | | ||||||
|  |  		 | ef    | primary |       |          | | ||||||
|  |  		 | gh    | primary |       |          | | ||||||
|  |  		 | ij    | primary |       |          | | ||||||
|  |  		 | kl    | primary |       |          | | ||||||
|  |  		 | bc    |         | ferry | 24:00    | | ||||||
|  |  		 | fg    |         | ferry | 168:00   | | ||||||
|  |  		 | jk    |         | ferry | 720:00   | | ||||||
|  | 
 | ||||||
|  |          When I route I should get | ||||||
|  |           | from | to | route | time        | | ||||||
|  |           | b    | c  | bc    | 86400s +-1  | | ||||||
|  |           | f    | g  | fg    | 604800s +-1 | | ||||||
|  |           | j    | k  | jk    | 259200s +-1 | | ||||||
| 
 | 
 | ||||||
| 	Scenario: Testbot - Ferry duration, multiple nodes | 	Scenario: Testbot - Ferry duration, multiple nodes | ||||||
| 		Given the node map | 		Given the node map | ||||||
| @ -29,33 +69,107 @@ Feature: Testbot - Handle ferry routes | |||||||
| 		  |   | a | b | c | d |   | | 		  |   | a | b | c | d |   | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | highway | route | bicycle | duration | | 		 | nodes | highway | route | duration | | ||||||
| 		 | xa    | primary |       |         |          | | 		 | xa    | primary |       |          | | ||||||
| 		 | yd    | primary |       |         |          | | 		 | yd    | primary |       |          | | ||||||
| 		 | abcd  |         | ferry | yes     | 1:00     | | 		 | ad    |         | ferry | 1:00     | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time      | | 		 | from | to | route | time      | | ||||||
| 		 | a    | d  | abcd  | 3600s +-1 | | 		 | a    | d  | ad    | 3600s +-1 | | ||||||
| 		 | d    | a  | abcd  | 3600s +-1 | | 		 | d    | a  | ad    | 3600s +-1 | | ||||||
|      |      | ||||||
|     @todo |     @todo | ||||||
| 	Scenario: Bike - Ferry duration, individual parts | 	Scenario: Testbot - Ferry duration, individual parts, fast | ||||||
|  |     Given a grid size of 10000 meters | ||||||
| 		Given the node map | 		Given the node map | ||||||
| 		  | x | y |  | z |  |  | v | | 		  | x | y |  | z |  |  | v | | ||||||
| 		  | a | b |  | c |  |  | d | | 		  | a | b |  | c |  |  | d | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | highway | route | bicycle | duration | | 		 | nodes | highway | route | duration | | ||||||
| 		 | xa    | primary |       |         |          | | 		 | xa    | primary |       |          | | ||||||
| 		 | yb    | primary |       |         |          | | 		 | yb    | primary |       |          | | ||||||
| 		 | zc    | primary |       |         |          | | 		 | zc    | primary |       |          | | ||||||
| 		 | vd    | primary |       |         |          | | 		 | vd    | primary |       |          | | ||||||
| 		 | abcd  |         | ferry | yes     | 1:00     | | 		 | abcd  |         | ferry | 0:06     | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | time      | | 		 | from | to | route | time     | | ||||||
| 		 | a    | d  | abcd  | 3600s +-1 | | 		 | a    | d  | abcd  | 360s +-1 | | ||||||
| 		 | a    | b  | abcd  | 600s +-1  | | 		 | a    | b  | abcd  | 60s +-1  | | ||||||
| 		 | b    | c  | abcd  | 1200s +-1 | | 		 | b    | c  | abcd  | 120s +-1 | | ||||||
| 		 | c    | d  | abcd  | 1800s +-1 | | 		 | c    | d  | abcd  | 180s +-1 | | ||||||
|  |      | ||||||
|  |     @todo | ||||||
|  |  	Scenario: Testbot - Ferry duration, individual parts, slow | ||||||
|  |  		Given the node map | ||||||
|  |  		  | x | y |  | z |  |  | v | | ||||||
|  |  		  | a | b |  | c |  |  | d | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | highway | route | duration | | ||||||
|  |  		 | xa    | primary |       |          | | ||||||
|  |  		 | yb    | primary |       |          | | ||||||
|  |  		 | zc    | primary |       |          | | ||||||
|  |  		 | vd    | primary |       |          | | ||||||
|  |  		 | abcd  |         | ferry | 1:00     | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | time      | | ||||||
|  |  		 | a    | d  | abcd  | 3600s ~1% | | ||||||
|  |  		 | a    | b  | abcd  | 600s ~1%  | | ||||||
|  |  		 | b    | c  | abcd  | 1200s ~1% | | ||||||
|  |  		 | c    | d  | abcd  | 1800s ~1% | | ||||||
|  |   | ||||||
|  |  	Scenario: Testbot - Ferry duration, connected routes | ||||||
|  |  		Given the node map | ||||||
|  |  		  | x |   |   |   | d |   |   |   | y | | ||||||
|  |  		  |   | a | b | c |   | e | f | g | t | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | highway | route | duration | | ||||||
|  |  		 | xa    | primary |       |          | | ||||||
|  |  		 | yg    | primary |       |          | | ||||||
|  |  		 | abcd  |         | ferry | 0:30     | | ||||||
|  |  		 | defg  |         | ferry | 0:30     | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route     | time      | | ||||||
|  |  		 | a    | g  | abcd,defg | 3600s +-1 | | ||||||
|  |  		 | g    | a  | defg,abcd | 3600s +-1 | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Testbot - Prefer road when faster than ferry | ||||||
|  |  		Given the node map | ||||||
|  |  		  | x | a | b | c |   | | ||||||
|  |  		  |   |   |   |   | d | | ||||||
|  |  		  | y | g | f | e |   | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | highway | route | duration | | ||||||
|  |  		 | xa    | primary |       |          | | ||||||
|  |  		 | yg    | primary |       |          | | ||||||
|  |  		 | xy    | primary |       |          | | ||||||
|  |  		 | abcd  |         | ferry | 0:01     | | ||||||
|  |  		 | defg  |         | ferry | 0:01     | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route    | time      | | ||||||
|  |  		 | a    | g  | xa,xy,yg | 60s +-25% | | ||||||
|  |  		 | g    | a  | yg,xy,xa | 60s +-25% | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Testbot - Long winding ferry route | ||||||
|  |  		Given the node map | ||||||
|  |  		  | x |   | b |   | d |   | f |   | y | | ||||||
|  |  		  |   | a |   | c |   | e |   | g |   | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes   | highway | route | duration | | ||||||
|  |  		 | xa      | primary |       |          | | ||||||
|  |  		 | yg      | primary |       |          | | ||||||
|  |  		 | abcdefg |         | ferry | 6:30     | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route   | time       | | ||||||
|  |  		 | a    | g  | abcdefg | 23400s +-1 | | ||||||
|  |  		 | g    | a  | abcdefg | 23400s +-1 | | ||||||
|  | |||||||
							
								
								
									
										22
									
								
								features/testbot/graph.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								features/testbot/graph.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,22 @@ | |||||||
|  | @routing @graph | ||||||
|  | Feature: Basic Routing | ||||||
|  | Test the input data descibed on https://github.com/DennisOSRM/Project-OSRM/wiki/Graph-representation | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	@smallest | ||||||
|  | 	Scenario: Graph transformation | ||||||
|  | 		Given the node map | ||||||
|  |         |   |   | d | | ||||||
|  |         | a | b | c | | ||||||
|  |         |   |   | e | | ||||||
|  | 	 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | | ||||||
|  | 		 | abc   | | ||||||
|  | 		 | dce   | | ||||||
|  |      | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route   | | ||||||
|  | 		 | a    | e  | abc,dce | | ||||||
							
								
								
									
										96
									
								
								features/testbot/impedance.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								features/testbot/impedance.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,96 @@ | |||||||
|  | @routing @testbot @impedance @todo | ||||||
|  | Feature: Setting impedance and speed separately | ||||||
|  | These tests assume that the speed is not factored into the impedance by OSRM internally. | ||||||
|  | Instead the speed can optionally be factored into the weiht in the lua profile. | ||||||
|  | 
 | ||||||
|  | Note: With the default grid size of 100m, the diagonals has a length if 141.42m | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Use impedance to pick route, even when longer/slower | ||||||
|  | 		Given the node map | ||||||
|  | 		 |   | s |   | t |   | u |   | v |   | | ||||||
|  | 		 | a |   | b |   | c |   | d |   | e | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | impedance | | ||||||
|  | 		 | ab    | 1.3    | | ||||||
|  | 		 | asb   | 1      | | ||||||
|  | 		 | bc    | 1.5    | | ||||||
|  | 		 | btc   | 1      | | ||||||
|  | 		 | cd    | 0.015  | | ||||||
|  | 		 | cud   | 0.010  | | ||||||
|  | 		 | de    | 150000 | | ||||||
|  | 		 | dve   | 100000 | | ||||||
|  | 
 | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route | distance | | ||||||
|  | 		 | a    | b  | ab    | 200m +-1 | | ||||||
|  | 		 | b    | a  | ab    | 200m +-1 | | ||||||
|  | 		 | b    | c  | btc   | 282m +-1 | | ||||||
|  | 		 | c    | b  | btc   | 282m +-1 | | ||||||
|  | 		 | c    | d  | cud   | 282m +-1 | | ||||||
|  | 		 | d    | c  | cud   | 282m +-1 | | ||||||
|  | 		 | d    | e  | dve   | 282m +-1 | | ||||||
|  | 		 | e    | d  | dve   | 282m +-1 | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Weight should default to 1 | ||||||
|  |  		Given the node map | ||||||
|  |  		 |   | s |   | t |   | | ||||||
|  |  		 | a |   | b |   | c | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | impedance | | ||||||
|  |  		 | ab    | 1.40   | | ||||||
|  |  		 | asb   |        | | ||||||
|  |  		 | bc    | 1.42   | | ||||||
|  |  		 | btc   |        | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | | ||||||
|  |  		 | a    | b  | ab    | | ||||||
|  |  		 | b    | a  | ab    | | ||||||
|  |  		 | b    | c  | btc   | | ||||||
|  |  		 | c    | b  | btc   | | ||||||
|  | 
 | ||||||
|  |   	Scenario: Use both impedance and speed (multiplied) when picking route | ||||||
|  |   	OSRM should not factor speed into impedance internally. However, the profile can choose to do so, | ||||||
|  |   	and this test expect the testbot profile to do it. | ||||||
|  |   		Given the node map | ||||||
|  |   		 |   | s |   | t |   | | ||||||
|  |   		 | a |   | b |   | c | | ||||||
|  | 
 | ||||||
|  |   		And the ways | ||||||
|  |   		 | nodes | impedance | highway   | | ||||||
|  |   		 | ab    | 2.80   | primary   | | ||||||
|  |   		 | asb   | 1      | secondary | | ||||||
|  |   		 | bc    | 2.84   | primary   | | ||||||
|  |   		 | btc   | 1      | secondary | | ||||||
|  | 
 | ||||||
|  |   		When I route I should get | ||||||
|  |   		 | from | to | route | | ||||||
|  |   		 | a    | b  | ab    | | ||||||
|  |   		 | b    | a  | ab    | | ||||||
|  |   		 | b    | c  | btc   | | ||||||
|  |   		 | c    | b  | btc   | | ||||||
|  | 
 | ||||||
|  |  	Scenario: Weight should influence neither speed nor travel time.  | ||||||
|  |  		Given the node map | ||||||
|  |  		 | a | b | c | | ||||||
|  |  		 | t |   |   | | ||||||
|  | 
 | ||||||
|  |  		And the ways | ||||||
|  |  		 | nodes | | ||||||
|  |  		 | ab    | | ||||||
|  |  		 | bc    | | ||||||
|  |  		 | at    | | ||||||
|  | 
 | ||||||
|  |  		When I route I should get | ||||||
|  |  		 | from | to | route | distance | time    | | ||||||
|  |  		 | a    | b  | ab    | 100m +-1 | 10s +-1 | | ||||||
|  |  		 | b    | a  | ab    | 100m +-1 | 10s +-1 | | ||||||
|  |  		 | b    | c  | bc    | 100m +-1 | 10s +-1 | | ||||||
|  |  		 | c    | b  | bc    | 100m +-1 | 10s +-1 | | ||||||
|  |  		 | a    | c  | ab,bc | 200m +-1 | 20s +-1 | | ||||||
|  |  		 | c    | a  | bc,ab | 200m +-1 | 20s +-1 | | ||||||
							
								
								
									
										52
									
								
								features/testbot/maxspeed.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								features/testbot/maxspeed.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,52 @@ | |||||||
|  | @routing @maxspeed @testbot | ||||||
|  | Feature: Car - Max speed restrictions | ||||||
|  | 
 | ||||||
|  | 	Background: Use specific speeds | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Testbot - Respect maxspeeds when lower that way type speed | ||||||
|  | 		Given the node map | ||||||
|  | 		 | a | b | c | d | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | maxspeed | | ||||||
|  | 		 | ab    |          | | ||||||
|  | 		 | bc    | 24       | | ||||||
|  | 		 | cd    | 18       | | ||||||
|  | 
 | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route | time    | | ||||||
|  | 		 | a    | b  | ab    | 10s +-1 | | ||||||
|  | 		 | b    | a  | ab    | 10s +-1 | | ||||||
|  | 		 | b    | c  | bc    | 15s +-1 | | ||||||
|  | 		 | c    | b  | bc    | 15s +-1 | | ||||||
|  | 		 | c    | d  | cd    | 20s +-1 | | ||||||
|  | 		 | d    | c  | cd    | 20s +-1 | | ||||||
|  | 
 | ||||||
|  | 	Scenario: Testbot - Ignore maxspeed when higher than way speed | ||||||
|  | 		Given the node map | ||||||
|  | 		 | a | b | c | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | maxspeed | | ||||||
|  | 		 | ab    |          | | ||||||
|  | 		 | bc    | 200      | | ||||||
|  | 
 | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route | time    | | ||||||
|  | 		 | a    | b  | ab    | 10s +-1 | | ||||||
|  | 		 | b    | a  | ab    | 10s +-1 | | ||||||
|  | 		 | b    | c  | bc    | 10s +-1 | | ||||||
|  | 		 | c    | b  | bc    | 10s +-1 | | ||||||
|  | 
 | ||||||
|  |     @opposite | ||||||
|  |     Scenario: Testbot - Forward/backward maxspeed | ||||||
|  |      	Then routability should be | ||||||
|  |      	 | maxspeed | maxspeed:forward | maxspeed:backward | forw    | backw   | | ||||||
|  |      	 |          |                  |                   | 20s +-1 | 20s +-1 | | ||||||
|  |      	 | 18       |                  |                   | 40s +-1 | 40s +-1 | | ||||||
|  |      	 |          | 18               |                   | 40s +-1 | 20s +-1 | | ||||||
|  |      	 |          |                  | 18                | 20s +-1 | 40s +-1 | | ||||||
|  |      	 | 9        | 18               |                   | 40s +-1 | 80s +-1 | | ||||||
|  |      	 | 9        |                  | 18                | 80s +-1 | 40s +-1 | | ||||||
|  |      	 | 9        | 24               | 18                | 30s +-1 | 40s +-1 | | ||||||
							
								
								
									
										18
									
								
								features/testbot/opposite.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								features/testbot/opposite.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,18 @@ | |||||||
|  | @routing @testbot @opposite | ||||||
|  | Feature: Separate settings for forward/backward direction | ||||||
|  | 	 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Testbot - Going against the flow | ||||||
|  | 		Given the node map | ||||||
|  | 		 | a | b | c | d | | ||||||
|  | 	 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | highway | | ||||||
|  | 		 | abcd  | river   | | ||||||
|  |      | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route | distance  | time | | ||||||
|  | 		 | a    | d  | abcd  | 300 +- 1m | 30s  | | ||||||
|  | 		 | d    | a  | abcd  | 300 +- 1m | 68s  | | ||||||
							
								
								
									
										35
									
								
								features/testbot/routes.feature
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								features/testbot/routes.feature
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,35 @@ | |||||||
|  | @routing @testbot @routes @todo | ||||||
|  | Feature: OSM Route Relation | ||||||
|  | 
 | ||||||
|  | 	Background: | ||||||
|  | 		Given the profile "testbot" | ||||||
|  | 	 | ||||||
|  | 	Scenario: Prioritize ways that are part of route relations | ||||||
|  | 	This scenario assumes that the testbot uses an impedance of 0.5 for ways that are part of 'testbot' routes. | ||||||
|  | 	 | ||||||
|  | 		Given the node map | ||||||
|  | 		 | s |  |  | t |  |  |   | | ||||||
|  | 		 | a |  |  | b |  |  | c | | ||||||
|  | 		 |   |  |  |   |  |  |   | | ||||||
|  | 		 |   |  |  | u |  |  | v | | ||||||
|  | 
 | ||||||
|  | 		And the ways | ||||||
|  | 		 | nodes | | ||||||
|  | 		 | ab    | | ||||||
|  | 		 | bc    | | ||||||
|  | 		 | as    | | ||||||
|  | 		 | stb   | | ||||||
|  | 		 | bu    | | ||||||
|  | 		 | uvc   | | ||||||
|  | 
 | ||||||
|  |    		And the relations | ||||||
|  | 		 | type  | route   | way:route | | ||||||
|  | 		 | route | testbot | as,stb    | | ||||||
|  | 		 | route | testbot | bu,uvc    | | ||||||
|  | 
 | ||||||
|  | 		When I route I should get | ||||||
|  | 		 | from | to | route  | distance | time    | | ||||||
|  | 		 | b    | c  | bc     | 300m +-1 | 30s +-1 | | ||||||
|  | 		 | c    | b  | bc     | 300m +-1 | 30s +-1 | | ||||||
|  | 		 | a    | b  | as,stb | 500m +-1 | 50s +-1 | | ||||||
|  | 		 | b    | a  | stb,as | 500m +-1 | 50s +-1 | | ||||||
| @ -97,19 +97,27 @@ Feature: Turn directions/codes | |||||||
| 		 | g | c | xg,xc | head,right,destination        | | 		 | g | c | xg,xc | head,right,destination        | | ||||||
| 		 | g | e | xg,xe | head,sharp_right,destination  | | 		 | g | e | xg,xe | head,sharp_right,destination  | | ||||||
| 	 | 	 | ||||||
| 	Scenario: Skadestuevej, København | 	Scenario: Turn instructions at high latitude | ||||||
| 	https://github.com/DennisOSRM/Project-OSRM/issues/532 | 	https://github.com/DennisOSRM/Project-OSRM/issues/532 | ||||||
| 		Given the node locations | 		Given the node locations | ||||||
| 		 | node | lat      | lon      | | 		 | node | lat      | lon      | | ||||||
| 		 | a    | 55.68679 | 12.52360 | | 		 | a    | 55.68740 | 12.52430 | | ||||||
| 		 | b    | 55.68745 | 12.52407 | | 		 | b    | 55.68745 | 12.52409 | | ||||||
| 		 | c    | 55.68720 | 12.52509 | | 		 | c    | 55.68711 | 12.52383 | | ||||||
|  | 		 | x    | -55.68740 | 12.52430 | | ||||||
|  | 		 | y    | -55.68745 | 12.52409 | | ||||||
|  | 		 | z    | -55.68711 | 12.52383 | | ||||||
| 
 | 
 | ||||||
| 		And the ways | 		And the ways | ||||||
| 		 | nodes | | 		 | nodes | | ||||||
| 		 | ab    | | 		 | ab    | | ||||||
| 		 | bc    | | 		 | bc    | | ||||||
|  | 		 | xy    | | ||||||
|  | 		 | yz    | | ||||||
| 
 | 
 | ||||||
| 		When I route I should get | 		When I route I should get | ||||||
| 		 | from | to | route | turns                  | | 		 | from | to | route | turns                  | | ||||||
| 		 | a    | c  | ab,bc | head,right,destination | | 		 | a    | c  | ab,bc | head,left,destination  | | ||||||
|  | 		 | c    | a  | bc,ab | head,right,destination | | ||||||
|  | 		 | x    | z  | xy,yz | head,right,destination  | | ||||||
|  | 		 | z    | x  | yz,xy | head,left,destination | | ||||||
|  | |||||||
							
								
								
									
										99
									
								
								profile.lua
									
									
									
									
									
								
							
							
						
						
									
										99
									
								
								profile.lua
									
									
									
									
									
								
							| @ -1,13 +1,15 @@ | |||||||
| -- Begin of globals | -- Begin of globals | ||||||
|  | require("lib/access") | ||||||
| 
 | 
 | ||||||
| barrier_whitelist = { ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true} | barrier_whitelist = { ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true, ["no"] = true} | ||||||
| access_tag_whitelist = { ["yes"] = true, ["motorcar"] = true, ["motor_vehicle"] = true, ["vehicle"] = true, ["permissive"] = true, ["designated"] = true  } | access_tag_whitelist = { ["yes"] = true, ["motorcar"] = true, ["motor_vehicle"] = true, ["vehicle"] = true, ["permissive"] = true, ["designated"] = true  } | ||||||
| access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestery"] = true } | access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestry"] = true } | ||||||
| access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | ||||||
| access_tags = { "motorcar", "motor_vehicle", "vehicle" } | access_tags = { "motorcar", "motor_vehicle", "vehicle" } | ||||||
| access_tags_hierachy = { "motorcar", "motor_vehicle", "vehicle", "access" } | access_tags_hierachy = { "motorcar", "motor_vehicle", "vehicle", "access" } | ||||||
| service_tag_restricted = { ["parking_aisle"] = true } | service_tag_restricted = { ["parking_aisle"] = true } | ||||||
| ignore_in_grid = { ["ferry"] = true } | ignore_in_grid = { ["ferry"] = true } | ||||||
|  | restriction_exception_tags = { "motorcar", "motor_vehicle", "vehicle" } | ||||||
| 
 | 
 | ||||||
| speed_profile = {  | speed_profile = {  | ||||||
|   ["motorway"] = 90,  |   ["motorway"] = 90,  | ||||||
| @ -26,6 +28,7 @@ speed_profile = { | |||||||
|   ["service"] = 15, |   ["service"] = 15, | ||||||
| --  ["track"] = 5, | --  ["track"] = 5, | ||||||
|   ["ferry"] = 5, |   ["ferry"] = 5, | ||||||
|  |   ["shuttle_train"] = 10, | ||||||
|   ["default"] = 50 |   ["default"] = 50 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -39,24 +42,9 @@ u_turn_penalty 			= 20 | |||||||
| 
 | 
 | ||||||
| -- End of globals | -- End of globals | ||||||
| 
 | 
 | ||||||
| --find first tag in access hierachy which is set | function get_exceptions(vector) | ||||||
| local function find_access_tag(source) | 	for i,v in ipairs(restriction_exception_tags) do  | ||||||
| 	for i,v in ipairs(access_tags_hierachy) do  | 		vector:Add(v) | ||||||
| 		if source.tags:Holds(v) then  |  | ||||||
| 			local tag = source.tags:Find(v) |  | ||||||
| 			if tag ~= '' then --and tag ~= "" then |  | ||||||
| 				return tag |  | ||||||
| 			end |  | ||||||
| 		end |  | ||||||
| 	end |  | ||||||
| 	return nil |  | ||||||
| end |  | ||||||
| 
 |  | ||||||
| local function find_in_keyvals(keyvals, tag) |  | ||||||
| 	if keyvals:Holds(tag) then |  | ||||||
| 		return keyvals:Find(tag) |  | ||||||
| 	else |  | ||||||
| 		return nil |  | ||||||
| 	end | 	end | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| @ -64,7 +52,7 @@ local function parse_maxspeed(source) | |||||||
| 	if source == nil then | 	if source == nil then | ||||||
| 		return 0 | 		return 0 | ||||||
| 	end | 	end | ||||||
| 	local n = tonumber(source) | 	local n = tonumber(source:match("%d*")) | ||||||
| 	if n == nil then | 	if n == nil then | ||||||
| 		n = 0 | 		n = 0 | ||||||
| 	end | 	end | ||||||
| @ -76,7 +64,7 @@ end | |||||||
| 
 | 
 | ||||||
| function node_function (node) | function node_function (node) | ||||||
|   local barrier = node.tags:Find ("barrier") |   local barrier = node.tags:Find ("barrier") | ||||||
|   local access = find_access_tag(node) |   local access = Access.find_access_tag(node, access_tags_hierachy) | ||||||
|   local traffic_signal = node.tags:Find("highway") |   local traffic_signal = node.tags:Find("highway") | ||||||
|    |    | ||||||
|   --flag node if it carries a traffic light |   --flag node if it carries a traffic light | ||||||
| @ -101,13 +89,7 @@ function node_function (node) | |||||||
| end | end | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| function way_function (way, numberOfNodesInWay) | function way_function (way) | ||||||
| 
 |  | ||||||
|   -- A way must have two nodes or more |  | ||||||
|   if(numberOfNodesInWay < 2) then |  | ||||||
|     return 0; |  | ||||||
|   end |  | ||||||
|    |  | ||||||
|   -- First, get the properties of each way that we come across |   -- First, get the properties of each way that we come across | ||||||
|     local highway = way.tags:Find("highway") |     local highway = way.tags:Find("highway") | ||||||
|     local name = way.tags:Find("name") |     local name = way.tags:Find("name") | ||||||
| @ -115,13 +97,15 @@ function way_function (way, numberOfNodesInWay) | |||||||
|     local junction = way.tags:Find("junction") |     local junction = way.tags:Find("junction") | ||||||
|     local route = way.tags:Find("route") |     local route = way.tags:Find("route") | ||||||
|     local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) |     local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) | ||||||
|  |     local maxspeed_forward = tonumber(way.tags:Find( "maxspeed:forward")) | ||||||
|  |     local maxspeed_backward = tonumber(way.tags:Find( "maxspeed:backward")) | ||||||
|     local barrier = way.tags:Find("barrier") |     local barrier = way.tags:Find("barrier") | ||||||
|     local oneway = way.tags:Find("oneway") |     local oneway = way.tags:Find("oneway") | ||||||
|     local cycleway = way.tags:Find("cycleway") |     local cycleway = way.tags:Find("cycleway") | ||||||
|     local duration  = way.tags:Find("duration") |     local duration  = way.tags:Find("duration") | ||||||
|     local service  = way.tags:Find("service") |     local service  = way.tags:Find("service") | ||||||
|     local area = way.tags:Find("area") |     local area = way.tags:Find("area") | ||||||
|     local access = find_access_tag(way) |     local access = Access.find_access_tag(way, access_tags_hierachy) | ||||||
| 
 | 
 | ||||||
|   -- Second, parse the way according to these properties |   -- Second, parse the way according to these properties | ||||||
| 
 | 
 | ||||||
| @ -148,29 +132,31 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 	end | 	end | ||||||
| 
 | 
 | ||||||
|   -- Handling ferries and piers |   -- Handling ferries and piers | ||||||
|     if (speed_profile[route] ~= nil and speed_profile[route] > 0) |   if (speed_profile[route] ~= nil and speed_profile[route] > 0) then | ||||||
|     then |    if durationIsValid(duration) then | ||||||
|       if durationIsValid(duration) then |     way.duration = math.max( parseDuration(duration), 1 ); | ||||||
| 	    way.speed = math.max( parseDuration(duration) / math.max(1, numberOfNodesInWay-1) ); |    end | ||||||
|         way.is_duration_set = true |    way.direction = Way.bidirectional | ||||||
|       end |    if speed_profile[route] ~= nil then | ||||||
|       way.direction = Way.bidirectional |     highway = route; | ||||||
|       if speed_profile[route] ~= nil then |    end | ||||||
|          highway = route; |    if tonumber(way.duration) < 0 then | ||||||
|       end |     way.speed = speed_profile[highway] | ||||||
|       if not way.is_duration_set then |    end | ||||||
|         way.speed = speed_profile[highway] |   end | ||||||
|       end |  | ||||||
|     end |  | ||||||
|      |      | ||||||
|   -- Set the avg speed on the way if it is accessible by road class |   -- Set the avg speed on the way if it is accessible by road class | ||||||
|     if (speed_profile[highway] ~= nil and way.speed == -1 ) then  |   if (speed_profile[highway] ~= nil and way.speed == -1 ) then | ||||||
|       if 0 == maxspeed then |   if maxspeed > speed_profile[highway] then | ||||||
|         maxspeed = math.huge |    way.speed = maxspeed | ||||||
|       end |   else | ||||||
|       way.speed = math.min(speed_profile[highway], maxspeed) |    if 0 == maxspeed then | ||||||
|  |     maxspeed = math.huge | ||||||
|  |    end | ||||||
|  |    way.speed = math.min(speed_profile[highway], maxspeed) | ||||||
|     end |     end | ||||||
|      |   end | ||||||
|  | 
 | ||||||
|   -- Set the avg speed on ways that are marked accessible |   -- Set the avg speed on ways that are marked accessible | ||||||
|     if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then |     if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then | ||||||
|       if 0 == maxspeed then |       if 0 == maxspeed then | ||||||
| @ -203,7 +189,18 @@ function way_function (way, numberOfNodesInWay) | |||||||
|     else |     else | ||||||
|       way.direction = Way.bidirectional |       way.direction = Way.bidirectional | ||||||
|     end |     end | ||||||
|      | 
 | ||||||
|  |   -- Override speed settings if explicit forward/backward maxspeeds are given | ||||||
|  |     if maxspeed_forward ~= nil and maxspeed_forward > 0 then | ||||||
|  | 	if Way.bidirectional == way.direction then | ||||||
|  |           way.backward_speed = way.speed | ||||||
|  |         end | ||||||
|  |         way.speed = maxspeed_forward | ||||||
|  |     end | ||||||
|  |     if maxspeed_backward ~= nil and maxspeed_backward > 0 then | ||||||
|  |       way.backward_speed = maxspeed_backward | ||||||
|  |     end | ||||||
|  | 
 | ||||||
|   -- Override general direction settings of there is a specific one for our mode of travel |   -- Override general direction settings of there is a specific one for our mode of travel | ||||||
|    |    | ||||||
|     if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then |     if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then | ||||||
|  | |||||||
| @ -1,39 +1,41 @@ | |||||||
|  | require("lib/access") | ||||||
|  | 
 | ||||||
| -- Begin of globals | -- Begin of globals | ||||||
| barrier_whitelist = { [""] = true, ["cycle_barrier"] = true, ["bollard"] = true, ["entrance"] = true, ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true} | barrier_whitelist = { [""] = true, ["cycle_barrier"] = true, ["bollard"] = true, ["entrance"] = true, ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true, ["no"] = true} | ||||||
| access_tag_whitelist = { ["yes"] = true, ["permissive"] = true, ["designated"] = true	} | access_tag_whitelist = { ["yes"] = true, ["permissive"] = true, ["designated"] = true	} | ||||||
| access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestery"] = true } | access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestery"] = true } | ||||||
| access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | ||||||
| access_tags_hierachy = { "bicycle", "vehicle", "access" } | access_tags_hierachy = { "bicycle", "vehicle", "access" } | ||||||
| cycleway_tags = {["track"]=true,["lane"]=true,["opposite"]=true,["opposite_lane"]=true,["opposite_track"]=true,["share_busway"]=true,["sharrow"]=true,["shared"]=true } | cycleway_tags = {["track"]=true,["lane"]=true,["opposite"]=true,["opposite_lane"]=true,["opposite_track"]=true,["share_busway"]=true,["sharrow"]=true,["shared"]=true } | ||||||
| service_tag_restricted = { ["parking_aisle"] = true } | service_tag_restricted = { ["parking_aisle"] = true } | ||||||
|  | restriction_exception_tags = { "bicycle", "vehicle", "access" } | ||||||
| 
 | 
 | ||||||
| default_speed = 16 | default_speed = 15 | ||||||
| 
 | 
 | ||||||
| main_speeds = {  | walking_speed = 6 | ||||||
| 	["cycleway"] = 18, | 
 | ||||||
| 	["primary"] = 17, | bicycle_speeds = {  | ||||||
| 	["primary_link"] = 17, | 	["cycleway"] = default_speed, | ||||||
| 	["secondary"] = 18, | 	["primary"] = default_speed, | ||||||
| 	["secondary_link"] = 18, | 	["primary_link"] = default_speed, | ||||||
| 	["tertiary"] = 18, | 	["secondary"] = default_speed, | ||||||
| 	["tertiary_link"] = 18, | 	["secondary_link"] = default_speed, | ||||||
| 	["residential"] = 18, | 	["tertiary"] = default_speed, | ||||||
| 	["unclassified"] = 16, | 	["tertiary_link"] = default_speed, | ||||||
| 	["living_street"] = 16, | 	["residential"] = default_speed, | ||||||
| 	["road"] = 16, | 	["unclassified"] = default_speed, | ||||||
| 	["service"] = 16, | 	["living_street"] = default_speed, | ||||||
| 	["track"] = 13, | 	["road"] = default_speed, | ||||||
| 	["path"] = 13, | 	["service"] = default_speed, | ||||||
| 	["footway"] = 12, | 	["track"] = 12, | ||||||
| 	["pedestrian"] = 12, | 	["path"] = 12 | ||||||
| 	["pier"] = 12, | 	--["footway"] = 12, | ||||||
| 	["steps"] = 2 | 	--["pedestrian"] = 12, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| pedestrian_speeds = {  | pedestrian_speeds = {  | ||||||
| 	["footway"] = 5, | 	["footway"] = walking_speed, | ||||||
| 	["pedestrian"] = 5, | 	["pedestrian"] = walking_speed, | ||||||
| 	["pier"] = 5, |  | ||||||
| 	["steps"] = 2 | 	["steps"] = 2 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -47,7 +49,7 @@ railway_speeds = { | |||||||
| } | } | ||||||
| 
 | 
 | ||||||
| platform_speeds = {  | platform_speeds = {  | ||||||
| 	["platform"] = 5 | 	["platform"] = walking_speed | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| amenity_speeds = {  | amenity_speeds = {  | ||||||
| @ -55,6 +57,10 @@ amenity_speeds = { | |||||||
| 	["parking_entrance"] = 10 | 	["parking_entrance"] = 10 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | man_made_speeds = {  | ||||||
|  | 	["pier"] = walking_speed | ||||||
|  | } | ||||||
|  | 
 | ||||||
| route_speeds = {  | route_speeds = {  | ||||||
| 	["ferry"] = 5 | 	["ferry"] = 5 | ||||||
| } | } | ||||||
| @ -64,25 +70,22 @@ obey_oneway 			= true | |||||||
| obey_bollards 			= false | obey_bollards 			= false | ||||||
| use_restrictions 		= true | use_restrictions 		= true | ||||||
| ignore_areas 			= true -- future feature | ignore_areas 			= true -- future feature | ||||||
| traffic_signal_penalty 	= 2 | traffic_signal_penalty 	= 5 | ||||||
| u_turn_penalty 			= 20 | u_turn_penalty 			= 20 | ||||||
| 
 | use_turn_restrictions   = false | ||||||
|  | turn_penalty 			= 60 | ||||||
|  | turn_bias               = 1.4 | ||||||
| -- End of globals | -- End of globals | ||||||
| 
 | 
 | ||||||
| --find first tag in access hierachy which is set | function get_exceptions(vector) | ||||||
| function find_access_tag(source) | 	for i,v in ipairs(restriction_exception_tags) do  | ||||||
| 	for i,v in ipairs(access_tags_hierachy) do  | 		vector:Add(v) | ||||||
| 		local tag = source.tags:Find(v) |  | ||||||
| 		if tag ~= '' then --and tag ~= "" then |  | ||||||
| 			return tag |  | ||||||
| 		end |  | ||||||
| 	end | 	end | ||||||
| 	return nil |  | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| function node_function (node) | function node_function (node) | ||||||
| 	local barrier = node.tags:Find ("barrier") | 	local barrier = node.tags:Find ("barrier") | ||||||
| 	local access = find_access_tag(node) | 	local access = Access.find_access_tag(node, access_tags_hierachy) | ||||||
| 	local traffic_signal = node.tags:Find("highway") | 	local traffic_signal = node.tags:Find("highway") | ||||||
| 	 | 	 | ||||||
| 	-- flag node if it carries a traffic light	 | 	-- flag node if it carries a traffic light	 | ||||||
| @ -91,7 +94,7 @@ function node_function (node) | |||||||
| 	end | 	end | ||||||
| 	 | 	 | ||||||
| 	-- parse access and barrier tags | 	-- parse access and barrier tags | ||||||
| 	if access  and access ~= "" then | 	if access and access ~= "" then | ||||||
| 		if access_tag_blacklist[access] then | 		if access_tag_blacklist[access] then | ||||||
| 			node.bollard = true | 			node.bollard = true | ||||||
| 		else | 		else | ||||||
| @ -108,22 +111,43 @@ function node_function (node) | |||||||
| 	return 1 | 	return 1 | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| function way_function (way, numberOfNodesInWay) | function way_function (way) | ||||||
| 	-- A way must have two nodes or more | 	-- initial routability check, filters out buildings, boundaries, etc | ||||||
| 	if(numberOfNodesInWay < 2) then |  | ||||||
| 		return 0; |  | ||||||
| 	end |  | ||||||
| 	 |  | ||||||
| 	-- First, get the properties of each way that we come across |  | ||||||
| 	local highway = way.tags:Find("highway") | 	local highway = way.tags:Find("highway") | ||||||
|  | 	local route = way.tags:Find("route") | ||||||
|  | 	local man_made = way.tags:Find("man_made") | ||||||
|  | 	local railway = way.tags:Find("railway") | ||||||
|  | 	local amenity = way.tags:Find("amenity") | ||||||
|  | 	local public_transport = way.tags:Find("public_transport") | ||||||
|  |     if (not highway or highway == '') and  | ||||||
|  | 		(not route or route == '') and  | ||||||
|  | 		(not railway or railway=='') and  | ||||||
|  | 		(not amenity or amenity=='') and | ||||||
|  | 		(not man_made or man_made=='') and | ||||||
|  |     	(not public_transport or public_transport=='') | ||||||
|  |     	then | ||||||
|  |     	return 0 | ||||||
|  |     end | ||||||
|  |      | ||||||
|  |     -- don't route on ways or railways that are still under construction | ||||||
|  |     if highway=='construction' or railway=='construction' then | ||||||
|  |         return 0 | ||||||
|  |     end | ||||||
|  |      | ||||||
|  | 	-- access | ||||||
|  |  	local access = Access.find_access_tag(way, access_tags_hierachy) | ||||||
|  |     if access_tag_blacklist[access] then | ||||||
|  | 		return 0 | ||||||
|  |     end | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 	-- other tags | ||||||
| 	local name = way.tags:Find("name") | 	local name = way.tags:Find("name") | ||||||
| 	local ref = way.tags:Find("ref") | 	local ref = way.tags:Find("ref") | ||||||
| 	local junction = way.tags:Find("junction") | 	local junction = way.tags:Find("junction") | ||||||
| 	local route = way.tags:Find("route") |  | ||||||
| 	local railway = way.tags:Find("railway") |  | ||||||
| 	local public_transport = way.tags:Find("public_transport") |  | ||||||
| 	local maxspeed = parseMaxspeed(way.tags:Find ( "maxspeed") ) | 	local maxspeed = parseMaxspeed(way.tags:Find ( "maxspeed") ) | ||||||
| 	local man_made = way.tags:Find("man_made") | 	local maxspeed_forward = parseMaxspeed(way.tags:Find( "maxspeed:forward")) | ||||||
|  | 	local maxspeed_backward = parseMaxspeed(way.tags:Find( "maxspeed:backward")) | ||||||
| 	local barrier = way.tags:Find("barrier") | 	local barrier = way.tags:Find("barrier") | ||||||
| 	local oneway = way.tags:Find("oneway") | 	local oneway = way.tags:Find("oneway") | ||||||
| 	local onewayClass = way.tags:Find("oneway:bicycle") | 	local onewayClass = way.tags:Find("oneway:bicycle") | ||||||
| @ -133,23 +157,7 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 	local duration	= way.tags:Find("duration") | 	local duration	= way.tags:Find("duration") | ||||||
| 	local service	= way.tags:Find("service") | 	local service	= way.tags:Find("service") | ||||||
| 	local area = way.tags:Find("area") | 	local area = way.tags:Find("area") | ||||||
| 	local amenity = way.tags:Find("amenity") | 	local foot = way.tags:Find("foot") | ||||||
| 	local access = find_access_tag(way) |  | ||||||
| 	 |  | ||||||
| 	-- initial routability check, filters out buildings, boundaries, etc |  | ||||||
|     if (not highway or highway == '') and  |  | ||||||
| 		(not route or route == '') and  |  | ||||||
| 		(not railway or railway=='') and  |  | ||||||
| 		(not amenity or amenity=='') and |  | ||||||
|     	(not public_transport or public_transport=='') |  | ||||||
|     	then |  | ||||||
|     	return 0 |  | ||||||
|     end |  | ||||||
| 		 |  | ||||||
|  	-- access |  | ||||||
|     if access_tag_blacklist[access] then |  | ||||||
| 		return 0 |  | ||||||
|     end |  | ||||||
| 
 | 
 | ||||||
| 	-- name	 | 	-- name	 | ||||||
| 	if "" ~= ref then | 	if "" ~= ref then | ||||||
| @ -160,13 +168,13 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 		way.name = highway		-- if no name exists, use way type | 		way.name = highway		-- if no name exists, use way type | ||||||
| 	end | 	end | ||||||
| 	 | 	 | ||||||
|  | 	-- speed | ||||||
|     if route_speeds[route] then |     if route_speeds[route] then | ||||||
| 		-- ferries | 		-- ferries (doesn't cover routes tagged using relations) | ||||||
| 		way.direction = Way.bidirectional | 		way.direction = Way.bidirectional | ||||||
| 		way.ignore_in_grid = true | 		way.ignore_in_grid = true | ||||||
| 		if durationIsValid(duration) then | 		if durationIsValid(duration) then | ||||||
| 			way.speed = math.max( parseDuration(duration) / math.max(1, numberOfNodesInWay-1) ) | 			way.duration = math.max( 1, parseDuration(duration) ) | ||||||
| 		 	way.is_duration_set = true |  | ||||||
| 		else | 		else | ||||||
| 		 	way.speed = route_speeds[route] | 		 	way.speed = route_speeds[route] | ||||||
| 		end | 		end | ||||||
| @ -182,34 +190,32 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 			way.speed = railway_speeds[railway]		 | 			way.speed = railway_speeds[railway]		 | ||||||
| 			way.direction = Way.bidirectional | 			way.direction = Way.bidirectional | ||||||
| 		end | 		end | ||||||
| 	elseif pedestrian_speeds[highway] and main_speeds[highway] then |  | ||||||
| 		-- pedestrian areas |  | ||||||
| 		if access_tag_whitelist[access] then |  | ||||||
| 			way.speed = main_speeds[highway]		-- biking  |  | ||||||
| 		else |  | ||||||
| 			way.speed = pedestrian_speeds[highway]	-- pushing bikes |  | ||||||
| 		end |  | ||||||
| 	elseif amenity and amenity_speeds[amenity] then | 	elseif amenity and amenity_speeds[amenity] then | ||||||
| 		-- parking areas | 		-- parking areas | ||||||
| 		way.speed = amenity_speeds[amenity] | 		way.speed = amenity_speeds[amenity] | ||||||
| 	else | 	elseif bicycle_speeds[highway] then | ||||||
| 		-- regular ways | 		-- regular ways | ||||||
| 		if main_speeds[highway] then  |       	way.speed = bicycle_speeds[highway] | ||||||
| 	      	way.speed = main_speeds[highway] | 	elseif access and access_tag_whitelist[access] then | ||||||
| 	    elseif main_speeds[man_made] then  | 	    -- unknown way, but valid access tag | ||||||
| 			way.speed = main_speeds[man_made] | 		way.speed = default_speed | ||||||
| 		elseif access_tag_whitelist[access] then | 	else | ||||||
| 			way.speed = default_speed | 	    -- biking not allowed, maybe we can push our bike? | ||||||
| 		end | 	    -- essentially requires pedestrian profiling, for example foot=no mean we can't push a bike | ||||||
| 	end |         -- TODO: if we can push, the way should be marked as pedestrion mode, but there's no way to do it yet from lua.. | ||||||
| 	 |         if foot ~= 'no' then | ||||||
| 	-- maxspeed | 	        if pedestrian_speeds[highway] then | ||||||
| 	if take_minimum_of_speeds then | 	            -- pedestrian-only ways and areas | ||||||
| 		if maxspeed and maxspeed>0 then |         		way.speed = pedestrian_speeds[highway] | ||||||
| 			way.speed = math.min(way.speed, maxspeed) |         	elseif man_made and man_made_speeds[man_made] then | ||||||
| 		end |             	-- man made structures | ||||||
| 	end |             	way.speed = man_made_speeds[man_made] | ||||||
| 	 |             elseif foot == 'yes' then | ||||||
|  |                 way.speed = walking_speed | ||||||
|  |             end | ||||||
|  |         end | ||||||
|  |     end | ||||||
|  | 		 | ||||||
| 	-- direction | 	-- direction | ||||||
| 	way.direction = Way.bidirectional | 	way.direction = Way.bidirectional | ||||||
| 	local impliedOneway = false | 	local impliedOneway = false | ||||||
| @ -252,15 +258,65 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 		way.direction = Way.oneway | 		way.direction = Way.oneway | ||||||
| 	end | 	end | ||||||
| 	 | 	 | ||||||
|  | 	-- pushing bikes | ||||||
|  | 	if bicycle_speeds[highway] or pedestrian_speeds[highway] then | ||||||
|  | 	    if foot ~= 'no' then | ||||||
|  | 	        if junction ~= "roundabout" then | ||||||
|  |             	if way.direction == Way.oneway then | ||||||
|  |             	    way.backward_speed = walking_speed | ||||||
|  |                 elseif way.direction == Way.opposite then | ||||||
|  |                     way.backward_speed = walking_speed | ||||||
|  |                     way.speed = way.speed | ||||||
|  |             	end | ||||||
|  |             end | ||||||
|  |         end | ||||||
|  |         if way.backward_speed == way.speed then | ||||||
|  |             -- TODO: no way yet to mark a way as pedestrian mode if forward/backward speeds are equal | ||||||
|  |             way.direction = Way.bidirectional | ||||||
|  |         end | ||||||
|  |     end | ||||||
|  | 
 | ||||||
|  | 	 | ||||||
| 	-- cycleways | 	-- cycleways | ||||||
| 	if cycleway and cycleway_tags[cycleway] then | 	if cycleway and cycleway_tags[cycleway] then | ||||||
| 		way.speed = main_speeds["cycleway"] | 		way.speed = bicycle_speeds["cycleway"] | ||||||
| 	elseif cycleway_left and cycleway_tags[cycleway_left] then | 	elseif cycleway_left and cycleway_tags[cycleway_left] then | ||||||
| 		way.speed = main_speeds["cycleway"] | 		way.speed = bicycle_speeds["cycleway"] | ||||||
| 	elseif cycleway_right and cycleway_tags[cycleway_right] then | 	elseif cycleway_right and cycleway_tags[cycleway_right] then | ||||||
| 		way.speed = main_speeds["cycleway"] | 		way.speed = bicycle_speeds["cycleway"] | ||||||
| 	end | 	end | ||||||
| 
 | 
 | ||||||
|  | 	-- maxspeed | ||||||
|  | 	-- TODO: maxspeed of backward direction | ||||||
|  | 	if take_minimum_of_speeds then | ||||||
|  | 		if maxspeed and maxspeed>0 then | ||||||
|  | 			way.speed = math.min(way.speed, maxspeed) | ||||||
|  | 		end | ||||||
|  | 	end | ||||||
|  | 
 | ||||||
|  |   -- Override speed settings if explicit forward/backward maxspeeds are given | ||||||
|  |     if maxspeed_forward ~= nil and maxspeed_forward > 0 then | ||||||
|  | 	if Way.bidirectional == way.direction then | ||||||
|  |           way.backward_speed = way.speed | ||||||
|  |         end | ||||||
|  |         way.speed = maxspeed_forward | ||||||
|  |     end | ||||||
|  |     if maxspeed_backward ~= nil and maxspeed_backward > 0 then | ||||||
|  |       way.backward_speed = maxspeed_backward | ||||||
|  |     end | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 	 | ||||||
| 	way.type = 1 | 	way.type = 1 | ||||||
| 	return 1 | 	return 1 | ||||||
| end | end | ||||||
|  | 
 | ||||||
|  | function turn_function (angle) | ||||||
|  |     -- compute turn penalty as angle^2, with a left/right bias | ||||||
|  |     k = turn_penalty/(90.0*90.0) | ||||||
|  | 	if angle>=0 then | ||||||
|  | 	    return angle*angle*k/turn_bias | ||||||
|  | 	else | ||||||
|  | 	    return angle*angle*k*turn_bias | ||||||
|  |     end | ||||||
|  | end | ||||||
|  | |||||||
| @ -1,6 +1,7 @@ | |||||||
| -- Begin of globals | -- Begin of globals | ||||||
|  | require("lib/access") | ||||||
| 
 | 
 | ||||||
| barrier_whitelist = { ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true} | barrier_whitelist = { ["cattle_grid"] = true, ["border_control"] = true, ["toll_booth"] = true, ["sally_port"] = true, ["gate"] = true, ["no"] = true} | ||||||
| access_tag_whitelist = { ["yes"] = true, ["motorcar"] = true, ["motor_vehicle"] = true, ["vehicle"] = true, ["permissive"] = true, ["designated"] = true  } | access_tag_whitelist = { ["yes"] = true, ["motorcar"] = true, ["motor_vehicle"] = true, ["vehicle"] = true, ["permissive"] = true, ["designated"] = true  } | ||||||
| access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestry"] = true } | access_tag_blacklist = { ["no"] = true, ["private"] = true, ["agricultural"] = true, ["forestry"] = true } | ||||||
| access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | ||||||
| @ -8,6 +9,7 @@ access_tags = { "motorcar", "motor_vehicle", "vehicle" } | |||||||
| access_tags_hierachy = { "motorcar", "motor_vehicle", "vehicle", "access" } | access_tags_hierachy = { "motorcar", "motor_vehicle", "vehicle", "access" } | ||||||
| service_tag_restricted = { ["parking_aisle"] = true } | service_tag_restricted = { ["parking_aisle"] = true } | ||||||
| ignore_in_grid = { ["ferry"] = true } | ignore_in_grid = { ["ferry"] = true } | ||||||
|  | restriction_exception_tags = { "motorcar", "motor_vehicle", "vehicle" } | ||||||
| 
 | 
 | ||||||
| speed_profile = {  | speed_profile = {  | ||||||
|   ["motorway"] = 90,  |   ["motorway"] = 90,  | ||||||
| @ -26,6 +28,7 @@ speed_profile = { | |||||||
|   ["service"] = 15, |   ["service"] = 15, | ||||||
| --  ["track"] = 5, | --  ["track"] = 5, | ||||||
|   ["ferry"] = 5, |   ["ferry"] = 5, | ||||||
|  |   ["shuttle_train"] = 10, | ||||||
|   ["default"] = 50 |   ["default"] = 50 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -39,24 +42,9 @@ u_turn_penalty 			= 20 | |||||||
| 
 | 
 | ||||||
| -- End of globals | -- End of globals | ||||||
| 
 | 
 | ||||||
| --find first tag in access hierachy which is set | function get_exceptions(vector) | ||||||
| local function find_access_tag(source) | 	for i,v in ipairs(restriction_exception_tags) do  | ||||||
| 	for i,v in ipairs(access_tags_hierachy) do  | 		vector:Add(v) | ||||||
| 		if source.tags:Holds(v) then  |  | ||||||
| 			local tag = source.tags:Find(v) |  | ||||||
| 			if tag ~= '' then --and tag ~= "" then |  | ||||||
| 				return tag |  | ||||||
| 			end |  | ||||||
| 		end |  | ||||||
| 	end |  | ||||||
| 	return nil |  | ||||||
| end |  | ||||||
| 
 |  | ||||||
| local function find_in_keyvals(keyvals, tag) |  | ||||||
| 	if keyvals:Holds(tag) then |  | ||||||
| 		return keyvals:Find(tag) |  | ||||||
| 	else |  | ||||||
| 		return nil |  | ||||||
| 	end | 	end | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| @ -64,7 +52,7 @@ local function parse_maxspeed(source) | |||||||
| 	if source == nil then | 	if source == nil then | ||||||
| 		return 0 | 		return 0 | ||||||
| 	end | 	end | ||||||
| 	local n = tonumber(source) | 	local n = tonumber(source:match("%d*")) | ||||||
| 	if n == nil then | 	if n == nil then | ||||||
| 		n = 0 | 		n = 0 | ||||||
| 	end | 	end | ||||||
| @ -76,7 +64,7 @@ end | |||||||
| 
 | 
 | ||||||
| function node_function (node) | function node_function (node) | ||||||
|   local barrier = node.tags:Find ("barrier") |   local barrier = node.tags:Find ("barrier") | ||||||
|   local access = find_access_tag(node) |   local access = Access.find_access_tag(node, access_tags_hierachy) | ||||||
|   local traffic_signal = node.tags:Find("highway") |   local traffic_signal = node.tags:Find("highway") | ||||||
|    |    | ||||||
|   --flag node if it carries a traffic light |   --flag node if it carries a traffic light | ||||||
| @ -101,13 +89,7 @@ function node_function (node) | |||||||
| end | end | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| function way_function (way, numberOfNodesInWay) | function way_function (way) | ||||||
| 
 |  | ||||||
|   -- A way must have two nodes or more |  | ||||||
|   if(numberOfNodesInWay < 2) then |  | ||||||
|     return 0; |  | ||||||
|   end |  | ||||||
|    |  | ||||||
|   -- First, get the properties of each way that we come across |   -- First, get the properties of each way that we come across | ||||||
|     local highway = way.tags:Find("highway") |     local highway = way.tags:Find("highway") | ||||||
|     local name = way.tags:Find("name") |     local name = way.tags:Find("name") | ||||||
| @ -115,13 +97,15 @@ function way_function (way, numberOfNodesInWay) | |||||||
|     local junction = way.tags:Find("junction") |     local junction = way.tags:Find("junction") | ||||||
|     local route = way.tags:Find("route") |     local route = way.tags:Find("route") | ||||||
|     local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) |     local maxspeed = parse_maxspeed(way.tags:Find ( "maxspeed") ) | ||||||
|  |     local maxspeed_forward = tonumber(way.tags:Find( "maxspeed:forward")) | ||||||
|  |     local maxspeed_backward = tonumber(way.tags:Find( "maxspeed:backward")) | ||||||
|     local barrier = way.tags:Find("barrier") |     local barrier = way.tags:Find("barrier") | ||||||
|     local oneway = way.tags:Find("oneway") |     local oneway = way.tags:Find("oneway") | ||||||
|     local cycleway = way.tags:Find("cycleway") |     local cycleway = way.tags:Find("cycleway") | ||||||
|     local duration  = way.tags:Find("duration") |     local duration  = way.tags:Find("duration") | ||||||
|     local service  = way.tags:Find("service") |     local service  = way.tags:Find("service") | ||||||
|     local area = way.tags:Find("area") |     local area = way.tags:Find("area") | ||||||
|     local access = find_access_tag(way) |     local access = Access.find_access_tag(way, access_tags_hierachy) | ||||||
| 
 | 
 | ||||||
|   -- Second, parse the way according to these properties |   -- Second, parse the way according to these properties | ||||||
| 
 | 
 | ||||||
| @ -148,29 +132,31 @@ function way_function (way, numberOfNodesInWay) | |||||||
| 	end | 	end | ||||||
| 
 | 
 | ||||||
|   -- Handling ferries and piers |   -- Handling ferries and piers | ||||||
|     if (speed_profile[route] ~= nil and speed_profile[route] > 0) |   if (speed_profile[route] ~= nil and speed_profile[route] > 0) then | ||||||
|     then |    if durationIsValid(duration) then | ||||||
|       if durationIsValid(duration) then |     way.duration = math.max( parseDuration(duration), 1 ); | ||||||
| 	    way.speed = math.max( parseDuration(duration) / math.max(1, numberOfNodesInWay-1) ); |    end | ||||||
|         way.is_duration_set = true |    way.direction = Way.bidirectional | ||||||
|       end |    if speed_profile[route] ~= nil then | ||||||
|       way.direction = Way.bidirectional |     highway = route; | ||||||
|       if speed_profile[route] ~= nil then |    end | ||||||
|          highway = route; |    if tonumber(way.duration) < 0 then | ||||||
|       end |     way.speed = speed_profile[highway] | ||||||
|       if not way.is_duration_set then |    end | ||||||
|         way.speed = speed_profile[highway] |   end | ||||||
|       end |  | ||||||
|     end |  | ||||||
|      |      | ||||||
|   -- Set the avg speed on the way if it is accessible by road class |   -- Set the avg speed on the way if it is accessible by road class | ||||||
|     if (speed_profile[highway] ~= nil and way.speed == -1 ) then  |   if (speed_profile[highway] ~= nil and way.speed == -1 ) then | ||||||
|       if 0 == maxspeed then |   if maxspeed > speed_profile[highway] then | ||||||
|         maxspeed = math.huge |    way.speed = maxspeed | ||||||
|       end |   else | ||||||
|       way.speed = math.min(speed_profile[highway], maxspeed) |    if 0 == maxspeed then | ||||||
|  |     maxspeed = math.huge | ||||||
|  |    end | ||||||
|  |    way.speed = math.min(speed_profile[highway], maxspeed) | ||||||
|     end |     end | ||||||
|      |   end | ||||||
|  | 
 | ||||||
|   -- Set the avg speed on ways that are marked accessible |   -- Set the avg speed on ways that are marked accessible | ||||||
|     if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then |     if "" ~= highway and access_tag_whitelist[access] and way.speed == -1 then | ||||||
|       if 0 == maxspeed then |       if 0 == maxspeed then | ||||||
| @ -203,7 +189,18 @@ function way_function (way, numberOfNodesInWay) | |||||||
|     else |     else | ||||||
|       way.direction = Way.bidirectional |       way.direction = Way.bidirectional | ||||||
|     end |     end | ||||||
|      | 
 | ||||||
|  |   -- Override speed settings if explicit forward/backward maxspeeds are given | ||||||
|  |     if maxspeed_forward ~= nil and maxspeed_forward > 0 then | ||||||
|  | 	if Way.bidirectional == way.direction then | ||||||
|  |           way.backward_speed = way.speed | ||||||
|  |         end | ||||||
|  |         way.speed = maxspeed_forward | ||||||
|  |     end | ||||||
|  |     if maxspeed_backward ~= nil and maxspeed_backward > 0 then | ||||||
|  |       way.backward_speed = maxspeed_backward | ||||||
|  |     end | ||||||
|  | 
 | ||||||
|   -- Override general direction settings of there is a specific one for our mode of travel |   -- Override general direction settings of there is a specific one for our mode of travel | ||||||
|    |    | ||||||
|     if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then |     if ignore_in_grid[highway] ~= nil and ignore_in_grid[highway] then | ||||||
|  | |||||||
| @ -9,6 +9,7 @@ access_tag_restricted = { ["destination"] = true, ["delivery"] = true } | |||||||
| access_tags = { "foot" } | access_tags = { "foot" } | ||||||
| service_tag_restricted = { ["parking_aisle"] = true } | service_tag_restricted = { ["parking_aisle"] = true } | ||||||
| ignore_in_grid = { ["ferry"] = true } | ignore_in_grid = { ["ferry"] = true } | ||||||
|  | restriction_exception_tags = { "foot" } | ||||||
| 
 | 
 | ||||||
| speed_profile = {  | speed_profile = {  | ||||||
|   ["primary"] = 5, |   ["primary"] = 5, | ||||||
| @ -40,9 +41,15 @@ use_restrictions 		= false | |||||||
| ignore_areas 			= true -- future feature | ignore_areas 			= true -- future feature | ||||||
| traffic_signal_penalty 	= 2 | traffic_signal_penalty 	= 2 | ||||||
| u_turn_penalty 			= 2 | u_turn_penalty 			= 2 | ||||||
| 
 | use_turn_restrictions   = false | ||||||
| -- End of globals | -- End of globals | ||||||
| 
 | 
 | ||||||
|  | function get_exceptions(vector) | ||||||
|  | 	for i,v in ipairs(restriction_exception_tags) do  | ||||||
|  | 		vector:Add(v) | ||||||
|  | 	end | ||||||
|  | end | ||||||
|  | 
 | ||||||
| function node_function (node) | function node_function (node) | ||||||
|   local barrier = node.tags:Find ("barrier") |   local barrier = node.tags:Find ("barrier") | ||||||
|   local access = node.tags:Find ("access") |   local access = node.tags:Find ("access") | ||||||
| @ -68,13 +75,8 @@ function node_function (node) | |||||||
|   return 1 |   return 1 | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| function way_function (way, numberOfNodesInWay) | function way_function (way) | ||||||
| 
 | 
 | ||||||
|   -- A way must have two nodes or more |  | ||||||
|   if(numberOfNodesInWay < 2) then |  | ||||||
|     return 0; |  | ||||||
|   end |  | ||||||
|    |  | ||||||
|   -- First, get the properties of each way that we come across |   -- First, get the properties of each way that we come across | ||||||
|     local highway = way.tags:Find("highway") |     local highway = way.tags:Find("highway") | ||||||
|     local name = way.tags:Find("name") |     local name = way.tags:Find("name") | ||||||
| @ -145,10 +147,7 @@ function way_function (way, numberOfNodesInWay) | |||||||
|      |      | ||||||
|   -- Set the avg speed on the way if it is accessible by road class |   -- Set the avg speed on the way if it is accessible by road class | ||||||
|     if (speed_profile[highway] ~= nil and way.speed == -1 ) then  |     if (speed_profile[highway] ~= nil and way.speed == -1 ) then  | ||||||
|       if (0 < maxspeed and not take_minimum_of_speeds) or (maxspeed == 0) then |       way.speed = speed_profile[highway] | ||||||
|         maxspeed = math.huge |  | ||||||
|       end |  | ||||||
|       way.speed = math.min(speed_profile[highway], maxspeed) |  | ||||||
|     end |     end | ||||||
|      |      | ||||||
|   -- Set the avg speed on ways that are marked accessible |   -- Set the avg speed on ways that are marked accessible | ||||||
|  | |||||||
							
								
								
									
										13
									
								
								profiles/lib/access.lua
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								profiles/lib/access.lua
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,13 @@ | |||||||
|  | local ipairs = ipairs | ||||||
|  | 
 | ||||||
|  | module "Access" | ||||||
|  | 
 | ||||||
|  | function find_access_tag(source,access_tags_hierachy) | ||||||
|  |     for i,v in ipairs(access_tags_hierachy) do  | ||||||
|  |         local tag = source.tags:Find(v) | ||||||
|  |         if tag ~= '' then | ||||||
|  |             return tag | ||||||
|  |         end | ||||||
|  |     end | ||||||
|  |     return nil | ||||||
|  | end | ||||||
| @ -23,6 +23,18 @@ ignore_areas 			= true	-- future feature | |||||||
| traffic_signal_penalty 	= 7		-- seconds | traffic_signal_penalty 	= 7		-- seconds | ||||||
| u_turn_penalty 			= 20 | u_turn_penalty 			= 20 | ||||||
| 
 | 
 | ||||||
|  | function limit_speed(speed, limits) | ||||||
|  |     -- don't use ipairs(), since it stops at the first nil value | ||||||
|  |     for i=1, #limits do | ||||||
|  |         limit = limits[i] | ||||||
|  |         if limit ~= nil and limit > 0 then | ||||||
|  |             if limit < speed then | ||||||
|  |                 return limit        -- stop at first speedlimit that's smaller than speed | ||||||
|  |             end | ||||||
|  |         end | ||||||
|  |     end | ||||||
|  |     return speed | ||||||
|  | end | ||||||
| 
 | 
 | ||||||
| function node_function (node) | function node_function (node) | ||||||
| 	local traffic_signal = node.tags:Find("highway") | 	local traffic_signal = node.tags:Find("highway") | ||||||
| @ -34,26 +46,50 @@ function node_function (node) | |||||||
| 	return 1 | 	return 1 | ||||||
| end | end | ||||||
| 
 | 
 | ||||||
| function way_function (way, numberOfNodesInWay) | function way_function (way) | ||||||
| 	-- A way must have two nodes or more |  | ||||||
| 	if(numberOfNodesInWay < 2) then |  | ||||||
| 		return 0; |  | ||||||
| 	end |  | ||||||
| 	 |  | ||||||
| 	local highway = way.tags:Find("highway") | 	local highway = way.tags:Find("highway") | ||||||
| 	local name = way.tags:Find("name") | 	local name = way.tags:Find("name") | ||||||
| 	local oneway = way.tags:Find("oneway") | 	local oneway = way.tags:Find("oneway") | ||||||
| 	local route = way.tags:Find("route") | 	local route = way.tags:Find("route") | ||||||
| 	local duration = way.tags:Find("duration") | 	local duration = way.tags:Find("duration") | ||||||
|  |     local maxspeed = tonumber(way.tags:Find ( "maxspeed")) | ||||||
|  |     local maxspeed_forward = tonumber(way.tags:Find( "maxspeed:forward")) | ||||||
|  |     local maxspeed_backward = tonumber(way.tags:Find( "maxspeed:backward")) | ||||||
| 	 | 	 | ||||||
| 	way.name = name | 	way.name = name | ||||||
| 
 | 
 | ||||||
|   	if route ~= nil and durationIsValid(duration) then |   	if route ~= nil and durationIsValid(duration) then | ||||||
| 		way.ignore_in_grid = true | 		way.duration = math.max( 1, parseDuration(duration) ) | ||||||
| 		way.speed = math.max( 1, parseDuration(duration) / math.max(1, numberOfNodesInWay-1) ) |  | ||||||
| 	 	way.is_duration_set = true |  | ||||||
| 	else | 	else | ||||||
| 		way.speed = speed_profile[highway] or speed_profile['default'] | 	    local speed_forw = speed_profile[highway] or speed_profile['default'] | ||||||
|  | 	    local speed_back = speed_forw | ||||||
|  | 
 | ||||||
|  |     	if highway == "river" then | ||||||
|  |     		local temp_speed = speed_forw; | ||||||
|  |     		speed_forw = temp_speed*1.5 | ||||||
|  |     		speed_back = temp_speed/1.5 | ||||||
|  |    	end | ||||||
|  |             	 | ||||||
|  |         if maxspeed_forward ~= nil and maxspeed_forward > 0 then | ||||||
|  | 			speed_forw = maxspeed_forward | ||||||
|  | 		else | ||||||
|  | 			if maxspeed ~= nil and maxspeed > 0 and speed_forw > maxspeed then | ||||||
|  | 				speed_forw = maxspeed | ||||||
|  | 			end | ||||||
|  | 		end | ||||||
|  | 		 | ||||||
|  | 		if maxspeed_backward ~= nil and maxspeed_backward > 0 then | ||||||
|  | 			speed_back = maxspeed_backward | ||||||
|  | 		else | ||||||
|  | 			if maxspeed ~=nil and maxspeed > 0 and speed_back > maxspeed then | ||||||
|  | 				speed_back = maxspeed | ||||||
|  | 			end | ||||||
|  | 		end | ||||||
|  |          | ||||||
|  |         way.speed = speed_forw | ||||||
|  |         if speed_back ~= way_forw then | ||||||
|  |             way.backward_speed = speed_back | ||||||
|  |         end | ||||||
| 	end | 	end | ||||||
| 	 | 	 | ||||||
| 	if oneway == "no" or oneway == "0" or oneway == "false" then | 	if oneway == "no" or oneway == "0" or oneway == "false" then | ||||||
|  | |||||||
							
								
								
									
										8
									
								
								profiles/turnbot.lua
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								profiles/turnbot.lua
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,8 @@ | |||||||
|  | -- Testbot, with turn penalty | ||||||
|  | -- Used for testing turn penalties | ||||||
|  | 
 | ||||||
|  | require 'testbot' | ||||||
|  | 
 | ||||||
|  | function turn_function (angle) | ||||||
|  |     return 200*math.abs(angle)/180 -- penalty  | ||||||
|  | end | ||||||
| @ -1 +1 @@ | |||||||
| disk=/tmp/stxxl,1,syscall | disk=/tmp/stxxl,10,syscall | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue
	
	Block a user