Rewrite cucumber test suite in JS

This commit is contained in:
Lauren Budorick 2016-03-04 12:11:05 -08:00
parent ea027a7cc1
commit c8bb50497b
70 changed files with 2889 additions and 2368 deletions

28
.eslintrc Normal file
View File

@ -0,0 +1,28 @@
{
"rules": {
"indent": [
2,
4
],
"quotes": [
1,
"single"
],
"linebreak-style": [
2,
"unix"
],
"semi": [
2,
"always"
],
"no-console": [
1
]
},
"env": {
"es6": true,
"node": true
},
"extends": "eslint:recommended"
}

5
.gitignore vendored
View File

@ -73,7 +73,12 @@ stxxl.errlog
################### ###################
/sandbox/ /sandbox/
# Test related files #
######################
/test/profile.lua /test/profile.lua
/test/cache
/test/speeds.csv
node_modules
# Deprecated config file # # Deprecated config file #
########################## ##########################

View File

@ -26,7 +26,7 @@ matrix:
addons: &gcc5 addons: &gcc5
apt: apt:
sources: ['ubuntu-toolchain-r-test'] sources: ['ubuntu-toolchain-r-test']
packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='g++-5' BUILD_TYPE='Debug' env: COMPILER='g++-5' BUILD_TYPE='Debug'
- os: linux - os: linux
@ -34,7 +34,7 @@ matrix:
addons: &gcc48 addons: &gcc48
apt: apt:
sources: ['ubuntu-toolchain-r-test'] sources: ['ubuntu-toolchain-r-test']
packages: ['g++-4.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['g++-4.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='g++-4.8' BUILD_TYPE='Debug' env: COMPILER='g++-4.8' BUILD_TYPE='Debug'
- os: linux - os: linux
@ -42,7 +42,7 @@ matrix:
addons: &clang38 addons: &clang38
apt: apt:
sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test'] sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test']
packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='clang++-3.8' BUILD_TYPE='Debug' RUN_CLANG_FORMAT=ON env: COMPILER='clang++-3.8' BUILD_TYPE='Debug' RUN_CLANG_FORMAT=ON
- os: osx - os: osx
@ -56,7 +56,7 @@ matrix:
addons: &gcc5 addons: &gcc5
apt: apt:
sources: ['ubuntu-toolchain-r-test'] sources: ['ubuntu-toolchain-r-test']
packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='g++-5' BUILD_TYPE='Release' env: COMPILER='g++-5' BUILD_TYPE='Release'
- os: linux - os: linux
@ -64,7 +64,7 @@ matrix:
addons: &gcc48 addons: &gcc48
apt: apt:
sources: ['ubuntu-toolchain-r-test'] sources: ['ubuntu-toolchain-r-test']
packages: ['g++-4.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['g++-4.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='g++-4.8' BUILD_TYPE='Release' env: COMPILER='g++-4.8' BUILD_TYPE='Release'
- os: linux - os: linux
@ -72,7 +72,7 @@ matrix:
addons: &clang38 addons: &clang38
apt: apt:
sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test'] sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test']
packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='clang++-3.8' BUILD_TYPE='Release' env: COMPILER='clang++-3.8' BUILD_TYPE='Release'
- os: osx - os: osx
@ -86,7 +86,7 @@ matrix:
addons: &gcc5 addons: &gcc5
apt: apt:
sources: ['ubuntu-toolchain-r-test'] sources: ['ubuntu-toolchain-r-test']
packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['g++-5', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='g++-5' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON env: COMPILER='g++-5' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON
- os: linux - os: linux
@ -94,7 +94,7 @@ matrix:
addons: &clang38 addons: &clang38
apt: apt:
sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test'] sources: ['llvm-toolchain-precise', 'ubuntu-toolchain-r-test']
packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'rubygems-integration', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev'] packages: ['clang-3.8', 'libbz2-dev', 'libstxxl-dev', 'libstxxl1', 'libxml2-dev', 'libzip-dev', 'lua5.1', 'liblua5.1-0-dev', 'libtbb-dev', 'libgdal-dev', 'libluabind-dev', 'libboost-all-dev']
env: COMPILER='clang++-3.8' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON env: COMPILER='clang++-3.8' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON
@ -121,8 +121,11 @@ matrix:
# compiler: clang # compiler: clang
# env: COMPILER='clang++' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON # env: COMPILER='clang++' BUILD_TYPE='Release' BUILD_SHARED_LIBS=ON
before_install:
- source ./scripts/install_node.sh 4
install: install:
- npm install
- DEPS_DIR="${TRAVIS_BUILD_DIR}/deps" - DEPS_DIR="${TRAVIS_BUILD_DIR}/deps"
- mkdir -p ${DEPS_DIR} && cd ${DEPS_DIR} - mkdir -p ${DEPS_DIR} && cd ${DEPS_DIR}
- | - |
@ -142,12 +145,9 @@ before_script:
if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
./scripts/check_taginfo.py taginfo.json profiles/car.lua ./scripts/check_taginfo.py taginfo.json profiles/car.lua
fi fi
- rvm use 1.9.3
- gem install bundler
- bundle install
- mkdir build && pushd build - mkdir build && pushd build
- export CXX=${COMPILER} - export CXX=${COMPILER}
- export OSRM_PORT=5000 OSRM_TIMEOUT=60 - export OSRM_PORT=5000 OSRM_TIMEOUT=6000
- cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} -DBUILD_TOOLS=1 -DENABLE_CCACHE=0 - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} -DBUILD_TOOLS=1 -DENABLE_CCACHE=0
script: script:
@ -163,7 +163,7 @@ script:
- ./engine-tests - ./engine-tests
- ./util-tests - ./util-tests
- popd - popd
- cucumber -p verify - npm test
- make -C test/data - make -C test/data
- mkdir example/build && pushd example/build - mkdir example/build && pushd example/build
- cmake .. - cmake ..

View File

@ -1,7 +0,0 @@
source "http://rubygems.org"
gem "cucumber"
gem "rake"
gem "osmlib-base"
gem "sys-proctable"
gem "rspec-expectations"

View File

@ -1,35 +0,0 @@
GEM
remote: http://rubygems.org/
specs:
builder (3.2.2)
cucumber (2.0.0)
builder (>= 2.1.2)
cucumber-core (~> 1.1.3)
diff-lcs (>= 1.1.3)
gherkin (~> 2.12)
multi_json (>= 1.7.5, < 2.0)
multi_test (>= 0.1.2)
cucumber-core (1.1.3)
gherkin (~> 2.12.0)
diff-lcs (1.2.5)
gherkin (2.12.2)
multi_json (~> 1.3)
multi_json (1.11.0)
multi_test (0.1.2)
osmlib-base (0.1.4)
rake (10.4.2)
rspec-expectations (3.2.1)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.2.0)
rspec-support (3.2.2)
sys-proctable (0.9.8)
PLATFORMS
ruby
DEPENDENCIES
cucumber
osmlib-base
rake
rspec-expectations
sys-proctable

190
Rakefile
View File

@ -1,190 +0,0 @@
require 'OSM/StreamParser'
require 'socket'
require 'digest/sha1'
require 'cucumber/rake/task'
require 'sys/proctable'
BUILD_FOLDER = 'build'
DATA_FOLDER = 'sandbox'
PROFILE = 'bicycle'
OSRM_PORT = 5000
PROFILES_FOLDER = '../profiles'
Cucumber::Rake::Task.new do |t|
t.cucumber_opts = %w{--format pretty}
end
areas = {
:kbh => { :country => 'denmark', :bbox => 'top=55.6972 left=12.5222 right=12.624 bottom=55.6376' },
:frd => { :country => 'denmark', :bbox => 'top=55.7007 left=12.4765 bottom=55.6576 right=12.5698' },
:regh => { :country => 'denmark', :bbox => 'top=56.164 left=11.792 bottom=55.403 right=12.731' },
:denmark => { :country => 'denmark', :bbox => nil },
:skaane => { :country => 'sweden', :bbox => 'top=56.55 left=12.4 bottom=55.3 right=14.6' }
}
osm_data_area_name = ARGV[1] ? ARGV[1].to_s.to_sym : :kbh
raise "Unknown data area." unless areas[osm_data_area_name]
osm_data_country = areas[osm_data_area_name][:country]
osm_data_area_bbox = areas[osm_data_area_name][:bbox]
task osm_data_area_name.to_sym {} #define empty task to prevent rake from whining. will break if area has same name as a task
def each_process name, &block
Sys::ProcTable.ps do |process|
if process.comm.strip == name.strip && process.state != 'zombie'
yield process.pid.to_i, process.state.strip
end
end
end
def up?
find_pid('osrm-routed') != nil
end
def find_pid name
each_process(name) { |pid,state| return pid.to_i }
return nil
end
def wait_for_shutdown name
timeout = 10
(timeout*10).times do
return if find_pid(name) == nil
sleep 0.1
end
raise "*** Could not terminate #{name}."
end
desc "Rebuild and run tests."
task :default => [:build]
desc "Build using CMake."
task :build do
if Dir.exists? BUILD_FOLDER
Dir.chdir BUILD_FOLDER do
system "make"
end
else
system "mkdir build; cd build; cmake ..; make"
end
end
desc "Setup config files."
task :setup do
end
desc "Download OSM data."
task :download do
Dir.mkdir "#{DATA_FOLDER}" unless File.exist? "#{DATA_FOLDER}"
puts "Downloading..."
puts "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf"
raise "Error while downloading data." unless system "curl http://download.geofabrik.de/europe/#{osm_data_country}-latest.osm.pbf -o #{DATA_FOLDER}/#{osm_data_country}.osm.pbf"
if osm_data_area_bbox
puts "Cropping and converting to protobuffer..."
raise "Error while cropping data." unless system "osmosis --read-pbf file=#{DATA_FOLDER}/#{osm_data_country}.osm.pbf --bounding-box #{osm_data_area_bbox} --write-pbf file=#{DATA_FOLDER}/#{osm_data_area_name}.osm.pbf omitmetadata=true"
end
end
desc "Crop OSM data"
task :crop do
if osm_data_area_bbox
raise "Error while cropping data." unless system "osmosis --read-pbf file=#{DATA_FOLDER}/#{osm_data_country}.osm.pbf --bounding-box #{osm_data_area_bbox} --write-pbf file=#{DATA_FOLDER}/#{osm_data_area_name}.osm.pbf omitmetadata=true"
end
end
desc "Reprocess OSM data."
task :process => [:extract,:contract] do
end
desc "Extract OSM data."
task :extract do
Dir.chdir DATA_FOLDER do
raise "Error while extracting data." unless system "../#{BUILD_FOLDER}/osrm-extract #{osm_data_area_name}.osm.pbf --profile ../profiles/#{PROFILE}.lua"
end
end
desc "Contract OSM data."
task :contract do
Dir.chdir DATA_FOLDER do
raise "Error while contracting data." unless system "../#{BUILD_FOLDER}/osrm-contract #{osm_data_area_name}.osrm"
end
end
desc "Delete preprocessing files."
task :clean do
File.delete *Dir.glob("#{DATA_FOLDER}/*.osrm")
File.delete *Dir.glob("#{DATA_FOLDER}/*.osrm.*")
end
desc "Run all cucumber test"
task :test do
system "cucumber"
puts
end
desc "Run the routing server in the terminal. Press Ctrl-C to stop."
task :run do
Dir.chdir DATA_FOLDER do
system "../#{BUILD_FOLDER}/osrm-routed #{osm_data_area_name}.osrm --port #{OSRM_PORT}"
end
end
desc "Launch the routing server in the background. Use rake:down to stop it."
task :up do
Dir.chdir DATA_FOLDER do
abort("Already up.") if up?
pipe = IO.popen("../#{BUILD_FOLDER}/osrm-routed #{osm_data_area_name}.osrm --port #{OSRM_PORT} 1>>osrm-routed.log 2>>osrm-routed.log")
timeout = 5
(timeout*10).times do
begin
socket = TCPSocket.new('localhost', OSRM_PORT)
socket.puts 'ping'
rescue Errno::ECONNREFUSED
sleep 0.1
end
end
end
end
desc "Stop the routing server."
task :down do
pid = find_pid 'osrm-routed'
if pid
Process.kill 'TERM', pid
else
puts "Already down."
end
end
desc "Kill all osrm-extract, osrm-contract and osrm-routed processes."
task :kill do
each_process('osrm-routed') { |pid,state| Process.kill 'KILL', pid }
each_process('osrm-contract') { |pid,state| Process.kill 'KILL', pid }
each_process('osrm-extract') { |pid,state| Process.kill 'KILL', pid }
wait_for_shutdown 'osrm-routed'
wait_for_shutdown 'osrm-contract'
wait_for_shutdown 'osrm-extract'
end
desc "Get PIDs of all osrm-extract, osrm-contract and osrm-routed processes."
task :pid do
each_process 'osrm-routed' do |pid,state|
puts "#{pid}\t#{state}"
end
end
desc "Stop, reprocess and restart."
task :update => [:down,:process,:up] do
end
desc "Remove test cache files."
task :sweep do
system "rm test/cache/*"
end

View File

@ -1,9 +0,0 @@
# config/cucumber.yml
##YAML Template
---
default: --require features --tags ~@todo --tags ~@bug --tag ~@stress
verify: --require features --tags ~@todo --tags ~@bug --tags ~@stress -f progress
jenkins: --require features --tags ~@todo --tags ~@bug --tags ~@stress --tags ~@options -f progress
bugs: --require features --tags @bug
todo: --require features --tags @todo
all: --require features

11
cucumber.js Normal file
View File

@ -0,0 +1,11 @@
module.exports = {
default: '--require features --tags ~@todo --tags ~@bug --tag ~@stress',
verify: '--require features --tags ~@todo --tags ~@bug --tags ~@stress -f progress',
jenkins: '--require features --tags ~@todo --tags ~@bug --tags ~@stress --tags ~@options -f progress',
bugs: '--require features --tags @bug',
todo: '--require features --tags @todo',
all: '--require features'
}

View File

@ -4,7 +4,7 @@ Feature: Bicycle - Handle movable bridge
Background: Background:
Given the profile "bicycle" Given the profile "bicycle"
Scenario: Car - Use a ferry route Scenario: Bicycle - Use a ferry route
Given the node map Given the node map
| a | b | c | | | | a | b | c | | |
| | | d | | | | | | d | | |
@ -27,7 +27,7 @@ Feature: Bicycle - Handle movable bridge
| c | f | cde,efg | 5,1 | | c | f | cde,efg | 5,1 |
| c | g | cde,efg | 5,1 | | c | g | cde,efg | 5,1 |
Scenario: Car - Properly handle durations Scenario: Bicycle - Properly handle durations
Given the node map Given the node map
| a | b | c | | | | a | b | c | | |
| | | d | | | | | | d | | |

View File

@ -9,7 +9,7 @@ Feature: Bike - Mode flag
Background: Background:
Given the profile "bicycle" Given the profile "bicycle"
Scenario: Bike - Mode when using a ferry Scenario: Bike - Mode when using a ferry
Given the node map Given the node map
| a | b | | | a | b | |

View File

@ -28,6 +28,7 @@ Feature: Raster - weights
0 0 0 250 0 0 0 250
0 0 0 0 0 0 0 0
""" """
And the data has been saved to disk
Scenario: Weighting not based on raster sources Scenario: Weighting not based on raster sources
Given the profile "testbot" Given the profile "testbot"

View File

@ -0,0 +1,273 @@
var util = require('util');
var path = require('path');
var fs = require('fs');
var d3 = require('d3-queue');
var OSM = require('../support/build_osm');
module.exports = function () {
this.Given(/^the profile "([^"]*)"$/, (profile, callback) => {
this.setProfile(profile, callback);
});
this.Given(/^the extract extra arguments "(.*?)"$/, (args, callback) => {
this.setExtractArgs(args);
callback();
});
this.Given(/^the contract extra arguments "(.*?)"$/, (args, callback) => {
this.setContractArgs(args);
callback();
});
this.Given(/^a grid size of (\d+) meters$/, (meters, callback) => {
this.setGridSize(meters);
callback();
});
this.Given(/^the origin ([-+]?[0-9]*\.?[0-9]+),([-+]?[0-9]*\.?[0-9]+)$/, (lat, lon, callback) => {
this.setOrigin([parseFloat(lon), parseFloat(lat)]);
callback();
});
this.Given(/^the shortcuts$/, (table, callback) => {
var q = d3.queue();
var addShortcut = (row, cb) => {
this.shortcutsHash[row.key] = row.value;
cb();
};
table.hashes().forEach((row) => {
q.defer(addShortcut, row);
});
q.awaitAll(callback);
});
this.Given(/^the node map$/, (table, callback) => {
var q = d3.queue();
var addNode = (name, ri, ci, cb) => {
if (name) {
if (name.length !== 1) throw new Error(util.format('*** node invalid name %s, must be single characters', name));
if (!name.match(/[a-z0-9]/)) throw new Error(util.format('*** invalid node name %s, must me alphanumeric', name));
var lonLat;
if (name.match(/[a-z]/)) {
if (this.nameNodeHash[name]) throw new Error(util.format('*** duplicate node %s', name));
lonLat = this.tableCoordToLonLat(ci, ri);
this.addOSMNode(name, lonLat[0], lonLat[1], null);
} else {
if (this.locationHash[name]) throw new Error(util.format('*** duplicate node %s'), name);
lonLat = this.tableCoordToLonLat(ci, ri);
this.addLocation(name, lonLat[0], lonLat[1], null);
}
cb();
}
else cb();
};
table.raw().forEach((row, ri) => {
row.forEach((name, ci) => {
q.defer(addNode, name, ri, ci);
});
});
q.awaitAll(callback);
});
this.Given(/^the node locations$/, (table, callback) => {
var q = d3.queue();
var addNodeLocations = (row, cb) => {
var name = row.node;
if (this.findNodeByName(name)) throw new Error(util.format('*** duplicate node %s'), name);
if (name.match(/[a-z]/)) {
var id = row.id && parseInt(row.id);
this.addOSMNode(name, row.lon, row.lat, id);
} else {
this.addLocation(name, row.lon, row.lat);
}
cb();
};
table.hashes().forEach((row) => q.defer(addNodeLocations, row));
q.awaitAll(callback);
});
this.Given(/^the nodes$/, (table, callback) => {
var q = d3.queue();
var addNode = (row, cb) => {
var name = row.node,
node = this.findNodeByName(name);
delete row.node;
if (!node) throw new Error(util.format('*** unknown node %s'), name);
for (var key in row) {
node.addTag(key, row[key]);
}
cb();
};
table.hashes().forEach((row) => q.defer(addNode, row));
q.awaitAll(callback);
});
this.Given(/^the ways$/, (table, callback) => {
if (this.osm_str) throw new Error('*** Map data already defined - did you pass an input file in this scenario?');
var q = d3.queue();
var addWay = (row, cb) => {
var way = new OSM.Way(this.makeOSMId(), this.OSM_USER, this.OSM_TIMESTAMP, this.OSM_UID);
var nodes = row.nodes;
if (this.nameWayHash.nodes) throw new Error(util.format('*** duplicate way %s', nodes));
for (var i=0; i<nodes.length; i++) {
var c = nodes[i];
if (!c.match(/[a-z]/)) throw new Error(util.format('*** ways can only use names a-z (%s)', c));
var node = this.findNodeByName(c);
if (!node) throw new Error(util.format('*** unknown node %s', c));
way.addNode(node);
}
var tags = {
highway: 'primary'
};
for (var key in row) {
tags[key] = row[key];
}
delete tags.nodes;
if (row.highway === '(nil)') delete tags.highway;
if (row.name === undefined)
tags.name = nodes;
else if (row.name === '""' || row.name === "''") // eslint-disable-line quotes
tags.name = '';
else if (row.name === '' || row.name === '(nil)')
delete tags.name;
else
tags.name = row.name;
way.setTags(tags);
this.OSMDB.addWay(way);
this.nameWayHash[nodes] = way;
cb();
};
table.hashes().forEach((row) => q.defer(addWay, row));
q.awaitAll(callback);
});
this.Given(/^the relations$/, (table, callback) => {
if (this.osm_str) throw new Error('*** Map data already defined - did you pass an input file in this scenario?');
var q = d3.queue();
var addRelation = (row, cb) => {
var relation = new OSM.Relation(this.makeOSMId(), this.OSM_USER, this.OSM_TIMESTAMP, this.OSM_UID);
for (var key in row) {
var isNode = key.match(/^node:(.*)/),
isWay = key.match(/^way:(.*)/),
isColonSeparated = key.match(/^(.*):(.*)/);
if (isNode) {
row[key].split(',').map(function(v) { return v.trim(); }).forEach((nodeName) => {
if (nodeName.length !== 1) throw new Error(util.format('*** invalid relation node member "%s"'), nodeName);
var node = this.findNodeByName(nodeName);
if (!node) throw new Error(util.format('*** unknown relation node member "%s"'), nodeName);
relation.addMember('node', node.id, isNode[1]);
});
} else if (isWay) {
row[key].split(',').map(function(v) { return v.trim(); }).forEach((wayName) => {
var way = this.findWayByName(wayName);
if (!way) throw new Error(util.format('*** unknown relation way member "%s"'), wayName);
relation.addMember('way', way.id, isWay[1]);
});
} else if (isColonSeparated && isColonSeparated[1] !== 'restriction') {
throw new Error(util.format('*** unknown relation member type "%s:%s", must be either "node" or "way"'), isColonSeparated[1], isColonSeparated[2]);
} else {
relation.addTag(key, row[key]);
}
}
relation.uid = this.OSM_UID;
this.OSMDB.addRelation(relation);
cb();
};
table.hashes().forEach((row) => q.defer(addRelation, row));
q.awaitAll(callback);
});
this.Given(/^the input file ([^"]*)$/, (file, callback) => {
if (path.extname(file) !== '.osm') throw new Error('*** Input file must be in .osm format');
fs.readFile(file, 'utf8', (err, data) => {
if (!err) this.osm_str = data.toString();
callback(err);
});
});
this.Given(/^the raster source$/, (data, callback) => {
fs.writeFile(path.resolve(this.TEST_FOLDER, 'rastersource.asc'), data, callback);
});
this.Given(/^the speed file$/, (data, callback) => {
fs.writeFile(path.resolve(this.TEST_FOLDER, 'speeds.csv'), data, callback);
});
this.Given(/^the data has been saved to disk$/, (callback) => {
try {
this.reprocess(callback);
} catch(e) {
this.processError = e;
callback(e);
}
});
this.Given(/^the data has been extracted$/, (callback) => {
this.writeAndExtract((err) => {
if (err) this.processError = err;
callback();
});
});
this.Given(/^the data has been contracted$/, (callback) => {
this.reprocess((err) => {
if (err) this.processError = err;
callback();
});
});
this.Given(/^osrm\-routed is stopped$/, (callback) => {
this.OSRMLoader.shutdown((err) => {
if (err) this.processError = err;
callback();
});
});
this.Given(/^data is loaded directly/, () => {
this.loadMethod = 'directly';
});
this.Given(/^data is loaded with datastore$/, () => {
this.loadMethod = 'datastore';
});
this.Given(/^the HTTP method "([^"]*)"$/, (method, callback) => {
this.httpMethod = method;
callback();
});
};

View File

@ -1,202 +0,0 @@
Given /^the profile "([^"]*)"$/ do |profile|
set_profile profile
end
Given(/^the import format "(.*?)"$/) do |format|
set_input_format format
end
Given /^the extract extra arguments "(.*?)"$/ do |args|
set_extract_args args
end
Given /^the contract extra arguments "(.*?)"$/ do |args|
set_contract_args args
end
Given /^a grid size of (\d+) meters$/ do |meters|
set_grid_size meters
end
Given /^the origin ([-+]?[0-9]*\.?[0-9]+),([-+]?[0-9]*\.?[0-9]+)$/ do |lat,lon|
set_origin [lon.to_f,lat.to_f]
end
Given /^the shortcuts$/ do |table|
table.hashes.each do |row|
shortcuts_hash[ row['key'] ] = row['value']
end
end
Given /^the node map$/ do |table|
table.raw.each_with_index do |row,ri|
row.each_with_index do |name,ci|
unless name.empty?
raise "*** node invalid name '#{name}', must be single characters" unless name.size == 1
raise "*** invalid node name '#{name}', must me alphanumeric" unless name.match /[a-z0-9]/
if name.match /[a-z]/
raise "*** duplicate node '#{name}'" if name_node_hash[name]
add_osm_node name, *table_coord_to_lonlat(ci,ri), nil
else
raise "*** duplicate node '#{name}'" if location_hash[name]
add_location name, *table_coord_to_lonlat(ci,ri)
end
end
end
end
end
Given /^the node locations$/ do |table|
table.hashes.each do |row|
name = row['node']
raise "*** duplicate node '#{name}'" if find_node_by_name name
if name.match /[a-z]/
id = row['id']
id = id.to_i if id
add_osm_node name, row['lon'].to_f, row['lat'].to_f, id
else
add_location name, row['lon'].to_f, row['lat'].to_f
end
end
end
Given /^the nodes$/ do |table|
table.hashes.each do |row|
name = row.delete 'node'
node = find_node_by_name(name)
raise "*** unknown node '#{c}'" unless node
node << row
end
end
Given /^the ways$/ do |table|
raise "*** Map data already defined - did you pass an input file in this scenaria?" if @osm_str
table.hashes.each do |row|
way = OSM::Way.new make_osm_id, OSM_USER, OSM_TIMESTAMP
way.uid = OSM_UID
nodes = row.delete 'nodes'
raise "*** duplicate way '#{nodes}'" if name_way_hash[nodes]
nodes.each_char do |c|
raise "*** ways can only use names a-z, '#{name}'" unless c.match /[a-z]/
node = find_node_by_name(c)
raise "*** unknown node '#{c}'" unless node
way << node
end
defaults = { 'highway' => 'primary' }
tags = defaults.merge(row)
if row['highway'] == '(nil)'
tags.delete 'highway'
end
if row['name'] == nil
tags['name'] = nodes
elsif (row['name'] == '""') || (row['name'] == "''")
tags['name'] = ''
elsif row['name'] == '' || row['name'] == '(nil)'
tags.delete 'name'
else
tags['name'] = row['name']
end
way << tags
osm_db << way
name_way_hash[nodes] = way
end
end
Given /^the relations$/ do |table|
raise "*** Map data already defined - did you pass an input file in this scenaria?" if @osm_str
table.hashes.each do |row|
relation = OSM::Relation.new make_osm_id, OSM_USER, OSM_TIMESTAMP
row.each_pair do |key,value|
if key =~ /^node:(.*)/
value.split(',').map { |v| v.strip }.each do |node_name|
raise "***invalid relation node member '#{node_name}', must be single character" unless node_name.size == 1
node = find_node_by_name(node_name)
raise "*** unknown relation node member '#{node_name}'" unless node
relation << OSM::Member.new( 'node', node.id, $1 )
end
elsif key =~ /^way:(.*)/
value.split(',').map { |v| v.strip }.each do |way_name|
way = find_way_by_name(way_name)
raise "*** unknown relation way member '#{way_name}'" unless way
relation << OSM::Member.new( 'way', way.id, $1 )
end
elsif key =~ /^(.*):(.*)/ && "#{$1}" != 'restriction'
raise "*** unknown relation member type '#{$1}:#{$2}', must be either 'node' or 'way'"
else
relation << { key => value }
end
end
relation.uid = OSM_UID
osm_db << relation
end
end
Given /^the defaults$/ do
end
Given /^the input file ([^"]*)$/ do |file|
raise "*** Input file must in .osm format" unless File.extname(file)=='.osm'
@osm_str = File.read file
end
Given /^the raster source$/ do |data|
Dir.chdir TEST_FOLDER do
File.open("rastersource.asc", "w") {|f| f.write(data)}
end
end
Given /^the speed file$/ do |data|
Dir.chdir TEST_FOLDER do
File.open("speeds.csv", "w") {|f| f.write(data)}
end
end
Given /^the data has been saved to disk$/ do
begin
write_input_data
rescue OSRMError => e
@process_error = e
end
end
Given /^the data has been extracted$/ do
begin
write_input_data
extract_data unless extracted?
rescue OSRMError => e
@process_error = e
end
end
Given /^the data has been contracted$/ do
begin
reprocess
rescue OSRMError => e
@process_error = e
end
end
Given /^osrm\-routed is stopped$/ do
begin
OSRMLoader.shutdown
rescue OSRMError => e
@process_error = e
end
end
Given /^data is loaded directly/ do
@load_method = 'directly'
end
Given /^data is loaded with datastore$/ do
@load_method = 'datastore'
end
Given /^the HTTP method "([^"]*)"$/ do |method|
@http_method = method
end

View File

@ -0,0 +1,82 @@
var util = require('util');
module.exports = function () {
this.When(/^I request a travel time matrix I should get$/, (table, callback) => {
var NO_ROUTE = 2147483647; // MAX_INT
var tableRows = table.raw();
if (tableRows[0][0] !== '') throw new Error('*** Top-left cell of matrix table must be empty');
var waypoints = [],
columnHeaders = tableRows[0].slice(1),
rowHeaders = tableRows.map((h) => h[0]).slice(1),
symmetric = columnHeaders.every((ele, i) => ele === rowHeaders[i]);
if (symmetric) {
columnHeaders.forEach((nodeName) => {
var node = this.findNodeByName(nodeName);
if (!node) throw new Error(util.format('*** unknown node "%s"'), nodeName);
waypoints.push({ coord: node, type: 'loc' });
});
} else {
columnHeaders.forEach((nodeName) => {
var node = this.findNodeByName(nodeName);
if (!node) throw new Error(util.format('*** unknown node "%s"'), nodeName);
waypoints.push({ coord: node, type: 'dst' });
});
rowHeaders.forEach((nodeName) => {
var node = this.findNodeByName(nodeName);
if (!node) throw new Error(util.format('*** unknown node "%s"'), nodeName);
waypoints.push({ coord: node, type: 'src' });
});
}
var actual = [];
actual.push(table.headers);
this.reprocessAndLoadData(() => {
// compute matrix
var params = this.queryParams;
this.requestTable(waypoints, params, (err, response) => {
if (err) return callback(err);
if (!response.body.length) return callback(new Error('Invalid response body'));
var jsonResult = JSON.parse(response.body),
result = jsonResult['distance_table'].map((row) => {
var hashes = {};
row.forEach((c, j) => {
hashes[tableRows[0][j+1]] = c;
});
return hashes;
});
var testRow = (row, ri, cb) => {
var ok = true;
for (var k in result[ri]) {
if (this.FuzzyMatch.match(result[ri][k], row[k])) {
result[ri][k] = row[k];
} else if (row[k] === '' && result[ri][k] === NO_ROUTE) {
result[ri][k] = '';
} else {
result[ri][k] = result[ri][k].toString();
ok = false;
}
}
if (!ok) {
var failed = { attempt: 'distance_matrix', query: this.query, response: response };
this.logFail(row, result[ri], [failed]);
}
result[ri][''] = row[''];
cb(null, result[ri]);
};
this.processRowsAndDiff(table, testRow, callback);
});
});
});
};

View File

@ -1,66 +0,0 @@
When /^I request a travel time matrix I should get$/ do |table|
no_route = 2147483647 # MAX_INT
raise "*** Top-left cell of matrix table must be empty" unless table.headers[0]==""
waypoints = []
column_headers = table.headers[1..-1]
row_headers = table.rows.map { |h| h.first }
symmetric = Set.new(column_headers) == Set.new(row_headers)
if symmetric then
column_headers.each do |node_name|
node = find_node_by_name(node_name)
raise "*** unknown node '#{node_name}" unless node
waypoints << {:coord => node, :type => "loc"}
end
else
column_headers.each do |node_name|
node = find_node_by_name(node_name)
raise "*** unknown node '#{node_name}" unless node
waypoints << {:coord => node, :type => "dst"}
end
row_headers.each do |node_name|
node = find_node_by_name(node_name)
raise "*** unknown node '#{node_name}" unless node
waypoints << {:coord => node, :type => "src"}
end
end
reprocess
actual = []
actual << table.headers
OSRMLoader.load(self,"#{contracted_file}.osrm") do
# compute matrix
params = @query_params
response = request_table waypoints, params
if response.body.empty? == false
json_result = JSON.parse response.body
result = json_result["distance_table"]
end
# compare actual and expected result, one row at a time
table.rows.each_with_index do |row,ri|
# fuzzy match
ok = true
0.upto(result[ri].size-1) do |i|
if FuzzyMatch.match result[ri][i], row[i+1]
result[ri][i] = row[i+1]
elsif row[i+1]=="" and result[ri][i]==no_route
result[ri][i] = ""
else
result[ri][i] = result[ri][i].to_s
ok = false
end
end
# add row header
r = [row[0],result[ri]].flatten
# store row for comparison
actual << r
end
end
table.diff! actual
end

View File

@ -0,0 +1,30 @@
var util = require('util');
module.exports = function () {
this.Before((scenario, callback) => {
this.scenarioTitle = scenario.getName();
this.loadMethod = this.DEFAULT_LOAD_METHOD;
this.queryParams = [];
var d = new Date();
this.scenarioTime = util.format('%d-%d-%dT%s:%s:%sZ', d.getFullYear(), d.getMonth()+1, d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds());
this.resetData();
this.hasLoggedPreprocessInfo = false;
this.hasLoggedScenarioInfo = false;
this.setGridSize(this.DEFAULT_GRID_SIZE);
this.setOrigin(this.DEFAULT_ORIGIN);
callback();
});
this.Before('@ignore-platform-windows', () => {
this.skipThisScenario();
});
this.Before('@ignore-platform-unix', () => {
this.skipThisScenario();
});
this.Before('@ignore-platform-mac', () => {
this.skipThisScenario();
});
};

View File

@ -1,11 +0,0 @@
Before '@ignore-platform-windows' do
skip_this_scenario
end
Before '@ignore-platform-unix' do
skip_this_scenario
end
Before '@ignore-platform-mac' do
skip_this_scenario
end

View File

@ -0,0 +1,174 @@
var util = require('util');
var d3 = require('d3-queue');
module.exports = function () {
this.When(/^I match I should get$/, (table, callback) => {
var got;
this.reprocessAndLoadData(() => {
var testRow = (row, ri, cb) => {
var afterRequest = (err, res) => {
if (err) return cb(err);
var json;
var headers = new Set(table.raw()[0]);
if (res.body.length) {
json = JSON.parse(res.body);
}
if (headers.has('status')) {
got.status = json.status.toString();
}
if (headers.has('message')) {
got.message = json.status_message;
}
if (headers.has('#')) {
// comment column
got['#'] = row['#'];
}
var subMatchings = [],
turns = '',
route = '',
duration = '';
if (res.statusCode === 200) {
if (headers.has('matchings')) {
subMatchings = json.matchings.filter(m => !!m).map(sub => sub.matched_points);
}
if (headers.has('turns')) {
if (json.matchings.length != 1) throw new Error('*** Checking turns only supported for matchings with one subtrace');
turns = this.turnList(json.matchings[0].instructions);
}
if (headers.has('route')) {
if (json.matchings.length != 1) throw new Error('*** Checking route only supported for matchings with one subtrace');
route = this.wayList(json.matchings[0].instructions);
}
if (headers.has('duration')) {
if (json.matchings.length != 1) throw new Error('*** Checking duration only supported for matchings with one subtrace');
duration = json.matchings[0].route_summary.total_time;
}
}
if (headers.has('turns')) {
got.turns = turns;
}
if (headers.has('route')) {
got.route = route;
}
if (headers.has('duration')) {
got.duration = duration.toString();
}
var ok = true;
var encodedResult = '',
extendedTarget = '';
var q = d3.queue();
var testSubMatching = (sub, si, scb) => {
if (si >= subMatchings.length) {
ok = false;
q.abort();
scb();
} else {
var sq = d3.queue();
var testSubNode = (ni, ncb) => {
var node = this.findNodeByName(sub[ni]),
outNode = subMatchings[si][ni];
if (this.FuzzyMatch.matchLocation(outNode, node)) {
encodedResult += sub[ni];
extendedTarget += sub[ni];
} else {
encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]);
extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon);
ok = false;
}
ncb();
};
for (var i=0; i<sub.length; i++) {
sq.defer(testSubNode, i);
}
sq.awaitAll(scb);
}
};
row.matchings.split(',').forEach((sub, si) => {
q.defer(testSubMatching, sub, si);
});
q.awaitAll(() => {
if (ok) {
if (headers.has('matchings')) {
got.matchings = row.matchings;
}
if (headers.has('timestamps')) {
got.timestamps = row.timestamps;
}
} else {
got.matchings = encodedResult;
row.matchings = extendedTarget;
this.logFail(row, got, { matching: { query: this.query, response: res } });
}
cb(null, got);
});
};
if (row.request) {
got = {};
got.request = row.request;
this.requestUrl(row.request, afterRequest);
} else {
var params = this.queryParams;
got = {};
for (var k in row) {
var match = k.match(/param:(.*)/);
if (match) {
if (row[k] === '(nil)') {
params[match[1]] = null;
} else if (row[k]) {
params[match[1]] = [row[k]];
}
got[k] = row[k];
}
}
var trace = [],
timestamps = [];
if (row.trace) {
for (var i=0; i<row.trace.length; i++) {
var n = row.trace[i],
node = this.findNodeByName(n);
if (!node) throw new Error(util.format('*** unknown waypoint node "%s"'), n);
trace.push(node);
}
if (row.timestamps) {
timestamps = row.timestamps.split(' ').filter(s => !!s).map(t => parseInt(t));
}
got.trace = row.trace;
this.requestMatching(trace, timestamps, params, afterRequest);
} else {
throw new Error('*** no trace');
}
}
};
this.processRowsAndDiff(table, testRow, callback);
});
});
};

View File

@ -1,124 +0,0 @@
When /^I match I should get$/ do |table|
reprocess
actual = []
OSRMLoader.load(self,"#{contracted_file}.osrm") do
table.hashes.each_with_index do |row,ri|
if row['request']
got = {'request' => row['request'] }
response = request_url row['request']
else
params = @query_params
got = {}
row.each_pair do |k,v|
if k =~ /param:(.*)/
if v=='(nil)'
params[$1]=nil
elsif v!=nil
params[$1]=[v]
end
got[k]=v
end
end
trace = []
timestamps = []
if row['trace']
row['trace'].each_char do |n|
node = find_node_by_name(n.strip)
raise "*** unknown waypoint node '#{n.strip}" unless node
trace << node
end
if row['timestamps']
timestamps = row['timestamps'].split(" ").compact.map { |t| t.to_i}
end
got = got.merge({'trace' => row['trace'] })
response = request_matching trace, timestamps, params
else
raise "*** no trace"
end
end
if response.body.empty? == false
json = JSON.parse response.body
end
if table.headers.include? 'status'
got['status'] = json['status'].to_s
end
if table.headers.include? 'message'
got['message'] = json['status_message']
end
if table.headers.include? '#' # comment column
got['#'] = row['#'] # copy value so it always match
end
sub_matchings = []
turns = ''
route = ''
duration = ''
if response.code == "200"
if table.headers.include? 'matchings'
sub_matchings = json['matchings'].compact.map { |sub| sub['matched_points']}
end
if table.headers.include? 'turns'
raise "*** Checking turns only support for matchings with one subtrace" unless json['matchings'].size == 1
turns = turn_list json['matchings'][0]['instructions']
end
if table.headers.include? 'route'
raise "*** Checking route only support for matchings with one subtrace" unless json['matchings'].size == 1
route = way_list json['matchings'][0]['instructions']
if table.headers.include? 'duration'
raise "*** Checking duration only support for matchings with one subtrace" unless json['matchings'].size == 1
duration = json['matchings'][0]['route_summary']['total_time']
end
end
end
if table.headers.include? 'turns'
got['turns'] = turns
end
if table.headers.include? 'route'
got['route'] = route
end
if table.headers.include? 'duration'
got['duration'] = duration.to_s
end
ok = true
encoded_result = ""
extended_target = ""
row['matchings'].split(',').each_with_index do |sub, sub_idx|
if sub_idx >= sub_matchings.length
ok = false
break
end
sub.length.times do |node_idx|
node = find_node_by_name(sub[node_idx])
out_node = sub_matchings[sub_idx][node_idx]
if FuzzyMatch.match_location out_node, node
encoded_result += sub[node_idx]
extended_target += sub[node_idx]
else
encoded_result += "? [#{out_node[0]},#{out_node[1]}]"
extended_target += "#{sub[node_idx]} [#{node.lat},#{node.lon}]"
ok = false
end
end
end
if ok
if table.headers.include? 'matchings'
got['matchings'] = row['matchings']
end
if table.headers.include? 'timestamps'
got['timestamps'] = row['timestamps']
end
else
got['matchings'] = encoded_result
row['matchings'] = extended_target
log_fail row,got, { 'matching' => {:query => @query, :response => response} }
end
actual << got
end
end
table.diff! actual
end

View File

@ -0,0 +1,53 @@
var util = require('util');
module.exports = function () {
this.When(/^I request nearest I should get$/, (table, callback) => {
this.reprocessAndLoadData(() => {
var testRow = (row, ri, cb) => {
var inNode = this.findNodeByName(row.in);
if (!inNode) throw new Error(util.format('*** unknown in-node "%s"'), row.in);
var outNode = this.findNodeByName(row.out);
if (!outNode) throw new Error(util.format('*** unknown out-node "%s"'), row.out);
this.requestNearest(inNode, this.queryParams, (err, response) => {
if (err) return cb(err);
var coord;
if (response.statusCode === 200 && response.body.length) {
var json = JSON.parse(response.body);
coord = json.mapped_coordinate;
var got = { in: row.in, out: row.out };
var ok = true;
Object.keys(row).forEach((key) => {
if (key === 'out') {
if (this.FuzzyMatch.matchLocation(coord, outNode)) {
got[key] = row[key];
} else {
row[key] = util.format('%s [%d,%d]', row[key], outNode.lat, outNode.lon);
ok = false;
}
}
});
if (!ok) {
var failed = { attempt: 'nearest', query: this.query, response: response };
this.logFail(row, got, [failed]);
}
cb(null, got);
}
else {
cb();
}
});
};
this.processRowsAndDiff(table, testRow, callback);
});
});
};

View File

@ -1,51 +0,0 @@
When /^I request nearest I should get$/ do |table|
reprocess
actual = []
OSRMLoader.load(self,"#{contracted_file}.osrm") do
table.hashes.each_with_index do |row,ri|
in_node = find_node_by_name row['in']
raise "*** unknown in-node '#{row['in']}" unless in_node
out_node = find_node_by_name row['out']
raise "*** unknown out-node '#{row['out']}" unless out_node
response = request_nearest in_node, @query_params
if response.code == "200" && response.body.empty? == false
json = JSON.parse response.body
if json['status'] == 200
coord = json['mapped_coordinate']
end
end
got = {'in' => row['in'], 'out' => coord }
ok = true
row.keys.each do |key|
if key=='out'
if FuzzyMatch.match_location coord, out_node
got[key] = row[key]
else
row[key] = "#{row[key]} [#{out_node.lat},#{out_node.lon}]"
ok = false
end
end
end
unless ok
failed = { :attempt => 'nearest', :query => @query, :response => response }
log_fail row,got,[failed]
end
actual << got
end
end
table.diff! actual
end
When /^I request nearest (\d+) times I should get$/ do |n,table|
ok = true
n.to_i.times do
ok = false unless step "I request nearest I should get", table
end
ok
end

View File

@ -0,0 +1,69 @@
var assert = require('assert');
module.exports = function () {
this.When(/^I run "osrm\-routed\s?(.*?)"$/, { timeout: this.SHUTDOWN_TIMEOUT }, (options, callback) => {
this.runBin('osrm-routed', options, () => {
callback();
});
});
this.When(/^I run "osrm\-extract\s?(.*?)"$/, (options, callback) => {
this.runBin('osrm-extract', options, () => {
callback();
});
});
this.When(/^I run "osrm\-contract\s?(.*?)"$/, (options, callback) => {
this.runBin('osrm-contract', options, () => {
callback();
});
});
this.When(/^I run "osrm\-datastore\s?(.*?)"$/, (options, callback) => {
this.runBin('osrm-datastore', options, () => {
callback();
});
});
this.Then(/^it should exit with code (\d+)$/, (code) => {
assert.equal(this.exitCode, parseInt(code));
});
this.Then(/^stdout should contain "(.*?)"$/, (str) => {
assert.ok(this.stdout.indexOf(str) > -1);
});
this.Then(/^stderr should contain "(.*?)"$/, (str) => {
assert.ok(this.stderr.indexOf(str) > -1);
});
this.Then(/^stdout should contain \/(.*)\/$/, (regexStr) => {
var re = new RegExp(regexStr);
assert.ok(this.stdout.match(re));
});
this.Then(/^stderr should contain \/(.*)\/$/, (regexStr) => {
var re = new RegExp(regexStr);
assert.ok(this.stdout.match(re));
});
this.Then(/^stdout should be empty$/, () => {
assert.equal(this.stdout.trim(), '');
});
this.Then(/^stderr should be empty$/, () => {
assert.equal(this.stderr.trim(), '');
});
this.Then(/^stdout should contain (\d+) lines?$/, (lines) => {
assert.equal(this.stdout.split('\n').length - 1, parseInt(lines));
});
this.Given(/^the query options$/, (table, callback) => {
table.raw().forEach((tuple) => {
this.queryParams.push(tuple);
});
callback();
});
};

View File

@ -1,57 +0,0 @@
When(/^I run "osrm\-routed\s?(.*?)"$/) do |options|
begin
Timeout.timeout(SHUTDOWN_TIMEOUT) { run_bin 'osrm-routed', options }
rescue Timeout::Error
raise "*** osrm-routed didn't quit. Maybe the --trial option wasn't used?"
end
end
When(/^I run "osrm\-extract\s?(.*?)"$/) do |options|
run_bin 'osrm-extract', options
end
When(/^I run "osrm\-contract\s?(.*?)"$/) do |options|
run_bin 'osrm-contract', options
end
When(/^I run "osrm\-datastore\s?(.*?)"$/) do |options|
run_bin 'osrm-datastore', options
end
Then /^it should exit with code (\d+)$/ do |code|
expect(@exit_code).to eq( code.to_i )
end
Then /^stdout should contain "(.*?)"$/ do |str|
expect(@stdout).to include(str)
end
Then /^stderr should contain "(.*?)"$/ do |str|
expect(@stderr).to include(str)
end
Then(/^stdout should contain \/(.*)\/$/) do |regex_str|
regex = Regexp.new regex_str
expect(@stdout).to match( regex )
end
Then(/^stderr should contain \/(.*)\/$/) do |regex_str|
regex = Regexp.new regex_str
expect(@stderr).to match( regex )
end
Then /^stdout should be empty$/ do
expect(@stdout).to eq("")
end
Then /^stderr should be empty$/ do
expect(@stderr).to eq("")
end
Then /^stdout should contain (\d+) lines?$/ do |lines|
expect(@stdout.lines.count).to eq( lines.to_i )
end
Given (/^the query options$/) do |table|
table.rows_hash.each { |k,v| @query_params << [k, v] }
end

View File

@ -0,0 +1,60 @@
var assert = require('assert');
module.exports = function () {
this.When(/^I request \/(.*)$/, (path, callback) => {
this.reprocessAndLoadData(() => {
this.requestPath(path, [], (err, res, body) => {
this.response = res;
callback(err, res, body);
});
});
});
this.Then(/^I should get a response/, () => {
this.ShouldGetAResponse();
});
this.Then(/^response should be valid JSON$/, (callback) => {
this.ShouldBeValidJSON(callback);
});
this.Then(/^response should be well-formed$/, () => {
this.ShouldBeWellFormed();
});
this.Then(/^status code should be (\d+)$/, (code, callback) => {
try {
this.json = JSON.parse(this.response.body);
} catch(e) {
return callback(e);
}
assert.equal(this.json.status, parseInt(code));
callback();
});
this.Then(/^status message should be "(.*?)"$/, (message, callback) => {
try {
this.json = JSON.parse(this.response.body);
} catch(e) {
return callback(e);
}
assert(this.json.status_message, message);
callback();
});
this.Then(/^response should be a well-formed route$/, () => {
this.ShouldBeWellFormed();
assert.equal(typeof this.json.status_message, 'string');
assert.equal(typeof this.json.route_summary, 'object');
assert.equal(typeof this.json.route_geometry, 'string');
assert.ok(Array.isArray(this.json.route_instructions));
assert.ok(Array.isArray(this.json.via_points));
assert.ok(Array.isArray(this.json.via_indices));
});
this.Then(/^"([^"]*)" should return code (\d+)$/, (binary, code) => {
assert.ok(this.processError instanceof this.OSRMError);
assert.equal(this.processError.process, binary);
assert.equal(parseInt(this.processError.code), parseInt(code));
});
};

View File

@ -1,46 +0,0 @@
When /^I request \/(.*)$/ do |path|
reprocess
OSRMLoader.load(self,"#{contracted_file}.osrm") do
@response = request_path path, []
end
end
Then /^I should get a response/ do
expect(@response.code).to eq("200")
expect(@response.body).not_to eq(nil)
expect(@response.body).not_to eq('')
end
Then /^response should be valid JSON$/ do
@json = JSON.parse @response.body
end
Then /^response should be well-formed$/ do
expect(@json['status'].class).to eq(Fixnum)
end
Then /^status code should be (\d+)$/ do |code|
@json = JSON.parse @response.body
expect(@json['status']).to eq(code.to_i)
end
Then /^status message should be "(.*?)"$/ do |message|
@json = JSON.parse @response.body
expect(@json['status_message']).to eq(message)
end
Then /^response should be a well-formed route$/ do
step "response should be well-formed"
expect(@json['status_message'].class).to eq(String)
expect(@json['route_summary'].class).to eq(Hash)
expect(@json['route_geometry'].class).to eq(String)
expect(@json['route_instructions'].class).to eq(Array)
expect(@json['via_points'].class).to eq(Array)
expect(@json['via_indices'].class).to eq(Array)
end
Then /^"([^"]*)" should return code (\d+)$/ do |binary, code|
expect(@process_error.is_a?(OSRMError)).to eq(true)
expect(@process_error.process).to eq(binary)
expect(@process_error.code.to_i).to eq(code.to_i)
end

View File

@ -0,0 +1,110 @@
var util = require('util');
var d3 = require('d3-queue');
var classes = require('../support/data_classes');
module.exports = function () {
this.Then(/^routability should be$/, (table, callback) => {
this.buildWaysFromTable(table, () => {
var directions = ['forw','backw','bothw'];
if (!directions.some(k => !!table.hashes()[0].hasOwnProperty(k))) {
throw new Error('*** routability table must contain either "forw", "backw" or "bothw" column');
}
this.reprocessAndLoadData(() => {
var testRow = (row, i, cb) => {
var outputRow = row;
testRoutabilityRow(i, (err, result) => {
if (err) return cb(err);
directions.filter(d => !!table.hashes()[0][d]).forEach((direction) => {
var want = this.shortcutsHash[row[direction]] || row[direction];
switch (true) {
case '' === want:
case 'x' === want:
outputRow[direction] = result[direction].status ?
result[direction].status.toString() : '';
break;
case /^\d+s/.test(want):
break;
case /^\d+ km\/h/.test(want):
break;
default:
throw new Error(util.format('*** Unknown expectation format: %s', want));
}
if (this.FuzzyMatch.match(outputRow[direction], want)) {
outputRow[direction] = row[direction];
}
});
if (outputRow != row) {
this.logFail(row, outputRow, result);
}
cb(null, outputRow);
});
};
this.processRowsAndDiff(table, testRow, callback);
});
});
});
var testRoutabilityRow = (i, cb) => {
var result = {};
var testDirection = (dir, callback) => {
var a = new classes.Location(this.origin[0] + (1+this.WAY_SPACING*i) * this.zoom, this.origin[1]),
b = new classes.Location(this.origin[0] + (3+this.WAY_SPACING*i) * this.zoom, this.origin[1]),
r = {};
r.which = dir;
this.requestRoute((dir === 'forw' ? [a, b] : [b, a]), [], this.queryParams, (err, res, body) => {
if (err) return callback(err);
r.query = this.query;
r.json = JSON.parse(body);
r.status = r.json.status === 200 ? 'x' : null;
if (r.status) {
r.route = this.wayList(r.json.route_instructions);
if (r.route === util.format('w%d', i)) {
r.time = r.json.route_summary.total_time;
r.distance = r.json.route_summary.total_distance;
r.speed = r.time > 0 ? parseInt(3.6 * r.distance / r.time) : null;
} else {
r.status = null;
}
}
callback(null, r);
});
};
d3.queue()
.defer(testDirection, 'forw')
.defer(testDirection, 'backw')
.awaitAll((err, res) => {
if (err) return cb(err);
// check if forw and backw returned the same values
res.forEach((dirRes) => {
var which = dirRes.which;
delete dirRes.which;
result[which] = dirRes;
});
result.bothw = {};
['status', 'time', 'distance', 'speed'].forEach((key) => {
if (result.forw[key] === result.backw[key]) {
result.bothw[key] = result.forw[key];
} else {
result.bothw[key] = 'diff';
}
});
cb(null, result);
});
};
};

View File

@ -1,78 +0,0 @@
def test_routability_row i
result = {}
['forw','backw'].each do |direction|
a = Location.new @origin[0]+(1+WAY_SPACING*i)*@zoom, @origin[1]
b = Location.new @origin[0]+(3+WAY_SPACING*i)*@zoom, @origin[1]
r = {}
r[:response] = request_route (direction=='forw' ? [a,b] : [b,a]), [], @query_params
r[:query] = @query
r[:json] = JSON.parse(r[:response].body)
r[:status] = (route_status r[:response]) == 200 ? 'x' : nil
if r[:status] then
r[:route] = way_list r[:json]['route_instructions']
if r[:route]=="w#{i}"
r[:time] = r[:json]['route_summary']['total_time']
r[:distance] = r[:json]['route_summary']['total_distance']
r[:speed] = r[:time]>0 ? (3.6*r[:distance]/r[:time]).to_i : nil
else
# if we hit the wrong way segment, we assume it's
# because the one we tested was not unroutable
r[:status] = nil
end
end
result[direction] = r
end
# check if forw and backw returned the same values
result['bothw'] = {}
[:status,:time,:distance,:speed].each do |key|
if result['forw'][key] == result['backw'][key]
result['bothw'][key] = result['forw'][key]
else
result['bothw'][key] = 'diff'
end
end
result
end
Then /^routability should be$/ do |table|
build_ways_from_table table
reprocess
actual = []
if table.headers&["forw","backw","bothw"] == []
raise "*** routability tabel must contain either 'forw', 'backw' or 'bothw' column"
end
OSRMLoader.load(self,"#{contracted_file}.osrm") do
table.hashes.each_with_index do |row,i|
output_row = row.dup
attempts = []
result = test_routability_row i
directions = ['forw','backw','bothw']
(directions & table.headers).each do |direction|
want = shortcuts_hash[row[direction]] || row[direction] #expand shortcuts
case want
when '', 'x'
output_row[direction] = result[direction][:status] ? result[direction][:status].to_s : ''
when /^\d+s/
output_row[direction] = result[direction][:time] ? "#{result[direction][:time]}s" : ''
when /^\d+ km\/h/
output_row[direction] = result[direction][:speed] ? "#{result[direction][:speed]} km/h" : ''
else
raise "*** Unknown expectation format: #{want}"
end
if FuzzyMatch.match output_row[direction], want
output_row[direction] = row[direction]
end
end
if output_row != row
log_fail row,output_row,result
end
actual << output_row
end
end
table.diff! actual
end

View File

@ -0,0 +1,16 @@
var d3 = require('d3-queue');
module.exports = function () {
this.When(/^I route I should get$/, this.WhenIRouteIShouldGet);
// This is used to route 100 times; timeout for entire step is therefore set to 100 * STRESS_TIMEOUT
this.When(/^I route (\d+) times I should get$/, { timeout: 30000 }, (n, table, callback) => {
var q = d3.queue(1);
for (var i=0; i<n; i++) {
q.defer(this.WhenIRouteIShouldGet, table);
}
q.awaitAll(callback);
});
};

View File

@ -1,165 +0,0 @@
When /^I route I should get$/ do |table|
reprocess
actual = []
OSRMLoader.load(self,"#{contracted_file}.osrm") do
table.hashes.each_with_index do |row,ri|
if row['request']
got = {'request' => row['request'] }
response = request_url row['request']
else
default_params = @query_params
user_params = []
got = {}
row.each_pair do |k,v|
if k =~ /param:(.*)/
if v=='(nil)'
user_params << [$1, nil]
elsif v!=nil
user_params << [$1, v]
end
got[k]=v
end
end
params = overwrite_params default_params, user_params
waypoints = []
bearings = []
if row['bearings']
got['bearings'] = row['bearings']
bearings = row['bearings'].split(' ').compact
end
if row['from'] and row['to']
node = find_node_by_name(row['from'])
raise "*** unknown from-node '#{row['from']}" unless node
waypoints << node
node = find_node_by_name(row['to'])
raise "*** unknown to-node '#{row['to']}" unless node
waypoints << node
got = got.merge({'from' => row['from'], 'to' => row['to'] })
response = request_route waypoints, bearings, params
elsif row['waypoints']
row['waypoints'].split(',').each do |n|
node = find_node_by_name(n.strip)
raise "*** unknown waypoint node '#{n.strip}" unless node
waypoints << node
end
got = got.merge({'waypoints' => row['waypoints'] })
response = request_route waypoints, bearings, params
else
raise "*** no waypoints"
end
end
if response.body.empty? == false
json = JSON.parse response.body
end
if response.body.empty? == false
if json['status'] == 200
instructions = way_list json['route_instructions']
bearings = bearing_list json['route_instructions']
compasses = compass_list json['route_instructions']
turns = turn_list json['route_instructions']
modes = mode_list json['route_instructions']
times = time_list json['route_instructions']
distances = distance_list json['route_instructions']
end
end
if table.headers.include? 'status'
got['status'] = json['status'].to_s
end
if table.headers.include? 'message'
got['message'] = json['status_message']
end
if table.headers.include? '#' # comment column
got['#'] = row['#'] # copy value so it always match
end
if table.headers.include? 'start'
got['start'] = instructions ? json['route_summary']['start_point'] : nil
end
if table.headers.include? 'end'
got['end'] = instructions ? json['route_summary']['end_point'] : nil
end
if table.headers.include? 'geometry'
got['geometry'] = json['route_geometry']
end
if table.headers.include? 'route'
got['route'] = (instructions || '').strip
if table.headers.include?('alternative')
got['alternative'] =
if json['found_alternative']
way_list json['alternative_instructions'].first
else
""
end
end
if table.headers.include?('distance')
if row['distance']!=''
raise "*** Distance must be specied in meters. (ex: 250m)" unless row['distance'] =~ /\d+m/
end
got['distance'] = instructions ? "#{json['route_summary']['total_distance'].to_s}m" : ''
end
if table.headers.include?('time')
raise "*** Time must be specied in seconds. (ex: 60s)" unless row['time'] =~ /\d+s/
got['time'] = instructions ? "#{json['route_summary']['total_time'].to_s}s" : ''
end
if table.headers.include?('speed')
if row['speed'] != '' && instructions
raise "*** Speed must be specied in km/h. (ex: 50 km/h)" unless row['speed'] =~ /\d+ km\/h/
time = json['route_summary']['total_time']
distance = json['route_summary']['total_distance']
speed = time>0 ? (3.6*distance/time).round : nil
got['speed'] = "#{speed} km/h"
else
got['speed'] = ''
end
end
if table.headers.include? 'bearing'
got['bearing'] = instructions ? bearings : ''
end
if table.headers.include? 'compass'
got['compass'] = instructions ? compasses : ''
end
if table.headers.include? 'turns'
got['turns'] = instructions ? turns : ''
end
if table.headers.include? 'modes'
got['modes'] = instructions ? modes : ''
end
if table.headers.include? 'times'
got['times'] = instructions ? times : ''
end
if table.headers.include? 'distances'
got['distances'] = instructions ? distances : ''
end
end
ok = true
row.keys.each do |key|
if FuzzyMatch.match got[key], row[key]
got[key] = row[key]
else
ok = false
end
end
unless ok
log_fail row,got, { 'route' => {:query => @query, :response => response} }
end
actual << got
end
end
table.diff! actual
end
When /^I route (\d+) times I should get$/ do |n,table|
ok = true
n.to_i.times do
ok = false unless step "I route I should get", table
end
ok
end

View File

@ -0,0 +1,13 @@
var assert = require('assert');
module.exports = function () {
this.Then(/^I should get a valid timestamp/, (callback) => {
this.ShouldGetAResponse();
this.ShouldBeValidJSON((err) => {
this.ShouldBeWellFormed();
assert.equal(typeof this.json.timestamp, 'string');
assert.equal(this.json.timestamp, '2000-01-01T00:00:00Z');
callback(err);
});
});
};

View File

@ -1,7 +0,0 @@
Then /^I should get a valid timestamp/ do
step "I should get a response"
step "response should be valid JSON"
step "response should be well-formed"
expect(@json['timestamp'].class).to eq(String)
expect(@json['timestamp']).to eq("2000-01-01T00:00:00Z")
end

View File

@ -0,0 +1,136 @@
var util = require('util');
module.exports = function () {
this.When(/^I plan a trip I should get$/, (table, callback) => {
var got;
this.reprocessAndLoadData(() => {
var testRow = (row, ri, cb) => {
var afterRequest = (err, res) => {
if (err) return cb(err);
var headers = new Set(table.raw()[0]);
for (var k in row) {
var match = k.match(/param:(.*)/);
if (match) {
if (row[k] === '(nil)') {
params[match[1]] = null;
} else if (row[k]) {
params[match[1]] = [row[k]];
}
got[k] = row[k];
}
}
var json;
if (res.body.length) {
json = JSON.parse(res.body);
}
if (headers.has('status')) {
got.status = json.status.toString();
}
if (headers.has('message')) {
got.message = json.status_message;
}
if (headers.has('#')) {
// comment column
got['#'] = row['#'];
}
var subTrips;
if (res.statusCode === 200) {
if (headers.has('trips')) {
subTrips = json.trips.filter(t => !!t).map(sub => sub.via_points);
}
}
var ok = true,
encodedResult = '',
extendedTarget = '';
row.trips.split(',').forEach((sub, si) => {
if (si >= subTrips.length) {
ok = false;
} else {
ok = false;
// TODO: Check all rotations of the round trip
for (var ni=0; ni<sub.length; ni++) {
var node = this.findNodeByName(sub[ni]),
outNode = subTrips[si][ni];
if (this.FuzzyMatch.matchLocation(outNode, node)) {
encodedResult += sub[ni];
extendedTarget += sub[ni];
ok = true;
} else {
encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]);
extendedTarget += util.format('%s [%d,%d]', sub[ni], node.lat, node.lon);
}
}
}
});
if (ok) {
got.trips = row.trips;
got.via_points = row.via_points;
} else {
got.trips = encodedResult;
got.trips = extendedTarget;
this.logFail(row, got, { trip: { query: this.query, response: res }});
}
ok = true;
for (var key in row) {
if (this.FuzzyMatch.match(got[key], row[key])) {
got[key] = row[key];
} else {
ok = false;
}
}
if (!ok) {
this.logFail(row, got, { trip: { query: this.query, response: res }});
}
cb(null, got);
};
if (row.request) {
got.request = row.request;
this.requestUrl(row.request, afterRequest);
} else {
var params = this.queryParams,
waypoints = [];
if (row.from && row.to) {
var fromNode = this.findNodeByName(row.from);
if (!fromNode) throw new Error(util.format('*** unknown from-node "%s"', row.from));
waypoints.push(fromNode);
var toNode = this.findNodeByName(row.to);
if (!toNode) throw new Error(util.format('*** unknown to-node "%s"', row.to));
waypoints.push(toNode);
got = { from: row.from, to: row.to };
this.requestTrip(waypoints, params, afterRequest);
} else if (row.waypoints) {
row.waypoints.split(',').forEach((n) => {
var node = this.findNodeByName(n);
if (!node) throw new Error(util.format('*** unknown waypoint node "%s"', n.trim()));
waypoints.push(node);
});
got = { waypoints: row.waypoints };
this.requestTrip(waypoints, params, afterRequest);
} else {
throw new Error('*** no waypoints');
}
}
};
this.processRowsAndDiff(table, testRow, callback);
});
});
};

View File

@ -1,121 +0,0 @@
When /^I plan a trip I should get$/ do |table|
reprocess
actual = []
OSRMLoader.load(self,"#{contracted_file}.osrm") do
table.hashes.each_with_index do |row,ri|
if row['request']
got = {'request' => row['request'] }
response = request_url row['request']
else
params = @query_params
waypoints = []
if row['from'] and row['to']
node = find_node_by_name(row['from'])
raise "*** unknown from-node '#{row['from']}" unless node
waypoints << node
node = find_node_by_name(row['to'])
raise "*** unknown to-node '#{row['to']}" unless node
waypoints << node
got = {'from' => row['from'], 'to' => row['to'] }
response = request_trip waypoints, params
elsif row['waypoints']
row['waypoints'].split(',').each do |n|
node = find_node_by_name(n.strip)
raise "*** unknown waypoint node '#{n.strip}" unless node
waypoints << node
end
got = {'waypoints' => row['waypoints'] }
response = request_trip waypoints, params
else
raise "*** no waypoints"
end
end
row.each_pair do |k,v|
if k =~ /param:(.*)/
if v=='(nil)'
params[$1]=nil
elsif v!=nil
params[$1]=[v]
end
got[k]=v
end
end
if response.body.empty? == false
json = JSON.parse response.body
end
if table.headers.include? 'status'
got['status'] = json['status'].to_s
end
if table.headers.include? 'message'
got['message'] = json['status_message']
end
if table.headers.include? '#' # comment column
got['#'] = row['#'] # copy value so it always match
end
if response.code == "200"
if table.headers.include? 'trips'
sub_trips = json['trips'].compact.map { |sub| sub['via_points']}
end
end
######################
ok = true
encoded_result = ""
extended_target = ""
row['trips'].split(',').each_with_index do |sub, sub_idx|
if sub_idx >= sub_trips.length
ok = false
break
end
ok = false;
#TODO: Check all rotations of the round trip
sub.length.times do |node_idx|
node = find_node_by_name(sub[node_idx])
out_node = sub_trips[sub_idx][node_idx]
if FuzzyMatch.match_location out_node, node
encoded_result += sub[node_idx]
extended_target += sub[node_idx]
ok = true
else
encoded_result += "? [#{out_node[0]},#{out_node[1]}]"
extended_target += "#{sub[node_idx]} [#{node.lat},#{node.lon}]"
end
end
end
if ok
got['trips'] = row['trips']
got['via_points'] = row['via_points']
else
got['trips'] = encoded_result
row['trips'] = extended_target
log_fail row,got, { 'trip' => {:query => @query, :response => response} }
end
ok = true
row.keys.each do |key|
if FuzzyMatch.match got[key], row[key]
got[key] = row[key]
else
ok = false
end
end
unless ok
log_fail row,got, { 'trip' => {:query => @query, :response => response} }
end
actual << got
end
end
table.diff! actual
end

View File

@ -0,0 +1,160 @@
'use strict';
var builder = require('xmlbuilder');
class DB {
constructor () {
this.nodes = new Array();
this.ways = new Array();
this.relations = new Array();
}
addNode (node) {
this.nodes.push(node);
}
addWay (way) {
this.ways.push(way);
}
addRelation (relation) {
this.relations.push(relation);
}
clear () {
this.nodes = [];
this.ways = [];
this.relations = [];
}
toXML (callback) {
var xml = builder.create('osm', {'encoding':'UTF-8'});
xml.att('generator', 'osrm-test')
.att('version', '0.6');
this.nodes.forEach((n) => {
var node = xml.ele('node', {
id: n.id,
version: 1,
uid: n.OSM_UID,
user: n.OSM_USER,
timestamp: n.OSM_TIMESTAMP,
lon: n.lon,
lat: n.lat
});
for (var k in n.tags) {
node.ele('tag')
.att('k', k)
.att('v', n.tags[k]);
}
});
this.ways.forEach((w) => {
var way = xml.ele('way', {
id: w.id,
version: 1,
uid: w.OSM_UID,
user: w.OSM_USER,
timestamp: w.OSM_TIMESTAMP
});
w.nodes.forEach((k) => {
way.ele('nd')
.att('ref', k.id);
});
for (var k in w.tags) {
way.ele('tag')
.att('k', k)
.att('v', w.tags[k]);
}
});
this.relations.forEach((r) => {
var relation = xml.ele('relation', {
id: r.id,
user: r.OSM_USER,
timestamp: r.OSM_TIMESTAMP,
uid: r.OSM_UID
});
r.members.forEach((m) => {
relation.ele('member', {
type: m.type,
ref: m.id,
role: m.role
});
});
for (var k in r.tags) {
relation.ele('tag')
.att('k', k)
.att('v', r.tags[k]);
}
});
callback(xml.end({ pretty: true, indent: ' ' }));
}
}
class Node {
constructor (id, OSM_USER, OSM_TIMESTAMP, OSM_UID, lon, lat, tags) {
this.id = id;
this.OSM_USER = OSM_USER;
this.OSM_TIMESTAMP = OSM_TIMESTAMP;
this.OSM_UID = OSM_UID;
this.lon = lon;
this.lat = lat;
this.tags = tags;
}
addTag (k, v) {
this.tags[k] = v;
}
}
class Way {
constructor (id, OSM_USER, OSM_TIMESTAMP, OSM_UID) {
this.id = id;
this.OSM_USER = OSM_USER;
this.OSM_TIMESTAMP = OSM_TIMESTAMP;
this.OSM_UID = OSM_UID;
this.tags = {};
this.nodes = [];
}
addNode (node) {
this.nodes.push(node);
}
setTags (tags) {
this.tags = tags;
}
}
class Relation {
constructor (id, OSM_USER, OSM_TIMESTAMP, OSM_UID) {
this.id = id;
this.OSM_USER = OSM_USER;
this.OSM_TIMESTAMP = OSM_TIMESTAMP;
this.OSM_UID = OSM_UID;
this.members = [];
this.tags = {};
}
addMember (memberType, id, role) {
this.members.push({type: memberType, id: id, role: role});
}
addTag (k, v) {
this.tags[k] = v;
}
}
module.exports = {
DB: DB,
Node: Node,
Way: Way,
Relation: Relation
};

115
features/support/config.js Normal file
View File

@ -0,0 +1,115 @@
var fs = require('fs');
var path = require('path');
var util = require('util');
var d3 = require('d3-queue');
var OSM = require('./build_osm');
var classes = require('./data_classes');
module.exports = function () {
this.initializeOptions = (callback) => {
this.profile = this.profile || this.DEFAULT_SPEEDPROFILE;
this.OSMDB = this.OSMDB || new OSM.DB();
this.nameNodeHash = this.nameNodeHash || {};
this.locationHash = this.locationHash || {};
this.nameWayHash = this.nameWayHash || {};
this.osmData = new classes.osmData(this);
this.STRESS_TIMEOUT = 300;
this.OSRMLoader = this._OSRMLoader();
this.PREPROCESS_LOG_FILE = path.resolve(this.TEST_FOLDER, 'preprocessing.log');
this.LOG_FILE = path.resolve(this.TEST_FOLDER, 'fail.log');
this.HOST = 'http://127.0.0.1:' + this.OSRM_PORT;
this.DESTINATION_REACHED = 15; // OSRM instruction code
this.shortcutsHash = this.shortcutsHash || {};
var hashLuaLib = (cb) => {
fs.readdir(path.normalize(this.PROFILES_PATH + '/lib/'), (err, files) => {
if (err) cb(err);
var luaFiles = files.filter(f => !!f.match(/\.lua$/)).map(f => path.normalize(this.PROFILES_PATH + '/lib/' + f));
this.hashOfFiles(luaFiles, hash => {
this.luaLibHash = hash;
cb();
});
});
};
var hashProfile = (cb) => {
this.hashProfile((hash) => {
this.profileHash = hash;
cb();
});
};
var hashExtract = (cb) => {
this.hashOfFiles(util.format('%s/osrm-extract%s', this.BIN_PATH, this.EXE), (hash) => {
this.binExtractHash = hash;
cb();
});
};
var hashContract = (cb) => {
this.hashOfFiles(util.format('%s/osrm-contract%s', this.BIN_PATH, this.EXE), (hash) => {
this.binContractHash = hash;
this.fingerprintContract = this.hashString(this.binContractHash);
cb();
});
};
var hashRouted = (cb) => {
this.hashOfFiles(util.format('%s/osrm-routed%s', this.BIN_PATH, this.EXE), (hash) => {
this.binRoutedHash = hash;
this.fingerprintRoute = this.hashString(this.binRoutedHash);
cb();
});
};
d3.queue()
.defer(hashLuaLib)
.defer(hashProfile)
.defer(hashExtract)
.defer(hashContract)
.defer(hashRouted)
.awaitAll(() => {
this.fingerprintExtract = this.hashString([this.profileHash, this.luaLibHash, this.binExtractHash].join('-'));
this.AfterConfiguration(() => {
callback();
});
});
};
this.setProfileBasedHashes = () => {
this.fingerprintExtract = this.hashString([this.profileHash, this.luaLibHash, this.binExtractHash].join('-'));
this.fingerprintContract = this.hashString(this.binContractHash);
};
this.setProfile = (profile, cb) => {
var lastProfile = this.profile;
if (profile !== lastProfile) {
this.profile = profile;
this.hashProfile((hash) => {
this.profileHash = hash;
this.setProfileBasedHashes();
cb();
});
} else cb();
};
this.setExtractArgs = (args) => {
this.extractArgs = args;
};
this.setContractArgs = (args) => {
this.contractArgs = args;
};
};

View File

@ -1,20 +0,0 @@
def profile
@profile ||= reset_profile
end
def reset_profile
@profile = nil
set_profile DEFAULT_SPEEDPROFILE
end
def set_profile profile
@profile = profile
end
def set_extract_args args
@extract_args = args
end
def set_contract_args args
@contract_args = args
end

339
features/support/data.js Normal file
View File

@ -0,0 +1,339 @@
var fs = require('fs');
var path = require('path');
var util = require('util');
var exec = require('child_process').exec;
var d3 = require('d3-queue');
var OSM = require('./build_osm');
var classes = require('./data_classes');
module.exports = function () {
this.setGridSize = (meters) => {
// the constant is calculated (with BigDecimal as: 1.0/(DEG_TO_RAD*EARTH_RADIUS_IN_METERS
// see ApproximateDistance() in ExtractorStructs.h
// it's only accurate when measuring along the equator, or going exactly north-south
this.zoom = parseFloat(meters) * 0.8990679362704610899694577444566908445396483347536032203503E-5;
};
this.setOrigin = (origin) => {
this.origin = origin;
};
this.buildWaysFromTable = (table, callback) => {
// add one unconnected way for each row
var buildRow = (row, ri, cb) => {
// comments ported directly from ruby suite:
// NOTE: currently osrm crashes when processing an isolated oneway with just 2 nodes, so we use 4 edges
// this is related to the fact that a oneway dead-end street doesn't make a lot of sense
// if we stack ways on different x coordinates, routability tests get messed up, because osrm might pick a neighboring way if the one test can't be used.
// instead we place all lines as a string on the same y coordinate. this prevents using neighboring ways.
// add some nodes
var makeFakeNode = (namePrefix, offset) => {
return new OSM.Node(this.makeOSMId(), this.OSM_USER, this.OSM_TIMESTAMP,
this.OSM_UID, this.origin[0]+(offset + this.WAY_SPACING * ri) * this.zoom,
this.origin[1], {name: util.format('%s%d', namePrefix, ri)});
};
var nodes = ['a','b','c','d','e'].map((l, i) => makeFakeNode(l, i));
nodes.forEach(node => { this.OSMDB.addNode(node); });
// ...with a way between them
var way = new OSM.Way(this.makeOSMId(), this.OSM_USER, this.OSM_TIMESTAMP, this.OSM_UID);
nodes.forEach(node => { way.addNode(node); });
// remove tags that describe expected test result, reject empty tags
var tags = {};
for (var rkey in row) {
if (!rkey.match(/^forw\b/) &&
!rkey.match(/^backw\b/) &&
!rkey.match(/^bothw\b/) &&
row[rkey].length)
tags[rkey] = row[rkey];
}
var wayTags = { highway: 'primary' },
nodeTags = {};
for (var key in tags) {
var nodeMatch = key.match(/node\/(.*)/);
if (nodeMatch) {
if (tags[key] === '(nil)') {
delete nodeTags[key];
} else {
nodeTags[nodeMatch[1]] = tags[key];
}
} else {
if (tags[key] === '(nil)') {
delete wayTags[key];
} else {
wayTags[key] = tags[key];
}
}
}
wayTags.name = util.format('w%d', ri);
way.setTags(wayTags);
this.OSMDB.addWay(way);
for (var k in nodeTags) {
nodes[2].addTag(k, nodeTags[k]);
}
cb();
};
var q = d3.queue();
table.hashes().forEach((row, ri) => {
q.defer(buildRow, row, ri);
});
q.awaitAll(callback);
};
var ensureDecimal = (i) => {
if (parseInt(i) === i) return i.toFixed(1);
else return i;
};
this.tableCoordToLonLat = (ci, ri) => {
return [this.origin[0] + ci * this.zoom, this.origin[1] - ri * this.zoom].map(ensureDecimal);
};
this.addOSMNode = (name, lon, lat, id) => {
id = id || this.makeOSMId();
var node = new OSM.Node(id, this.OSM_USER, this.OSM_TIMESTAMP, this.OSM_UID, lon, lat, {name: name});
this.OSMDB.addNode(node);
this.nameNodeHash[name] = node;
};
this.addLocation = (name, lon, lat) => {
this.locationHash[name] = new classes.Location(lon, lat);
};
this.findNodeByName = (s) => {
if (s.length !== 1) throw new Error(util.format('*** invalid node name "%s", must be single characters', s));
if (!s.match(/[a-z0-9]/)) throw new Error(util.format('*** invalid node name "%s", must be alphanumeric', s));
var fromNode;
if (s.match(/[a-z]/)) {
fromNode = this.nameNodeHash[s.toString()];
} else {
fromNode = this.locationHash[s.toString()];
}
return fromNode;
};
this.findWayByName = (s) => {
return this.nameWayHash[s.toString()] || this.nameWayHash[s.toString().split('').reverse().join('')];
};
this.resetData = () => {
this.resetOSM();
};
this.makeOSMId = () => {
this.osmID = this.osmID + 1;
return this.osmID;
};
this.resetOSM = () => {
this.OSMDB.clear();
this.osmData.reset();
this.nameNodeHash = {};
this.locationHash = {};
this.nameWayHash = {};
this.osmID = 0;
};
this.writeOSM = (callback) => {
fs.exists(this.DATA_FOLDER, (exists) => {
var mkDirFn = exists ? (cb) => { cb(); } : fs.mkdir.bind(fs.mkdir, this.DATA_FOLDER);
mkDirFn((err) => {
if (err) return callback(err);
var osmPath = path.resolve(this.DATA_FOLDER, util.format('%s.osm', this.osmData.osmFile));
fs.exists(osmPath, (exists) => {
if (!exists) fs.writeFile(osmPath, this.osmData.str, callback);
else callback();
});
});
});
};
this.isExtracted = (callback) => {
fs.exists(util.format('%s.osrm', this.osmData.extractedFile), (core) => {
if (!core) return callback(false);
fs.exists(util.format('%s.osrm.names', this.osmData.extractedFile), (names) => {
if (!names) return callback(false);
fs.exists(util.format('%s.osrm.restrictions', this.osmData.extractedFile), (restrictions) => {
return callback(restrictions);
});
});
});
};
this.isContracted = (callback) => {
fs.exists(util.format('%s.osrm.hsgr', this.osmData.contractedFile), callback);
};
this.writeTimestamp = (callback) => {
fs.writeFile(util.format('%s.osrm.timestamp', this.osmData.contractedFile), this.OSM_TIMESTAMP, callback);
};
this.writeInputData = (callback) => {
this.writeOSM((err) => {
if (err) return callback(err);
this.writeTimestamp(callback);
});
};
this.extractData = (callback) => {
this.logPreprocessInfo();
this.log(util.format('== Extracting %s.osm...', this.osmData.osmFile), 'preprocess');
var cmd = util.format('%s%s/osrm-extract %s.osm %s --profile %s/%s.lua >>%s 2>&1',
this.LOAD_LIBRARIES, this.BIN_PATH, this.osmData.osmFile, this.extractArgs || '', this.PROFILES_PATH, this.profile, this.PREPROCESS_LOG_FILE);
this.log(cmd);
process.chdir(this.TEST_FOLDER);
exec(cmd, (err) => {
if (err) {
this.log(util.format('*** Exited with code %d', err.code), 'preprocess');
return callback(this.ExtractError(err.code, util.format('osrm-extract exited with code %d', err.code)));
}
var q = d3.queue();
var rename = (file, cb) => {
this.log(util.format('Renaming %s.%s to %s.%s', this.osmData.osmFile, file, this.osmData.extractedFile, file), 'preprocess');
fs.rename([this.osmData.osmFile, file].join('.'), [this.osmData.extractedFile, file].join('.'), (err) => {
if (err) return cb(this.FileError(null, 'failed to rename data file after extracting'));
cb();
});
};
var renameIfExists = (file, cb) => {
fs.stat([this.osmData.osmFile, file].join('.'), (doesNotExistErr, exists) => {
if (exists) rename(file, cb);
else cb();
});
};
['osrm','osrm.names','osrm.restrictions','osrm.ebg','osrm.enw','osrm.edges','osrm.fileIndex','osrm.geometry','osrm.nodes','osrm.ramIndex'].forEach(file => {
q.defer(rename, file);
});
['osrm.edge_segment_lookup','osrm.edge_penalties'].forEach(file => {
q.defer(renameIfExists, file);
});
q.awaitAll((err) => {
this.log('Finished extracting ' + this.osmData.extractedFile, 'preprocess');
process.chdir('../');
callback(err);
});
});
};
this.contractData = (callback) => {
this.logPreprocessInfo();
this.log(util.format('== Contracting %s.osm...', this.osmData.extractedFile), 'preprocess');
var cmd = util.format('%s%s/osrm-contract %s %s.osrm >>%s 2>&1',
this.LOAD_LIBRARIES, this.BIN_PATH, this.contractArgs || '', this.osmData.extractedFile, this.PREPROCESS_LOG_FILE);
this.log(cmd);
process.chdir(this.TEST_FOLDER);
exec(cmd, (err) => {
if (err) {
this.log(util.format('*** Exited with code %d', err.code), 'preprocess');
return callback(this.ContractError(err.code, util.format('osrm-contract exited with code %d', err.code)));
}
var rename = (file, cb) => {
this.log(util.format('Renaming %s.%s to %s.%s', this.osmData.extractedFile, file, this.osmData.contractedFile, file), 'preprocess');
fs.rename([this.osmData.extractedFile, file].join('.'), [this.osmData.contractedFile, file].join('.'), (err) => {
if (err) return cb(this.FileError(null, 'failed to rename data file after contracting.'));
cb();
});
};
var copy = (file, cb) => {
this.log(util.format('Copying %s.%s to %s.%s', this.osmData.extractedFile, file, this.osmData.contractedFile, file), 'preprocess');
fs.createReadStream([this.osmData.extractedFile, file].join('.'))
.pipe(fs.createWriteStream([this.osmData.contractedFile, file].join('.'))
.on('finish', cb)
)
.on('error', () => {
return cb(this.FileError(null, 'failed to copy data after contracting.'));
});
};
var q = d3.queue();
['osrm.hsgr','osrm.fileIndex','osrm.geometry','osrm.nodes','osrm.ramIndex','osrm.core','osrm.edges'].forEach((file) => {
q.defer(rename, file);
});
['osrm.names','osrm.restrictions','osrm'].forEach((file) => {
q.defer(copy, file);
});
q.awaitAll((err) => {
this.log('Finished contracting ' + this.osmData.contractedFile, 'preprocess');
process.chdir('../');
callback(err);
});
});
};
var noop = (cb) => cb();
this.reprocess = (callback) => {
this.writeAndExtract((e) => {
if (e) return callback(e);
this.isContracted((isContracted) => {
var contractFn = isContracted ? noop : this.contractData;
if (isContracted) this.log('Already contracted ' + this.osmData.contractedFile, 'preprocess');
contractFn((e) => {
if (e) return callback(e);
this.logPreprocessDone();
callback();
});
});
});
};
this.writeAndExtract = (callback) => {
this.osmData.populate(() => {
this.writeInputData((e) => {
if (e) return callback(e);
this.isExtracted((isExtracted) => {
var extractFn = isExtracted ? noop : this.extractData;
if (isExtracted) this.log('Already extracted ' + this.osmData.extractedFile, 'preprocess');
extractFn((e) => {
callback(e);
});
});
});
});
};
this.reprocessAndLoadData = (callback) => {
this.reprocess(() => {
this.OSRMLoader.load(util.format('%s.osrm', this.osmData.contractedFile), callback);
});
};
this.processRowsAndDiff = (table, fn, callback) => {
var q = d3.queue(1);
table.hashes().forEach((row, i) => q.defer(fn, row, i));
q.awaitAll((err, actual) => {
if (err) return callback(err);
this.diffTables(table, actual, {}, callback);
});
};
};

View File

@ -1,321 +0,0 @@
require 'OSM/objects' #osmlib gem
require 'OSM/Database'
require 'builder'
require 'fileutils'
class Location
attr_accessor :lon,:lat
def initialize lon,lat
@lat = lat
@lon = lon
end
end
def set_input_format format
raise '*** Input format must be eiter "osm" or "pbf"' unless ['pbf','osm'].include? format.to_s
@input_format = format.to_s
end
def input_format
@input_format || DEFAULT_INPUT_FORMAT
end
def sanitized_scenario_title
@sanitized_scenario_title ||= @scenario_title.to_s.gsub /[^0-9A-Za-z.\-]/, '_'
end
def set_grid_size meters
#the constant is calculated (with BigDecimal as: 1.0/(DEG_TO_RAD*EARTH_RADIUS_IN_METERS
#see ApproximateDistance() in ExtractorStructs.h
#it's only accurate when measuring along the equator, or going exactly north-south
@zoom = meters.to_f*0.8990679362704610899694577444566908445396483347536032203503E-5
end
def set_origin origin
@origin = origin
end
def build_ways_from_table table
#add one unconnected way for each row
table.hashes.each_with_index do |row,ri|
#NOTE:
#currently osrm crashes when processing an isolated oneway with just 2 nodes, so we use 4 edges
#this is relatated to the fact that a oneway dead-end street doesn't make a lot of sense
#if we stack ways on different x coordinates, routability tests get messed up, because osrm might pick a neighboring way if the one test can't be used.
#instead we place all lines as a string on the same y coordinate. this prevents using neightboring ways.
#a few nodes...
node1 = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, @origin[0]+(0+WAY_SPACING*ri)*@zoom, @origin[1]
node2 = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, @origin[0]+(1+WAY_SPACING*ri)*@zoom, @origin[1]
node3 = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, @origin[0]+(2+WAY_SPACING*ri)*@zoom, @origin[1]
node4 = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, @origin[0]+(3+WAY_SPACING*ri)*@zoom, @origin[1]
node5 = OSM::Node.new make_osm_id, OSM_USER, OSM_TIMESTAMP, @origin[0]+(4+WAY_SPACING*ri)*@zoom, @origin[1]
node1.uid = OSM_UID
node2.uid = OSM_UID
node3.uid = OSM_UID
node4.uid = OSM_UID
node5.uid = OSM_UID
node1 << { :name => "a#{ri}" }
node2 << { :name => "b#{ri}" }
node3 << { :name => "c#{ri}" }
node4 << { :name => "d#{ri}" }
node5 << { :name => "e#{ri}" }
osm_db << node1
osm_db << node2
osm_db << node3
osm_db << node4
osm_db << node5
#...with a way between them
way = OSM::Way.new make_osm_id, OSM_USER, OSM_TIMESTAMP
way.uid = OSM_UID
way << node1
way << node2
way << node3
way << node4
way << node5
tags = row.dup
# remove tags that describe expected test result
tags.reject! do |k,v|
k =~ /^forw\b/ ||
k =~ /^backw\b/ ||
k =~ /^bothw\b/
end
##remove empty tags
tags.reject! { |k,v| v=='' }
# sort tag keys in the form of 'node/....'
way_tags = { 'highway' => 'primary' }
node_tags = {}
tags.each_pair do |k,v|
if k =~ /node\/(.*)/
if v=='(nil)'
node_tags.delete k
else
node_tags[$1] = v
end
else
if v=='(nil)'
way_tags.delete k
else
way_tags[k] = v
end
end
end
way_tags['name'] = "w#{ri}"
way << way_tags
node3 << node_tags
osm_db << way
end
end
def table_coord_to_lonlat ci,ri
[@origin[0]+ci*@zoom, @origin[1]-ri*@zoom]
end
def add_osm_node name,lon,lat,id
id = make_osm_id if id == nil
node = OSM::Node.new id, OSM_USER, OSM_TIMESTAMP, lon, lat
node << { :name => name }
node.uid = OSM_UID
osm_db << node
name_node_hash[name] = node
end
def add_location name,lon,lat
location_hash[name] = Location.new(lon,lat)
end
def find_node_by_name s
raise "***invalid node name '#{s}', must be single characters" unless s.size == 1
raise "*** invalid node name '#{s}', must be alphanumeric" unless s.match /[a-z0-9]/
if s.match /[a-z]/
from_node = name_node_hash[ s.to_s ]
else
from_node = location_hash[ s.to_s ]
end
end
def find_way_by_name s
name_way_hash[s.to_s] || name_way_hash[s.to_s.reverse]
end
def reset_data
Dir.chdir TEST_FOLDER do
#clear_log
#clear_data_files
end
reset_profile
reset_osm
@fingerprint_osm = nil
@fingerprint_extract = nil
@fingerprint_prepare = nil
@fingerprint_route = nil
end
def make_osm_id
@osm_id = @osm_id+1
end
def reset_osm
osm_db.clear
name_node_hash.clear
location_hash.clear
name_way_hash.clear
@osm_str = nil
@osm_hash = nil
@osm_id = 0
end
def clear_data_files
File.delete *Dir.glob("#{DATA_FOLDER}/test.*")
end
def clear_log
File.delete *Dir.glob("*.log")
end
def osm_db
@osm_db ||= OSM::Database.new
end
def name_node_hash
@name_node_hash ||= {}
end
def location_hash
@location_hash ||= {}
end
def name_way_hash
@name_way_hash ||= {}
end
def osm_str
return @osm_str if @osm_str
@osm_str = ''
doc = Builder::XmlMarkup.new :indent => 2, :target => @osm_str
doc.instruct!
osm_db.to_xml doc, OSM_GENERATOR
@osm_str
end
def osm_file
@osm_file ||= "#{DATA_FOLDER}/#{fingerprint_osm}"
end
def extracted_file
@extracted_file ||= "#{osm_file}_#{fingerprint_extract}"
end
def contracted_file
@contracted_file ||= "#{osm_file}_#{fingerprint_extract}_#{fingerprint_prepare}"
end
def write_osm
Dir.mkdir DATA_FOLDER unless File.exist? DATA_FOLDER
unless File.exist?("#{osm_file}.osm")
File.open( "#{osm_file}.osm", 'w') {|f| f.write(osm_str) }
end
end
def extracted?
Dir.chdir TEST_FOLDER do
File.exist?("#{extracted_file}.osrm") &&
File.exist?("#{extracted_file}.osrm.names") &&
File.exist?("#{extracted_file}.osrm.restrictions")
end
end
def contracted?
Dir.chdir TEST_FOLDER do
File.exist?("#{contracted_file}.osrm.hsgr")
end
end
def write_timestamp
File.open( "#{contracted_file}.osrm.timestamp", 'w') {|f| f.write(OSM_TIMESTAMP) }
end
def write_input_data
Dir.chdir TEST_FOLDER do
write_osm
write_timestamp
end
end
def extract_data
Dir.chdir TEST_FOLDER do
log_preprocess_info
log "== Extracting #{osm_file}.osm...", :preprocess
log "#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-extract #{osm_file}.osm #{@extract_args} --profile #{PROFILES_PATH}/#{@profile}.lua >>#{PREPROCESS_LOG_FILE} 2>&1"
unless system "#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-extract #{osm_file}.osm #{@extract_args} --profile #{PROFILES_PATH}/#{@profile}.lua >>#{PREPROCESS_LOG_FILE} 2>&1"
log "*** Exited with code #{$?.exitstatus}.", :preprocess
raise ExtractError.new $?.exitstatus, "osrm-extract exited with code #{$?.exitstatus}."
end
begin
["osrm","osrm.names","osrm.restrictions","osrm.ebg","osrm.enw","osrm.edges","osrm.fileIndex","osrm.geometry","osrm.nodes","osrm.ramIndex"].each do |file|
log "Renaming #{osm_file}.#{file} to #{extracted_file}.#{file}", :preprocess
File.rename "#{osm_file}.#{file}", "#{extracted_file}.#{file}"
end
rescue Exception => e
raise FileError.new nil, "failed to rename data file after extracting."
end
begin
["osrm.edge_segment_lookup","osrm.edge_penalties"].each do |file|
if File.exists?("#{osm_file}.#{file}")
log "Renaming #{osm_file}.#{file} to #{extracted_file}.#{file}", :preprocess
File.rename "#{osm_file}.#{file}", "#{extracted_file}.#{file}"
end
end
rescue Exception => e
raise FileError.new nil, "failed to rename data file after extracting."
end
end
end
def prepare_data
Dir.chdir TEST_FOLDER do
log_preprocess_info
log "== Preparing #{extracted_file}.osm...", :preprocess
log "#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-contract #{@contract_args} #{extracted_file}.osrm >>#{PREPROCESS_LOG_FILE} 2>&1"
unless system "#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-contract #{@contract_args} #{extracted_file}.osrm >>#{PREPROCESS_LOG_FILE} 2>&1"
log "*** Exited with code #{$?.exitstatus}.", :preprocess
raise PrepareError.new $?.exitstatus, "osrm-contract exited with code #{$?.exitstatus}."
end
begin
["osrm.hsgr","osrm.fileIndex","osrm.geometry","osrm.nodes","osrm.ramIndex","osrm.core","osrm.edges"].each do |file|
log "Renaming #{extracted_file}.#{file} to #{contracted_file}.#{file}", :preprocess
File.rename "#{extracted_file}.#{file}", "#{contracted_file}.#{file}"
end
rescue Exception => e
raise FileError.new nil, "failed to rename data file after preparing."
end
begin
["osrm.names","osrm.restrictions","osrm"].each do |file|
log "Copying #{extracted_file}.#{file} to #{contracted_file}.#{file}", :preprocess
FileUtils.cp "#{extracted_file}.#{file}", "#{contracted_file}.#{file}"
end
rescue Exception => e
raise FileError.new nil, "failed to copy data file after preparing."
end
log '', :preprocess
end
end
def reprocess
write_input_data
extract_data unless extracted?
prepare_data unless contracted?
log_preprocess_done
end

View File

@ -0,0 +1,85 @@
'use strict';
var util = require('util');
var path = require('path');
module.exports = {
Location: class {
constructor (lon, lat) {
this.lon = lon;
this.lat = lat;
}
},
osmData: class {
constructor (scope) {
this.scope = scope;
this.str = null;
this.hash = null;
this.fingerprintOSM = null;
this.osmFile = null;
this.extractedFile = null;
this.contractedFile = null;
}
populate (callback) {
this.scope.OSMDB.toXML((str) => {
this.str = str;
this.hash = this.scope.hashString(str);
this.fingerprintOSM = this.scope.hashString(this.hash);
this.osmFile = path.resolve(this.scope.DATA_FOLDER, this.fingerprintOSM);
this.extractedFile = path.resolve([this.osmFile, this.scope.fingerprintExtract].join('_'));
this.contractedFile = path.resolve([this.osmFile, this.scope.fingerprintExtract, this.scope.fingerprintContract].join('_'));
callback();
});
}
reset () {
this.str = null;
this.hash = null;
this.fingerprintOSM = null;
this.osmFile = null;
this.extractedFile = null;
this.contractedFile = null;
}
},
FuzzyMatch: class {
match (got, want) {
var matchPercent = want.match(/(.*)\s+~(.+)%$/),
matchAbs = want.match(/(.*)\s+\+\-(.+)$/),
matchRe = want.match(/^\/(.*)\/$/);
if (got === want) {
return true;
} else if (matchPercent) { // percentage range: 100 ~ 5%
var target = parseFloat(matchPercent[1]),
percentage = parseFloat(matchPercent[2]);
if (target === 0) {
return true;
} else {
var ratio = Math.abs(1 - parseFloat(got) / target);
return 100 * ratio < percentage;
}
} else if (matchAbs) { // absolute range: 100 +-5
var margin = parseFloat(matchAbs[2]),
fromR = parseFloat(matchAbs[1]) - margin,
toR = parseFloat(matchAbs[1]) + margin;
return parseFloat(got) >= fromR && parseFloat(got) <= toR;
} else if (matchRe) { // regex: /a,b,.*/
return got.match(matchRe[1]);
} else {
return false;
}
}
matchLocation (got, want) {
return this.match(got[0], util.format('%d ~0.0025%', want.lat)) &&
this.match(got[1], util.format('%d ~0.0025%', want.lon));
}
}
};

125
features/support/env.js Normal file
View File

@ -0,0 +1,125 @@
var path = require('path');
var util = require('util');
var fs = require('fs');
var exec = require('child_process').exec;
var d3 = require('d3-queue');
module.exports = function () {
this.initializeEnv = (callback) => {
this.DEFAULT_PORT = 5000;
this.DEFAULT_TIMEOUT = 2000;
this.setDefaultTimeout(this.DEFAULT_TIMEOUT);
this.ROOT_FOLDER = process.cwd();
this.OSM_USER = 'osrm';
this.OSM_GENERATOR = 'osrm-test';
this.OSM_UID = 1;
this.TEST_FOLDER = path.resolve(this.ROOT_FOLDER, 'test');
this.DATA_FOLDER = path.resolve(this.TEST_FOLDER, 'cache');
this.OSM_TIMESTAMP = '2000-01-01T00:00:00Z';
this.DEFAULT_SPEEDPROFILE = 'bicycle';
this.WAY_SPACING = 100;
this.DEFAULT_GRID_SIZE = 100; // meters
this.PROFILES_PATH = path.resolve(this.ROOT_FOLDER, 'profiles');
this.FIXTURES_PATH = path.resolve(this.ROOT_FOLDER, 'unit_tests/fixtures');
this.BIN_PATH = path.resolve(this.ROOT_FOLDER, 'build');
this.DEFAULT_INPUT_FORMAT = 'osm';
this.DEFAULT_ORIGIN = [1,1];
this.LAUNCH_TIMEOUT = 1000;
this.SHUTDOWN_TIMEOUT = 10000;
this.DEFAULT_LOAD_METHOD = 'datastore';
this.OSRM_ROUTED_LOG_FILE = path.resolve(this.TEST_FOLDER, 'osrm-routed.log');
this.ERROR_LOG_FILE = path.resolve(this.TEST_FOLDER, 'error.log');
// OS X shim to ensure shared libraries from custom locations can be loaded
// This is needed in OS X >= 10.11 because DYLD_LIBRARY_PATH is blocked
// https://forums.developer.apple.com/thread/9233
this.LOAD_LIBRARIES = process.env.OSRM_SHARED_LIBRARY_PATH ? util.format('DYLD_LIBRARY_PATH=%s ', process.env.OSRM_SHARED_LIBRARY_PATH) : '';
// TODO make sure this works on win
if (process.platform.match(/indows.*/)) {
this.TERMSIGNAL = 9;
this.EXE = '.exe';
this.QQ = '"';
} else {
this.TERMSIGNAL = 'SIGTERM';
this.EXE = '';
this.QQ = '';
}
// eslint-disable-next-line no-console
console.info(util.format('Node Version', process.version));
if (parseInt(process.version.match(/v(\d)/)[1]) < 4) throw new Error('*** PLease upgrade to Node 4.+ to run OSRM cucumber tests');
if (process.env.OSRM_PORT) {
this.OSRM_PORT = parseInt(process.env.OSRM_PORT);
// eslint-disable-next-line no-console
console.info(util.format('Port set to %d', this.OSRM_PORT));
} else {
this.OSRM_PORT = this.DEFAULT_PORT;
// eslint-disable-next-line no-console
console.info(util.format('Using default port %d', this.OSRM_PORT));
}
if (process.env.OSRM_TIMEOUT) {
this.OSRM_TIMEOUT = parseInt(process.env.OSRM_TIMEOUT);
// eslint-disable-next-line no-console
console.info(util.format('Timeout set to %d', this.OSRM_TIMEOUT));
} else {
this.OSRM_TIMEOUT = this.DEFAULT_TIMEOUT;
// eslint-disable-next-line no-console
console.info(util.format('Using default timeout %d', this.OSRM_TIMEOUT));
}
fs.exists(this.TEST_FOLDER, (exists) => {
if (!exists) throw new Error(util.format('*** Test folder %s doesn\'t exist.', this.TEST_FOLDER));
callback();
});
};
this.verifyOSRMIsNotRunning = () => {
if (this.OSRMLoader.up()) {
throw new Error('*** osrm-routed is already running.');
}
};
this.verifyExistenceOfBinaries = (callback) => {
var verify = (bin, cb) => {
var binPath = path.resolve(util.format('%s/%s%s', this.BIN_PATH, bin, this.EXE));
fs.exists(binPath, (exists) => {
if (!exists) throw new Error(util.format('%s is missing. Build failed?', binPath));
var helpPath = util.format('%s%s --help > /dev/null 2>&1', this.LOAD_LIBRARIES, binPath);
exec(helpPath, (err) => {
if (err) {
this.log(util.format('*** Exited with code %d', err.code), 'preprocess');
throw new Error(util.format('*** %s exited with code %d', helpPath, err.code));
}
cb();
});
});
};
var q = d3.queue();
['osrm-extract', 'osrm-contract', 'osrm-routed'].forEach(bin => { q.defer(verify, bin); });
q.awaitAll(() => {
callback();
});
};
this.AfterConfiguration = (callback) => {
this.clearLogFiles(() => {
this.verifyOSRMIsNotRunning();
this.verifyExistenceOfBinaries(() => {
callback();
});
});
};
process.on('exit', () => {
if (this.OSRMLoader.loader) this.OSRMLoader.shutdown(() => {});
});
process.on('SIGINT', () => {
process.exit(2);
// TODO need to handle for windows??
});
};

View File

@ -1,111 +0,0 @@
require 'rspec/expectations'
DEFAULT_PORT = 5000
DEFAULT_TIMEOUT = 2
ROOT_FOLDER = Dir.pwd
OSM_USER = 'osrm'
OSM_GENERATOR = 'osrm-test'
OSM_UID = 1
TEST_FOLDER = File.join ROOT_FOLDER, 'test'
DATA_FOLDER = 'cache'
OSM_TIMESTAMP = '2000-01-01T00:00:00Z'
DEFAULT_SPEEDPROFILE = 'bicycle'
WAY_SPACING = 100
DEFAULT_GRID_SIZE = 100 #meters
PROFILES_PATH = File.join ROOT_FOLDER, 'profiles'
FIXTURES_PATH = File.join ROOT_FOLDER, 'unit_tests/fixtures'
BIN_PATH = File.join ROOT_FOLDER, 'build'
DEFAULT_INPUT_FORMAT = 'osm'
DEFAULT_ORIGIN = [1,1]
LAUNCH_TIMEOUT = 1
SHUTDOWN_TIMEOUT = 10
DEFAULT_LOAD_METHOD = 'datastore'
OSRM_ROUTED_LOG_FILE = 'osrm-routed.log'
# OS X shim to ensure shared libraries from custom locations can be loaded
# This is needed in OS X >= 10.11 because DYLD_LIBRARY_PATH is blocked
# https://forums.developer.apple.com/thread/9233
if ENV['OSRM_SHARED_LIBRARY_PATH']
LOAD_LIBRARIES="DYLD_LIBRARY_PATH=#{ENV['OSRM_SHARED_LIBRARY_PATH']} "
else
LOAD_LIBRARIES=""
end
if ENV['OS']=~/Windows.*/ then
TERMSIGNAL=9
else
TERMSIGNAL='TERM'
end
def log_time_and_run cmd
log_time cmd
`#{cmd}`
end
def log_time cmd
puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S:%L')}] #{cmd}"
end
puts "Ruby version #{RUBY_VERSION}"
unless RUBY_VERSION.to_f >= 1.9
raise "*** Please upgrade to Ruby 1.9.x to run the OSRM cucumber tests"
end
if ENV["OSRM_PORT"]
OSRM_PORT = ENV["OSRM_PORT"].to_i
puts "Port set to #{OSRM_PORT}"
else
OSRM_PORT = DEFAULT_PORT
puts "Using default port #{OSRM_PORT}"
end
if ENV["OSRM_TIMEOUT"]
OSRM_TIMEOUT = ENV["OSRM_TIMEOUT"].to_i
puts "Timeout set to #{OSRM_TIMEOUT}"
else
OSRM_TIMEOUT = DEFAULT_TIMEOUT
puts "Using default timeout #{OSRM_TIMEOUT}"
end
unless File.exists? TEST_FOLDER
raise "*** Test folder #{TEST_FOLDER} doesn't exist."
end
def verify_osrm_is_not_running
if OSRMLoader::OSRMBaseLoader.new.osrm_up?
raise "*** osrm-routed is already running."
end
end
def verify_existance_of_binaries
["osrm-extract", "osrm-contract", "osrm-routed"].each do |bin|
unless File.exists? "#{BIN_PATH}/#{bin}#{EXE}"
raise "*** #{BIN_PATH}/#{bin}#{EXE} is missing. Build failed?"
end
unless system "#{LOAD_LIBRARIES}#{BIN_PATH}/#{bin}#{EXE} --help > /dev/null 2>&1"
log "*** Exited with code #{$?.exitstatus}.", :preprocess
raise "*** #{LOAD_LIBRARIES}#{BIN_PATH}/#{bin}#{EXE} --help exited with code #{$?.exitstatus}."
end
end
end
if ENV['OS']=~/Windows.*/ then
EXE='.exe'
QQ='"'
else
EXE=''
QQ=''
end
AfterConfiguration do |config|
clear_log_files
verify_osrm_is_not_running
verify_existance_of_binaries
end
at_exit do
OSRMLoader::OSRMBaseLoader.new.shutdown
end

View File

@ -0,0 +1,130 @@
'use strict';
var util = require('util');
var fs = require('fs');
var OSRMError = class extends Error {
constructor (process, code, msg, log, lines) {
super(msg);
this.process = process;
this.code = code;
this.msg = msg;
this.lines = lines;
this.log = log;
}
extract (callback) {
this.logTail(this.log, this.lines, callback);
}
toString (callback) {
this.extract((tail) => {
callback(util.format('*** %s\nLast %s from %s:\n%s\n', this.msg, this.lines, this.log, tail));
});
}
logTail (path, n, callback) {
var expanded = path.resolve(this.TEST_FOLDER, path);
fs.exists(expanded, (exists) => {
if (exists) {
fs.readFile(expanded, (err, data) => {
var lines = data.trim().split('\n');
callback(lines
.slice(lines.length - n)
.map(line => util.format(' %s', line))
.join('\n'));
});
} else {
callback(util.format('File %s does not exist!', expanded));
}
});
}
};
var unescapeStr = (str) => str.replace(/\\\|/g, '\|').replace(/\\\\/g, '\\');
module.exports = {
OSRMError: OSRMError,
FileError: class extends OSRMError {
constructor (logFile, code, msg) {
super ('fileutil', code, msg, logFile, 5);
}
},
LaunchError: class extends OSRMError {
constructor (logFile, launchProcess, code, msg) {
super (launchProcess, code, msg, logFile, 5);
}
},
ExtractError: class extends OSRMError {
constructor (logFile, code, msg) {
super('osrm-extract', code, msg, logFile, 3);
}
},
ContractError: class extends OSRMError {
constructor (logFile, code, msg) {
super('osrm-contract', code, msg, logFile, 3);
}
},
RoutedError: class extends OSRMError {
constructor (logFile, msg) {
super('osrm-routed', null, msg, logFile, 3);
}
},
TableDiffError: class extends Error {
constructor (expected, actual) {
super();
this.headers = expected.raw()[0];
this.expected = expected.hashes();
this.actual = actual;
this.diff = [];
this.hasErrors = false;
var good = 0, bad = 0;
this.expected.forEach((row, i) => {
var rowError = false;
for (var j in row) {
if (unescapeStr(row[j]) != actual[i][j]) {
rowError = true;
this.hasErrors = true;
break;
}
}
if (rowError) {
bad++;
this.diff.push(Object.assign({}, row, {status: 'undefined'}));
this.diff.push(Object.assign({}, actual[i], {status: 'comment'}));
} else {
good++;
this.diff.push(row);
}
});
}
get string () {
if (!this.hasErrors) return null;
var s = ['Tables were not identical:'];
s.push(this.headers.map(key => ' ' + key).join(' | '));
this.diff.forEach((row) => {
var rowString = '| ';
this.headers.forEach((header) => {
if (!row.status) rowString += ' ' + row[header] + ' | ';
else if (row.status === 'undefined') rowString += '(-) ' + row[header] + ' | ';
else rowString += '(+) ' + row[header] + ' | ';
});
s.push(rowString);
});
return s.join('\n') + '\nTODO this is a temp workaround waiting for https://github.com/cucumber/cucumber-js/issues/534';
}
}
};

View File

@ -0,0 +1,15 @@
var exceptions = require('./exception_classes');
module.exports = function () {
this.OSRMError = exceptions.OSRMError,
this.FileError = (code, msg) => new (exceptions.FileError.bind(exceptions.FileError, this.PREPROCESS_LOG_FILE))(code, msg);
this.LaunchError = (code, launchProcess, msg) => new (exceptions.LaunchError.bind(exceptions.LaunchError, this.ERROR_LOG_FILE))(code, launchProcess, msg);
this.ExtractError = (code, msg) => new (exceptions.ExtractError.bind(exceptions.ExtractError, this.PREPROCESS_LOG_FILE))(code, msg);
this.ContractError = (code, msg) => new (exceptions.ContractError.bind(exceptions.ContractError, this.PREPROCESS_LOG_FILE))(code, msg);
this.RoutedError = (msg) => new (exceptions.RoutedError.bind(exceptions.RoutedError, this.OSRM_ROUTED_LOG_FILE))(msg);
};

View File

@ -1,56 +0,0 @@
class OSRMError < StandardError
attr_accessor :msg, :code, :process
def initialize process, code, msg, log, lines
@process = process
@code = code
@msg = msg
@lines = lines
@log = log
@extract = log_tail @log, @lines
end
def to_s
"*** #{@msg}\nLast #{@lines} lines from #{@log}:\n#{@extract}\n"
end
private
def log_tail path, n
Dir.chdir TEST_FOLDER do
expanded = File.expand_path path
if File.exists? expanded
File.open(expanded) do |f|
return f.tail(n).map { |line| " #{line}" }.join "\n"
end
else
return "File '#{expanded} does not exist!"
end
end
end
end
class FileError < OSRMError
def initialize code, msg
super 'fileutil', code, msg, PREPROCESS_LOG_FILE, 5
end
end
class ExtractError < OSRMError
def initialize code, msg
super 'osrm-extract', code, msg, PREPROCESS_LOG_FILE, 3
end
end
class PrepareError < OSRMError
def initialize code, msg
super 'osrm-contract', code, msg, PREPROCESS_LOG_FILE, 3
end
end
class RoutedError < OSRMError
def initialize msg
super 'osrm-routed', nil, msg, OSRM_ROUTED_LOG_FILE, 3
end
end

View File

@ -1,34 +0,0 @@
class File
# read last n lines of a file (trailing newlines are ignored)
def tail(n)
return [] if size==0
buffer = 1024
str = nil
if size>buffer
chunks = []
lines = 0
idx = size
begin
idx -= buffer # rewind
if idx<0
buffer += idx # adjust last read to avoid negative index
idx = 0
end
seek(idx)
chunk = read(buffer)
chunk.gsub!(/\n+\Z/,"") if chunks.empty? # strip newlines from end of file (first chunk)
lines += chunk.count("\n") # update total lines found
chunks.unshift chunk # prepend
end while lines<(n) && idx>0 # stop when enough lines found or no more to read
str = chunks.join('')
else
str = read(buffer)
end
# return last n lines of str
lines = str.split("\n")
lines.size>=n ? lines[-n,n] : lines
end
end

View File

@ -0,0 +1,5 @@
var classes = require('./data_classes');
module.exports = function() {
this.FuzzyMatch = new classes.FuzzyMatch();
};

View File

@ -1,32 +0,0 @@
class FuzzyMatch
def self.match got, want
if got == want
return true
elsif want.match /(.*)\s+~(.+)%$/ #percentage range: 100 ~5%
target = $1.to_f
percentage = $2.to_f
if target==0
return true
else
ratio = (1-(got.to_f / target)).abs;
return 100*ratio < percentage;
end
elsif want.match /(.*)\s+\+\-(.+)$/ #absolute range: 100 +-5
margin = $2.to_f
from = $1.to_f-margin
to = $1.to_f+margin
return got.to_f >= from && got.to_f <= to
elsif want =~ /^\/(.*)\/$/ #regex: /a,b,.*/
return got =~ /#{$1}/
else
return false
end
end
def self.match_location got, want
match( got[0], "#{want.lat} ~0.0025%" ) &&
match( got[1], "#{want.lon} ~0.0025%" )
end
end

37
features/support/hash.js Normal file
View File

@ -0,0 +1,37 @@
var fs = require('fs');
var path = require('path');
var crypto = require('crypto');
var d3 = require('d3-queue');
module.exports = function () {
this.hashOfFiles = (paths, cb) => {
paths = Array.isArray(paths) ? paths : [paths];
var shasum = crypto.createHash('sha1');
var q = d3.queue(1);
var addFile = (path, cb) => {
fs.readFile(path, (err, data) => {
shasum.update(data);
cb(err);
});
};
paths.forEach(path => { q.defer(addFile, path); });
q.awaitAll(err => {
if (err) throw new Error('*** Error reading files:', err);
cb(shasum.digest('hex'));
});
};
this.hashProfile = (cb) => {
this.hashOfFiles(path.resolve(this.PROFILES_PATH, this.profile + '.lua'), cb);
};
this.hashString = (str) => {
return crypto.createHash('sha1').update(str).digest('hex');
};
return this;
};

View File

@ -1,63 +0,0 @@
require 'digest/sha1'
bin_extract_hash = nil
profile_hashes = nil
def hash_of_files paths
paths = [paths] unless paths.is_a? Array
hash = Digest::SHA1.new
for path in paths do
open(path,'rb') do |io|
while !io.eof
buf = io.readpartial 1024
hash.update buf
end
end
end
return hash.hexdigest
end
def profile_hash
profile_hashes ||= {}
profile_hashes[@profile] ||= hash_of_files "#{PROFILES_PATH}/#{@profile}.lua"
end
def osm_hash
@osm_hash ||= Digest::SHA1.hexdigest osm_str
end
def lua_lib_hash
@lua_lib_hash ||= hash_of_files Dir.glob("../profiles/lib/*.lua")
end
def bin_extract_hash
@bin_extract_hash ||= hash_of_files "#{BIN_PATH}/osrm-extract#{EXE}"
@bin_extract_hash
end
def bin_prepare_hash
@bin_prepare_hash ||= hash_of_files "#{BIN_PATH}/osrm-contract#{EXE}"
end
def bin_routed_hash
@bin_routed_hash ||= hash_of_files "#{BIN_PATH}/osrm-routed#{EXE}"
end
# combine state of data, profile and binaries into a hashes that identifies
# the exact test situation at different stages, so we can later skip steps when possible.
def fingerprint_osm
@fingerprint_osm ||= Digest::SHA1.hexdigest "#{osm_hash}"
end
def fingerprint_extract
@fingerprint_extract ||= Digest::SHA1.hexdigest "#{profile_hash}-#{lua_lib_hash}-#{bin_extract_hash}"
end
def fingerprint_prepare
@fingerprint_prepare ||= Digest::SHA1.hexdigest "#{bin_prepare_hash}"
end
def fingerprint_route
@fingerprint_route ||= Digest::SHA1.hexdigest "#{bin_routed_hash}"
end

37
features/support/hooks.js Normal file
View File

@ -0,0 +1,37 @@
var util = require('util');
module.exports = function () {
this.BeforeFeatures((features, callback) => {
this.pid = null;
this.initializeEnv(() => {
this.initializeOptions(callback);
});
});
this.Before((scenario, callback) => {
this.scenarioTitle = scenario.getName();
this.loadMethod = this.DEFAULT_LOAD_METHOD;
this.queryParams = [];
var d = new Date();
this.scenarioTime = util.format('%d-%d-%dT%s:%s:%sZ', d.getFullYear(), d.getMonth()+1, d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds());
this.resetData();
this.hasLoggedPreprocessInfo = false;
this.hasLoggedScenarioInfo = false;
this.setGridSize(this.DEFAULT_GRID_SIZE);
this.setOrigin(this.DEFAULT_ORIGIN);
callback();
});
this.After((scenario, callback) => {
this.setExtractArgs('');
this.setContractArgs('');
if (this.loadMethod === 'directly' && !!this.OSRMLoader.loader) this.OSRMLoader.shutdown(callback);
else callback();
});
this.Around('@stress', (scenario, callback) => {
// TODO implement stress timeout? Around support is being dropped in cucumber-js anyway
callback();
});
};

View File

@ -1,35 +0,0 @@
STRESS_TIMEOUT = 300
Before do |scenario|
# fetch scenario and feature name, so we can use it in log files if needed
case scenario
when Cucumber::RunningTestCase::Scenario
@feature_name = scenario.feature.name
@scenario_title = scenario.name
when Cucumber::RunningTestCase::ExampleRow
@feature_name = scenario.scenario_outline.feature.name
@scenario_title = scenario.scenario_outline.name
end
@load_method = DEFAULT_LOAD_METHOD
@query_params = []
@scenario_time = Time.now.strftime("%Y-%m-%dT%H:%m:%SZ")
reset_data
@has_logged_preprocess_info = false
@has_logged_scenario_info = false
set_grid_size DEFAULT_GRID_SIZE
set_origin DEFAULT_ORIGIN
end
Around('@stress') do |scenario, block|
Timeout.timeout(STRESS_TIMEOUT) do
block.call
end
end
After do
end

47
features/support/http.js Normal file
View File

@ -0,0 +1,47 @@
var Timeout = require('node-timeout');
var request = require('request');
module.exports = function () {
// Converts an array [["param","val1"], ["param","val2"]] into param=val1&param=val2
this.paramsToString = (params) => {
var kvPairs = params.map((kv) => kv[0].toString() + '=' + kv[1].toString());
var url = kvPairs.length ? kvPairs.join('&') : '';
return url.trim();
};
this.sendRequest = (baseUri, parameters, callback) => {
var limit = Timeout(this.OSRM_TIMEOUT, { err: { statusCode: 408 } });
var runRequest = (cb) => {
var params = this.paramsToString(parameters);
this.query = baseUri + (params.length ? '?' + params : '');
var options = this.httpMethod === 'POST' ? {
method: 'POST',
body: params,
url: baseUri
} : this.query;
request(options, (err, res, body) => {
if (err && err.code === 'ECONNREFUSED') {
throw new Error('*** osrm-routed is not running.');
} else if (err && err.statusCode === 408) {
throw new Error();
}
return cb(err, res, body);
});
};
runRequest(limit((err, res, body) => {
if (err) {
if (err.statusCode === 408)
return callback(this.RoutedError('*** osrm-routed did not respond'));
else if (err.code === 'ECONNREFUSED')
return callback(this.RoutedError('*** osrm-routed is not running'));
}
return callback(err, res, body);
}));
};
};

View File

@ -1,33 +0,0 @@
require 'net/http'
# Converts an array [["param","val1"], ["param","val2"]] into param=val1&param=val2
def params_to_string params
kv_pairs = params.map { |kv| kv[0].to_s + "=" + kv[1].to_s }
url = kv_pairs.size > 0 ? kv_pairs.join("&") : ""
return url
end
def send_request base_uri, parameters
Timeout.timeout(OSRM_TIMEOUT) do
uri_string = base_uri
params = params_to_string(parameters)
if not params.eql? ""
uri_string = uri_string + "?" + params
end
uri = URI.parse(uri_string)
@query = uri.to_s
if @http_method.eql? "POST"
Net::HTTP.start(uri.hostname, uri.port) do |http|
req = Net::HTTP::Post.new(uri.path)
req.body = params_to_string parameters
response = http.request(req)
end
else
response = Net::HTTP.get_response uri
end
end
rescue Errno::ECONNREFUSED => e
raise "*** osrm-routed is not running."
rescue Timeout::Error
raise "*** osrm-routed did not respond."
end

View File

@ -0,0 +1,5 @@
var launchClasses = require('./launch_classes');
module.exports = function () {
this._OSRMLoader = () => new (launchClasses._OSRMLoader.bind(launchClasses._OSRMLoader, this))();
};

View File

@ -1,137 +0,0 @@
require 'socket'
require 'open3'
require 'json'
# Only one isntance of osrm-routed is ever launched, to avoid collisions.
# The default is to keep osrm-routed running and load data with datastore.
# however, osrm-routed it shut down and relaunched for each scenario thats
# loads data directly.
class OSRMLoader
class OSRMBaseLoader
@@pid = nil
def launch
Timeout.timeout(LAUNCH_TIMEOUT) do
osrm_up
wait_for_connection
end
rescue Timeout::Error
raise RoutedError.new "Launching osrm-routed timed out."
end
def shutdown
Timeout.timeout(SHUTDOWN_TIMEOUT) do
osrm_down
end
rescue Timeout::Error
kill
raise RoutedError.new "Shutting down osrm-routed timed out."
end
def osrm_up?
if @@pid
begin
if Process.waitpid(@@pid, Process::WNOHANG) then
false
else
true
end
rescue Errno::ESRCH, Errno::ECHILD
false
end
end
end
def osrm_down
if @@pid
Process.kill TERMSIGNAL, @@pid
wait_for_shutdown
@@pid = nil
end
end
def kill
if @@pid
Process.kill 'KILL', @@pid
end
end
def wait_for_connection
while true
begin
socket = TCPSocket.new('127.0.0.1', OSRM_PORT)
return
rescue Errno::ECONNREFUSED
sleep 0.1
end
end
end
def wait_for_shutdown
while osrm_up?
sleep 0.01
end
end
end
# looading data directly when lauching osrm-routed:
# under this scheme, osmr-routed is launched and shutdown for each scenario,
# and osrm-datastore is not used
class OSRMDirectLoader < OSRMBaseLoader
def load world, input_file, &block
@world = world
@input_file = input_file
Dir.chdir TEST_FOLDER do
shutdown
launch
yield
shutdown
end
end
def osrm_up
return if @@pid
@@pid = Process.spawn("#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-routed #{@input_file} --port #{OSRM_PORT}",:out=>OSRM_ROUTED_LOG_FILE, :err=>OSRM_ROUTED_LOG_FILE)
Process.detach(@@pid) # avoid zombie processes
end
end
# looading data with osrm-datastore:
# under this scheme, osmr-routed is launched once and kept running for all scenarios,
# and osrm-datastore is used to load data for each scenario
class OSRMDatastoreLoader < OSRMBaseLoader
def load world, input_file, &block
@world = world
@input_file = input_file
Dir.chdir TEST_FOLDER do
load_data
launch unless @@pid
yield
end
end
def load_data
run_bin "osrm-datastore", @input_file
end
def osrm_up
return if osrm_up?
@@pid = Process.spawn("#{LOAD_LIBRARIES}#{BIN_PATH}/osrm-routed --shared-memory=1 --port #{OSRM_PORT}",:out=>OSRM_ROUTED_LOG_FILE, :err=>OSRM_ROUTED_LOG_FILE)
Process.detach(@@pid) # avoid zombie processes
end
end
def self.load world, input_file, &block
method = world.instance_variable_get "@load_method"
if method == 'datastore'
OSRMDatastoreLoader.new.load world, input_file, &block
elsif method == 'directly'
OSRMDirectLoader.new.load world, input_file, &block
else
raise "*** Unknown load method '#{method}'"
end
end
end

View File

@ -0,0 +1,163 @@
'use strict';
var fs = require('fs');
var net = require('net');
var spawn = require('child_process').spawn;
var util = require('util');
var Timeout = require('node-timeout');
var OSRMBaseLoader = class {
constructor (scope) {
this.scope = scope;
}
launch (callback) {
var limit = Timeout(this.scope.LAUNCH_TIMEOUT, { err: this.scope.RoutedError('Launching osrm-routed timed out.') });
var runLaunch = (cb) => {
this.osrmUp(() => {
this.waitForConnection(cb);
});
};
runLaunch(limit((e) => { if (e) callback(e); callback(); }));
}
shutdown (callback) {
var limit = Timeout(this.scope.SHUTDOWN_TIMEOUT, { err: this.scope.RoutedError('Shutting down osrm-routed timed out.')});
var runShutdown = (cb) => {
this.osrmDown(cb);
};
runShutdown(limit((e) => { if (e) callback(e); callback(); }));
}
osrmIsRunning () {
return !!this.scope.pid && this.child && !this.child.killed;
}
osrmDown (callback) {
if (this.scope.pid) {
process.kill(this.scope.pid, this.scope.TERMSIGNAL);
this.waitForShutdown(callback);
this.scope.pid = null;
} else callback(true);
}
waitForConnection (callback) {
net.connect({
port: this.scope.OSRM_PORT,
host: '127.0.0.1'
})
.on('connect', () => {
callback();
})
.on('error', (e) => {
setTimeout(() => {
callback(e);
}, 100);
});
}
waitForShutdown (callback) {
var check = () => {
if (!this.osrmIsRunning()) callback();
};
setTimeout(check, 100);
}
};
var OSRMDirectLoader = class extends OSRMBaseLoader {
constructor (scope) {
super(scope);
}
load (inputFile, callback) {
this.inputFile = inputFile;
this.shutdown(() => {
this.launch(callback);
});
}
osrmUp (callback) {
if (this.scope.pid) return callback();
var writeToLog = (data) => {
fs.appendFile(this.scope.OSRM_ROUTED_LOG_FILE, data, (err) => { if (err) throw err; });
};
var child = spawn(util.format('%s%s/osrm-routed', this.scope.LOAD_LIBRARIES, this.scope.BIN_PATH), [this.inputFile, util.format('-p%d', this.scope.OSRM_PORT)], {detached: true});
this.scope.pid = child.pid;
child.stdout.on('data', writeToLog);
child.stderr.on('data', writeToLog);
callback();
}
};
var OSRMDatastoreLoader = class extends OSRMBaseLoader {
constructor (scope) {
super(scope);
}
load (inputFile, callback) {
this.inputFile = inputFile;
this.loadData((err) => {
if (err) return callback(err);
if (!this.scope.pid) return this.launch(callback);
else callback();
});
}
loadData (callback) {
this.scope.runBin('osrm-datastore', this.inputFile, (err) => {
if (err) return callback(new this.LaunchError(this.exitCode, 'datastore', err));
callback();
});
}
osrmUp (callback) {
if (this.scope.pid) return callback();
var writeToLog = (data) => {
fs.appendFile(this.scope.OSRM_ROUTED_LOG_FILE, data, (err) => { if (err) throw err; });
};
var child = spawn(util.format('%s%s/osrm-routed', this.scope.LOAD_LIBRARIES, this.scope.BIN_PATH), ['--shared-memory=1', util.format('-p%d', this.scope.OSRM_PORT)], {detached: true});
this.child = child;
this.scope.pid = child.pid;
child.stdout.on('data', writeToLog);
child.stderr.on('data', writeToLog);
callback();
}
};
module.exports = {
_OSRMLoader: class {
constructor (scope) {
this.scope = scope;
this.loader = null;
}
load (inputFile, callback) {
var method = this.scope.loadMethod;
if (method === 'datastore') {
this.loader = new OSRMDatastoreLoader(this.scope);
this.loader.load(inputFile, callback);
} else if (method === 'directly') {
this.loader = new OSRMDirectLoader(this.scope);
this.loader.load(inputFile, callback);
} else {
throw new Error('*** Unknown load method ' + method);
}
}
shutdown (callback) {
this.loader.shutdown(callback);
}
up () {
return this.loader ? this.loader.osrmIsRunning() : false;
}
}
};

90
features/support/log.js Normal file
View File

@ -0,0 +1,90 @@
var fs = require('fs');
module.exports = function () {
this.clearLogFiles = (callback) => {
// emptying existing files, rather than deleting and writing new ones makes it
// easier to use tail -f from the command line
fs.writeFile(this.OSRM_ROUTED_LOG_FILE, '', err => {
if (err) throw err;
fs.writeFile(this.PREPROCESS_LOG_FILE, '', err => {
if (err) throw err;
fs.writeFile(this.LOG_FILE, '', err => {
if (err) throw err;
callback();
});
});
});
};
var log = this.log = (s, type) => {
s = s || '';
type = type || null;
var file = type === 'preprocess' ? this.PREPROCESS_LOG_FILE : this.LOG_FILE;
fs.appendFile(file, s + '\n', err => {
if (err) throw err;
});
};
this.logScenarioFailInfo = () => {
if (this.hasLoggedScenarioInfo) return;
log('=========================================');
log('Failed scenario: ' + this.scenarioTitle);
log('Time: ' + this.scenarioTime);
log('Fingerprint osm stage: ' + this.osmData.fingerprintOSM);
log('Fingerprint extract stage: ' + this.fingerprintExtract);
log('Fingerprint contract stage: ' + this.fingerprintContract);
log('Fingerprint route stage: ' + this.fingerprintRoute);
log('Profile: ' + this.profile);
log();
log('```xml'); // so output can be posted directly to github comment fields
log(this.osmData.str.trim());
log('```');
log();
log();
this.hasLoggedScenarioInfo = true;
};
this.logFail = (expected, got, attempts) => {
this.logScenarioFailInfo();
log('== ');
log('Expected: ' + JSON.stringify(expected));
log('Got: ' + JSON.stringify(got));
log();
['route','forw','backw'].forEach((direction) => {
if (attempts[direction]) {
log('Direction: ' + direction);
log('Query: ' + attempts[direction].query);
log('Response: ' + attempts[direction].response.body);
log();
}
});
};
this.logPreprocessInfo = () => {
if (this.hasLoggedPreprocessInfo) return;
log('=========================================', 'preprocess');
log('Preprocessing data for scenario: ' + this.scenarioTitle, 'preprocess');
log('Time: ' + this.scenarioTime, 'preprocess');
log('', 'preprocess');
log('== OSM data:', 'preprocess');
log('```xml', 'preprocess'); // so output can be posted directly to github comment fields
log(this.osmData.str, 'preprocess');
log('```', 'preprocess');
log('', 'preprocess');
log('== Profile:', 'preprocess');
log(this.profile, 'preprocess');
log('', 'preprocess');
this.hasLoggedPreprocessInfo = true;
};
this.logPreprocess = (str) => {
this.logPreprocessInfo();
log(str, 'preprocess');
};
this.logPreprocessDone = () => {
log('Done with preprocessing at ' + new Date(), 'preprocess');
};
};

View File

@ -1,88 +0,0 @@
# logging
PREPROCESS_LOG_FILE = 'preprocessing.log'
LOG_FILE = 'fail.log'
def clear_log_files
Dir.chdir TEST_FOLDER do
# emptying existing files, rather than deleting and writing new ones makes it
# easier to use tail -f from the command line
`echo '' > #{OSRM_ROUTED_LOG_FILE}`
`echo '' > #{PREPROCESS_LOG_FILE}`
`echo '' > #{LOG_FILE}`
end
end
def log s='', type=nil
if type == :preprocess
file = PREPROCESS_LOG_FILE
else
file = LOG_FILE
end
File.open(file, 'a') {|f| f.write("#{s}\n") }
end
def log_scenario_fail_info
return if @has_logged_scenario_info
log "========================================="
log "Failed scenario: #{@scenario_title}"
log "Time: #{@scenario_time}"
log "Fingerprint osm stage: #{@fingerprint_osm}"
log "Fingerprint extract stage: #{@fingerprint_extract}"
log "Fingerprint prepare stage: #{@fingerprint_prepare}"
log "Fingerprint route stage: #{@fingerprint_route}"
log "Profile: #{@profile}"
log
log '```xml' #so output can be posted directly to github comment fields
log osm_str.strip
log '```'
log
log
@has_logged_scenario_info = true
end
def log_fail expected,got,attempts
return
log_scenario_fail_info
log "== "
log "Expected: #{expected}"
log "Got: #{got}"
log
['route','forw','backw'].each do |direction|
if attempts[direction]
attempts[direction]
log "Direction: #{direction}"
log "Query: #{attempts[direction][:query]}"
log "Response: #{attempts[direction][:response].body}"
log
end
end
end
def log_preprocess_info
return if @has_logged_preprocess_info
log "=========================================", :preprocess
log "Preprocessing data for scenario: #{@scenario_title}", :preprocess
log "Time: #{@scenario_time}", :preprocess
log '', :preprocess
log "== OSM data:", :preprocess
log '```xml', :preprocess #so output can be posted directly to github comment fields
log osm_str, :preprocess
log '```', :preprocess
log '', :preprocess
log "== Profile:", :preprocess
log @profile, :preprocess
log '', :preprocess
@has_logged_preprocess_info = true
end
def log_preprocess str
log_preprocess_info
log str, :preprocess
end
def log_preprocess_done
end

View File

@ -1,25 +0,0 @@
require 'OSM/StreamParser'
locations = nil
class OSMTestParserCallbacks < OSM::Callbacks
locations = nil
def self.locations
if locations
locations
else
#parse the test file, so we can later reference nodes and ways by name in tests
locations = {}
file = 'test/data/test.osm'
callbacks = OSMTestParserCallbacks.new
parser = OSM::StreamParser.new(:filename => file, :callbacks => callbacks)
parser.parse
puts locations
end
end
def node(node)
locations[node.name] = [node.lat,node.lon]
end
end

View File

@ -1,14 +0,0 @@
#monkey-patch osmlib to fix a bug
module OSM
class Way
def to_xml(xml)
xml.way(attributes) do
nodes.each do |node|
xml.nd(:ref => node)
end
tags.to_xml(xml)
end
end
end
end

160
features/support/route.js Normal file
View File

@ -0,0 +1,160 @@
var Timeout = require('node-timeout');
var request = require('request');
module.exports = function () {
this.requestPath = (service, params, callback) => {
var uri = [this.HOST, service].join('/');
return this.sendRequest(uri, params, callback);
};
this.requestUrl = (path, callback) => {
var uri = this.query = [this.HOST, path].join('/'),
limit = Timeout(this.OSRM_TIMEOUT, { err: { statusCode: 408 } });
function runRequest (cb) {
request(uri, cb);
}
runRequest(limit((err, res, body) => {
if (err) {
if (err.statusCode === 408) return callback(this.RoutedError('*** osrm-routed did not respond'));
else if (err.code === 'ECONNREFUSED')
return callback(this.RoutedError('*** osrm-routed is not running'));
} else
return callback(err, res, body);
}));
};
// Overwrites the default values in defaults
// e.g. [[a, 1], [b, 2]], [[a, 5], [d, 10]] => [[a, 5], [b, 2], [d, 10]]
this.overwriteParams = (defaults, other) => {
var merged = {};
var overwrite = (o) => {
merged[o[0]] = o[1];
};
defaults.forEach(overwrite);
other.forEach(overwrite);
return Object.keys(merged).map((key) => [key, merged[key]]);
};
var encodeWaypoints = (waypoints) => {
return waypoints.map(w => ['loc', [w.lat, w.lon].join(',')]);
};
this.requestRoute = (waypoints, bearings, userParams, callback) => {
if (bearings.length && bearings.length !== waypoints.length) throw new Error('*** number of bearings does not equal the number of waypoints');
var defaults = [['output','json'], ['instructions','true'], ['alt',false]],
params = this.overwriteParams(defaults, userParams),
encodedWaypoints = encodeWaypoints(waypoints);
if (bearings.length) {
var encodedBearings = bearings.map(b => ['b', b.toString()]);
params = Array.prototype.concat.apply(params, encodedWaypoints.map((o, i) => [o, encodedBearings[i]]));
} else {
params = params.concat(encodedWaypoints);
}
return this.requestPath('viaroute', params, callback);
};
this.requestNearest = (node, userParams, callback) => {
var defaults = [['output', 'json']],
params = this.overwriteParams(defaults, userParams);
params.push(['loc', [node.lat, node.lon].join(',')]);
return this.requestPath('nearest', params, callback);
};
this.requestTable = (waypoints, userParams, callback) => {
var defaults = [['output', 'json']],
params = this.overwriteParams(defaults, userParams);
params = params.concat(waypoints.map(w => [w.type, [w.coord.lat, w.coord.lon].join(',')]));
return this.requestPath('table', params, callback);
};
this.requestTrip = (waypoints, userParams, callback) => {
var defaults = [['output', 'json']],
params = this.overwriteParams(defaults, userParams);
params = params.concat(encodeWaypoints(waypoints));
return this.requestPath('trip', params, callback);
};
this.requestMatching = (waypoints, timestamps, userParams, callback) => {
var defaults = [['output', 'json']],
params = this.overwriteParams(defaults, userParams);
var encodedWaypoints = encodeWaypoints(waypoints);
if (timestamps.length) {
var encodedTimestamps = timestamps.map(t => ['t', t.toString()]);
params = Array.prototype.concat.apply(params, encodedWaypoints.map((o, i) => [o, encodedTimestamps[i]]));
} else {
params = params.concat(encodedWaypoints);
}
return this.requestPath('match', params, callback);
};
this.extractInstructionList = (instructions, index, postfix) => {
postfix = postfix || null;
if (instructions) {
return instructions.filter(r => r[0].toString() !== this.DESTINATION_REACHED.toString())
.map(r => r[index])
.map(r => (isNaN(parseInt(r)) && (!r || r == '')) ? '""' : '' + r + (postfix || ''))
.join(',');
}
};
this.wayList = (instructions) => {
return this.extractInstructionList(instructions, 1);
};
this.compassList = (instructions) => {
return this.extractInstructionList(instructions, 6);
};
this.bearingList = (instructions) => {
return this.extractInstructionList(instructions, 7);
};
this.turnList = (instructions) => {
var types = {
'0': 'none',
'1': 'straight',
'2': 'slight_right',
'3': 'right',
'4': 'sharp_right',
'5': 'u_turn',
'6': 'sharp_left',
'7': 'left',
'8': 'slight_left',
'9': 'via',
'10': 'head',
'11': 'enter_roundabout',
'12': 'leave_roundabout',
'13': 'stay_roundabout',
'14': 'start_end_of_street',
'15': 'destination',
'16': 'name_changes',
'17': 'enter_contraflow',
'18': 'leave_contraflow'
};
// replace instructions codes with strings, e.g. '11-3' gets converted to 'enter_roundabout-3'
return instructions ? instructions.map(r => r[0].toString().replace(/^(\d*)/, (match, num) => types[num])).join(',') : instructions;
};
this.modeList = (instructions) => {
return this.extractInstructionList(instructions, 8);
};
this.timeList = (instructions) => {
return this.extractInstructionList(instructions, 4, 's');
};
this.distanceList = (instructions) => {
return this.extractInstructionList(instructions, 2, 'm');
};
};

View File

@ -1,182 +0,0 @@
require 'net/http'
HOST = "http://127.0.0.1:#{OSRM_PORT}"
DESTINATION_REACHED = 15 #OSRM instruction code
def request_path service, params
uri = "#{HOST}/" + service
response = send_request uri, params
return response
end
def request_url path
uri = URI.parse"#{HOST}/#{path}"
@query = uri.to_s
Timeout.timeout(OSRM_TIMEOUT) do
Net::HTTP.get_response uri
end
rescue Errno::ECONNREFUSED => e
raise "*** osrm-routed is not running."
rescue Timeout::Error
raise "*** osrm-routed did not respond."
end
# Overwriters the default values in defaults.
# e.g. [[a, 1], [b, 2]], [[a, 5], [d, 10]] => [[a, 5], [b, 2], [d, 10]]
def overwrite_params defaults, other
merged = []
defaults.each do |k,v|
idx = other.index { |p| p[0] == k }
if idx == nil then
merged << [k, v]
else
merged << [k, other[idx][1]]
end
end
other.each do |k,v|
if merged.index { |pair| pair[0] == k} == nil then
merged << [k, v]
end
end
return merged
end
def request_route waypoints, bearings, user_params
raise "*** number of bearings does not equal the number of waypoints" unless bearings.size == 0 || bearings.size == waypoints.size
defaults = [['output','json'], ['instructions',true], ['alt',false]]
params = overwrite_params defaults, user_params
encoded_waypoint = waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] }
if bearings.size > 0
encoded_bearings = bearings.map { |b| ["b", b.to_s]}
parasm = params.concat encoded_waypoint.zip(encoded_bearings).flatten! 1
else
params = params.concat encoded_waypoint
end
return request_path "viaroute", params
end
def request_nearest node, user_params
defaults = [['output', 'json']]
params = overwrite_params defaults, user_params
params << ["loc", "#{node.lat},#{node.lon}"]
return request_path "nearest", params
end
def request_table waypoints, user_params
defaults = [['output', 'json']]
params = overwrite_params defaults, user_params
params = params.concat waypoints.map{ |w| [w[:type],"#{w[:coord].lat},#{w[:coord].lon}"] }
return request_path "table", params
end
def request_trip waypoints, user_params
defaults = [['output', 'json']]
params = overwrite_params defaults, user_params
params = params.concat waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] }
return request_path "trip", params
end
def request_matching waypoints, timestamps, user_params
defaults = [['output', 'json']]
params = overwrite_params defaults, user_params
encoded_waypoint = waypoints.map{ |w| ["loc","#{w.lat},#{w.lon}"] }
if timestamps.size > 0
encoded_timestamps = timestamps.map { |t| ["t", t.to_s]}
parasm = params.concat encoded_waypoint.zip(encoded_timestamps).flatten! 1
else
params = params.concat encoded_waypoint
end
return request_path "match", params
end
def got_route? response
if response.code == "200" && !response.body.empty?
json = JSON.parse response.body
if json['status'] == 200
return way_list( json['route_instructions']).empty? == false
end
end
return false
end
def route_status response
if response.code == "200" && !response.body.empty?
json = JSON.parse response.body
return json['status']
else
"HTTP #{response.code}"
end
end
def extract_instruction_list instructions, index, postfix=nil
if instructions
instructions.reject { |r| r[0].to_s=="#{DESTINATION_REACHED}" }.
map { |r| r[index] }.
map { |r| (r=="" || r==nil) ? '""' : "#{r}#{postfix}" }.
join(',')
end
end
def way_list instructions
extract_instruction_list instructions, 1
end
def compass_list instructions
extract_instruction_list instructions, 6
end
def bearing_list instructions
extract_instruction_list instructions, 7
end
def turn_list instructions
if instructions
types = {
0 => :none,
1 => :straight,
2 => :slight_right,
3 => :right,
4 => :sharp_right,
5 => :u_turn,
6 => :sharp_left,
7 => :left,
8 => :slight_left,
9 => :via,
10 => :head,
11 => :enter_roundabout,
12 => :leave_roundabout,
13 => :stay_roundabout,
14 => :start_end_of_street,
15 => :destination,
16 => :name_changes,
17 => :enter_contraflow,
18 => :leave_contraflow
}
# replace instructions codes with strings
# "11-3" (enter roundabout and leave a 3rd exit) gets converted to "enter_roundabout-3"
instructions.map do |r|
r[0].to_s.gsub(/^\d*/) do |match|
types[match.to_i].to_s
end
end.join(',')
end
end
def mode_list instructions
extract_instruction_list instructions, 8
end
def time_list instructions
extract_instruction_list instructions, 4, "s"
end
def distance_list instructions
extract_instruction_list instructions, 2, "m"
end

40
features/support/run.js Normal file
View File

@ -0,0 +1,40 @@
var fs = require('fs');
var util = require('util');
var exec = require('child_process').exec;
module.exports = function () {
this.runBin = (bin, options, callback) => {
var opts = options.slice();
if (opts.match('{osm_base}')) {
if (!this.osmData.osmFile) throw new Error('*** {osm_base} is missing');
opts = opts.replace('{osm_base}', this.osmData.osmFile);
}
if (opts.match('{extracted_base}')) {
if (!this.osmData.extractedFile) throw new Error('*** {extracted_base} is missing');
opts = opts.replace('{extracted_base}', this.osmData.extractedFile);
}
if (opts.match('{contracted_base}')) {
if (!this.osmData.contractedFile) throw new Error('*** {contracted_base} is missing');
opts = opts.replace('{contracted_base}', this.osmData.contractedFile);
}
if (opts.match('{profile}')) {
opts = opts.replace('{profile}', [this.PROFILES_PATH, this.profile + '.lua'].join('/'));
}
var cmd = util.format('%s%s%s/%s%s%s %s 2>%s', this.QQ, this.LOAD_LIBRARIES, this.BIN_PATH, bin, this.EXE, this.QQ, opts, this.ERROR_LOG_FILE);
process.chdir(this.TEST_FOLDER);
exec(cmd, (err, stdout, stderr) => {
this.stdout = stdout.toString();
fs.readFile(this.ERROR_LOG_FILE, (e, data) => {
this.stderr = data ? data.toString() : '';
this.exitCode = err && err.code || 0;
process.chdir('../');
callback(err, stdout, stderr);
});
});
};
};

View File

@ -1,28 +0,0 @@
def run_bin bin, options
Dir.chdir TEST_FOLDER do
opt = options.dup
if opt.include? '{osm_base}'
raise "*** {osm_base} is missing" unless osm_file
opt.gsub! "{osm_base}", "#{osm_file}"
end
if opt.include? '{extracted_base}'
raise "*** {extracted_base} is missing" unless extracted_file
opt.gsub! "{extracted_base}", "#{extracted_file}"
end
if opt.include? '{contracted_base}'
raise "*** {contracted_base} is missing" unless contracted_file
opt.gsub! "{contracted_base}", "#{contracted_file}"
end
if opt.include? '{profile}'
opt.gsub! "{profile}", "#{PROFILES_PATH}/#{@profile}.lua"
end
cmd = "#{QQ}#{LOAD_LIBRARIES}#{BIN_PATH}/#{bin}#{EXE}#{QQ} #{opt} 2>error.log"
@stdout = `#{cmd}`
@stderr = File.read 'error.log'
@exit_code = $?.exitstatus
end
end

View File

@ -0,0 +1,203 @@
var util = require('util');
var assert = require('assert');
module.exports = function () {
this.ShouldGetAResponse = () => {
assert.equal(this.response.statusCode, 200);
assert.ok(this.response.body);
assert.ok(this.response.body.length);
};
this.ShouldBeValidJSON = (callback) => {
try {
this.json = JSON.parse(this.response.body);
callback();
} catch (e) {
callback(e);
}
};
this.ShouldBeWellFormed = () => {
assert.equal(typeof this.json.status, 'number');
};
this.WhenIRouteIShouldGet = (table, callback) => {
this.reprocessAndLoadData(() => {
var headers = new Set(table.raw()[0]);
var requestRow = (row, ri, cb) => {
var got,
json;
var afterRequest = (err, res, body) => {
if (err) return cb(err);
if (body && body.length) {
var instructions, bearings, compasses, turns, modes, times, distances;
json = JSON.parse(body);
var hasRoute = json.status === 200;
if (hasRoute) {
instructions = this.wayList(json.route_instructions);
bearings = this.bearingList(json.route_instructions);
compasses = this.compassList(json.route_instructions);
turns = this.turnList(json.route_instructions);
modes = this.modeList(json.route_instructions);
times = this.timeList(json.route_instructions);
distances = this.distanceList(json.route_instructions);
}
if (headers.has('status')) {
got.status = json.status.toString();
}
if (headers.has('message')) {
got.message = json.status_message;
}
if (headers.has('#')) {
// comment column
got['#'] = row['#'];
}
if (headers.has('start')) {
got.start = instructions ? json.route_summary.start_point : null;
}
if (headers.has('end')) {
got.end = instructions ? json.route_summary.end_point : null;
}
if (headers.has('geometry')) {
got.geometry = json.route_geometry;
}
if (headers.has('route')) {
got.route = (instructions || '').trim();
if (headers.has('alternative')) {
got.alternative = json.found_alternative ?
this.wayList(json.alternative_instructions[0]) : '';
}
var distance = hasRoute && json.route_summary.total_distance,
time = hasRoute && json.route_summary.total_time;
if (headers.has('distance')) {
if (row.distance.length) {
if (!row.distance.match(/\d+m/))
throw new Error('*** Distance must be specified in meters. (ex: 250m)');
got.distance = instructions ? util.format('%dm', distance) : '';
} else {
got.distance = '';
}
}
if (headers.has('time')) {
if (!row.time.match(/\d+s/))
throw new Error('*** Time must be specied in seconds. (ex: 60s)');
got.time = instructions ? util.format('%ds', time) : '';
}
if (headers.has('speed')) {
if (row.speed !== '' && instructions) {
if (!row.speed.match(/\d+ km\/h/))
throw new Error('*** Speed must be specied in km/h. (ex: 50 km/h)');
var speed = time > 0 ? Math.round(3.6*distance/time) : null;
got.speed = util.format('%d km/h', speed);
} else {
got.speed = '';
}
}
var putValue = (key, value) => {
if (headers.has(key)) got[key] = instructions ? value : '';
};
putValue('bearing', bearings);
putValue('compass', compasses);
putValue('turns', turns);
putValue('modes', modes);
putValue('times', times);
putValue('distances', distances);
}
var ok = true;
for (var key in row) {
if (this.FuzzyMatch.match(got[key], row[key])) {
got[key] = row[key];
} else {
ok = false;
}
}
if (!ok) {
this.logFail(row, got, { route: { query: this.query, response: res }});
}
cb(null, got);
} else {
cb(new Error('request failed to return valid body'));
}
};
if (headers.has('request')) {
got = { request: row.request };
this.requestUrl(row.request, afterRequest);
} else {
var defaultParams = this.queryParams;
var userParams = [];
got = {};
for (var k in row) {
var match = k.match(/param:(.*)/);
if (match) {
if (row[k] === '(nil)') {
userParams.push([match[1], null]);
} else if (row[k]) {
userParams.push([match[1], row[k]]);
}
got[k] = row[k];
}
}
var params = this.overwriteParams(defaultParams, userParams),
waypoints = [],
bearings = [];
if (row.bearings) {
got.bearings = row.bearings;
bearings = row.bearings.split(' ').filter(b => !!b);
}
if (row.from && row.to) {
var fromNode = this.findNodeByName(row.from);
if (!fromNode) throw new Error(util.format('*** unknown from-node "%s"'), row.from);
waypoints.push(fromNode);
var toNode = this.findNodeByName(row.to);
if (!toNode) throw new Error(util.format('*** unknown to-node "%s"'), row.to);
waypoints.push(toNode);
got.from = row.from;
got.to = row.to;
this.requestRoute(waypoints, bearings, params, afterRequest);
} else if (row.waypoints) {
row.waypoints.split(',').forEach((n) => {
var node = this.findNodeByName(n.trim());
if (!node) throw new Error('*** unknown waypoint node "%s"', n.trim());
waypoints.push(node);
});
got.waypoints = row.waypoints;
this.requestRoute(waypoints, bearings, params, afterRequest);
} else {
throw new Error('*** no waypoints');
}
}
};
this.processRowsAndDiff(table, requestRow, callback);
});
};
};

View File

@ -1,3 +0,0 @@
def shortcuts_hash
@shortcuts_hash ||= {}
end

View File

@ -0,0 +1,11 @@
var DifferentError = require('./exception_classes').TableDiffError;
module.exports = function () {
this.diffTables = (expected, actual, options, callback) => {
// this is a temp workaround while waiting for https://github.com/cucumber/cucumber-js/issues/534
var error = new DifferentError(expected, actual);
return callback(error.string);
};
};

View File

@ -34,7 +34,7 @@ Feature: Ways of loading data
| s | t | st | | s | t | st |
| t | s | st | | t | s | st |
Scenario: Load data datstore - xy Scenario: Load data datastore - xy
Given data is loaded with datastore Given data is loaded with datastore
Given the node map Given the node map
| x | y | | x | y |

35
package.json Normal file
View File

@ -0,0 +1,35 @@
{
"name": "osrm-backend-test-suite",
"version": "0.0.0",
"private": true,
"description": "The Open Source Routing Machine is a high performance routing engine written in C++11 designed to run on OpenStreetMap data.",
"dependencies": {
"cucumber": "^0.9.4",
"d3-queue": "^2.0.3",
"node-timeout": "0.0.4",
"request": "^2.69.0",
"xmlbuilder": "^4.2.1"
},
"scripts": {
"lint": "eslint -c ./.eslintrc features/step_definitions/ features/support/",
"test": "npm run lint && ./node_modules/cucumber/bin/cucumber.js features/ -p verify",
"clean-test": "rm -rf test/cache",
"cucumber": "./node_modules/cucumber/bin/cucumber.js"
},
"repository": {
"type": "git",
"url": "https://github.com/Project-OSRM/osrm-backend.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/Project-OSRM/osrm-backend/issues"
},
"homepage": "https://github.com/Project-OSRM/osrm-backend",
"engines": {
"node": ">=4.0.0"
},
"devDependencies": {
"eslint": "^2.4.0"
}
}

9
scripts/install_node.sh Normal file
View File

@ -0,0 +1,9 @@
# here we set up the node version on the fly. currently only node 4, but can be used for more values if need be
# This is done manually so that the build works the same on OS X
rm -rf ~/.nvm/ && git clone --depth 1 --branch v0.30.1 https://github.com/creationix/nvm.git ~/.nvm
source ~/.nvm/nvm.sh
nvm install $1
nvm use $1
node --version
npm --version
which node