From 97872e34d8c4389a8e0702ed4857bc47658dc0f1 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sat, 2 Nov 2024 11:23:06 +0100 Subject: [PATCH 01/35] Use std::string_view for key type in json::Object --- include/nodejs/json_v8_renderer.hpp | 2 +- include/util/json_container.hpp | 2 +- include/util/json_deep_compare.hpp | 10 +++++----- include/util/json_renderer.hpp | 8 ++++---- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/include/nodejs/json_v8_renderer.hpp b/include/nodejs/json_v8_renderer.hpp index 9572744c2..a6b424da6 100644 --- a/include/nodejs/json_v8_renderer.hpp +++ b/include/nodejs/json_v8_renderer.hpp @@ -30,7 +30,7 @@ struct V8Renderer { Napi::Value child; std::visit(V8Renderer(env, child), keyValue.second); - obj.Set(keyValue.first, child); + obj.Set(keyValue.first.data(), child); } out = obj; } diff --git a/include/util/json_container.hpp b/include/util/json_container.hpp index 14ca9d52f..728b6e3e9 100644 --- a/include/util/json_container.hpp +++ b/include/util/json_container.hpp @@ -104,7 +104,7 @@ using Value = std::variant; */ struct Object { - std::unordered_map values; + std::unordered_map values; }; /** diff --git a/include/util/json_deep_compare.hpp b/include/util/json_deep_compare.hpp index 24b226ca7..d57f9cde1 100644 --- a/include/util/json_deep_compare.hpp +++ b/include/util/json_deep_compare.hpp @@ -44,13 +44,13 @@ struct Comparator bool operator()(const Object &lhs, const Object &rhs) const { - std::set lhs_keys; + std::set lhs_keys; for (const auto &key_value : lhs.values) { lhs_keys.insert(key_value.first); } - std::set rhs_keys; + std::set rhs_keys; for (const auto &key_value : rhs.values) { rhs_keys.insert(key_value.first); @@ -60,7 +60,7 @@ struct Comparator { if (rhs_keys.find(key) == rhs_keys.end()) { - reason = rhs_path + " doesn't have key \"" + key + "\""; + reason = rhs_path + " doesn't have key \"" + std::string(key) + "\""; return false; } } @@ -69,7 +69,7 @@ struct Comparator { if (lhs_keys.find(key) == lhs_keys.end()) { - reason = lhs_path + " doesn't have key \"" + key + "\""; + reason = lhs_path + " doesn't have key \"" + std::string(key) + "\""; return false; } } @@ -82,7 +82,7 @@ struct Comparator const auto &rhs_child = rhs.values.find(key)->second; const auto &lhs_child = lhs.values.find(key)->second; auto is_same = - std::visit(Comparator(reason, lhs_path + "." + key, rhs_path + "." + key), + std::visit(Comparator(reason, lhs_path + "." + std::string(key), rhs_path + "." + std::string(key)), lhs_child, rhs_child); if (!is_same) diff --git a/include/util/json_renderer.hpp b/include/util/json_renderer.hpp index d1adfcce6..bdc3dcae0 100644 --- a/include/util/json_renderer.hpp +++ b/include/util/json_renderer.hpp @@ -97,7 +97,7 @@ template struct Renderer void operator()(const Null &) { write<>("null"); } private: - void write(const std::string &str); + void write(std::string_view str); void write(const char *str, size_t size); void write(char ch); @@ -110,7 +110,7 @@ template struct Renderer Out &out; }; -template <> void Renderer>::write(const std::string &str) +template <> void Renderer>::write(std::string_view str) { out.insert(out.end(), str.begin(), str.end()); } @@ -122,7 +122,7 @@ template <> void Renderer>::write(const char *str, size_t size template <> void Renderer>::write(char ch) { out.push_back(ch); } -template <> void Renderer::write(const std::string &str) { out << str; } +template <> void Renderer::write(std::string_view str) { out << str; } template <> void Renderer::write(const char *str, size_t size) { @@ -131,7 +131,7 @@ template <> void Renderer::write(const char *str, size_t size) template <> void Renderer::write(char ch) { out << ch; } -template <> void Renderer::write(const std::string &str) { out += str; } +template <> void Renderer::write(std::string_view str) { out += str; } template <> void Renderer::write(const char *str, size_t size) { From a94368cc8744ed5cbddfe69f3eb41b0f3cdc35b1 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sat, 2 Nov 2024 11:24:19 +0100 Subject: [PATCH 02/35] Use std::string_view for key type in json::Object --- include/util/json_deep_compare.hpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/include/util/json_deep_compare.hpp b/include/util/json_deep_compare.hpp index d57f9cde1..438386e79 100644 --- a/include/util/json_deep_compare.hpp +++ b/include/util/json_deep_compare.hpp @@ -81,10 +81,11 @@ struct Comparator const auto &rhs_child = rhs.values.find(key)->second; const auto &lhs_child = lhs.values.find(key)->second; - auto is_same = - std::visit(Comparator(reason, lhs_path + "." + std::string(key), rhs_path + "." + std::string(key)), - lhs_child, - rhs_child); + auto is_same = std::visit(Comparator(reason, + lhs_path + "." + std::string(key), + rhs_path + "." + std::string(key)), + lhs_child, + rhs_child); if (!is_same) { return false; From 918c33cb2dc4a4dcd8f07e323ce326cb67f4ef29 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sat, 2 Nov 2024 11:36:10 +0100 Subject: [PATCH 03/35] wip From 085f852315e066a48dac55c3527bece3d3ca8cf2 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sat, 2 Nov 2024 14:20:01 +0100 Subject: [PATCH 04/35] wip --- scripts/ci/run_benchmarks.sh | 110 +++++++++++++++++------------------ 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 5b092471d..1f328a385 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -64,70 +64,70 @@ function run_benchmarks_for_folder { $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" echo "Running alias" $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" - echo "Running json-render-bench" - $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" - echo "Running packedvector-bench" - $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - echo "Running rtree-bench" - $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + # echo "Running json-render-bench" + # $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" + # echo "Running packedvector-bench" + # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + # echo "Running rtree-bench" + # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" - cp -rf $OSM_PBF $FOLDER/data.osm.pbf + # cp -rf $OSM_PBF $FOLDER/data.osm.pbf - echo "Running osrm-extract" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" - echo "Running osrm-partition" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" - echo "Running osrm-customize" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-customize $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_customize.bench" - echo "Running osrm-contract" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-contract $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_contract.bench" + # echo "Running osrm-extract" + # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + # echo "Running osrm-partition" + # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" + # echo "Running osrm-customize" + # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-customize $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_customize.bench" + # echo "Running osrm-contract" + # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-contract $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_contract.bench" - for ALGORITHM in ch mld; do - for BENCH in nearest table trip route match; do - echo "Running node $BENCH $ALGORITHM" - START=$(date +%s.%N) - node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 - END=$(date +%s.%N) - DIFF=$(echo "$END - $START" | bc) - echo "Took: ${DIFF}s" - done - done - - for ALGORITHM in ch mld; do - for BENCH in nearest table trip route match; do - echo "Running random $BENCH $ALGORITHM" - START=$(date +%s.%N) - $BENCHMARKS_FOLDER/bench "$FOLDER/data.osrm" $ALGORITHM $GPS_TRACES ${BENCH} > "$RESULTS_FOLDER/random_${BENCH}_${ALGORITHM}.bench" 5 || true - END=$(date +%s.%N) - DIFF=$(echo "$END - $START" | bc) - echo "Took: ${DIFF}s" - done - done + # for ALGORITHM in ch mld; do + # for BENCH in nearest table trip route match; do + # echo "Running node $BENCH $ALGORITHM" + # START=$(date +%s.%N) + # node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 + # END=$(date +%s.%N) + # DIFF=$(echo "$END - $START" | bc) + # echo "Took: ${DIFF}s" + # done + # done + + # for ALGORITHM in ch mld; do + # for BENCH in nearest table trip route match; do + # echo "Running random $BENCH $ALGORITHM" + # START=$(date +%s.%N) + # $BENCHMARKS_FOLDER/bench "$FOLDER/data.osrm" $ALGORITHM $GPS_TRACES ${BENCH} > "$RESULTS_FOLDER/random_${BENCH}_${ALGORITHM}.bench" 5 || true + # END=$(date +%s.%N) + # DIFF=$(echo "$END - $START" | bc) + # echo "Took: ${DIFF}s" + # done + # done - for ALGORITHM in ch mld; do - $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & - OSRM_ROUTED_PID=$! + # for ALGORITHM in ch mld; do + # $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & + # OSRM_ROUTED_PID=$! - # wait for osrm-routed to start - if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then - echo "osrm-routed failed to start for algorithm $ALGORITHM" - kill -9 $OSRM_ROUTED_PID - continue - fi + # # wait for osrm-routed to start + # if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then + # echo "osrm-routed failed to start for algorithm $ALGORITHM" + # kill -9 $OSRM_ROUTED_PID + # continue + # fi - for METHOD in route nearest trip table match; do - echo "Running e2e benchmark for $METHOD $ALGORITHM" - START=$(date +%s.%N) - python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench - END=$(date +%s.%N) - DIFF=$(echo "$END - $START" | bc) - echo "Took: ${DIFF}s" - done + # for METHOD in route nearest trip table match; do + # echo "Running e2e benchmark for $METHOD $ALGORITHM" + # START=$(date +%s.%N) + # python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench + # END=$(date +%s.%N) + # DIFF=$(echo "$END - $START" | bc) + # echo "Took: ${DIFF}s" + # done - kill -9 $OSRM_ROUTED_PID - done + # kill -9 $OSRM_ROUTED_PID + # done } run_benchmarks_for_folder From 1ae0a6e2756b8e978c2d174e2def8598d2210624 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sat, 2 Nov 2024 14:38:34 +0100 Subject: [PATCH 05/35] wip --- scripts/ci/run_benchmarks.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 1f328a385..98d65774d 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -66,15 +66,15 @@ function run_benchmarks_for_folder { $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" # echo "Running json-render-bench" # $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" - # echo "Running packedvector-bench" - # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - # echo "Running rtree-bench" - # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + echo "Running packedvector-bench" + $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + echo "Running rtree-bench" + $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" - # cp -rf $OSM_PBF $FOLDER/data.osm.pbf + cp -rf $OSM_PBF $FOLDER/data.osm.pbf - # echo "Running osrm-extract" - # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + echo "Running osrm-extract" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" # echo "Running osrm-partition" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" # echo "Running osrm-customize" From ea1f2be3d42841e4569bf98c930c3760f6bb5de1 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 11:58:34 +0100 Subject: [PATCH 06/35] wip --- .github/workflows/osrm-backend.yml | 1192 ++++++++++++++-------------- scripts/ci/run_benchmarks.sh | 4 +- 2 files changed, 599 insertions(+), 597 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index abbe09245..36171df39 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -23,643 +23,643 @@ concurrency: cancel-in-progress: true jobs: - windows-release-node: - needs: format-taginfo-docs - runs-on: windows-2022 - continue-on-error: false - env: - BUILD_TYPE: Release - steps: - - uses: actions/checkout@v4 - - run: pip install "conan<2.0.0" - - run: conan --version - - run: cmake --version - - uses: actions/setup-node@v4 - with: - node-version: 18 - - run: node --version - - run: npm --version - - name: Prepare environment - shell: bash - run: | - PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - - run: npm install --ignore-scripts - - run: npm link --ignore-scripts - - name: Build - shell: bash - run: | - mkdir build - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. - cmake --build . --config Release + # windows-release-node: + # needs: format-taginfo-docs + # runs-on: windows-2022 + # continue-on-error: false + # env: + # BUILD_TYPE: Release + # steps: + # - uses: actions/checkout@v4 + # - run: pip install "conan<2.0.0" + # - run: conan --version + # - run: cmake --version + # - uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - run: node --version + # - run: npm --version + # - name: Prepare environment + # shell: bash + # run: | + # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + # - run: npm install --ignore-scripts + # - run: npm link --ignore-scripts + # - name: Build + # shell: bash + # run: | + # mkdir build + # cd build + # cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. + # cmake --build . --config Release - # TODO: MSVC goes out of memory when building our tests - # - name: Run tests - # shell: bash - # run: | - # cd build - # cmake --build . --config Release --target tests - # # TODO: run tests - # - name: Run node tests - # shell: bash - # run: | - # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf + # # TODO: MSVC goes out of memory when building our tests + # # - name: Run tests + # # shell: bash + # # run: | + # # cd build + # # cmake --build . --config Release --target tests + # # # TODO: run tests + # # - name: Run node tests + # # shell: bash + # # run: | + # # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf - # mkdir -p test/data/ch - # cp test/data/monaco.osrm* test/data/ch/ - # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm + # # mkdir -p test/data/ch + # # cp test/data/monaco.osrm* test/data/ch/ + # # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm - # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm - # node test/nodejs/index.js - - name: Build Node package - shell: bash - run: ./scripts/ci/node_package.sh - - name: Publish Node package - if: ${{ env.PUBLISH == 'On' }} - uses: ncipollo/release-action@v1 - with: - allowUpdates: true - artifactErrorsFailBuild: true - artifacts: build/stage/**/*.tar.gz - omitBody: true - omitBodyDuringUpdate: true - omitName: true - omitNameDuringUpdate: true - replacesArtifacts: true - token: ${{ secrets.GITHUB_TOKEN }} + # # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm + # # node test/nodejs/index.js + # - name: Build Node package + # shell: bash + # run: ./scripts/ci/node_package.sh + # - name: Publish Node package + # if: ${{ env.PUBLISH == 'On' }} + # uses: ncipollo/release-action@v1 + # with: + # allowUpdates: true + # artifactErrorsFailBuild: true + # artifacts: build/stage/**/*.tar.gz + # omitBody: true + # omitBodyDuringUpdate: true + # omitName: true + # omitNameDuringUpdate: true + # replacesArtifacts: true + # token: ${{ secrets.GITHUB_TOKEN }} - format-taginfo-docs: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 18 - - name: Enable Node.js cache - uses: actions/cache@v4 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- - - name: Prepare environment - run: | - npm ci --ignore-scripts - clang-format-15 --version - - name: Run checks - run: | - ./scripts/check_taginfo.py taginfo.json profiles/car.lua - ./scripts/format.sh && ./scripts/error_on_dirty.sh - node ./scripts/validate_changelog.js - npm run docs && ./scripts/error_on_dirty.sh - npm audit --production + # format-taginfo-docs: + # runs-on: ubuntu-22.04 + # steps: + # - uses: actions/checkout@v4 + # - name: Use Node.js + # uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - name: Enable Node.js cache + # uses: actions/cache@v4 + # with: + # path: ~/.npm + # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + # restore-keys: | + # ${{ runner.os }}-node- + # - name: Prepare environment + # run: | + # npm ci --ignore-scripts + # clang-format-15 --version + # - name: Run checks + # run: | + # ./scripts/check_taginfo.py taginfo.json profiles/car.lua + # ./scripts/format.sh && ./scripts/error_on_dirty.sh + # node ./scripts/validate_changelog.js + # npm run docs && ./scripts/error_on_dirty.sh + # npm audit --production - docker-image-matrix: - strategy: - matrix: - docker-base-image: ["debian", "alpine"] - needs: format-taginfo-docs - runs-on: ubuntu-22.04 - continue-on-error: false - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Enable osm.pbf cache - uses: actions/cache@v4 - with: - path: berlin-latest.osm.pbf - key: v1-berlin-osm-pbf - restore-keys: | - v1-berlin-osm-pbf - - name: Docker build - run: | - docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . - - name: Test Docker image - run: | - if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then - wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf - fi - TAG=osrm-backend-local - # when `--memory-swap` value equals `--memory` it means container won't use swap - # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details - MEMORY_ARGS="--memory=1g --memory-swap=1g" - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson - if [ ! -s "${PWD}/berlin-latest.geojson" ] - then - >&2 echo "No berlin-latest.geojson found" - exit 1 - fi - # removing `.osrm.nbg` to check that whole pipeline works without it - rm -rf "${PWD}/berlin-latest.osrm.nbg" + # docker-image-matrix: + # strategy: + # matrix: + # docker-base-image: ["debian", "alpine"] + # needs: format-taginfo-docs + # runs-on: ubuntu-22.04 + # continue-on-error: false + # steps: + # - name: Check out the repo + # uses: actions/checkout@v4 + # - name: Enable osm.pbf cache + # uses: actions/cache@v4 + # with: + # path: berlin-latest.osm.pbf + # key: v1-berlin-osm-pbf + # restore-keys: | + # v1-berlin-osm-pbf + # - name: Docker build + # run: | + # docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . + # - name: Test Docker image + # run: | + # if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then + # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf + # fi + # TAG=osrm-backend-local + # # when `--memory-swap` value equals `--memory` it means container won't use swap + # # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details + # MEMORY_ARGS="--memory=1g --memory-swap=1g" + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson + # if [ ! -s "${PWD}/berlin-latest.geojson" ] + # then + # >&2 echo "No berlin-latest.geojson found" + # exit 1 + # fi + # # removing `.osrm.nbg` to check that whole pipeline works without it + # rm -rf "${PWD}/berlin-latest.osrm.nbg" - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm - docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & - curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" - docker stop osrm-container + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm + # docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & + # curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" + # docker stop osrm-container - build-test-publish: - needs: format-taginfo-docs - strategy: - matrix: - include: - - name: gcc-13-debug-cov - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: gcc-13 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: g++-13 - ENABLE_COVERAGE: ON + # build-test-publish: + # needs: format-taginfo-docs + # strategy: + # matrix: + # include: + # - name: gcc-13-debug-cov + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: gcc-13 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: g++-13 + # ENABLE_COVERAGE: ON - - name: clang-18-debug-asan-ubsan - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: clang++-18 - ENABLE_SANITIZER: ON - TARGET_ARCH: x86_64-asan-ubsan - OSRM_CONNECTION_RETRIES: 10 - OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 + # - name: clang-18-debug-asan-ubsan + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: clang++-18 + # ENABLE_SANITIZER: ON + # TARGET_ARCH: x86_64-asan-ubsan + # OSRM_CONNECTION_RETRIES: 10 + # OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 - - name: clang-18-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-18-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: clang-18-debug - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-18-debug + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: clang-18-debug-clang-tidy - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_CLANG_TIDY: ON + # - name: clang-18-debug-clang-tidy + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_CLANG_TIDY: ON - - name: clang-17-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-17 - CXXCOMPILER: clang++-17 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-17-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-17 + # CXXCOMPILER: clang++-17 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: clang-16-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-16-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: conan-linux-debug-asan-ubsan - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_SANITIZER: ON - ENABLE_LTO: OFF + # - name: conan-linux-debug-asan-ubsan + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_SANITIZER: ON + # ENABLE_LTO: OFF - - name: conan-linux-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_LTO: OFF + # - name: conan-linux-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_LTO: OFF - - name: gcc-14-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-14 - CXXCOMPILER: g++-14 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-14-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-14 + # CXXCOMPILER: g++-14 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: gcc-13-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-13 - CXXCOMPILER: g++-13 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-13-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-13 + # CXXCOMPILER: g++-13 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: gcc-12-release - continue-on-error: false - node: 20 - runs-on: ubuntu-22.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-12 - CXXCOMPILER: g++-12 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-12-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-22.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-12 + # CXXCOMPILER: g++-12 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: conan-linux-release-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Release - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON + # - name: conan-linux-release-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Release + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON - - name: conan-linux-debug-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Debug - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON + # - name: conan-linux-debug-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Debug + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON - - name: conan-macos-x64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-13 # x86_64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON + # - name: conan-macos-x64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-13 # x86_64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON - - name: conan-macos-arm64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-14 # arm64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON + # - name: conan-macos-arm64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-14 # arm64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON - name: ${{ matrix.name}} - continue-on-error: ${{ matrix.continue-on-error }} - runs-on: ${{ matrix.runs-on }} - env: - BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} - BUILD_TYPE: ${{ matrix.BUILD_TYPE }} - BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} - CCOMPILER: ${{ matrix.CCOMPILER }} - CFLAGS: ${{ matrix.CFLAGS }} - CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} - CXXCOMPILER: ${{ matrix.CXXCOMPILER }} - CXXFLAGS: ${{ matrix.CXXFLAGS }} - ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} - ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} - ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} - ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} - ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} - NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} - TARGET_ARCH: ${{ matrix.TARGET_ARCH }} - OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} - OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} - ENABLE_LTO: ${{ matrix.ENABLE_LTO }} - steps: - - uses: actions/checkout@v4 - - name: Build machine architecture - run: uname -m - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - name: Enable Node.js cache - uses: actions/cache@v4 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- - - name: Enable compiler cache - uses: actions/cache@v4 - with: - path: ~/.ccache - key: ccache-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - ccache-${{ matrix.name }}- - - name: Enable Conan cache - uses: actions/cache@v4 - with: - path: ~/.conan - key: v9-conan-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - v9-conan-${{ matrix.name }}- - - name: Enable test cache - uses: actions/cache@v4 - with: - path: ${{github.workspace}}/test/cache - key: v4-test-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - v4-test-${{ matrix.name }}- - - name: Prepare environment - run: | - echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV - mkdir -p $HOME/.ccache + # name: ${{ matrix.name}} + # continue-on-error: ${{ matrix.continue-on-error }} + # runs-on: ${{ matrix.runs-on }} + # env: + # BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} + # BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + # BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} + # CCOMPILER: ${{ matrix.CCOMPILER }} + # CFLAGS: ${{ matrix.CFLAGS }} + # CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} + # CXXCOMPILER: ${{ matrix.CXXCOMPILER }} + # CXXFLAGS: ${{ matrix.CXXFLAGS }} + # ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} + # ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} + # ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} + # ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} + # ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} + # NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} + # TARGET_ARCH: ${{ matrix.TARGET_ARCH }} + # OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} + # OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} + # ENABLE_LTO: ${{ matrix.ENABLE_LTO }} + # steps: + # - uses: actions/checkout@v4 + # - name: Build machine architecture + # run: uname -m + # - name: Use Node.js + # uses: actions/setup-node@v4 + # with: + # node-version: ${{ matrix.node }} + # - name: Enable Node.js cache + # uses: actions/cache@v4 + # with: + # path: ~/.npm + # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + # restore-keys: | + # ${{ runner.os }}-node- + # - name: Enable compiler cache + # uses: actions/cache@v4 + # with: + # path: ~/.ccache + # key: ccache-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # ccache-${{ matrix.name }}- + # - name: Enable Conan cache + # uses: actions/cache@v4 + # with: + # path: ~/.conan + # key: v9-conan-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # v9-conan-${{ matrix.name }}- + # - name: Enable test cache + # uses: actions/cache@v4 + # with: + # path: ${{github.workspace}}/test/cache + # key: v4-test-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # v4-test-${{ matrix.name }}- + # - name: Prepare environment + # run: | + # echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV + # mkdir -p $HOME/.ccache - PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV - echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV - if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then - # We can only set this after checkout once we know the workspace directory - echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV - echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV - echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV - fi + # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + # echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV + # echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV + # if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then + # # We can only set this after checkout once we know the workspace directory + # echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV + # echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV + # echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV + # fi - if [[ "${RUNNER_OS}" == "Linux" ]]; then - echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV - fi - # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 - # We need it to be able to access system folders while restoring cached Boost below - - name: Give tar root ownership - if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar - - name: Cache Boost - if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - id: cache-boost - uses: actions/cache@v4 - with: - path: | - /usr/local/include/boost - /usr/local/lib/libboost* - key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - restore-keys: | - v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV + # fi + # # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 + # # We need it to be able to access system folders while restoring cached Boost below + # - name: Give tar root ownership + # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar + # - name: Cache Boost + # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # id: cache-boost + # uses: actions/cache@v4 + # with: + # path: | + # /usr/local/include/boost + # /usr/local/lib/libboost* + # key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + # restore-keys: | + # v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - - name: Install Boost - if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - run: | - BOOST_VERSION="1.85.0" - BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" - wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - cd boost_${BOOST_VERSION_UNDERSCORE} - sudo ./bootstrap.sh - sudo ./b2 install - cd .. - sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* + # - name: Install Boost + # if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # run: | + # BOOST_VERSION="1.85.0" + # BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" + # wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + # tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + # cd boost_${BOOST_VERSION_UNDERSCORE} + # sudo ./bootstrap.sh + # sudo ./b2 install + # cd .. + # sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* - - name: Install dev dependencies - run: | - python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages + # - name: Install dev dependencies + # run: | + # python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages - # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 - # and that's why CI cannot find conan executable installed above - if [[ "${RUNNER_OS}" == "macOS" ]]; then - echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH - fi + # # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 + # # and that's why CI cannot find conan executable installed above + # if [[ "${RUNNER_OS}" == "macOS" ]]; then + # echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH + # fi - # ccache - if [[ "${RUNNER_OS}" == "Linux" ]]; then - sudo apt-get update -y && sudo apt-get install ccache - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - brew install ccache - fi + # # ccache + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # sudo apt-get update -y && sudo apt-get install ccache + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # brew install ccache + # fi - # Linux dev packages - if [ "${ENABLE_CONAN}" != "ON" ]; then - sudo apt-get update -y - sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev - if [[ "${CCOMPILER}" != clang-* ]]; then - sudo apt-get install -y ${CXXCOMPILER} - fi - if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - sudo apt-get install -y lcov - fi - fi + # # Linux dev packages + # if [ "${ENABLE_CONAN}" != "ON" ]; then + # sudo apt-get update -y + # sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev + # if [[ "${CCOMPILER}" != clang-* ]]; then + # sudo apt-get install -y ${CXXCOMPILER} + # fi + # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + # sudo apt-get install -y lcov + # fi + # fi - # TBB - TBB_VERSION=2021.12.0 - if [[ "${RUNNER_OS}" == "Linux" ]]; then - TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" - fi - wget --tries 5 ${TBB_URL} -O onetbb.tgz - tar zxvf onetbb.tgz - sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ - sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ - - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 - if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} - run: | - sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + # # TBB + # TBB_VERSION=2021.12.0 + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" + # fi + # wget --tries 5 ${TBB_URL} -O onetbb.tgz + # tar zxvf onetbb.tgz + # sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ + # sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ + # - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 + # if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} + # run: | + # sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq - conan config init - yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" - - name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16 - if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }} - run: | - sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq + # conan config init + # yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" + # - name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16 + # if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }} + # run: | + # sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq - conan config init - yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml" - - name: Prepare build - run: | - mkdir ${OSRM_BUILD_DIR} - ccache --max-size=256M - npm ci --ignore-scripts - if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - lcov --directory . --zerocounters # clean cached files - fi - echo "CC=${CCOMPILER}" >> $GITHUB_ENV - echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV - if [[ "${RUNNER_OS}" == "macOS" ]]; then - # missing from GCC path, needed for conan builds of libiconv, for example. - sudo xcode-select --switch /Library/Developer/CommandLineTools - echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV - echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV - fi + # conan config init + # yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml" + # - name: Prepare build + # run: | + # mkdir ${OSRM_BUILD_DIR} + # ccache --max-size=256M + # npm ci --ignore-scripts + # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + # lcov --directory . --zerocounters # clean cached files + # fi + # echo "CC=${CCOMPILER}" >> $GITHUB_ENV + # echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV + # if [[ "${RUNNER_OS}" == "macOS" ]]; then + # # missing from GCC path, needed for conan builds of libiconv, for example. + # sudo xcode-select --switch /Library/Developer/CommandLineTools + # echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV + # echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV + # fi - - name: Build and install OSRM - run: | - echo "Using ${JOBS} jobs" - pushd ${OSRM_BUILD_DIR} + # - name: Build and install OSRM + # run: | + # echo "Using ${JOBS} jobs" + # pushd ${OSRM_BUILD_DIR} - ccache --zero-stats - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ - -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ - -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ - -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ - -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ - -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ - -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ - -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ - -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ - -DENABLE_CCACHE=ON \ - -DENABLE_LTO=${ENABLE_LTO:-ON} \ - -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} - make --jobs=${JOBS} + # ccache --zero-stats + # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + # -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ + # -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ + # -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ + # -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ + # -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ + # -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ + # -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ + # -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ + # -DENABLE_CCACHE=ON \ + # -DENABLE_LTO=${ENABLE_LTO:-ON} \ + # -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} + # make --jobs=${JOBS} - if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then - make tests --jobs=${JOBS} - make benchmarks --jobs=${JOBS} + # if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then + # make tests --jobs=${JOBS} + # make benchmarks --jobs=${JOBS} - sudo make install - if [[ "${RUNNER_OS}" == "Linux" ]]; then - echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV - fi - echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV - fi - popd - - name: Build example - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - run: | - mkdir example/build && pushd example/build - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} - make --jobs=${JOBS} - popd - - name: Run all tests - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - run: | - make -C test/data benchmark + # sudo make install + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV + # fi + # echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV + # fi + # popd + # - name: Build example + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + # run: | + # mkdir example/build && pushd example/build + # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} + # make --jobs=${JOBS} + # popd + # - name: Run all tests + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + # run: | + # make -C test/data benchmark - # macOS SIP strips the linker path. Reset this inside the running shell - export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} - ./example/build/osrm-example test/data/mld/monaco.osrm + # # macOS SIP strips the linker path. Reset this inside the running shell + # export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} + # ./example/build/osrm-example test/data/mld/monaco.osrm - # All tests assume to be run from the build directory - pushd ${OSRM_BUILD_DIR} - for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done - if [ -z "${ENABLE_SANITIZER}" ]; then - npm run nodejs-tests - fi - popd - npm test + # # All tests assume to be run from the build directory + # pushd ${OSRM_BUILD_DIR} + # for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done + # if [ -z "${ENABLE_SANITIZER}" ]; then + # npm run nodejs-tests + # fi + # popd + # npm test - - name: Use Node 18 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: 18 - - name: Run Node package tests on Node 18 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests - - name: Use Node 20 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Run Node package tests on Node 20 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests - - name: Use Node latest - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: latest - - name: Run Node package tests on Node-latest - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests + # - name: Use Node 18 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - name: Run Node package tests on Node 18 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests + # - name: Use Node 20 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: 20 + # - name: Run Node package tests on Node 20 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests + # - name: Use Node latest + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: latest + # - name: Run Node package tests on Node-latest + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests - - name: Upload test logs - uses: actions/upload-artifact@v4 - if: failure() - with: - name: logs - path: test/logs/ + # - name: Upload test logs + # uses: actions/upload-artifact@v4 + # if: failure() + # with: + # name: logs + # path: test/logs/ - # - name: Generate code coverage - # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # run: | - # lcov --directory . --capture --output-file coverage.info # capture coverage info - # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system - # lcov --list coverage.info #debug info + # # - name: Generate code coverage + # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # # run: | + # # lcov --directory . --capture --output-file coverage.info # capture coverage info + # # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system + # # lcov --list coverage.info #debug info - # # Uploading report to CodeCov - # - name: Upload code coverage - # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # uses: codecov/codecov-action@v4 - # with: - # files: coverage.info - # name: codecov-osrm-backend - # fail_ci_if_error: true - # verbose: true - - name: Build Node package - if: ${{ matrix.build_node_package }} - run: ./scripts/ci/node_package.sh - - name: Publish Node package - if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} - uses: ncipollo/release-action@v1 - with: - allowUpdates: true - artifactErrorsFailBuild: true - artifacts: build/stage/**/*.tar.gz - omitBody: true - omitBodyDuringUpdate: true - omitName: true - omitNameDuringUpdate: true - replacesArtifacts: true - token: ${{ secrets.GITHUB_TOKEN }} - - name: Show CCache statistics - run: | - ccache -p - ccache -s + # # # Uploading report to CodeCov + # # - name: Upload code coverage + # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # # uses: codecov/codecov-action@v4 + # # with: + # # files: coverage.info + # # name: codecov-osrm-backend + # # fail_ci_if_error: true + # # verbose: true + # - name: Build Node package + # if: ${{ matrix.build_node_package }} + # run: ./scripts/ci/node_package.sh + # - name: Publish Node package + # if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} + # uses: ncipollo/release-action@v1 + # with: + # allowUpdates: true + # artifactErrorsFailBuild: true + # artifacts: build/stage/**/*.tar.gz + # omitBody: true + # omitBodyDuringUpdate: true + # omitName: true + # omitNameDuringUpdate: true + # replacesArtifacts: true + # token: ${{ secrets.GITHUB_TOKEN }} + # - name: Show CCache statistics + # run: | + # ccache -p + # ccache -s benchmarks: if: github.event_name == 'pull_request' - needs: [format-taginfo-docs] + # needs: [format-taginfo-docs] runs-on: self-hosted env: CCOMPILER: clang-16 @@ -749,6 +749,8 @@ jobs: cp -rf pr/test/data ~/benchmarks/test/data cp -rf pr/profiles ~/benchmarks/profiles + ls ~/benchmarks/test/data + sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks sudo cset shield --reset @@ -778,9 +780,9 @@ jobs: ccache -p ccache -s - ci-complete: - runs-on: ubuntu-22.04 - needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] - steps: - - run: echo "CI complete" + # ci-complete: + # runs-on: ubuntu-22.04 + # needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] + # steps: + # - run: echo "CI complete" diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 98d65774d..8ab597c94 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -64,8 +64,8 @@ function run_benchmarks_for_folder { $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" echo "Running alias" $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" - # echo "Running json-render-bench" - # $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" + echo "Running json-render-bench" + $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" #> "$RESULTS_FOLDER/json-render.bench" echo "Running packedvector-bench" $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" echo "Running rtree-bench" From 668dafc242a87971d1cee57bb63e8b69e645548f Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:06:03 +0100 Subject: [PATCH 07/35] wip From bd3d409f035ffcfae224992b9c3a1ee2b79f08da Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:18:57 +0100 Subject: [PATCH 08/35] wip --- .github/workflows/osrm-backend.yml | 58 +++++++++++++++--------------- scripts/ci/run_benchmarks.sh | 36 +++++++++---------- src/benchmarks/json_render.cpp | 3 ++ 3 files changed, 50 insertions(+), 47 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 36171df39..540498dfa 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -719,18 +719,18 @@ jobs: make -j$(nproc) benchmarks cd .. make -C test/data - - name: Build PR Branch - run: | - cd pr - npm ci --ignore-scripts - cd .. - mkdir -p pr/build - cd pr/build - cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - make -j$(nproc) - make -j$(nproc) benchmarks - cd .. - make -C test/data + # - name: Build PR Branch + # run: | + # cd pr + # npm ci --ignore-scripts + # cd .. + # mkdir -p pr/build + # cd pr/build + # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + # make -j$(nproc) + # make -j$(nproc) benchmarks + # cd .. + # make -C test/data # we run benchmarks in tmpfs to avoid impact of disk IO - name: Create folder for tmpfs run: | @@ -738,22 +738,22 @@ jobs: sudo umount ~/benchmarks | true rm -rf ~/benchmarks mkdir -p ~/benchmarks - # see https://llvm.org/docs/Benchmarking.html - - name: Run PR Benchmarks - run: | - sudo cset shield -c 2-3 -k on - sudo mount -t tmpfs -o size=4g none ~/benchmarks - cp -rf pr/build ~/benchmarks/build - cp -rf pr/lib ~/benchmarks/lib - mkdir -p ~/benchmarks/test - cp -rf pr/test/data ~/benchmarks/test/data - cp -rf pr/profiles ~/benchmarks/profiles + # # see https://llvm.org/docs/Benchmarking.html + # - name: Run PR Benchmarks + # run: | + # sudo cset shield -c 2-3 -k on + # sudo mount -t tmpfs -o size=4g none ~/benchmarks + # cp -rf pr/build ~/benchmarks/build + # cp -rf pr/lib ~/benchmarks/lib + # mkdir -p ~/benchmarks/test + # cp -rf pr/test/data ~/benchmarks/test/data + # cp -rf pr/profiles ~/benchmarks/profiles - ls ~/benchmarks/test/data + # ls ~/benchmarks/test/data - sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv - sudo umount ~/benchmarks - sudo cset shield --reset + # sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + # sudo umount ~/benchmarks + # sudo cset shield --reset - name: Run Base Benchmarks run: | sudo cset shield -c 2-3 -k on @@ -772,9 +772,9 @@ jobs: sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks sudo cset shield --reset - - name: Post Benchmark Results - run: | - python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results + # - name: Post Benchmark Results + # run: | + # python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results - name: Show CCache statistics run: | ccache -p diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 8ab597c94..cec573563 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -54,28 +54,28 @@ function run_benchmarks_for_folder { BENCHMARKS_FOLDER="$BINARIES_FOLDER/src/benchmarks" - echo "Running match-bench MLD" - $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" - echo "Running match-bench CH" - $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" - echo "Running route-bench MLD" - $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" - echo "Running route-bench CH" - $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" - echo "Running alias" - $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" - echo "Running json-render-bench" + # echo "Running match-bench MLD" + # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" + # echo "Running match-bench CH" + # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" + # echo "Running route-bench MLD" + # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" + # echo "Running route-bench CH" + # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" + # echo "Running alias" + # $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" + # echo "Running json-render-bench" $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" #> "$RESULTS_FOLDER/json-render.bench" echo "Running packedvector-bench" - $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - echo "Running rtree-bench" - $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + # echo "Running rtree-bench" + # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" - cp -rf $OSM_PBF $FOLDER/data.osm.pbf + # cp -rf $OSM_PBF $FOLDER/data.osm.pbf - echo "Running osrm-extract" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" - # echo "Running osrm-partition" + # echo "Running osrm-extract" + # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + # # echo "Running osrm-partition" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" # echo "Running osrm-customize" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-customize $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_customize.bench" diff --git a/src/benchmarks/json_render.cpp b/src/benchmarks/json_render.cpp index d2c00b51f..d2793f748 100644 --- a/src/benchmarks/json_render.cpp +++ b/src/benchmarks/json_render.cpp @@ -99,7 +99,10 @@ int main(int argc, char **argv) return EXIT_FAILURE; } + std::cerr << "before: " << argv[1] << std::endl; + const auto obj = load(argv[1]); + std::cerr << "after: " << argv[1] << std::endl; TIMER_START(string); std::string out_str; From 5581ac16bd44dc7cbd731e051291acd2da21ea8f Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:19:36 +0100 Subject: [PATCH 09/35] wip --- .github/workflows/osrm-backend.yml | 90 +++++++++++++++--------------- 1 file changed, 45 insertions(+), 45 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 540498dfa..c23b485e2 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -707,71 +707,71 @@ jobs: with: ref: ${{ github.event.pull_request.base.ref }} path: base - - name: Build Base Branch - run: | - cd base - npm ci --ignore-scripts - cd .. - mkdir base/build - cd base/build - cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - make -j$(nproc) - make -j$(nproc) benchmarks - cd .. - make -C test/data - # - name: Build PR Branch + # - name: Build Base Branch # run: | - # cd pr + # cd base # npm ci --ignore-scripts # cd .. - # mkdir -p pr/build - # cd pr/build + # mkdir base/build + # cd base/build # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - # make -j$(nproc) + # make -j$(nproc) # make -j$(nproc) benchmarks # cd .. # make -C test/data - # we run benchmarks in tmpfs to avoid impact of disk IO + - name: Build PR Branch + run: | + cd pr + npm ci --ignore-scripts + cd .. + mkdir -p pr/build + cd pr/build + cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + make -j$(nproc) + make -j$(nproc) benchmarks + cd .. + make -C test/data + we run benchmarks in tmpfs to avoid impact of disk IO - name: Create folder for tmpfs run: | # if by any chance it was mounted before(e.g. due to previous job failed), unmount it sudo umount ~/benchmarks | true rm -rf ~/benchmarks mkdir -p ~/benchmarks - # # see https://llvm.org/docs/Benchmarking.html - # - name: Run PR Benchmarks - # run: | - # sudo cset shield -c 2-3 -k on - # sudo mount -t tmpfs -o size=4g none ~/benchmarks - # cp -rf pr/build ~/benchmarks/build - # cp -rf pr/lib ~/benchmarks/lib - # mkdir -p ~/benchmarks/test - # cp -rf pr/test/data ~/benchmarks/test/data - # cp -rf pr/profiles ~/benchmarks/profiles - - # ls ~/benchmarks/test/data - - # sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv - # sudo umount ~/benchmarks - # sudo cset shield --reset - - name: Run Base Benchmarks + # see https://llvm.org/docs/Benchmarking.html + - name: Run PR Benchmarks run: | sudo cset shield -c 2-3 -k on sudo mount -t tmpfs -o size=4g none ~/benchmarks - cp -rf base/build ~/benchmarks/build - cp -rf base/lib ~/benchmarks/lib + cp -rf pr/build ~/benchmarks/build + cp -rf pr/lib ~/benchmarks/lib mkdir -p ~/benchmarks/test - cp -rf base/test/data ~/benchmarks/test/data - cp -rf base/profiles ~/benchmarks/profiles + cp -rf pr/test/data ~/benchmarks/test/data + cp -rf pr/profiles ~/benchmarks/profiles - # TODO: remove it when base branch will have this file at needed location - if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then - cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json - fi - # we intentionally use scripts from PR branch to be able to update them and see results in the same PR - sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + ls ~/benchmarks/test/data + + sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks sudo cset shield --reset + # - name: Run Base Benchmarks + # run: | + # sudo cset shield -c 2-3 -k on + # sudo mount -t tmpfs -o size=4g none ~/benchmarks + # cp -rf base/build ~/benchmarks/build + # cp -rf base/lib ~/benchmarks/lib + # mkdir -p ~/benchmarks/test + # cp -rf base/test/data ~/benchmarks/test/data + # cp -rf base/profiles ~/benchmarks/profiles + + # # TODO: remove it when base branch will have this file at needed location + # if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then + # cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json + # fi + # # we intentionally use scripts from PR branch to be able to update them and see results in the same PR + # sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + # sudo umount ~/benchmarks + # sudo cset shield --reset # - name: Post Benchmark Results # run: | # python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results From c3e9e12fc3a00b62953fb884d3af1dc49b5b204e Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:21:12 +0100 Subject: [PATCH 10/35] wip --- .github/workflows/osrm-backend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index c23b485e2..60c7a725d 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -731,7 +731,7 @@ jobs: make -j$(nproc) benchmarks cd .. make -C test/data - we run benchmarks in tmpfs to avoid impact of disk IO + # we run benchmarks in tmpfs to avoid impact of disk IO - name: Create folder for tmpfs run: | # if by any chance it was mounted before(e.g. due to previous job failed), unmount it From 09020115d2a893aa7c7277d70e27e4602132ebef Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:41:33 +0100 Subject: [PATCH 11/35] wip --- include/util/json_container.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/util/json_container.hpp b/include/util/json_container.hpp index 728b6e3e9..14ca9d52f 100644 --- a/include/util/json_container.hpp +++ b/include/util/json_container.hpp @@ -104,7 +104,7 @@ using Value = std::variant; */ struct Object { - std::unordered_map values; + std::unordered_map values; }; /** From 761883e8d6d4dc59d5dd4ce5bab2e90779778219 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:47:34 +0100 Subject: [PATCH 12/35] wip --- scripts/ci/run_benchmarks.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index cec573563..42093036d 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -71,10 +71,10 @@ function run_benchmarks_for_folder { # echo "Running rtree-bench" # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" - # cp -rf $OSM_PBF $FOLDER/data.osm.pbf + cp -rf $OSM_PBF $FOLDER/data.osm.pbf - # echo "Running osrm-extract" - # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + echo "Running osrm-extract" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" # # echo "Running osrm-partition" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" # echo "Running osrm-customize" From 7f70f9068a79c9dd499870b0e5cd73be0f78e86c Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 12:52:53 +0100 Subject: [PATCH 13/35] wip --- scripts/ci/run_benchmarks.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 42093036d..dac04a8b9 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -74,7 +74,7 @@ function run_benchmarks_for_folder { cp -rf $OSM_PBF $FOLDER/data.osm.pbf echo "Running osrm-extract" - measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" # # echo "Running osrm-partition" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" # echo "Running osrm-customize" From 5b69c0a7f233e03b1c63cb42c8764632c7f35b42 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:01:33 +0100 Subject: [PATCH 14/35] wip --- scripts/ci/run_benchmarks.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index dac04a8b9..42093036d 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -74,7 +74,7 @@ function run_benchmarks_for_folder { cp -rf $OSM_PBF $FOLDER/data.osm.pbf echo "Running osrm-extract" - "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" # # echo "Running osrm-partition" # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" # echo "Running osrm-customize" From d5ab66496c5858b5edc163a5f82ad6ed33e07431 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:07:28 +0100 Subject: [PATCH 15/35] wip --- include/util/json_container.hpp | 2 +- scripts/ci/run_benchmarks.sh | 118 ++++++++++++++++---------------- src/benchmarks/json_render.cpp | 9 ++- 3 files changed, 68 insertions(+), 61 deletions(-) diff --git a/include/util/json_container.hpp b/include/util/json_container.hpp index 14ca9d52f..728b6e3e9 100644 --- a/include/util/json_container.hpp +++ b/include/util/json_container.hpp @@ -104,7 +104,7 @@ using Value = std::variant; */ struct Object { - std::unordered_map values; + std::unordered_map values; }; /** diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 42093036d..f639528b4 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -54,80 +54,80 @@ function run_benchmarks_for_folder { BENCHMARKS_FOLDER="$BINARIES_FOLDER/src/benchmarks" - # echo "Running match-bench MLD" - # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" - # echo "Running match-bench CH" - # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" - # echo "Running route-bench MLD" - # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" - # echo "Running route-bench CH" - # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" - # echo "Running alias" - # $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" - # echo "Running json-render-bench" + echo "Running match-bench MLD" + $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" + echo "Running match-bench CH" + $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" + echo "Running route-bench MLD" + $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" + echo "Running route-bench CH" + $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" + echo "Running alias" + $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" + echo "Running json-render-bench" $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" #> "$RESULTS_FOLDER/json-render.bench" echo "Running packedvector-bench" - # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - # echo "Running rtree-bench" - # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + echo "Running rtree-bench" + $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" cp -rf $OSM_PBF $FOLDER/data.osm.pbf echo "Running osrm-extract" measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-extract -p $FOLDER/profiles/car.lua $FOLDER/data.osm.pbf" "$RESULTS_FOLDER/osrm_extract.bench" - # # echo "Running osrm-partition" - # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" - # echo "Running osrm-customize" - # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-customize $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_customize.bench" - # echo "Running osrm-contract" - # measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-contract $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_contract.bench" + echo "Running osrm-partition" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-partition $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_partition.bench" + echo "Running osrm-customize" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-customize $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_customize.bench" + echo "Running osrm-contract" + measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-contract $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_contract.bench" - # for ALGORITHM in ch mld; do - # for BENCH in nearest table trip route match; do - # echo "Running node $BENCH $ALGORITHM" - # START=$(date +%s.%N) - # node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 - # END=$(date +%s.%N) - # DIFF=$(echo "$END - $START" | bc) - # echo "Took: ${DIFF}s" - # done - # done + for ALGORITHM in ch mld; do + for BENCH in nearest table trip route match; do + echo "Running node $BENCH $ALGORITHM" + START=$(date +%s.%N) + node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 + END=$(date +%s.%N) + DIFF=$(echo "$END - $START" | bc) + echo "Took: ${DIFF}s" + done + done - # for ALGORITHM in ch mld; do - # for BENCH in nearest table trip route match; do - # echo "Running random $BENCH $ALGORITHM" - # START=$(date +%s.%N) - # $BENCHMARKS_FOLDER/bench "$FOLDER/data.osrm" $ALGORITHM $GPS_TRACES ${BENCH} > "$RESULTS_FOLDER/random_${BENCH}_${ALGORITHM}.bench" 5 || true - # END=$(date +%s.%N) - # DIFF=$(echo "$END - $START" | bc) - # echo "Took: ${DIFF}s" - # done - # done + for ALGORITHM in ch mld; do + for BENCH in nearest table trip route match; do + echo "Running random $BENCH $ALGORITHM" + START=$(date +%s.%N) + $BENCHMARKS_FOLDER/bench "$FOLDER/data.osrm" $ALGORITHM $GPS_TRACES ${BENCH} > "$RESULTS_FOLDER/random_${BENCH}_${ALGORITHM}.bench" 5 || true + END=$(date +%s.%N) + DIFF=$(echo "$END - $START" | bc) + echo "Took: ${DIFF}s" + done + done - # for ALGORITHM in ch mld; do - # $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & - # OSRM_ROUTED_PID=$! + for ALGORITHM in ch mld; do + $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & + OSRM_ROUTED_PID=$! - # # wait for osrm-routed to start - # if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then - # echo "osrm-routed failed to start for algorithm $ALGORITHM" - # kill -9 $OSRM_ROUTED_PID - # continue - # fi + # wait for osrm-routed to start + if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then + echo "osrm-routed failed to start for algorithm $ALGORITHM" + kill -9 $OSRM_ROUTED_PID + continue + fi - # for METHOD in route nearest trip table match; do - # echo "Running e2e benchmark for $METHOD $ALGORITHM" - # START=$(date +%s.%N) - # python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench - # END=$(date +%s.%N) - # DIFF=$(echo "$END - $START" | bc) - # echo "Took: ${DIFF}s" - # done + for METHOD in route nearest trip table match; do + echo "Running e2e benchmark for $METHOD $ALGORITHM" + START=$(date +%s.%N) + python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench + END=$(date +%s.%N) + DIFF=$(echo "$END - $START" | bc) + echo "Took: ${DIFF}s" + done - # kill -9 $OSRM_ROUTED_PID - # done + kill -9 $OSRM_ROUTED_PID + done } run_benchmarks_for_folder diff --git a/src/benchmarks/json_render.cpp b/src/benchmarks/json_render.cpp index d2793f748..2f6f2a8ea 100644 --- a/src/benchmarks/json_render.cpp +++ b/src/benchmarks/json_render.cpp @@ -9,12 +9,17 @@ #include #include #include +#include using namespace osrm; namespace { +// we use std::string_view as a key in the object, so since here we have dynamic keys we have to "hold" them somewhere +// okay for tests... +static std::unordered_set gKeysHolder; + void convert(const rapidjson::Value &value, json::Value &result) { if (value.IsString()) @@ -32,7 +37,8 @@ void convert(const rapidjson::Value &value, json::Value &result) { json::Value member; convert(itr->value, member); - object.values.emplace(itr->name.GetString(), std::move(member)); + auto keyItr = gKeysHolder.emplace(itr->name.GetString()).first; + object.values.emplace(*keyItr, std::move(member));; } result = std::move(object); } @@ -125,6 +131,7 @@ int main(int argc, char **argv) if (std::string{out_vec.begin(), out_vec.end()} != out_str || out_str != out_ss_str) { + std::cerr << "Vector/string results are not equal\n"; throw std::logic_error("Vector/stringstream/string results are not equal"); } return EXIT_SUCCESS; From 1bf831a318964b946b68f129177d33597160f822 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:15:59 +0100 Subject: [PATCH 16/35] wip --- .github/workflows/osrm-backend.yml | 2 -- scripts/ci/run_benchmarks.sh | 10 +++++----- src/benchmarks/json_render.cpp | 3 --- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 60c7a725d..70da8a292 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -749,8 +749,6 @@ jobs: cp -rf pr/test/data ~/benchmarks/test/data cp -rf pr/profiles ~/benchmarks/profiles - ls ~/benchmarks/test/data - sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks sudo cset shield --reset diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index f639528b4..5921620db 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -65,11 +65,11 @@ function run_benchmarks_for_folder { echo "Running alias" $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" echo "Running json-render-bench" - $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" #> "$RESULTS_FOLDER/json-render.bench" - echo "Running packedvector-bench" - $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - echo "Running rtree-bench" - $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" + # echo "Running packedvector-bench" + # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + # echo "Running rtree-bench" + # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" cp -rf $OSM_PBF $FOLDER/data.osm.pbf diff --git a/src/benchmarks/json_render.cpp b/src/benchmarks/json_render.cpp index 2f6f2a8ea..d4d40cdc3 100644 --- a/src/benchmarks/json_render.cpp +++ b/src/benchmarks/json_render.cpp @@ -105,10 +105,7 @@ int main(int argc, char **argv) return EXIT_FAILURE; } - std::cerr << "before: " << argv[1] << std::endl; - const auto obj = load(argv[1]); - std::cerr << "after: " << argv[1] << std::endl; TIMER_START(string); std::string out_str; From b9d095e644cf0808ed891417d63c60fd03acb7be Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:24:12 +0100 Subject: [PATCH 17/35] wip --- .github/workflows/osrm-backend.yml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 70da8a292..cf1b792d4 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -719,18 +719,18 @@ jobs: # make -j$(nproc) benchmarks # cd .. # make -C test/data - - name: Build PR Branch - run: | - cd pr - npm ci --ignore-scripts - cd .. - mkdir -p pr/build - cd pr/build - cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - make -j$(nproc) - make -j$(nproc) benchmarks - cd .. - make -C test/data + # - name: Build PR Branch + # run: | + # cd pr + # npm ci --ignore-scripts + # cd .. + # mkdir -p pr/build + # cd pr/build + # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + # make -j$(nproc) + # make -j$(nproc) benchmarks + # cd .. + # make -C test/data # we run benchmarks in tmpfs to avoid impact of disk IO - name: Create folder for tmpfs run: | From c46f92b6c39bc72f5a594919e41f712127253922 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:28:56 +0100 Subject: [PATCH 18/35] wip --- .github/workflows/osrm-backend.yml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index cf1b792d4..70da8a292 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -719,18 +719,18 @@ jobs: # make -j$(nproc) benchmarks # cd .. # make -C test/data - # - name: Build PR Branch - # run: | - # cd pr - # npm ci --ignore-scripts - # cd .. - # mkdir -p pr/build - # cd pr/build - # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - # make -j$(nproc) - # make -j$(nproc) benchmarks - # cd .. - # make -C test/data + - name: Build PR Branch + run: | + cd pr + npm ci --ignore-scripts + cd .. + mkdir -p pr/build + cd pr/build + cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + make -j$(nproc) + make -j$(nproc) benchmarks + cd .. + make -C test/data # we run benchmarks in tmpfs to avoid impact of disk IO - name: Create folder for tmpfs run: | From 9c6a78e86575f1442c52a7ed24bd5db4e62c6bbe Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:36:17 +0100 Subject: [PATCH 19/35] wip From cf4f04d08f7302b8d86abc9e0c9c3dccb0e95cf8 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 13:56:55 +0100 Subject: [PATCH 20/35] wip --- .github/workflows/osrm-backend.yml | 64 +++++++++++++++--------------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 70da8a292..72618fad0 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -707,18 +707,18 @@ jobs: with: ref: ${{ github.event.pull_request.base.ref }} path: base - # - name: Build Base Branch - # run: | - # cd base - # npm ci --ignore-scripts - # cd .. - # mkdir base/build - # cd base/build - # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - # make -j$(nproc) - # make -j$(nproc) benchmarks - # cd .. - # make -C test/data + - name: Build Base Branch + run: | + cd base + npm ci --ignore-scripts + cd .. + mkdir base/build + cd base/build + cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + make -j$(nproc) + make -j$(nproc) benchmarks + cd .. + make -C test/data - name: Build PR Branch run: | cd pr @@ -752,27 +752,27 @@ jobs: sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks sudo cset shield --reset - # - name: Run Base Benchmarks - # run: | - # sudo cset shield -c 2-3 -k on - # sudo mount -t tmpfs -o size=4g none ~/benchmarks - # cp -rf base/build ~/benchmarks/build - # cp -rf base/lib ~/benchmarks/lib - # mkdir -p ~/benchmarks/test - # cp -rf base/test/data ~/benchmarks/test/data - # cp -rf base/profiles ~/benchmarks/profiles + - name: Run Base Benchmarks + run: | + sudo cset shield -c 2-3 -k on + sudo mount -t tmpfs -o size=4g none ~/benchmarks + cp -rf base/build ~/benchmarks/build + cp -rf base/lib ~/benchmarks/lib + mkdir -p ~/benchmarks/test + cp -rf base/test/data ~/benchmarks/test/data + cp -rf base/profiles ~/benchmarks/profiles - # # TODO: remove it when base branch will have this file at needed location - # if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then - # cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json - # fi - # # we intentionally use scripts from PR branch to be able to update them and see results in the same PR - # sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv - # sudo umount ~/benchmarks - # sudo cset shield --reset - # - name: Post Benchmark Results - # run: | - # python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results + # TODO: remove it when base branch will have this file at needed location + if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then + cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json + fi + # we intentionally use scripts from PR branch to be able to update them and see results in the same PR + sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + sudo umount ~/benchmarks + sudo cset shield --reset + - name: Post Benchmark Results + run: | + python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results - name: Show CCache statistics run: | ccache -p From 69777e2784117b708d09d2c1754df47552be3df0 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 14:56:31 +0100 Subject: [PATCH 21/35] wip --- .github/workflows/osrm-backend.yml | 1180 ++++++++++++++-------------- scripts/ci/run_benchmarks.sh | 8 +- 2 files changed, 594 insertions(+), 594 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 72618fad0..6b6f66599 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -23,643 +23,643 @@ concurrency: cancel-in-progress: true jobs: - # windows-release-node: - # needs: format-taginfo-docs - # runs-on: windows-2022 - # continue-on-error: false - # env: - # BUILD_TYPE: Release - # steps: - # - uses: actions/checkout@v4 - # - run: pip install "conan<2.0.0" - # - run: conan --version - # - run: cmake --version - # - uses: actions/setup-node@v4 - # with: - # node-version: 18 - # - run: node --version - # - run: npm --version - # - name: Prepare environment - # shell: bash - # run: | - # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - # - run: npm install --ignore-scripts - # - run: npm link --ignore-scripts - # - name: Build - # shell: bash - # run: | - # mkdir build - # cd build - # cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. - # cmake --build . --config Release + windows-release-node: + needs: format-taginfo-docs + runs-on: windows-2022 + continue-on-error: false + env: + BUILD_TYPE: Release + steps: + - uses: actions/checkout@v4 + - run: pip install "conan<2.0.0" + - run: conan --version + - run: cmake --version + - uses: actions/setup-node@v4 + with: + node-version: 18 + - run: node --version + - run: npm --version + - name: Prepare environment + shell: bash + run: | + PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + - run: npm install --ignore-scripts + - run: npm link --ignore-scripts + - name: Build + shell: bash + run: | + mkdir build + cd build + cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. + cmake --build . --config Release - # # TODO: MSVC goes out of memory when building our tests - # # - name: Run tests - # # shell: bash - # # run: | - # # cd build - # # cmake --build . --config Release --target tests - # # # TODO: run tests - # # - name: Run node tests - # # shell: bash - # # run: | - # # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf + # TODO: MSVC goes out of memory when building our tests + # - name: Run tests + # shell: bash + # run: | + # cd build + # cmake --build . --config Release --target tests + # # TODO: run tests + # - name: Run node tests + # shell: bash + # run: | + # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf - # # mkdir -p test/data/ch - # # cp test/data/monaco.osrm* test/data/ch/ - # # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm + # mkdir -p test/data/ch + # cp test/data/monaco.osrm* test/data/ch/ + # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm - # # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm - # # node test/nodejs/index.js - # - name: Build Node package - # shell: bash - # run: ./scripts/ci/node_package.sh - # - name: Publish Node package - # if: ${{ env.PUBLISH == 'On' }} - # uses: ncipollo/release-action@v1 - # with: - # allowUpdates: true - # artifactErrorsFailBuild: true - # artifacts: build/stage/**/*.tar.gz - # omitBody: true - # omitBodyDuringUpdate: true - # omitName: true - # omitNameDuringUpdate: true - # replacesArtifacts: true - # token: ${{ secrets.GITHUB_TOKEN }} + # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm + # node test/nodejs/index.js + - name: Build Node package + shell: bash + run: ./scripts/ci/node_package.sh + - name: Publish Node package + if: ${{ env.PUBLISH == 'On' }} + uses: ncipollo/release-action@v1 + with: + allowUpdates: true + artifactErrorsFailBuild: true + artifacts: build/stage/**/*.tar.gz + omitBody: true + omitBodyDuringUpdate: true + omitName: true + omitNameDuringUpdate: true + replacesArtifacts: true + token: ${{ secrets.GITHUB_TOKEN }} - # format-taginfo-docs: - # runs-on: ubuntu-22.04 - # steps: - # - uses: actions/checkout@v4 - # - name: Use Node.js - # uses: actions/setup-node@v4 - # with: - # node-version: 18 - # - name: Enable Node.js cache - # uses: actions/cache@v4 - # with: - # path: ~/.npm - # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - # restore-keys: | - # ${{ runner.os }}-node- - # - name: Prepare environment - # run: | - # npm ci --ignore-scripts - # clang-format-15 --version - # - name: Run checks - # run: | - # ./scripts/check_taginfo.py taginfo.json profiles/car.lua - # ./scripts/format.sh && ./scripts/error_on_dirty.sh - # node ./scripts/validate_changelog.js - # npm run docs && ./scripts/error_on_dirty.sh - # npm audit --production + format-taginfo-docs: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: 18 + - name: Enable Node.js cache + uses: actions/cache@v4 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + - name: Prepare environment + run: | + npm ci --ignore-scripts + clang-format-15 --version + - name: Run checks + run: | + ./scripts/check_taginfo.py taginfo.json profiles/car.lua + ./scripts/format.sh && ./scripts/error_on_dirty.sh + node ./scripts/validate_changelog.js + npm run docs && ./scripts/error_on_dirty.sh + npm audit --production - # docker-image-matrix: - # strategy: - # matrix: - # docker-base-image: ["debian", "alpine"] - # needs: format-taginfo-docs - # runs-on: ubuntu-22.04 - # continue-on-error: false - # steps: - # - name: Check out the repo - # uses: actions/checkout@v4 - # - name: Enable osm.pbf cache - # uses: actions/cache@v4 - # with: - # path: berlin-latest.osm.pbf - # key: v1-berlin-osm-pbf - # restore-keys: | - # v1-berlin-osm-pbf - # - name: Docker build - # run: | - # docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . - # - name: Test Docker image - # run: | - # if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then - # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf - # fi - # TAG=osrm-backend-local - # # when `--memory-swap` value equals `--memory` it means container won't use swap - # # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details - # MEMORY_ARGS="--memory=1g --memory-swap=1g" - # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf - # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson - # if [ ! -s "${PWD}/berlin-latest.geojson" ] - # then - # >&2 echo "No berlin-latest.geojson found" - # exit 1 - # fi - # # removing `.osrm.nbg` to check that whole pipeline works without it - # rm -rf "${PWD}/berlin-latest.osrm.nbg" + docker-image-matrix: + strategy: + matrix: + docker-base-image: ["debian", "alpine"] + needs: format-taginfo-docs + runs-on: ubuntu-22.04 + continue-on-error: false + steps: + - name: Check out the repo + uses: actions/checkout@v4 + - name: Enable osm.pbf cache + uses: actions/cache@v4 + with: + path: berlin-latest.osm.pbf + key: v1-berlin-osm-pbf + restore-keys: | + v1-berlin-osm-pbf + - name: Docker build + run: | + docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . + - name: Test Docker image + run: | + if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then + wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf + fi + TAG=osrm-backend-local + # when `--memory-swap` value equals `--memory` it means container won't use swap + # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details + MEMORY_ARGS="--memory=1g --memory-swap=1g" + docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf + docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson + if [ ! -s "${PWD}/berlin-latest.geojson" ] + then + >&2 echo "No berlin-latest.geojson found" + exit 1 + fi + # removing `.osrm.nbg` to check that whole pipeline works without it + rm -rf "${PWD}/berlin-latest.osrm.nbg" - # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm - # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm - # docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & - # curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" - # docker stop osrm-container + docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm + docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm + docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & + curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" + docker stop osrm-container - # build-test-publish: - # needs: format-taginfo-docs - # strategy: - # matrix: - # include: - # - name: gcc-13-debug-cov - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Debug - # CCOMPILER: gcc-13 - # CUCUMBER_TIMEOUT: 20000 - # CXXCOMPILER: g++-13 - # ENABLE_COVERAGE: ON + build-test-publish: + needs: format-taginfo-docs + strategy: + matrix: + include: + - name: gcc-13-debug-cov + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Debug + CCOMPILER: gcc-13 + CUCUMBER_TIMEOUT: 20000 + CXXCOMPILER: g++-13 + ENABLE_COVERAGE: ON - # - name: clang-18-debug-asan-ubsan - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Debug - # CCOMPILER: clang-18 - # CUCUMBER_TIMEOUT: 20000 - # CXXCOMPILER: clang++-18 - # ENABLE_SANITIZER: ON - # TARGET_ARCH: x86_64-asan-ubsan - # OSRM_CONNECTION_RETRIES: 10 - # OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 + - name: clang-18-debug-asan-ubsan + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Debug + CCOMPILER: clang-18 + CUCUMBER_TIMEOUT: 20000 + CXXCOMPILER: clang++-18 + ENABLE_SANITIZER: ON + TARGET_ARCH: x86_64-asan-ubsan + OSRM_CONNECTION_RETRIES: 10 + OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 - # - name: clang-18-release - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: clang-18 - # CXXCOMPILER: clang++-18 - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_LTO: OFF + - name: clang-18-release + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: clang-18 + CXXCOMPILER: clang++-18 + CUCUMBER_TIMEOUT: 60000 + ENABLE_LTO: OFF - # - name: clang-18-debug - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Debug - # CCOMPILER: clang-18 - # CXXCOMPILER: clang++-18 - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_LTO: OFF + - name: clang-18-debug + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Debug + CCOMPILER: clang-18 + CXXCOMPILER: clang++-18 + CUCUMBER_TIMEOUT: 60000 + ENABLE_LTO: OFF - # - name: clang-18-debug-clang-tidy - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Debug - # CCOMPILER: clang-18 - # CXXCOMPILER: clang++-18 - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_CLANG_TIDY: ON + - name: clang-18-debug-clang-tidy + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Debug + CCOMPILER: clang-18 + CXXCOMPILER: clang++-18 + CUCUMBER_TIMEOUT: 60000 + ENABLE_CLANG_TIDY: ON - # - name: clang-17-release - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: clang-17 - # CXXCOMPILER: clang++-17 - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_LTO: OFF + - name: clang-17-release + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: clang-17 + CXXCOMPILER: clang++-17 + CUCUMBER_TIMEOUT: 60000 + ENABLE_LTO: OFF - # - name: clang-16-release - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: clang-16 - # CXXCOMPILER: clang++-16 - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_LTO: OFF + - name: clang-16-release + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: clang-16 + CXXCOMPILER: clang++-16 + CUCUMBER_TIMEOUT: 60000 + ENABLE_LTO: OFF - # - name: conan-linux-debug-asan-ubsan - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: clang-18 - # CXXCOMPILER: clang++-18 - # ENABLE_CONAN: ON - # ENABLE_SANITIZER: ON - # ENABLE_LTO: OFF + - name: conan-linux-debug-asan-ubsan + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: clang-18 + CXXCOMPILER: clang++-18 + ENABLE_CONAN: ON + ENABLE_SANITIZER: ON + ENABLE_LTO: OFF - # - name: conan-linux-release - # continue-on-error: false - # node: 18 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: clang-18 - # CXXCOMPILER: clang++-18 - # ENABLE_CONAN: ON - # ENABLE_LTO: OFF + - name: conan-linux-release + continue-on-error: false + node: 18 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: clang-18 + CXXCOMPILER: clang++-18 + ENABLE_CONAN: ON + ENABLE_LTO: OFF - # - name: gcc-14-release - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: gcc-14 - # CXXCOMPILER: g++-14 - # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + - name: gcc-14-release + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: gcc-14 + CXXCOMPILER: g++-14 + CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - # - name: gcc-13-release - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: gcc-13 - # CXXCOMPILER: g++-13 - # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + - name: gcc-13-release + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: gcc-13 + CXXCOMPILER: g++-13 + CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - # - name: gcc-12-release - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-22.04 - # BUILD_TOOLS: ON - # BUILD_TYPE: Release - # CCOMPILER: gcc-12 - # CXXCOMPILER: g++-12 - # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + - name: gcc-12-release + continue-on-error: false + node: 20 + runs-on: ubuntu-22.04 + BUILD_TOOLS: ON + BUILD_TYPE: Release + CCOMPILER: gcc-12 + CXXCOMPILER: g++-12 + CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - # - name: conan-linux-release-node - # build_node_package: true - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TYPE: Release - # CCOMPILER: clang-16 - # CXXCOMPILER: clang++-16 - # ENABLE_CONAN: ON - # NODE_PACKAGE_TESTS_ONLY: ON + - name: conan-linux-release-node + build_node_package: true + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TYPE: Release + CCOMPILER: clang-16 + CXXCOMPILER: clang++-16 + ENABLE_CONAN: ON + NODE_PACKAGE_TESTS_ONLY: ON - # - name: conan-linux-debug-node - # build_node_package: true - # continue-on-error: false - # node: 20 - # runs-on: ubuntu-24.04 - # BUILD_TYPE: Debug - # CCOMPILER: clang-16 - # CXXCOMPILER: clang++-16 - # ENABLE_CONAN: ON - # NODE_PACKAGE_TESTS_ONLY: ON + - name: conan-linux-debug-node + build_node_package: true + continue-on-error: false + node: 20 + runs-on: ubuntu-24.04 + BUILD_TYPE: Debug + CCOMPILER: clang-16 + CXXCOMPILER: clang++-16 + ENABLE_CONAN: ON + NODE_PACKAGE_TESTS_ONLY: ON - # - name: conan-macos-x64-release-node - # build_node_package: true - # continue-on-error: true - # node: 20 - # runs-on: macos-13 # x86_64 - # BUILD_TYPE: Release - # CCOMPILER: clang - # CXXCOMPILER: clang++ - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_ASSERTIONS: ON - # ENABLE_CONAN: ON + - name: conan-macos-x64-release-node + build_node_package: true + continue-on-error: true + node: 20 + runs-on: macos-13 # x86_64 + BUILD_TYPE: Release + CCOMPILER: clang + CXXCOMPILER: clang++ + CUCUMBER_TIMEOUT: 60000 + ENABLE_ASSERTIONS: ON + ENABLE_CONAN: ON - # - name: conan-macos-arm64-release-node - # build_node_package: true - # continue-on-error: true - # node: 20 - # runs-on: macos-14 # arm64 - # BUILD_TYPE: Release - # CCOMPILER: clang - # CXXCOMPILER: clang++ - # CUCUMBER_TIMEOUT: 60000 - # ENABLE_ASSERTIONS: ON - # ENABLE_CONAN: ON + - name: conan-macos-arm64-release-node + build_node_package: true + continue-on-error: true + node: 20 + runs-on: macos-14 # arm64 + BUILD_TYPE: Release + CCOMPILER: clang + CXXCOMPILER: clang++ + CUCUMBER_TIMEOUT: 60000 + ENABLE_ASSERTIONS: ON + ENABLE_CONAN: ON - # name: ${{ matrix.name}} - # continue-on-error: ${{ matrix.continue-on-error }} - # runs-on: ${{ matrix.runs-on }} - # env: - # BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} - # BUILD_TYPE: ${{ matrix.BUILD_TYPE }} - # BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} - # CCOMPILER: ${{ matrix.CCOMPILER }} - # CFLAGS: ${{ matrix.CFLAGS }} - # CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} - # CXXCOMPILER: ${{ matrix.CXXCOMPILER }} - # CXXFLAGS: ${{ matrix.CXXFLAGS }} - # ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} - # ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} - # ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} - # ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} - # ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} - # NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} - # TARGET_ARCH: ${{ matrix.TARGET_ARCH }} - # OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} - # OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} - # ENABLE_LTO: ${{ matrix.ENABLE_LTO }} - # steps: - # - uses: actions/checkout@v4 - # - name: Build machine architecture - # run: uname -m - # - name: Use Node.js - # uses: actions/setup-node@v4 - # with: - # node-version: ${{ matrix.node }} - # - name: Enable Node.js cache - # uses: actions/cache@v4 - # with: - # path: ~/.npm - # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - # restore-keys: | - # ${{ runner.os }}-node- - # - name: Enable compiler cache - # uses: actions/cache@v4 - # with: - # path: ~/.ccache - # key: ccache-${{ matrix.name }}-${{ github.sha }} - # restore-keys: | - # ccache-${{ matrix.name }}- - # - name: Enable Conan cache - # uses: actions/cache@v4 - # with: - # path: ~/.conan - # key: v9-conan-${{ matrix.name }}-${{ github.sha }} - # restore-keys: | - # v9-conan-${{ matrix.name }}- - # - name: Enable test cache - # uses: actions/cache@v4 - # with: - # path: ${{github.workspace}}/test/cache - # key: v4-test-${{ matrix.name }}-${{ github.sha }} - # restore-keys: | - # v4-test-${{ matrix.name }}- - # - name: Prepare environment - # run: | - # echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV - # mkdir -p $HOME/.ccache + name: ${{ matrix.name}} + continue-on-error: ${{ matrix.continue-on-error }} + runs-on: ${{ matrix.runs-on }} + env: + BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} + BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} + CCOMPILER: ${{ matrix.CCOMPILER }} + CFLAGS: ${{ matrix.CFLAGS }} + CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} + CXXCOMPILER: ${{ matrix.CXXCOMPILER }} + CXXFLAGS: ${{ matrix.CXXFLAGS }} + ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} + ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} + ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} + ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} + ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} + NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} + TARGET_ARCH: ${{ matrix.TARGET_ARCH }} + OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} + OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} + ENABLE_LTO: ${{ matrix.ENABLE_LTO }} + steps: + - uses: actions/checkout@v4 + - name: Build machine architecture + run: uname -m + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node }} + - name: Enable Node.js cache + uses: actions/cache@v4 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + - name: Enable compiler cache + uses: actions/cache@v4 + with: + path: ~/.ccache + key: ccache-${{ matrix.name }}-${{ github.sha }} + restore-keys: | + ccache-${{ matrix.name }}- + - name: Enable Conan cache + uses: actions/cache@v4 + with: + path: ~/.conan + key: v9-conan-${{ matrix.name }}-${{ github.sha }} + restore-keys: | + v9-conan-${{ matrix.name }}- + - name: Enable test cache + uses: actions/cache@v4 + with: + path: ${{github.workspace}}/test/cache + key: v4-test-${{ matrix.name }}-${{ github.sha }} + restore-keys: | + v4-test-${{ matrix.name }}- + - name: Prepare environment + run: | + echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV + mkdir -p $HOME/.ccache - # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - # echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV - # echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV - # if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then - # # We can only set this after checkout once we know the workspace directory - # echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV - # echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV - # echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV - # fi + PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV + echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV + if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then + # We can only set this after checkout once we know the workspace directory + echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV + echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV + echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV + fi - # if [[ "${RUNNER_OS}" == "Linux" ]]; then - # echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV - # elif [[ "${RUNNER_OS}" == "macOS" ]]; then - # echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV - # fi - # # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 - # # We need it to be able to access system folders while restoring cached Boost below - # - name: Give tar root ownership - # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - # run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar - # - name: Cache Boost - # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - # id: cache-boost - # uses: actions/cache@v4 - # with: - # path: | - # /usr/local/include/boost - # /usr/local/lib/libboost* - # key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - # restore-keys: | - # v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + if [[ "${RUNNER_OS}" == "Linux" ]]; then + echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV + elif [[ "${RUNNER_OS}" == "macOS" ]]; then + echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV + fi + # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 + # We need it to be able to access system folders while restoring cached Boost below + - name: Give tar root ownership + if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar + - name: Cache Boost + if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + id: cache-boost + uses: actions/cache@v4 + with: + path: | + /usr/local/include/boost + /usr/local/lib/libboost* + key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + restore-keys: | + v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - # - name: Install Boost - # if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - # run: | - # BOOST_VERSION="1.85.0" - # BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" - # wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - # tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - # cd boost_${BOOST_VERSION_UNDERSCORE} - # sudo ./bootstrap.sh - # sudo ./b2 install - # cd .. - # sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* + - name: Install Boost + if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + run: | + BOOST_VERSION="1.85.0" + BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" + wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + cd boost_${BOOST_VERSION_UNDERSCORE} + sudo ./bootstrap.sh + sudo ./b2 install + cd .. + sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* - # - name: Install dev dependencies - # run: | - # python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages + - name: Install dev dependencies + run: | + python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages - # # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 - # # and that's why CI cannot find conan executable installed above - # if [[ "${RUNNER_OS}" == "macOS" ]]; then - # echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH - # fi + # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 + # and that's why CI cannot find conan executable installed above + if [[ "${RUNNER_OS}" == "macOS" ]]; then + echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH + fi - # # ccache - # if [[ "${RUNNER_OS}" == "Linux" ]]; then - # sudo apt-get update -y && sudo apt-get install ccache - # elif [[ "${RUNNER_OS}" == "macOS" ]]; then - # brew install ccache - # fi + # ccache + if [[ "${RUNNER_OS}" == "Linux" ]]; then + sudo apt-get update -y && sudo apt-get install ccache + elif [[ "${RUNNER_OS}" == "macOS" ]]; then + brew install ccache + fi - # # Linux dev packages - # if [ "${ENABLE_CONAN}" != "ON" ]; then - # sudo apt-get update -y - # sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev - # if [[ "${CCOMPILER}" != clang-* ]]; then - # sudo apt-get install -y ${CXXCOMPILER} - # fi - # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - # sudo apt-get install -y lcov - # fi - # fi + # Linux dev packages + if [ "${ENABLE_CONAN}" != "ON" ]; then + sudo apt-get update -y + sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev + if [[ "${CCOMPILER}" != clang-* ]]; then + sudo apt-get install -y ${CXXCOMPILER} + fi + if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + sudo apt-get install -y lcov + fi + fi - # # TBB - # TBB_VERSION=2021.12.0 - # if [[ "${RUNNER_OS}" == "Linux" ]]; then - # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" - # elif [[ "${RUNNER_OS}" == "macOS" ]]; then - # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" - # fi - # wget --tries 5 ${TBB_URL} -O onetbb.tgz - # tar zxvf onetbb.tgz - # sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ - # sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ - # - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 - # if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} - # run: | - # sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + # TBB + TBB_VERSION=2021.12.0 + if [[ "${RUNNER_OS}" == "Linux" ]]; then + TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" + elif [[ "${RUNNER_OS}" == "macOS" ]]; then + TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" + fi + wget --tries 5 ${TBB_URL} -O onetbb.tgz + tar zxvf onetbb.tgz + sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ + sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ + - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 + if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} + run: | + sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq - # conan config init - # yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" - # - name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16 - # if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }} - # run: | - # sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq + conan config init + yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" + - name: Add Apple-clang 16 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Apple-clang 16 + if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.runs-on == 'macos-14' }} + run: | + sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_darwin_arm64 -O /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq - # conan config init - # yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml" - # - name: Prepare build - # run: | - # mkdir ${OSRM_BUILD_DIR} - # ccache --max-size=256M - # npm ci --ignore-scripts - # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - # lcov --directory . --zerocounters # clean cached files - # fi - # echo "CC=${CCOMPILER}" >> $GITHUB_ENV - # echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV - # if [[ "${RUNNER_OS}" == "macOS" ]]; then - # # missing from GCC path, needed for conan builds of libiconv, for example. - # sudo xcode-select --switch /Library/Developer/CommandLineTools - # echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV - # echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV - # fi + conan config init + yq eval '.compiler.apple-clang.version += ["16.0"]' -i "$HOME/.conan/settings.yml" + - name: Prepare build + run: | + mkdir ${OSRM_BUILD_DIR} + ccache --max-size=256M + npm ci --ignore-scripts + if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + lcov --directory . --zerocounters # clean cached files + fi + echo "CC=${CCOMPILER}" >> $GITHUB_ENV + echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV + if [[ "${RUNNER_OS}" == "macOS" ]]; then + # missing from GCC path, needed for conan builds of libiconv, for example. + sudo xcode-select --switch /Library/Developer/CommandLineTools + echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV + echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV + fi - # - name: Build and install OSRM - # run: | - # echo "Using ${JOBS} jobs" - # pushd ${OSRM_BUILD_DIR} + - name: Build and install OSRM + run: | + echo "Using ${JOBS} jobs" + pushd ${OSRM_BUILD_DIR} - # ccache --zero-stats - # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ - # -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ - # -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ - # -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ - # -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ - # -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ - # -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ - # -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ - # -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ - # -DENABLE_CCACHE=ON \ - # -DENABLE_LTO=${ENABLE_LTO:-ON} \ - # -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} - # make --jobs=${JOBS} + ccache --zero-stats + cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ + -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ + -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ + -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ + -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ + -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ + -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ + -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ + -DENABLE_CCACHE=ON \ + -DENABLE_LTO=${ENABLE_LTO:-ON} \ + -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} + make --jobs=${JOBS} - # if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then - # make tests --jobs=${JOBS} - # make benchmarks --jobs=${JOBS} + if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then + make tests --jobs=${JOBS} + make benchmarks --jobs=${JOBS} - # sudo make install - # if [[ "${RUNNER_OS}" == "Linux" ]]; then - # echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV - # fi - # echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV - # fi - # popd - # - name: Build example - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - # run: | - # mkdir example/build && pushd example/build - # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} - # make --jobs=${JOBS} - # popd - # - name: Run all tests - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - # run: | - # make -C test/data benchmark + sudo make install + if [[ "${RUNNER_OS}" == "Linux" ]]; then + echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV + fi + echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV + fi + popd + - name: Build example + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + run: | + mkdir example/build && pushd example/build + cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} + make --jobs=${JOBS} + popd + - name: Run all tests + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + run: | + make -C test/data benchmark - # # macOS SIP strips the linker path. Reset this inside the running shell - # export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} - # ./example/build/osrm-example test/data/mld/monaco.osrm + # macOS SIP strips the linker path. Reset this inside the running shell + export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} + ./example/build/osrm-example test/data/mld/monaco.osrm - # # All tests assume to be run from the build directory - # pushd ${OSRM_BUILD_DIR} - # for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done - # if [ -z "${ENABLE_SANITIZER}" ]; then - # npm run nodejs-tests - # fi - # popd - # npm test + # All tests assume to be run from the build directory + pushd ${OSRM_BUILD_DIR} + for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done + if [ -z "${ENABLE_SANITIZER}" ]; then + npm run nodejs-tests + fi + popd + npm test - # - name: Use Node 18 - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # uses: actions/setup-node@v4 - # with: - # node-version: 18 - # - name: Run Node package tests on Node 18 - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # run: | - # node --version - # npm run nodejs-tests - # - name: Use Node 20 - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # uses: actions/setup-node@v4 - # with: - # node-version: 20 - # - name: Run Node package tests on Node 20 - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # run: | - # node --version - # npm run nodejs-tests - # - name: Use Node latest - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # uses: actions/setup-node@v4 - # with: - # node-version: latest - # - name: Run Node package tests on Node-latest - # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - # run: | - # node --version - # npm run nodejs-tests + - name: Use Node 18 + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + uses: actions/setup-node@v4 + with: + node-version: 18 + - name: Run Node package tests on Node 18 + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + run: | + node --version + npm run nodejs-tests + - name: Use Node 20 + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Run Node package tests on Node 20 + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + run: | + node --version + npm run nodejs-tests + - name: Use Node latest + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + uses: actions/setup-node@v4 + with: + node-version: latest + - name: Run Node package tests on Node-latest + if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + run: | + node --version + npm run nodejs-tests - # - name: Upload test logs - # uses: actions/upload-artifact@v4 - # if: failure() - # with: - # name: logs - # path: test/logs/ + - name: Upload test logs + uses: actions/upload-artifact@v4 + if: failure() + with: + name: logs + path: test/logs/ - # # - name: Generate code coverage - # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # # run: | - # # lcov --directory . --capture --output-file coverage.info # capture coverage info - # # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system - # # lcov --list coverage.info #debug info + # - name: Generate code coverage + # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # run: | + # lcov --directory . --capture --output-file coverage.info # capture coverage info + # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system + # lcov --list coverage.info #debug info - # # # Uploading report to CodeCov - # # - name: Upload code coverage - # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # # uses: codecov/codecov-action@v4 - # # with: - # # files: coverage.info - # # name: codecov-osrm-backend - # # fail_ci_if_error: true - # # verbose: true - # - name: Build Node package - # if: ${{ matrix.build_node_package }} - # run: ./scripts/ci/node_package.sh - # - name: Publish Node package - # if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} - # uses: ncipollo/release-action@v1 - # with: - # allowUpdates: true - # artifactErrorsFailBuild: true - # artifacts: build/stage/**/*.tar.gz - # omitBody: true - # omitBodyDuringUpdate: true - # omitName: true - # omitNameDuringUpdate: true - # replacesArtifacts: true - # token: ${{ secrets.GITHUB_TOKEN }} - # - name: Show CCache statistics - # run: | - # ccache -p - # ccache -s + # # Uploading report to CodeCov + # - name: Upload code coverage + # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # uses: codecov/codecov-action@v4 + # with: + # files: coverage.info + # name: codecov-osrm-backend + # fail_ci_if_error: true + # verbose: true + - name: Build Node package + if: ${{ matrix.build_node_package }} + run: ./scripts/ci/node_package.sh + - name: Publish Node package + if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} + uses: ncipollo/release-action@v1 + with: + allowUpdates: true + artifactErrorsFailBuild: true + artifacts: build/stage/**/*.tar.gz + omitBody: true + omitBodyDuringUpdate: true + omitName: true + omitNameDuringUpdate: true + replacesArtifacts: true + token: ${{ secrets.GITHUB_TOKEN }} + - name: Show CCache statistics + run: | + ccache -p + ccache -s benchmarks: if: github.event_name == 'pull_request' - # needs: [format-taginfo-docs] + needs: [format-taginfo-docs] runs-on: self-hosted env: CCOMPILER: clang-16 diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 5921620db..b763b83d6 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -66,10 +66,10 @@ function run_benchmarks_for_folder { $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" echo "Running json-render-bench" $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" - # echo "Running packedvector-bench" - # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - # echo "Running rtree-bench" - # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + echo "Running packedvector-bench" + $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + echo "Running rtree-bench" + $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" cp -rf $OSM_PBF $FOLDER/data.osm.pbf From 1b0a89fbd0809b1249de0ae368cdd4d5799392d3 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 14:57:50 +0100 Subject: [PATCH 22/35] wip --- .github/workflows/osrm-backend.yml | 10 +++++----- CHANGELOG.md | 1 + 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 6b6f66599..abbe09245 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -778,9 +778,9 @@ jobs: ccache -p ccache -s - # ci-complete: - # runs-on: ubuntu-22.04 - # needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] - # steps: - # - run: echo "CI complete" + ci-complete: + runs-on: ubuntu-22.04 + needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] + steps: + - run: echo "CI complete" diff --git a/CHANGELOG.md b/CHANGELOG.md index 954dcc627..7eb545ffe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ - NodeJS: - CHANGED: Use node-api instead of NAN. [#6452](https://github.com/Project-OSRM/osrm-backend/pull/6452) - Misc: + - CHANGED: Use std::string_view for key type in json::Object. [#7062](https://github.com/Project-OSRM/osrm-backend/pull/7062) - CHANGED: Use thread_local instead of boost::thread_specific_ptr. [#6991](https://github.com/Project-OSRM/osrm-backend/pull/6991) - CHANGED: Bump flatbuffers to v24.3.25 version. [#6988](https://github.com/Project-OSRM/osrm-backend/pull/6988) - CHANGED: Add .reserve(...) to assembleGeometry function. [#6983](https://github.com/Project-OSRM/osrm-backend/pull/6983) From f0bb5faa6ab2aa13202b02b3b4a213cbf626c8ff Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 14:59:10 +0100 Subject: [PATCH 23/35] wip --- src/benchmarks/json_render.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/benchmarks/json_render.cpp b/src/benchmarks/json_render.cpp index d4d40cdc3..47dea88ca 100644 --- a/src/benchmarks/json_render.cpp +++ b/src/benchmarks/json_render.cpp @@ -16,8 +16,8 @@ using namespace osrm; namespace { -// we use std::string_view as a key in the object, so since here we have dynamic keys we have to "hold" them somewhere -// okay for tests... +// we use std::string_view as a key in the object, so since here we have dynamic keys we have to +// "hold" them somewhere okay for tests... static std::unordered_set gKeysHolder; void convert(const rapidjson::Value &value, json::Value &result) @@ -38,7 +38,7 @@ void convert(const rapidjson::Value &value, json::Value &result) json::Value member; convert(itr->value, member); auto keyItr = gKeysHolder.emplace(itr->name.GetString()).first; - object.values.emplace(*keyItr, std::move(member));; + object.values.emplace(*keyItr, std::move(member)); } result = std::move(object); } From c01685f38d2563a66080850799fb9120b74e3c07 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 15:12:21 +0100 Subject: [PATCH 24/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index 342d672f7..9a1446169 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -6,6 +6,7 @@ #include #include +#include // utility class to redirect stderr so we can test it // inspired by https://stackoverflow.com/questions/5405016 @@ -31,14 +32,17 @@ BOOST_AUTO_TEST_SUITE(library_extract) BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) { + tbb::flow::graph g; osrm::ExtractorConfig config; config.requested_num_threads = std::thread::hardware_concurrency(); BOOST_CHECK_THROW(osrm::extract(config), std::exception); // including osrm::util::exception, osmium::io_error, etc. + g.wait_for_all(); } BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) { + tbb::flow::graph g; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -46,6 +50,7 @@ BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) config.small_component_size = 1000; config.requested_num_threads = std::thread::hardware_concurrency(); BOOST_CHECK_NO_THROW(osrm::extract(config)); + g.wait_for_all(); } BOOST_AUTO_TEST_CASE(test_setup_runtime_error) From b65d8f1e34c20888fb7836638993e2da151a0c77 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 15:16:20 +0100 Subject: [PATCH 25/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index 9a1446169..d84d5ee4d 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -5,8 +5,8 @@ #include "osrm/extractor_config.hpp" #include -#include #include +#include // utility class to redirect stderr so we can test it // inspired by https://stackoverflow.com/questions/5405016 From 832b576549d2d8b43a78425f1027c927cbb5826c Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 15:26:31 +0100 Subject: [PATCH 26/35] wip --- unit_tests/library/extract.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index d84d5ee4d..54ad828cb 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -5,6 +5,8 @@ #include "osrm/extractor_config.hpp" #include +#include +#include #include #include @@ -32,17 +34,17 @@ BOOST_AUTO_TEST_SUITE(library_extract) BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) { - tbb::flow::graph g; + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.requested_num_threads = std::thread::hardware_concurrency(); BOOST_CHECK_THROW(osrm::extract(config), std::exception); // including osrm::util::exception, osmium::io_error, etc. - g.wait_for_all(); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) { - tbb::flow::graph g; + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -50,11 +52,12 @@ BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) config.small_component_size = 1000; config.requested_num_threads = std::thread::hardware_concurrency(); BOOST_CHECK_NO_THROW(osrm::extract(config)); - g.wait_for_all(); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_setup_runtime_error) { + osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); From 74aab7b2ea384c5374150a60d76eb261e6c67658 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 17:12:50 +0100 Subject: [PATCH 27/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index 54ad828cb..c5ff810e0 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -57,7 +57,7 @@ BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) BOOST_AUTO_TEST_CASE(test_setup_runtime_error) { - + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -76,10 +76,12 @@ BOOST_AUTO_TEST_CASE(test_setup_runtime_error) // issues since the output contains the full path to the file, which may change between systems BOOST_CHECK(boost::algorithm::contains(output.str(), "bad_setup.lua:6: attempt to compare number with nil")); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_way_runtime_error) { + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -98,10 +100,12 @@ BOOST_AUTO_TEST_CASE(test_way_runtime_error) // issues since the output contains the full path to the file, which may change between systems BOOST_CHECK(boost::algorithm::contains(output.str(), "bad_way.lua:41: attempt to compare number with nil")); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_node_runtime_error) { + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -120,10 +124,12 @@ BOOST_AUTO_TEST_CASE(test_node_runtime_error) // issues since the output contains the full path to the file, which may change between systems BOOST_CHECK(boost::algorithm::contains(output.str(), "bad_node.lua:36: attempt to compare number with nil")); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_segment_runtime_error) { + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -142,10 +148,12 @@ BOOST_AUTO_TEST_CASE(test_segment_runtime_error) // issues since the output contains the full path to the file, which may change between systems BOOST_CHECK(boost::algorithm::contains( output.str(), "bad_segment.lua:132: attempt to compare number with nil")); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_CASE(test_turn_runtime_error) { + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; osrm::ExtractorConfig config; config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); @@ -164,6 +172,7 @@ BOOST_AUTO_TEST_CASE(test_turn_runtime_error) // issues since the output contains the full path to the file, which may change between systems BOOST_CHECK(boost::algorithm::contains(output.str(), "bad_turn.lua:122: attempt to compare number with nil")); + oneapi::tbb::finalize(handle); } BOOST_AUTO_TEST_SUITE_END() From 1c2410b57d76f5149b869f7d5952fe8a564cab4b Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 17:20:28 +0100 Subject: [PATCH 28/35] Try to fix flaky tests --- .github/workflows/osrm-backend.yml | 818 +++++++++++++++-------------- 1 file changed, 410 insertions(+), 408 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index abbe09245..5f0b36d46 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -23,218 +23,218 @@ concurrency: cancel-in-progress: true jobs: - windows-release-node: - needs: format-taginfo-docs - runs-on: windows-2022 - continue-on-error: false - env: - BUILD_TYPE: Release - steps: - - uses: actions/checkout@v4 - - run: pip install "conan<2.0.0" - - run: conan --version - - run: cmake --version - - uses: actions/setup-node@v4 - with: - node-version: 18 - - run: node --version - - run: npm --version - - name: Prepare environment - shell: bash - run: | - PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - - run: npm install --ignore-scripts - - run: npm link --ignore-scripts - - name: Build - shell: bash - run: | - mkdir build - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. - cmake --build . --config Release + # windows-release-node: + # needs: format-taginfo-docs + # runs-on: windows-2022 + # continue-on-error: false + # env: + # BUILD_TYPE: Release + # steps: + # - uses: actions/checkout@v4 + # - run: pip install "conan<2.0.0" + # - run: conan --version + # - run: cmake --version + # - uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - run: node --version + # - run: npm --version + # - name: Prepare environment + # shell: bash + # run: | + # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + # - run: npm install --ignore-scripts + # - run: npm link --ignore-scripts + # - name: Build + # shell: bash + # run: | + # mkdir build + # cd build + # cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. + # cmake --build . --config Release - # TODO: MSVC goes out of memory when building our tests - # - name: Run tests - # shell: bash - # run: | - # cd build - # cmake --build . --config Release --target tests - # # TODO: run tests - # - name: Run node tests - # shell: bash - # run: | - # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf + # # TODO: MSVC goes out of memory when building our tests + # # - name: Run tests + # # shell: bash + # # run: | + # # cd build + # # cmake --build . --config Release --target tests + # # # TODO: run tests + # # - name: Run node tests + # # shell: bash + # # run: | + # # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf - # mkdir -p test/data/ch - # cp test/data/monaco.osrm* test/data/ch/ - # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm + # # mkdir -p test/data/ch + # # cp test/data/monaco.osrm* test/data/ch/ + # # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm - # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm - # node test/nodejs/index.js - - name: Build Node package - shell: bash - run: ./scripts/ci/node_package.sh - - name: Publish Node package - if: ${{ env.PUBLISH == 'On' }} - uses: ncipollo/release-action@v1 - with: - allowUpdates: true - artifactErrorsFailBuild: true - artifacts: build/stage/**/*.tar.gz - omitBody: true - omitBodyDuringUpdate: true - omitName: true - omitNameDuringUpdate: true - replacesArtifacts: true - token: ${{ secrets.GITHUB_TOKEN }} + # # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm + # # node test/nodejs/index.js + # - name: Build Node package + # shell: bash + # run: ./scripts/ci/node_package.sh + # - name: Publish Node package + # if: ${{ env.PUBLISH == 'On' }} + # uses: ncipollo/release-action@v1 + # with: + # allowUpdates: true + # artifactErrorsFailBuild: true + # artifacts: build/stage/**/*.tar.gz + # omitBody: true + # omitBodyDuringUpdate: true + # omitName: true + # omitNameDuringUpdate: true + # replacesArtifacts: true + # token: ${{ secrets.GITHUB_TOKEN }} - format-taginfo-docs: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 18 - - name: Enable Node.js cache - uses: actions/cache@v4 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- - - name: Prepare environment - run: | - npm ci --ignore-scripts - clang-format-15 --version - - name: Run checks - run: | - ./scripts/check_taginfo.py taginfo.json profiles/car.lua - ./scripts/format.sh && ./scripts/error_on_dirty.sh - node ./scripts/validate_changelog.js - npm run docs && ./scripts/error_on_dirty.sh - npm audit --production + # format-taginfo-docs: + # runs-on: ubuntu-22.04 + # steps: + # - uses: actions/checkout@v4 + # - name: Use Node.js + # uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - name: Enable Node.js cache + # uses: actions/cache@v4 + # with: + # path: ~/.npm + # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + # restore-keys: | + # ${{ runner.os }}-node- + # - name: Prepare environment + # run: | + # npm ci --ignore-scripts + # clang-format-15 --version + # - name: Run checks + # run: | + # ./scripts/check_taginfo.py taginfo.json profiles/car.lua + # ./scripts/format.sh && ./scripts/error_on_dirty.sh + # node ./scripts/validate_changelog.js + # npm run docs && ./scripts/error_on_dirty.sh + # npm audit --production - docker-image-matrix: - strategy: - matrix: - docker-base-image: ["debian", "alpine"] - needs: format-taginfo-docs - runs-on: ubuntu-22.04 - continue-on-error: false - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Enable osm.pbf cache - uses: actions/cache@v4 - with: - path: berlin-latest.osm.pbf - key: v1-berlin-osm-pbf - restore-keys: | - v1-berlin-osm-pbf - - name: Docker build - run: | - docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . - - name: Test Docker image - run: | - if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then - wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf - fi - TAG=osrm-backend-local - # when `--memory-swap` value equals `--memory` it means container won't use swap - # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details - MEMORY_ARGS="--memory=1g --memory-swap=1g" - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson - if [ ! -s "${PWD}/berlin-latest.geojson" ] - then - >&2 echo "No berlin-latest.geojson found" - exit 1 - fi - # removing `.osrm.nbg` to check that whole pipeline works without it - rm -rf "${PWD}/berlin-latest.osrm.nbg" + # docker-image-matrix: + # strategy: + # matrix: + # docker-base-image: ["debian", "alpine"] + # needs: format-taginfo-docs + # runs-on: ubuntu-22.04 + # continue-on-error: false + # steps: + # - name: Check out the repo + # uses: actions/checkout@v4 + # - name: Enable osm.pbf cache + # uses: actions/cache@v4 + # with: + # path: berlin-latest.osm.pbf + # key: v1-berlin-osm-pbf + # restore-keys: | + # v1-berlin-osm-pbf + # - name: Docker build + # run: | + # docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . + # - name: Test Docker image + # run: | + # if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then + # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf + # fi + # TAG=osrm-backend-local + # # when `--memory-swap` value equals `--memory` it means container won't use swap + # # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details + # MEMORY_ARGS="--memory=1g --memory-swap=1g" + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson + # if [ ! -s "${PWD}/berlin-latest.geojson" ] + # then + # >&2 echo "No berlin-latest.geojson found" + # exit 1 + # fi + # # removing `.osrm.nbg` to check that whole pipeline works without it + # rm -rf "${PWD}/berlin-latest.osrm.nbg" - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm - docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & - curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" - docker stop osrm-container + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm + # docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & + # curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" + # docker stop osrm-container build-test-publish: needs: format-taginfo-docs strategy: matrix: include: - - name: gcc-13-debug-cov - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: gcc-13 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: g++-13 - ENABLE_COVERAGE: ON + # - name: gcc-13-debug-cov + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: gcc-13 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: g++-13 + # ENABLE_COVERAGE: ON - - name: clang-18-debug-asan-ubsan - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: clang++-18 - ENABLE_SANITIZER: ON - TARGET_ARCH: x86_64-asan-ubsan - OSRM_CONNECTION_RETRIES: 10 - OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 + # - name: clang-18-debug-asan-ubsan + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: clang++-18 + # ENABLE_SANITIZER: ON + # TARGET_ARCH: x86_64-asan-ubsan + # OSRM_CONNECTION_RETRIES: 10 + # OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 - - name: clang-18-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-18-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: clang-18-debug - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-18-debug + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - - name: clang-18-debug-clang-tidy - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_CLANG_TIDY: ON + # - name: clang-18-debug-clang-tidy + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_CLANG_TIDY: ON - - name: clang-17-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-17 - CXXCOMPILER: clang++-17 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF + # - name: clang-17-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-17 + # CXXCOMPILER: clang++-17 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF - name: clang-16-release continue-on-error: false @@ -247,104 +247,104 @@ jobs: CUCUMBER_TIMEOUT: 60000 ENABLE_LTO: OFF - - name: conan-linux-debug-asan-ubsan - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_SANITIZER: ON - ENABLE_LTO: OFF + # - name: conan-linux-debug-asan-ubsan + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_SANITIZER: ON + # ENABLE_LTO: OFF - - name: conan-linux-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_LTO: OFF + # - name: conan-linux-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_LTO: OFF - - name: gcc-14-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-14 - CXXCOMPILER: g++-14 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-14-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-14 + # CXXCOMPILER: g++-14 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: gcc-13-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-13 - CXXCOMPILER: g++-13 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-13-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-13 + # CXXCOMPILER: g++-13 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: gcc-12-release - continue-on-error: false - node: 20 - runs-on: ubuntu-22.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-12 - CXXCOMPILER: g++-12 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + # - name: gcc-12-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-22.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-12 + # CXXCOMPILER: g++-12 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - name: conan-linux-release-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Release - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON + # - name: conan-linux-release-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Release + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON - - name: conan-linux-debug-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Debug - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON + # - name: conan-linux-debug-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Debug + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON - - name: conan-macos-x64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-13 # x86_64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON + # - name: conan-macos-x64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-13 # x86_64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON - - name: conan-macos-arm64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-14 # arm64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON + # - name: conan-macos-arm64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-14 # arm64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON name: ${{ matrix.name}} continue-on-error: ${{ matrix.continue-on-error }} @@ -575,7 +575,9 @@ jobs: # All tests assume to be run from the build directory pushd ${OSRM_BUILD_DIR} - for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done + for i in $(seq 1 1000); do + ./unit_tests/library-tests + done if [ -z "${ENABLE_SANITIZER}" ]; then npm run nodejs-tests fi @@ -657,130 +659,130 @@ jobs: ccache -p ccache -s - benchmarks: - if: github.event_name == 'pull_request' - needs: [format-taginfo-docs] - runs-on: self-hosted - env: - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - CC: clang-16 - CXX: clang++-16 - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - PR_NUMBER: ${{ github.event.pull_request.number }} - GITHUB_REPOSITORY: ${{ github.repository }} - RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }} - steps: - - name: Checkout PR Branch - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - path: pr - - name: Activate virtualenv - run: | - python3 -m venv .venv - source .venv/bin/activate - echo PATH=$PATH >> $GITHUB_ENV - pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4" - - name: Prepare data - run: | - if [ "$RUN_BIG_BENCHMARK" = "true" ]; then - rm -rf ~/data.osm.pbf - wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet - gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv - else - if [ ! -f "~/data.osm.pbf" ]; then - wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf - else - echo "Using cached data.osm.pbf" - fi - gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv - fi - - name: Prepare environment - run: | - echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV - mkdir -p $HOME/.ccache - ccache --zero-stats - ccache --max-size=256M - - name: Checkout Base Branch - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.base.ref }} - path: base - - name: Build Base Branch - run: | - cd base - npm ci --ignore-scripts - cd .. - mkdir base/build - cd base/build - cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - make -j$(nproc) - make -j$(nproc) benchmarks - cd .. - make -C test/data - - name: Build PR Branch - run: | - cd pr - npm ci --ignore-scripts - cd .. - mkdir -p pr/build - cd pr/build - cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. - make -j$(nproc) - make -j$(nproc) benchmarks - cd .. - make -C test/data - # we run benchmarks in tmpfs to avoid impact of disk IO - - name: Create folder for tmpfs - run: | - # if by any chance it was mounted before(e.g. due to previous job failed), unmount it - sudo umount ~/benchmarks | true - rm -rf ~/benchmarks - mkdir -p ~/benchmarks - # see https://llvm.org/docs/Benchmarking.html - - name: Run PR Benchmarks - run: | - sudo cset shield -c 2-3 -k on - sudo mount -t tmpfs -o size=4g none ~/benchmarks - cp -rf pr/build ~/benchmarks/build - cp -rf pr/lib ~/benchmarks/lib - mkdir -p ~/benchmarks/test - cp -rf pr/test/data ~/benchmarks/test/data - cp -rf pr/profiles ~/benchmarks/profiles + # benchmarks: + # if: github.event_name == 'pull_request' + # needs: [format-taginfo-docs] + # runs-on: self-hosted + # env: + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # CC: clang-16 + # CXX: clang++-16 + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # PR_NUMBER: ${{ github.event.pull_request.number }} + # GITHUB_REPOSITORY: ${{ github.repository }} + # RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }} + # steps: + # - name: Checkout PR Branch + # uses: actions/checkout@v4 + # with: + # ref: ${{ github.head_ref }} + # path: pr + # - name: Activate virtualenv + # run: | + # python3 -m venv .venv + # source .venv/bin/activate + # echo PATH=$PATH >> $GITHUB_ENV + # pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4" + # - name: Prepare data + # run: | + # if [ "$RUN_BIG_BENCHMARK" = "true" ]; then + # rm -rf ~/data.osm.pbf + # wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet + # gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv + # else + # if [ ! -f "~/data.osm.pbf" ]; then + # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf + # else + # echo "Using cached data.osm.pbf" + # fi + # gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv + # fi + # - name: Prepare environment + # run: | + # echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV + # mkdir -p $HOME/.ccache + # ccache --zero-stats + # ccache --max-size=256M + # - name: Checkout Base Branch + # uses: actions/checkout@v4 + # with: + # ref: ${{ github.event.pull_request.base.ref }} + # path: base + # - name: Build Base Branch + # run: | + # cd base + # npm ci --ignore-scripts + # cd .. + # mkdir base/build + # cd base/build + # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + # make -j$(nproc) + # make -j$(nproc) benchmarks + # cd .. + # make -C test/data + # - name: Build PR Branch + # run: | + # cd pr + # npm ci --ignore-scripts + # cd .. + # mkdir -p pr/build + # cd pr/build + # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. + # make -j$(nproc) + # make -j$(nproc) benchmarks + # cd .. + # make -C test/data + # # we run benchmarks in tmpfs to avoid impact of disk IO + # - name: Create folder for tmpfs + # run: | + # # if by any chance it was mounted before(e.g. due to previous job failed), unmount it + # sudo umount ~/benchmarks | true + # rm -rf ~/benchmarks + # mkdir -p ~/benchmarks + # # see https://llvm.org/docs/Benchmarking.html + # - name: Run PR Benchmarks + # run: | + # sudo cset shield -c 2-3 -k on + # sudo mount -t tmpfs -o size=4g none ~/benchmarks + # cp -rf pr/build ~/benchmarks/build + # cp -rf pr/lib ~/benchmarks/lib + # mkdir -p ~/benchmarks/test + # cp -rf pr/test/data ~/benchmarks/test/data + # cp -rf pr/profiles ~/benchmarks/profiles - sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv - sudo umount ~/benchmarks - sudo cset shield --reset - - name: Run Base Benchmarks - run: | - sudo cset shield -c 2-3 -k on - sudo mount -t tmpfs -o size=4g none ~/benchmarks - cp -rf base/build ~/benchmarks/build - cp -rf base/lib ~/benchmarks/lib - mkdir -p ~/benchmarks/test - cp -rf base/test/data ~/benchmarks/test/data - cp -rf base/profiles ~/benchmarks/profiles + # sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + # sudo umount ~/benchmarks + # sudo cset shield --reset + # - name: Run Base Benchmarks + # run: | + # sudo cset shield -c 2-3 -k on + # sudo mount -t tmpfs -o size=4g none ~/benchmarks + # cp -rf base/build ~/benchmarks/build + # cp -rf base/lib ~/benchmarks/lib + # mkdir -p ~/benchmarks/test + # cp -rf base/test/data ~/benchmarks/test/data + # cp -rf base/profiles ~/benchmarks/profiles - # TODO: remove it when base branch will have this file at needed location - if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then - cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json - fi - # we intentionally use scripts from PR branch to be able to update them and see results in the same PR - sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv - sudo umount ~/benchmarks - sudo cset shield --reset - - name: Post Benchmark Results - run: | - python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results - - name: Show CCache statistics - run: | - ccache -p - ccache -s + # # TODO: remove it when base branch will have this file at needed location + # if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then + # cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json + # fi + # # we intentionally use scripts from PR branch to be able to update them and see results in the same PR + # sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + # sudo umount ~/benchmarks + # sudo cset shield --reset + # - name: Post Benchmark Results + # run: | + # python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results + # - name: Show CCache statistics + # run: | + # ccache -p + # ccache -s - ci-complete: - runs-on: ubuntu-22.04 - needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] - steps: - - run: echo "CI complete" + # ci-complete: + # runs-on: ubuntu-22.04 + # needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] + # steps: + # - run: echo "CI complete" From baada3bd492a8c7c1b0dbc7914ce86e454788c2b Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 17:21:28 +0100 Subject: [PATCH 29/35] Try to fix flaky tests --- .github/workflows/osrm-backend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 5f0b36d46..ed277f79b 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -162,7 +162,7 @@ jobs: # docker stop osrm-container build-test-publish: - needs: format-taginfo-docs + #needs: format-taginfo-docs strategy: matrix: include: From 13c92c72d5135ab7e0eaaf9e2f0ac6c205034dac Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 17:25:02 +0100 Subject: [PATCH 30/35] Try to fix flaky tests --- .github/workflows/osrm-backend.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index ed277f79b..2be4acde3 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -575,8 +575,12 @@ jobs: # All tests assume to be run from the build directory pushd ${OSRM_BUILD_DIR} - for i in $(seq 1 1000); do - ./unit_tests/library-tests + for i in $(seq 1 100); do + echo "Iteration $i" + for test in ./unit_tests/*-tests; do + echo "Running $test" + $test + done done if [ -z "${ENABLE_SANITIZER}" ]; then npm run nodejs-tests From c4edaad85da0a918fb82dd050b81b5a557f88eec Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 17:29:41 +0100 Subject: [PATCH 31/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 242 ++++++++++++++++----------------- 1 file changed, 121 insertions(+), 121 deletions(-) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index c5ff810e0..ac637c7b7 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -32,147 +32,147 @@ class redirect_stderr BOOST_AUTO_TEST_SUITE(library_extract) -BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.requested_num_threads = std::thread::hardware_concurrency(); - BOOST_CHECK_THROW(osrm::extract(config), - std::exception); // including osrm::util::exception, osmium::io_error, etc. - oneapi::tbb::finalize(handle); -} +// BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_CHECK_THROW(osrm::extract(config), +// std::exception); // including osrm::util::exception, osmium::io_error, etc. +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/../../profiles/car.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); - BOOST_CHECK_NO_THROW(osrm::extract(config)); - oneapi::tbb::finalize(handle); -} +// BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/../../profiles/car.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_CHECK_NO_THROW(osrm::extract(config)); +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_setup_runtime_error) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_setup.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_AUTO_TEST_CASE(test_setup_runtime_error) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_setup.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); - std::stringstream output; +// std::stringstream output; - { - redirect_stderr redir(output.rdbuf()); - BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); - } +// { +// redirect_stderr redir(output.rdbuf()); +// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); +// } - // We just look for the line number, file name, and error message. This avoids portability - // issues since the output contains the full path to the file, which may change between systems - BOOST_CHECK(boost::algorithm::contains(output.str(), - "bad_setup.lua:6: attempt to compare number with nil")); - oneapi::tbb::finalize(handle); -} +// // We just look for the line number, file name, and error message. This avoids portability +// // issues since the output contains the full path to the file, which may change between systems +// BOOST_CHECK(boost::algorithm::contains(output.str(), +// "bad_setup.lua:6: attempt to compare number with nil")); +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_way_runtime_error) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_way.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_AUTO_TEST_CASE(test_way_runtime_error) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_way.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); - std::stringstream output; +// std::stringstream output; - { - redirect_stderr redir(output.rdbuf()); - BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); - } +// { +// redirect_stderr redir(output.rdbuf()); +// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); +// } - // We just look for the line number, file name, and error message. This avoids portability - // issues since the output contains the full path to the file, which may change between systems - BOOST_CHECK(boost::algorithm::contains(output.str(), - "bad_way.lua:41: attempt to compare number with nil")); - oneapi::tbb::finalize(handle); -} +// // We just look for the line number, file name, and error message. This avoids portability +// // issues since the output contains the full path to the file, which may change between systems +// BOOST_CHECK(boost::algorithm::contains(output.str(), +// "bad_way.lua:41: attempt to compare number with nil")); +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_node_runtime_error) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_node.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_AUTO_TEST_CASE(test_node_runtime_error) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_node.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); - std::stringstream output; +// std::stringstream output; - { - redirect_stderr redir(output.rdbuf()); - BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); - } +// { +// redirect_stderr redir(output.rdbuf()); +// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); +// } - // We just look for the line number, file name, and error message. This avoids portability - // issues since the output contains the full path to the file, which may change between systems - BOOST_CHECK(boost::algorithm::contains(output.str(), - "bad_node.lua:36: attempt to compare number with nil")); - oneapi::tbb::finalize(handle); -} +// // We just look for the line number, file name, and error message. This avoids portability +// // issues since the output contains the full path to the file, which may change between systems +// BOOST_CHECK(boost::algorithm::contains(output.str(), +// "bad_node.lua:36: attempt to compare number with nil")); +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_segment_runtime_error) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_segment.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_AUTO_TEST_CASE(test_segment_runtime_error) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_segment.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); - std::stringstream output; +// std::stringstream output; - { - redirect_stderr redir(output.rdbuf()); - BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); - } +// { +// redirect_stderr redir(output.rdbuf()); +// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); +// } - // We just look for the line number, file name, and error message. This avoids portability - // issues since the output contains the full path to the file, which may change between systems - BOOST_CHECK(boost::algorithm::contains( - output.str(), "bad_segment.lua:132: attempt to compare number with nil")); - oneapi::tbb::finalize(handle); -} +// // We just look for the line number, file name, and error message. This avoids portability +// // issues since the output contains the full path to the file, which may change between systems +// BOOST_CHECK(boost::algorithm::contains( +// output.str(), "bad_segment.lua:132: attempt to compare number with nil")); +// oneapi::tbb::finalize(handle); +// } -BOOST_AUTO_TEST_CASE(test_turn_runtime_error) -{ - oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; - osrm::ExtractorConfig config; - config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; - config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); - config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_turn.lua"; - config.small_component_size = 1000; - config.requested_num_threads = std::thread::hardware_concurrency(); +// BOOST_AUTO_TEST_CASE(test_turn_runtime_error) +// { +// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; +// osrm::ExtractorConfig config; +// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; +// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); +// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_turn.lua"; +// config.small_component_size = 1000; +// config.requested_num_threads = std::thread::hardware_concurrency(); - std::stringstream output; +// std::stringstream output; - { - redirect_stderr redir(output.rdbuf()); - BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); - } +// { +// redirect_stderr redir(output.rdbuf()); +// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception); +// } - // We just look for the line number, file name, and error message. This avoids portability - // issues since the output contains the full path to the file, which may change between systems - BOOST_CHECK(boost::algorithm::contains(output.str(), - "bad_turn.lua:122: attempt to compare number with nil")); - oneapi::tbb::finalize(handle); -} +// // We just look for the line number, file name, and error message. This avoids portability +// // issues since the output contains the full path to the file, which may change between systems +// BOOST_CHECK(boost::algorithm::contains(output.str(), +// "bad_turn.lua:122: attempt to compare number with nil")); +// oneapi::tbb::finalize(handle); +// } BOOST_AUTO_TEST_SUITE_END() From 24e4068d0748a8b7aa7c77ed8e2859787165c5f7 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 18:22:56 +0100 Subject: [PATCH 32/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index ac637c7b7..1bec22842 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -32,6 +32,11 @@ class redirect_stderr BOOST_AUTO_TEST_SUITE(library_extract) +BOOST_AUTO_TEST_CASE(dummy) +{ + BOOST_CHECK(true); +} + // BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) // { // oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; From 40446e416fb952f4f15da139cca30f87a4624688 Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 18:30:45 +0100 Subject: [PATCH 33/35] Try to fix flaky tests --- unit_tests/library/extract.cpp | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index 1bec22842..9135051ba 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -37,15 +37,15 @@ BOOST_AUTO_TEST_CASE(dummy) BOOST_CHECK(true); } -// BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) -// { -// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; -// osrm::ExtractorConfig config; -// config.requested_num_threads = std::thread::hardware_concurrency(); -// BOOST_CHECK_THROW(osrm::extract(config), -// std::exception); // including osrm::util::exception, osmium::io_error, etc. -// oneapi::tbb::finalize(handle); -// } +BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) +{ + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; + osrm::ExtractorConfig config; + config.requested_num_threads = std::thread::hardware_concurrency(); + BOOST_CHECK_THROW(osrm::extract(config), + std::exception); // including osrm::util::exception, osmium::io_error, etc. + oneapi::tbb::finalize(handle); +} // BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) // { From 6f73d2b4e0bc81a01b4da1499e278b2ec6422bda Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 18:31:01 +0100 Subject: [PATCH 34/35] Try to fix flaky tests --- .github/workflows/osrm-backend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index 2be4acde3..c64f79286 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -575,7 +575,7 @@ jobs: # All tests assume to be run from the build directory pushd ${OSRM_BUILD_DIR} - for i in $(seq 1 100); do + for i in $(seq 1 1000); do echo "Iteration $i" for test in ./unit_tests/*-tests; do echo "Running $test" From 3d9f0c6549500c8868d6575df945be5ba4bc1afd Mon Sep 17 00:00:00 2001 From: Siarhei Fedartsou Date: Sun, 3 Nov 2024 18:51:42 +0100 Subject: [PATCH 35/35] Try to fix flaky tests --- .github/workflows/osrm-backend.yml | 2 +- unit_tests/library/extract.cpp | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index c64f79286..2be4acde3 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -575,7 +575,7 @@ jobs: # All tests assume to be run from the build directory pushd ${OSRM_BUILD_DIR} - for i in $(seq 1 1000); do + for i in $(seq 1 100); do echo "Iteration $i" for test in ./unit_tests/*-tests; do echo "Running $test" diff --git a/unit_tests/library/extract.cpp b/unit_tests/library/extract.cpp index 9135051ba..d996d099d 100644 --- a/unit_tests/library/extract.cpp +++ b/unit_tests/library/extract.cpp @@ -47,18 +47,18 @@ BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config) oneapi::tbb::finalize(handle); } -// BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) -// { -// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; -// osrm::ExtractorConfig config; -// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; -// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); -// config.profile_path = OSRM_TEST_DATA_DIR "/../../profiles/car.lua"; -// config.small_component_size = 1000; -// config.requested_num_threads = std::thread::hardware_concurrency(); -// BOOST_CHECK_NO_THROW(osrm::extract(config)); -// oneapi::tbb::finalize(handle); -// } +BOOST_AUTO_TEST_CASE(test_extract_with_valid_config) +{ + oneapi::tbb::task_scheduler_handle handle{tbb::attach{}}; + osrm::ExtractorConfig config; + config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf"; + config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf"); + config.profile_path = OSRM_TEST_DATA_DIR "/../../profiles/car.lua"; + config.small_component_size = 1000; + config.requested_num_threads = std::thread::hardware_concurrency(); + BOOST_CHECK_NO_THROW(osrm::extract(config)); + oneapi::tbb::finalize(handle); +} // BOOST_AUTO_TEST_CASE(test_setup_runtime_error) // {