Compare commits

..

36 Commits

Author SHA1 Message Date
Siarhei Fedartsou
3d9f0c6549 Try to fix flaky tests 2024-11-03 18:51:42 +01:00
Siarhei Fedartsou
21c9f5d17d Merge branch 'master' into sf-try-to-fix-flaky-tests 2024-11-03 18:32:04 +01:00
Siarhei Fedartsou
6f73d2b4e0 Try to fix flaky tests 2024-11-03 18:31:01 +01:00
Siarhei Fedartsou
40446e416f Try to fix flaky tests 2024-11-03 18:30:45 +01:00
Siarhei Fedartsou
24e4068d07 Try to fix flaky tests 2024-11-03 18:22:56 +01:00
Siarhei Fedartsou
c4edaad85d Try to fix flaky tests 2024-11-03 17:29:41 +01:00
Siarhei Fedartsou
13c92c72d5 Try to fix flaky tests 2024-11-03 17:25:02 +01:00
Siarhei Fedartsou
baada3bd49 Try to fix flaky tests 2024-11-03 17:21:28 +01:00
Siarhei Fedartsou
1c2410b57d Try to fix flaky tests 2024-11-03 17:20:28 +01:00
Siarhei Fedartsou
74aab7b2ea Try to fix flaky tests 2024-11-03 17:12:50 +01:00
Siarhei Fedartsou
832b576549 wip 2024-11-03 15:26:31 +01:00
Siarhei Fedartsou
b65d8f1e34 Try to fix flaky tests 2024-11-03 15:16:20 +01:00
Siarhei Fedartsou
c01685f38d Try to fix flaky tests 2024-11-03 15:12:21 +01:00
Siarhei Fedartsou
f0bb5faa6a wip 2024-11-03 14:59:10 +01:00
Siarhei Fedartsou
1b0a89fbd0 wip 2024-11-03 14:57:50 +01:00
Siarhei Fedartsou
69777e2784 wip 2024-11-03 14:56:31 +01:00
Siarhei Fedartsou
cf4f04d08f wip 2024-11-03 13:56:55 +01:00
Siarhei Fedartsou
9c6a78e865 wip 2024-11-03 13:36:17 +01:00
Siarhei Fedartsou
c46f92b6c3 wip 2024-11-03 13:28:56 +01:00
Siarhei Fedartsou
b9d095e644 wip 2024-11-03 13:24:12 +01:00
Siarhei Fedartsou
1bf831a318 wip 2024-11-03 13:15:59 +01:00
Siarhei Fedartsou
d5ab66496c wip 2024-11-03 13:07:28 +01:00
Siarhei Fedartsou
5b69c0a7f2 wip 2024-11-03 13:01:33 +01:00
Siarhei Fedartsou
7f70f9068a wip 2024-11-03 12:52:53 +01:00
Siarhei Fedartsou
761883e8d6 wip 2024-11-03 12:47:34 +01:00
Siarhei Fedartsou
09020115d2 wip 2024-11-03 12:41:33 +01:00
Siarhei Fedartsou
c3e9e12fc3 wip 2024-11-03 12:21:12 +01:00
Siarhei Fedartsou
5581ac16bd wip 2024-11-03 12:19:36 +01:00
Siarhei Fedartsou
bd3d409f03 wip 2024-11-03 12:18:57 +01:00
Siarhei Fedartsou
668dafc242 wip 2024-11-03 12:06:03 +01:00
Siarhei Fedartsou
ea1f2be3d4 wip 2024-11-03 11:58:34 +01:00
Siarhei Fedartsou
1ae0a6e275 wip 2024-11-02 14:38:34 +01:00
Siarhei Fedartsou
085f852315 wip 2024-11-02 14:20:01 +01:00
Siarhei Fedartsou
918c33cb2d wip 2024-11-02 11:36:10 +01:00
Siarhei Fedartsou
a94368cc87 Use std::string_view for key type in json::Object 2024-11-02 11:24:19 +01:00
Siarhei Fedartsou
97872e34d8 Use std::string_view for key type in json::Object 2024-11-02 11:23:06 +01:00
7 changed files with 566 additions and 633 deletions

View File

@ -1,65 +0,0 @@
name: Build and Publish Docker Image
on:
release:
types: [published, prereleased]
env:
IMAGE_NAME: openharbor/osrm-backend
jobs:
publish:
strategy:
matrix:
docker-base-image: ["debian", "alpine"]
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.IMAGE_NAME }}
- name: Docker meta - debug
id: metadebug
uses: docker/metadata-action@v4
with:
images: ${{ env.IMAGE_NAME }}
flavor: |
latest=true
suffix=-debug,onlatest=true
- name: Log in to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
- name: Build and push debug image
uses: docker/build-push-action@v4
with:
push: true
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
tags: ${{ steps.metadebug.outputs.tags }}
build-args: |
DOCKER_TAG=${{ join(steps.metadebug.outputs.tags) }}-${{ matrix.docker-base-image }}
- name: Build and push normal image
uses: docker/build-push-action@v4
with:
push: true
platforms: linux/amd64,linux/arm64,linux/ppc64le,linux/riscv64
file: ./docker/Dockerfile-${{ matrix.docker-base-image }}
tags: ${{ steps.meta.outputs.tags }}
build-args: |
DOCKER_TAG=${{ join(steps.meta.outputs.tags) }}-${{ matrix.docker-base-image }}

View File

@ -23,218 +23,218 @@ concurrency:
cancel-in-progress: true
jobs:
windows-release-node:
needs: format-taginfo-docs
runs-on: windows-2022
continue-on-error: false
env:
BUILD_TYPE: Release
steps:
- uses: actions/checkout@v4
- run: pip install "conan<2.0.0"
- run: conan --version
- run: cmake --version
- uses: actions/setup-node@v4
with:
node-version: 18
- run: node --version
- run: npm --version
- name: Prepare environment
shell: bash
run: |
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
- run: npm install --ignore-scripts
- run: npm link --ignore-scripts
- name: Build
shell: bash
run: |
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
cmake --build . --config Release
# windows-release-node:
# needs: format-taginfo-docs
# runs-on: windows-2022
# continue-on-error: false
# env:
# BUILD_TYPE: Release
# steps:
# - uses: actions/checkout@v4
# - run: pip install "conan<2.0.0"
# - run: conan --version
# - run: cmake --version
# - uses: actions/setup-node@v4
# with:
# node-version: 18
# - run: node --version
# - run: npm --version
# - name: Prepare environment
# shell: bash
# run: |
# PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
# echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
# - run: npm install --ignore-scripts
# - run: npm link --ignore-scripts
# - name: Build
# shell: bash
# run: |
# mkdir build
# cd build
# cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
# cmake --build . --config Release
# TODO: MSVC goes out of memory when building our tests
# - name: Run tests
# shell: bash
# run: |
# cd build
# cmake --build . --config Release --target tests
# # TODO: run tests
# - name: Run node tests
# shell: bash
# run: |
# ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
# # TODO: MSVC goes out of memory when building our tests
# # - name: Run tests
# # shell: bash
# # run: |
# # cd build
# # cmake --build . --config Release --target tests
# # # TODO: run tests
# # - name: Run node tests
# # shell: bash
# # run: |
# # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
# mkdir -p test/data/ch
# cp test/data/monaco.osrm* test/data/ch/
# ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
# # mkdir -p test/data/ch
# # cp test/data/monaco.osrm* test/data/ch/
# # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
# ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
# node test/nodejs/index.js
- name: Build Node package
shell: bash
run: ./scripts/ci/node_package.sh
- name: Publish Node package
if: ${{ env.PUBLISH == 'On' }}
uses: ncipollo/release-action@v1
with:
allowUpdates: true
artifactErrorsFailBuild: true
artifacts: build/stage/**/*.tar.gz
omitBody: true
omitBodyDuringUpdate: true
omitName: true
omitNameDuringUpdate: true
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
# # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
# # node test/nodejs/index.js
# - name: Build Node package
# shell: bash
# run: ./scripts/ci/node_package.sh
# - name: Publish Node package
# if: ${{ env.PUBLISH == 'On' }}
# uses: ncipollo/release-action@v1
# with:
# allowUpdates: true
# artifactErrorsFailBuild: true
# artifacts: build/stage/**/*.tar.gz
# omitBody: true
# omitBodyDuringUpdate: true
# omitName: true
# omitNameDuringUpdate: true
# replacesArtifacts: true
# token: ${{ secrets.GITHUB_TOKEN }}
format-taginfo-docs:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 18
- name: Enable Node.js cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Prepare environment
run: |
npm ci --ignore-scripts
clang-format-15 --version
- name: Run checks
run: |
./scripts/check_taginfo.py taginfo.json profiles/car.lua
./scripts/format.sh && ./scripts/error_on_dirty.sh
node ./scripts/validate_changelog.js
npm run docs && ./scripts/error_on_dirty.sh
npm audit --production
# format-taginfo-docs:
# runs-on: ubuntu-22.04
# steps:
# - uses: actions/checkout@v4
# - name: Use Node.js
# uses: actions/setup-node@v4
# with:
# node-version: 18
# - name: Enable Node.js cache
# uses: actions/cache@v4
# with:
# path: ~/.npm
# key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
# restore-keys: |
# ${{ runner.os }}-node-
# - name: Prepare environment
# run: |
# npm ci --ignore-scripts
# clang-format-15 --version
# - name: Run checks
# run: |
# ./scripts/check_taginfo.py taginfo.json profiles/car.lua
# ./scripts/format.sh && ./scripts/error_on_dirty.sh
# node ./scripts/validate_changelog.js
# npm run docs && ./scripts/error_on_dirty.sh
# npm audit --production
docker-image-matrix:
strategy:
matrix:
docker-base-image: ["debian", "alpine"]
needs: format-taginfo-docs
runs-on: ubuntu-22.04
continue-on-error: false
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Enable osm.pbf cache
uses: actions/cache@v4
with:
path: berlin-latest.osm.pbf
key: v1-berlin-osm-pbf
restore-keys: |
v1-berlin-osm-pbf
- name: Docker build
run: |
docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
- name: Test Docker image
run: |
if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
fi
TAG=osrm-backend-local
# when `--memory-swap` value equals `--memory` it means container won't use swap
# see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
MEMORY_ARGS="--memory=1g --memory-swap=1g"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
if [ ! -s "${PWD}/berlin-latest.geojson" ]
then
>&2 echo "No berlin-latest.geojson found"
exit 1
fi
# removing `.osrm.nbg` to check that whole pipeline works without it
rm -rf "${PWD}/berlin-latest.osrm.nbg"
# docker-image-matrix:
# strategy:
# matrix:
# docker-base-image: ["debian", "alpine"]
# needs: format-taginfo-docs
# runs-on: ubuntu-22.04
# continue-on-error: false
# steps:
# - name: Check out the repo
# uses: actions/checkout@v4
# - name: Enable osm.pbf cache
# uses: actions/cache@v4
# with:
# path: berlin-latest.osm.pbf
# key: v1-berlin-osm-pbf
# restore-keys: |
# v1-berlin-osm-pbf
# - name: Docker build
# run: |
# docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
# - name: Test Docker image
# run: |
# if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
# wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
# fi
# TAG=osrm-backend-local
# # when `--memory-swap` value equals `--memory` it means container won't use swap
# # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
# MEMORY_ARGS="--memory=1g --memory-swap=1g"
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
# if [ ! -s "${PWD}/berlin-latest.geojson" ]
# then
# >&2 echo "No berlin-latest.geojson found"
# exit 1
# fi
# # removing `.osrm.nbg` to check that whole pipeline works without it
# rm -rf "${PWD}/berlin-latest.osrm.nbg"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
docker stop osrm-container
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
# docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
# curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
# docker stop osrm-container
build-test-publish:
needs: format-taginfo-docs
#needs: format-taginfo-docs
strategy:
matrix:
include:
- name: gcc-13-debug-cov
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: gcc-13
CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: g++-13
ENABLE_COVERAGE: ON
# - name: gcc-13-debug-cov
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Debug
# CCOMPILER: gcc-13
# CUCUMBER_TIMEOUT: 20000
# CXXCOMPILER: g++-13
# ENABLE_COVERAGE: ON
- name: clang-18-debug-asan-ubsan
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: clang++-18
ENABLE_SANITIZER: ON
TARGET_ARCH: x86_64-asan-ubsan
OSRM_CONNECTION_RETRIES: 10
OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
# - name: clang-18-debug-asan-ubsan
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Debug
# CCOMPILER: clang-18
# CUCUMBER_TIMEOUT: 20000
# CXXCOMPILER: clang++-18
# ENABLE_SANITIZER: ON
# TARGET_ARCH: x86_64-asan-ubsan
# OSRM_CONNECTION_RETRIES: 10
# OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
- name: clang-18-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
# - name: clang-18-release
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: clang-18
# CXXCOMPILER: clang++-18
# CUCUMBER_TIMEOUT: 60000
# ENABLE_LTO: OFF
- name: clang-18-debug
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
# - name: clang-18-debug
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Debug
# CCOMPILER: clang-18
# CXXCOMPILER: clang++-18
# CUCUMBER_TIMEOUT: 60000
# ENABLE_LTO: OFF
- name: clang-18-debug-clang-tidy
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_CLANG_TIDY: ON
# - name: clang-18-debug-clang-tidy
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Debug
# CCOMPILER: clang-18
# CXXCOMPILER: clang++-18
# CUCUMBER_TIMEOUT: 60000
# ENABLE_CLANG_TIDY: ON
- name: clang-17-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-17
CXXCOMPILER: clang++-17
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
# - name: clang-17-release
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: clang-17
# CXXCOMPILER: clang++-17
# CUCUMBER_TIMEOUT: 60000
# ENABLE_LTO: OFF
- name: clang-16-release
continue-on-error: false
@ -247,104 +247,104 @@ jobs:
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
- name: conan-linux-debug-asan-ubsan
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
ENABLE_CONAN: ON
ENABLE_SANITIZER: ON
ENABLE_LTO: OFF
# - name: conan-linux-debug-asan-ubsan
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: clang-18
# CXXCOMPILER: clang++-18
# ENABLE_CONAN: ON
# ENABLE_SANITIZER: ON
# ENABLE_LTO: OFF
- name: conan-linux-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
ENABLE_CONAN: ON
ENABLE_LTO: OFF
# - name: conan-linux-release
# continue-on-error: false
# node: 18
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: clang-18
# CXXCOMPILER: clang++-18
# ENABLE_CONAN: ON
# ENABLE_LTO: OFF
- name: gcc-14-release
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-14
CXXCOMPILER: g++-14
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
# - name: gcc-14-release
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: gcc-14
# CXXCOMPILER: g++-14
# CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-13-release
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-13
CXXCOMPILER: g++-13
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
# - name: gcc-13-release
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: gcc-13
# CXXCOMPILER: g++-13
# CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-12-release
continue-on-error: false
node: 20
runs-on: ubuntu-22.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-12
CXXCOMPILER: g++-12
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
# - name: gcc-12-release
# continue-on-error: false
# node: 20
# runs-on: ubuntu-22.04
# BUILD_TOOLS: ON
# BUILD_TYPE: Release
# CCOMPILER: gcc-12
# CXXCOMPILER: g++-12
# CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: conan-linux-release-node
build_node_package: true
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TYPE: Release
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON
# - name: conan-linux-release-node
# build_node_package: true
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TYPE: Release
# CCOMPILER: clang-16
# CXXCOMPILER: clang++-16
# ENABLE_CONAN: ON
# NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-linux-debug-node
build_node_package: true
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TYPE: Debug
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON
# - name: conan-linux-debug-node
# build_node_package: true
# continue-on-error: false
# node: 20
# runs-on: ubuntu-24.04
# BUILD_TYPE: Debug
# CCOMPILER: clang-16
# CXXCOMPILER: clang++-16
# ENABLE_CONAN: ON
# NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-macos-x64-release-node
build_node_package: true
continue-on-error: true
node: 20
runs-on: macos-13 # x86_64
BUILD_TYPE: Release
CCOMPILER: clang
CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON
# - name: conan-macos-x64-release-node
# build_node_package: true
# continue-on-error: true
# node: 20
# runs-on: macos-13 # x86_64
# BUILD_TYPE: Release
# CCOMPILER: clang
# CXXCOMPILER: clang++
# CUCUMBER_TIMEOUT: 60000
# ENABLE_ASSERTIONS: ON
# ENABLE_CONAN: ON
- name: conan-macos-arm64-release-node
build_node_package: true
continue-on-error: true
node: 20
runs-on: macos-14 # arm64
BUILD_TYPE: Release
CCOMPILER: clang
CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON
# - name: conan-macos-arm64-release-node
# build_node_package: true
# continue-on-error: true
# node: 20
# runs-on: macos-14 # arm64
# BUILD_TYPE: Release
# CCOMPILER: clang
# CXXCOMPILER: clang++
# CUCUMBER_TIMEOUT: 60000
# ENABLE_ASSERTIONS: ON
# ENABLE_CONAN: ON
name: ${{ matrix.name}}
continue-on-error: ${{ matrix.continue-on-error }}
@ -446,14 +446,14 @@ jobs:
if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
run: |
BOOST_VERSION="1.85.0"
BOOST_VERSION_FLAVOR="${BOOST_VERSION}-b2-nodocs"
wget -q https://github.com/boostorg/boost/releases/download/boost-${BOOST_VERSION}/boost-${BOOST_VERSION_FLAVOR}.tar.gz
tar xzf boost-${BOOST_VERSION_FLAVOR}.tar.gz
cd boost-${BOOST_VERSION}
BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}"
wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz
tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz
cd boost_${BOOST_VERSION_UNDERSCORE}
sudo ./bootstrap.sh
sudo ./b2 install
cd ..
sudo rm -rf boost-${BOOST_VERSION}*
sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}*
- name: Install dev dependencies
run: |
@ -575,7 +575,13 @@ jobs:
# All tests assume to be run from the build directory
pushd ${OSRM_BUILD_DIR}
for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done
for i in $(seq 1 100); do
echo "Iteration $i"
for test in ./unit_tests/*-tests; do
echo "Running $test"
$test
done
done
if [ -z "${ENABLE_SANITIZER}" ]; then
npm run nodejs-tests
fi
@ -657,130 +663,130 @@ jobs:
ccache -p
ccache -s
benchmarks:
if: github.event_name == 'pull_request'
needs: [format-taginfo-docs]
runs-on: self-hosted
env:
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
CC: clang-16
CXX: clang++-16
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
GITHUB_REPOSITORY: ${{ github.repository }}
RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
steps:
- name: Checkout PR Branch
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
path: pr
- name: Activate virtualenv
run: |
python3 -m venv .venv
source .venv/bin/activate
echo PATH=$PATH >> $GITHUB_ENV
pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4"
- name: Prepare data
run: |
if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
rm -rf ~/data.osm.pbf
wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
else
if [ ! -f "~/data.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
else
echo "Using cached data.osm.pbf"
fi
gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
fi
- name: Prepare environment
run: |
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
mkdir -p $HOME/.ccache
ccache --zero-stats
ccache --max-size=256M
- name: Checkout Base Branch
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.base.ref }}
path: base
- name: Build Base Branch
run: |
cd base
npm ci --ignore-scripts
cd ..
mkdir base/build
cd base/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
- name: Build PR Branch
run: |
cd pr
npm ci --ignore-scripts
cd ..
mkdir -p pr/build
cd pr/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
# we run benchmarks in tmpfs to avoid impact of disk IO
- name: Create folder for tmpfs
run: |
# if by any chance it was mounted before(e.g. due to previous job failed), unmount it
sudo umount ~/benchmarks | true
rm -rf ~/benchmarks
mkdir -p ~/benchmarks
# see https://llvm.org/docs/Benchmarking.html
- name: Run PR Benchmarks
run: |
sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf pr/build ~/benchmarks/build
cp -rf pr/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test
cp -rf pr/test/data ~/benchmarks/test/data
cp -rf pr/profiles ~/benchmarks/profiles
# benchmarks:
# if: github.event_name == 'pull_request'
# needs: [format-taginfo-docs]
# runs-on: self-hosted
# env:
# CCOMPILER: clang-16
# CXXCOMPILER: clang++-16
# CC: clang-16
# CXX: clang++-16
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# PR_NUMBER: ${{ github.event.pull_request.number }}
# GITHUB_REPOSITORY: ${{ github.repository }}
# RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
# steps:
# - name: Checkout PR Branch
# uses: actions/checkout@v4
# with:
# ref: ${{ github.head_ref }}
# path: pr
# - name: Activate virtualenv
# run: |
# python3 -m venv .venv
# source .venv/bin/activate
# echo PATH=$PATH >> $GITHUB_ENV
# pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4"
# - name: Prepare data
# run: |
# if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
# rm -rf ~/data.osm.pbf
# wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
# gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
# else
# if [ ! -f "~/data.osm.pbf" ]; then
# wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
# else
# echo "Using cached data.osm.pbf"
# fi
# gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
# fi
# - name: Prepare environment
# run: |
# echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
# mkdir -p $HOME/.ccache
# ccache --zero-stats
# ccache --max-size=256M
# - name: Checkout Base Branch
# uses: actions/checkout@v4
# with:
# ref: ${{ github.event.pull_request.base.ref }}
# path: base
# - name: Build Base Branch
# run: |
# cd base
# npm ci --ignore-scripts
# cd ..
# mkdir base/build
# cd base/build
# cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
# make -j$(nproc)
# make -j$(nproc) benchmarks
# cd ..
# make -C test/data
# - name: Build PR Branch
# run: |
# cd pr
# npm ci --ignore-scripts
# cd ..
# mkdir -p pr/build
# cd pr/build
# cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
# make -j$(nproc)
# make -j$(nproc) benchmarks
# cd ..
# make -C test/data
# # we run benchmarks in tmpfs to avoid impact of disk IO
# - name: Create folder for tmpfs
# run: |
# # if by any chance it was mounted before(e.g. due to previous job failed), unmount it
# sudo umount ~/benchmarks | true
# rm -rf ~/benchmarks
# mkdir -p ~/benchmarks
# # see https://llvm.org/docs/Benchmarking.html
# - name: Run PR Benchmarks
# run: |
# sudo cset shield -c 2-3 -k on
# sudo mount -t tmpfs -o size=4g none ~/benchmarks
# cp -rf pr/build ~/benchmarks/build
# cp -rf pr/lib ~/benchmarks/lib
# mkdir -p ~/benchmarks/test
# cp -rf pr/test/data ~/benchmarks/test/data
# cp -rf pr/profiles ~/benchmarks/profiles
sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks
sudo cset shield --reset
- name: Run Base Benchmarks
run: |
sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf base/build ~/benchmarks/build
cp -rf base/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test
cp -rf base/test/data ~/benchmarks/test/data
cp -rf base/profiles ~/benchmarks/profiles
# sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
# sudo umount ~/benchmarks
# sudo cset shield --reset
# - name: Run Base Benchmarks
# run: |
# sudo cset shield -c 2-3 -k on
# sudo mount -t tmpfs -o size=4g none ~/benchmarks
# cp -rf base/build ~/benchmarks/build
# cp -rf base/lib ~/benchmarks/lib
# mkdir -p ~/benchmarks/test
# cp -rf base/test/data ~/benchmarks/test/data
# cp -rf base/profiles ~/benchmarks/profiles
# TODO: remove it when base branch will have this file at needed location
if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
fi
# we intentionally use scripts from PR branch to be able to update them and see results in the same PR
sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks
sudo cset shield --reset
- name: Post Benchmark Results
run: |
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
- name: Show CCache statistics
run: |
ccache -p
ccache -s
# # TODO: remove it when base branch will have this file at needed location
# if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
# cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
# fi
# # we intentionally use scripts from PR branch to be able to update them and see results in the same PR
# sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
# sudo umount ~/benchmarks
# sudo cset shield --reset
# - name: Post Benchmark Results
# run: |
# python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
# - name: Show CCache statistics
# run: |
# ccache -p
# ccache -s
ci-complete:
runs-on: ubuntu-22.04
needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
steps:
- run: echo "CI complete"
# ci-complete:
# runs-on: ubuntu-22.04
# needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
# steps:
# - run: echo "CI complete"

View File

@ -56,9 +56,6 @@ endif()
if (POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif()
if (POLICY CMP0167)
cmake_policy(SET CMP0167 NEW)
endif()
project(OSRM C CXX)

View File

@ -1,34 +1,20 @@
FROM alpine:3.21.2 AS alpine-mimalloc
FROM alpine:3.20.0 as alpine-mimalloc
RUN apk update && \
apk upgrade && \
apk add --no-cache \
boost-iostreams \
boost-program_options \
boost-thread \
mimalloc
RUN apk add --no-cache mimalloc
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
ENV MIMALLOC_LARGE_OS_PAGES=1
FROM alpine-mimalloc AS builder
FROM alpine-mimalloc as builder
ARG DOCKER_TAG
ARG BUILD_CONCURRENCY
RUN mkdir -p /src && mkdir -p /opt
RUN mkdir -p /src /opt && \
apk add --no-cache \
boost-dev \
boost-filesystem \
clang \
cmake \
expat-dev \
git \
libbz2 \
libxml2 \
lua5.4-dev \
make \
onetbb-dev
RUN apk add --no-cache \
cmake make git clang libbz2 libxml2 \
boost-dev boost-program_options boost-filesystem boost-iostreams boost-thread \
lua5.4-dev onetbb-dev expat-dev
COPY . /src
WORKDIR /src
@ -53,18 +39,16 @@ RUN NPROC=${BUILD_CONCURRENCY:-$(nproc)} && \
rm -rf /src
# Multistage build to reduce image size - https://docs.docker.com/build/building/multi-stage/#use-multi-stage-builds
# Multistage build to reduce image size - https://docs.docker.com/engine/userguide/eng-image/multistage-build/#use-multi-stage-builds
# Only the content below ends up in the image, this helps remove /src from the image (which is large)
FROM alpine-mimalloc AS runstage
FROM alpine-mimalloc as runstage
COPY --from=builder /usr/local /usr/local
COPY --from=builder /opt /opt
RUN apk add --no-cache \
boost-date_time \
expat \
lua5.4 \
onetbb && \
boost-program_options boost-date_time boost-iostreams boost-thread \
expat lua5.4 onetbb && \
ldconfig /usr/local/lib
RUN /usr/local/bin/osrm-extract --help && \
@ -76,4 +60,3 @@ RUN /usr/local/bin/osrm-extract --help && \
WORKDIR /opt
EXPOSE 5000

View File

@ -1,24 +1,21 @@
FROM debian:bookworm-slim AS builder
FROM debian:bookworm-slim as builder
ARG DOCKER_TAG
ARG BUILD_CONCURRENCY
RUN mkdir -p /src && mkdir -p /opt
RUN mkdir -p /src /opt && \
apt-get update && \
apt-get -y --no-install-recommends --no-install-suggests install \
ca-certificates \
cmake \
g++ \
gcc \
git \
libboost1.81-all-dev \
libbz2-dev \
liblua5.4-dev \
libtbb-dev \
libxml2-dev \
libzip-dev \
lua5.4 \
make \
pkg-config
RUN apt-get update && \
apt-get -y --no-install-recommends install ca-certificates cmake make git gcc g++ libbz2-dev libxml2-dev wget \
libzip-dev libboost1.81-all-dev lua5.4 liblua5.4-dev pkg-config -o APT::Install-Suggests=0 -o APT::Install-Recommends=0
RUN NPROC=${BUILD_CONCURRENCY:-$(nproc)} && \
ldconfig /usr/local/lib && \
git clone --branch v2021.12.0 --single-branch https://github.com/oneapi-src/oneTBB.git && \
cd oneTBB && \
mkdir build && \
cd build && \
cmake -DTBB_TEST=OFF -DCMAKE_BUILD_TYPE=Release .. && \
cmake --build . && \
cmake --install .
COPY . /src
WORKDIR /src
@ -44,24 +41,19 @@ RUN NPROC=${BUILD_CONCURRENCY:-$(nproc)} && \
rm -rf /src
# Multistage build to reduce image size - https://docs.docker.com/build/building/multi-stage/#use-multi-stage-builds
# Multistage build to reduce image size - https://docs.docker.com/engine/userguide/eng-image/multistage-build/#use-multi-stage-builds
# Only the content below ends up in the image, this helps remove /src from the image (which is large)
FROM debian:bookworm-slim AS runstage
FROM debian:bookworm-slim as runstage
COPY --from=builder /usr/local /usr/local
COPY --from=builder /opt /opt
RUN apt-get update && \
apt-get install -y --no-install-recommends --no-install-suggests \
expat \
libboost-date-time1.81.0 \
libboost-iostreams1.81.0 \
libboost-program-options1.81.0 \
libboost-thread1.81.0 \
liblua5.4-0 \
libtbb12 && \
apt-get install -y --no-install-recommends \
libboost-program-options1.81.0 libboost-date-time1.81.0 libboost-iostreams1.81.0 libboost-thread1.81.0 \
expat liblua5.4-0 && \
rm -rf /var/lib/apt/lists/* && \
# Add /usr/local/lib to ldconfig to allow loading libraries from there
# add /usr/local/lib to ldconfig to allow loading libraries from there
ldconfig /usr/local/lib
RUN /usr/local/bin/osrm-extract --help && \
@ -73,4 +65,3 @@ RUN /usr/local/bin/osrm-extract --help && \
WORKDIR /opt
EXPOSE 5000

View File

@ -6752,8 +6752,7 @@ namespace sol {
static_assert(std::is_constructible<T, Args&&...>::value, "T must be constructible with Args");
*this = nullopt;
new (static_cast<void*>(this)) optional(std::in_place, std::forward<Args>(args)...);
return **this;
this->construct(std::forward<Args>(args)...);
}
/// Swaps this optional with the other.

View File

@ -5,6 +5,9 @@
#include "osrm/extractor_config.hpp"
#include <boost/algorithm/string.hpp>
#include <oneapi/tbb/global_control.h>
#include <oneapi/tbb/parallel_for.h>
#include <tbb/flow_graph.h>
#include <thread>
// utility class to redirect stderr so we can test it
@ -29,16 +32,24 @@ class redirect_stderr
BOOST_AUTO_TEST_SUITE(library_extract)
BOOST_AUTO_TEST_CASE(dummy)
{
BOOST_CHECK(true);
}
BOOST_AUTO_TEST_CASE(test_extract_with_invalid_config)
{
oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
osrm::ExtractorConfig config;
config.requested_num_threads = std::thread::hardware_concurrency();
BOOST_CHECK_THROW(osrm::extract(config),
std::exception); // including osrm::util::exception, osmium::io_error, etc.
oneapi::tbb::finalize(handle);
}
BOOST_AUTO_TEST_CASE(test_extract_with_valid_config)
{
oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
@ -46,116 +57,127 @@ BOOST_AUTO_TEST_CASE(test_extract_with_valid_config)
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
BOOST_CHECK_NO_THROW(osrm::extract(config));
oneapi::tbb::finalize(handle);
}
BOOST_AUTO_TEST_CASE(test_setup_runtime_error)
{
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_setup.lua";
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
// BOOST_AUTO_TEST_CASE(test_setup_runtime_error)
// {
// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
// osrm::ExtractorConfig config;
// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_setup.lua";
// config.small_component_size = 1000;
// config.requested_num_threads = std::thread::hardware_concurrency();
std::stringstream output;
// std::stringstream output;
{
redirect_stderr redir(output.rdbuf());
BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
}
// {
// redirect_stderr redir(output.rdbuf());
// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
// }
// We just look for the line number, file name, and error message. This avoids portability
// issues since the output contains the full path to the file, which may change between systems
BOOST_CHECK(boost::algorithm::contains(output.str(),
"bad_setup.lua:6: attempt to compare number with nil"));
}
// // We just look for the line number, file name, and error message. This avoids portability
// // issues since the output contains the full path to the file, which may change between systems
// BOOST_CHECK(boost::algorithm::contains(output.str(),
// "bad_setup.lua:6: attempt to compare number with nil"));
// oneapi::tbb::finalize(handle);
// }
BOOST_AUTO_TEST_CASE(test_way_runtime_error)
{
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_way.lua";
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
// BOOST_AUTO_TEST_CASE(test_way_runtime_error)
// {
// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
// osrm::ExtractorConfig config;
// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_way.lua";
// config.small_component_size = 1000;
// config.requested_num_threads = std::thread::hardware_concurrency();
std::stringstream output;
// std::stringstream output;
{
redirect_stderr redir(output.rdbuf());
BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
}
// {
// redirect_stderr redir(output.rdbuf());
// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
// }
// We just look for the line number, file name, and error message. This avoids portability
// issues since the output contains the full path to the file, which may change between systems
BOOST_CHECK(boost::algorithm::contains(output.str(),
"bad_way.lua:41: attempt to compare number with nil"));
}
// // We just look for the line number, file name, and error message. This avoids portability
// // issues since the output contains the full path to the file, which may change between systems
// BOOST_CHECK(boost::algorithm::contains(output.str(),
// "bad_way.lua:41: attempt to compare number with nil"));
// oneapi::tbb::finalize(handle);
// }
BOOST_AUTO_TEST_CASE(test_node_runtime_error)
{
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_node.lua";
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
// BOOST_AUTO_TEST_CASE(test_node_runtime_error)
// {
// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
// osrm::ExtractorConfig config;
// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_node.lua";
// config.small_component_size = 1000;
// config.requested_num_threads = std::thread::hardware_concurrency();
std::stringstream output;
// std::stringstream output;
{
redirect_stderr redir(output.rdbuf());
BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
}
// {
// redirect_stderr redir(output.rdbuf());
// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
// }
// We just look for the line number, file name, and error message. This avoids portability
// issues since the output contains the full path to the file, which may change between systems
BOOST_CHECK(boost::algorithm::contains(output.str(),
"bad_node.lua:36: attempt to compare number with nil"));
}
// // We just look for the line number, file name, and error message. This avoids portability
// // issues since the output contains the full path to the file, which may change between systems
// BOOST_CHECK(boost::algorithm::contains(output.str(),
// "bad_node.lua:36: attempt to compare number with nil"));
// oneapi::tbb::finalize(handle);
// }
BOOST_AUTO_TEST_CASE(test_segment_runtime_error)
{
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_segment.lua";
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
// BOOST_AUTO_TEST_CASE(test_segment_runtime_error)
// {
// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
// osrm::ExtractorConfig config;
// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_segment.lua";
// config.small_component_size = 1000;
// config.requested_num_threads = std::thread::hardware_concurrency();
std::stringstream output;
// std::stringstream output;
{
redirect_stderr redir(output.rdbuf());
BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
}
// {
// redirect_stderr redir(output.rdbuf());
// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
// }
// We just look for the line number, file name, and error message. This avoids portability
// issues since the output contains the full path to the file, which may change between systems
BOOST_CHECK(boost::algorithm::contains(
output.str(), "bad_segment.lua:132: attempt to compare number with nil"));
}
// // We just look for the line number, file name, and error message. This avoids portability
// // issues since the output contains the full path to the file, which may change between systems
// BOOST_CHECK(boost::algorithm::contains(
// output.str(), "bad_segment.lua:132: attempt to compare number with nil"));
// oneapi::tbb::finalize(handle);
// }
BOOST_AUTO_TEST_CASE(test_turn_runtime_error)
{
osrm::ExtractorConfig config;
config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_turn.lua";
config.small_component_size = 1000;
config.requested_num_threads = std::thread::hardware_concurrency();
// BOOST_AUTO_TEST_CASE(test_turn_runtime_error)
// {
// oneapi::tbb::task_scheduler_handle handle{tbb::attach{}};
// osrm::ExtractorConfig config;
// config.input_path = OSRM_TEST_DATA_DIR "/monaco.osm.pbf";
// config.UseDefaultOutputNames(OSRM_TEST_DATA_DIR "/monaco.osm.pbf");
// config.profile_path = OSRM_TEST_DATA_DIR "/profiles/bad_turn.lua";
// config.small_component_size = 1000;
// config.requested_num_threads = std::thread::hardware_concurrency();
std::stringstream output;
// std::stringstream output;
{
redirect_stderr redir(output.rdbuf());
BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
}
// {
// redirect_stderr redir(output.rdbuf());
// BOOST_CHECK_THROW(osrm::extract(config), osrm::util::exception);
// }
// We just look for the line number, file name, and error message. This avoids portability
// issues since the output contains the full path to the file, which may change between systems
BOOST_CHECK(boost::algorithm::contains(output.str(),
"bad_turn.lua:122: attempt to compare number with nil"));
}
// // We just look for the line number, file name, and error message. This avoids portability
// // issues since the output contains the full path to the file, which may change between systems
// BOOST_CHECK(boost::algorithm::contains(output.str(),
// "bad_turn.lua:122: attempt to compare number with nil"));
// oneapi::tbb::finalize(handle);
// }
BOOST_AUTO_TEST_SUITE_END()