Migrate to Conan 2.x

This commit is contained in:
Siarhei Fedartsou 2024-09-29 10:09:25 +02:00
parent 4f1c62a768
commit 8983956fa1
2 changed files with 482 additions and 544 deletions

View File

@ -23,282 +23,282 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
windows-release-node: # windows-release-node:
needs: format-taginfo-docs # needs: format-taginfo-docs
runs-on: windows-2022 # runs-on: windows-2022
continue-on-error: false # continue-on-error: false
env: # env:
BUILD_TYPE: Release # BUILD_TYPE: Release
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- run: pip install "conan<2.0.0" # - run: pip install "conan<2.0.0"
- run: conan --version # - run: conan --version
- run: cmake --version # - run: cmake --version
- uses: actions/setup-node@v4 # - uses: actions/setup-node@v4
with: # with:
node-version: 18 # node-version: 18
- run: node --version # - run: node --version
- run: npm --version # - run: npm --version
- name: Prepare environment # - name: Prepare environment
shell: bash
run: |
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
- run: npm install --ignore-scripts
- run: npm link --ignore-scripts
- name: Build
shell: bash
run: |
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
cmake --build . --config Release
# TODO: MSVC goes out of memory when building our tests
# - name: Run tests
# shell: bash # shell: bash
# run: | # run: |
# PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
# echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
# - run: npm install --ignore-scripts
# - run: npm link --ignore-scripts
# - name: Build
# shell: bash
# run: |
# mkdir build
# cd build # cd build
# cmake --build . --config Release --target tests # cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
# # TODO: run tests # cmake --build . --config Release
# - name: Run node tests
# # TODO: MSVC goes out of memory when building our tests
# # - name: Run tests
# # shell: bash
# # run: |
# # cd build
# # cmake --build . --config Release --target tests
# # # TODO: run tests
# # - name: Run node tests
# # shell: bash
# # run: |
# # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
# # mkdir -p test/data/ch
# # cp test/data/monaco.osrm* test/data/ch/
# # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
# # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
# # node test/nodejs/index.js
# - name: Build Node package
# shell: bash # shell: bash
# run: ./scripts/ci/node_package.sh
# - name: Publish Node package
# if: ${{ env.PUBLISH == 'On' }}
# uses: ncipollo/release-action@v1
# with:
# allowUpdates: true
# artifactErrorsFailBuild: true
# artifacts: build/stage/**/*.tar.gz
# omitBody: true
# omitBodyDuringUpdate: true
# omitName: true
# omitNameDuringUpdate: true
# replacesArtifacts: true
# token: ${{ secrets.GITHUB_TOKEN }}
# format-taginfo-docs:
# runs-on: ubuntu-22.04
# steps:
# - uses: actions/checkout@v4
# - name: Use Node.js
# uses: actions/setup-node@v4
# with:
# node-version: 18
# - name: Enable Node.js cache
# uses: actions/cache@v4
# with:
# path: ~/.npm
# key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
# restore-keys: |
# ${{ runner.os }}-node-
# - name: Prepare environment
# run: | # run: |
# ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf # npm ci --ignore-scripts
# clang-format-15 --version
# - name: Run checks
# run: |
# ./scripts/check_taginfo.py taginfo.json profiles/car.lua
# ./scripts/format.sh && ./scripts/error_on_dirty.sh
# node ./scripts/validate_changelog.js
# npm run docs && ./scripts/error_on_dirty.sh
# npm audit --production
# mkdir -p test/data/ch # docker-image-matrix:
# cp test/data/monaco.osrm* test/data/ch/ # strategy:
# ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm # matrix:
# docker-base-image: ["debian", "alpine"]
# needs: format-taginfo-docs
# runs-on: ubuntu-22.04
# continue-on-error: false
# steps:
# - name: Check out the repo
# uses: actions/checkout@v4
# - name: Enable osm.pbf cache
# uses: actions/cache@v4
# with:
# path: berlin-latest.osm.pbf
# key: v1-berlin-osm-pbf
# restore-keys: |
# v1-berlin-osm-pbf
# - name: Docker build
# run: |
# docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
# - name: Test Docker image
# run: |
# if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
# wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
# fi
# TAG=osrm-backend-local
# # when `--memory-swap` value equals `--memory` it means container won't use swap
# # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
# MEMORY_ARGS="--memory=1g --memory-swap=1g"
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
# docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
# if [ ! -s "${PWD}/berlin-latest.geojson" ]
# then
# >&2 echo "No berlin-latest.geojson found"
# exit 1
# fi
# # removing `.osrm.nbg` to check that whole pipeline works without it
# rm -rf "${PWD}/berlin-latest.osrm.nbg"
# ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
# node test/nodejs/index.js # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
- name: Build Node package # docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
shell: bash # curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
run: ./scripts/ci/node_package.sh # docker stop osrm-container
- name: Publish Node package
if: ${{ env.PUBLISH == 'On' }}
uses: ncipollo/release-action@v1
with:
allowUpdates: true
artifactErrorsFailBuild: true
artifacts: build/stage/**/*.tar.gz
omitBody: true
omitBodyDuringUpdate: true
omitName: true
omitNameDuringUpdate: true
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
format-taginfo-docs:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 18
- name: Enable Node.js cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Prepare environment
run: |
npm ci --ignore-scripts
clang-format-15 --version
- name: Run checks
run: |
./scripts/check_taginfo.py taginfo.json profiles/car.lua
./scripts/format.sh && ./scripts/error_on_dirty.sh
node ./scripts/validate_changelog.js
npm run docs && ./scripts/error_on_dirty.sh
npm audit --production
docker-image-matrix:
strategy:
matrix:
docker-base-image: ["debian", "alpine"]
needs: format-taginfo-docs
runs-on: ubuntu-22.04
continue-on-error: false
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Enable osm.pbf cache
uses: actions/cache@v4
with:
path: berlin-latest.osm.pbf
key: v1-berlin-osm-pbf
restore-keys: |
v1-berlin-osm-pbf
- name: Docker build
run: |
docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
- name: Test Docker image
run: |
if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
fi
TAG=osrm-backend-local
# when `--memory-swap` value equals `--memory` it means container won't use swap
# see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
MEMORY_ARGS="--memory=1g --memory-swap=1g"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
if [ ! -s "${PWD}/berlin-latest.geojson" ]
then
>&2 echo "No berlin-latest.geojson found"
exit 1
fi
# removing `.osrm.nbg` to check that whole pipeline works without it
rm -rf "${PWD}/berlin-latest.osrm.nbg"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
docker stop osrm-container
build-test-publish: build-test-publish:
needs: format-taginfo-docs #needs: format-taginfo-docs
strategy: strategy:
matrix: matrix:
include: include:
- name: gcc-13-debug-cov # - name: gcc-13-debug-cov
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Debug # BUILD_TYPE: Debug
CCOMPILER: gcc-13 # CCOMPILER: gcc-13
CUCUMBER_TIMEOUT: 20000 # CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: g++-13 # CXXCOMPILER: g++-13
ENABLE_COVERAGE: ON # ENABLE_COVERAGE: ON
- name: clang-18-debug-asan-ubsan # - name: clang-18-debug-asan-ubsan
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Debug # BUILD_TYPE: Debug
CCOMPILER: clang-18 # CCOMPILER: clang-18
CUCUMBER_TIMEOUT: 20000 # CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
ENABLE_SANITIZER: ON # ENABLE_SANITIZER: ON
TARGET_ARCH: x86_64-asan-ubsan # TARGET_ARCH: x86_64-asan-ubsan
OSRM_CONNECTION_RETRIES: 10 # OSRM_CONNECTION_RETRIES: 10
OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 # OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
- name: clang-18-release # - name: clang-18-release
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang-18 # CCOMPILER: clang-18
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: clang-18-debug # - name: clang-18-debug
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Debug # BUILD_TYPE: Debug
CCOMPILER: clang-18 # CCOMPILER: clang-18
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: clang-18-debug-clang-tidy # - name: clang-18-debug-clang-tidy
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Debug # BUILD_TYPE: Debug
CCOMPILER: clang-18 # CCOMPILER: clang-18
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_CLANG_TIDY: ON # ENABLE_CLANG_TIDY: ON
- name: clang-17-release # - name: clang-17-release
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang-17 # CCOMPILER: clang-17
CXXCOMPILER: clang++-17 # CXXCOMPILER: clang++-17
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: clang-16-release # - name: clang-16-release
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang-16 # CCOMPILER: clang-16
CXXCOMPILER: clang++-16 # CXXCOMPILER: clang++-16
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: conan-linux-debug-asan-ubsan # - name: conan-linux-debug-asan-ubsan
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang-18 # CCOMPILER: clang-18
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
ENABLE_CONAN: ON # ENABLE_CONAN: ON
ENABLE_SANITIZER: ON # ENABLE_SANITIZER: ON
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: conan-linux-release # - name: conan-linux-release
continue-on-error: false # continue-on-error: false
node: 18 # node: 18
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang-18 # CCOMPILER: clang-18
CXXCOMPILER: clang++-18 # CXXCOMPILER: clang++-18
ENABLE_CONAN: ON # ENABLE_CONAN: ON
ENABLE_LTO: OFF # ENABLE_LTO: OFF
- name: gcc-14-release # - name: gcc-14-release
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: gcc-14 # CCOMPILER: gcc-14
CXXCOMPILER: g++-14 # CXXCOMPILER: g++-14
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-13-release # - name: gcc-13-release
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: gcc-13 # CCOMPILER: gcc-13
CXXCOMPILER: g++-13 # CXXCOMPILER: g++-13
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-12-release # - name: gcc-12-release
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-22.04 # runs-on: ubuntu-22.04
BUILD_TOOLS: ON # BUILD_TOOLS: ON
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: gcc-12 # CCOMPILER: gcc-12
CXXCOMPILER: g++-12 # CXXCOMPILER: g++-12
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: conan-linux-release-node - name: conan-linux-release-node
build_node_package: true build_node_package: true
@ -311,40 +311,40 @@ jobs:
ENABLE_CONAN: ON ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-linux-debug-node # - name: conan-linux-debug-node
build_node_package: true # build_node_package: true
continue-on-error: false # continue-on-error: false
node: 20 # node: 20
runs-on: ubuntu-24.04 # runs-on: ubuntu-24.04
BUILD_TYPE: Debug # BUILD_TYPE: Debug
CCOMPILER: clang-16 # CCOMPILER: clang-16
CXXCOMPILER: clang++-16 # CXXCOMPILER: clang++-16
ENABLE_CONAN: ON # ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON # NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-macos-x64-release-node # - name: conan-macos-x64-release-node
build_node_package: true # build_node_package: true
continue-on-error: true # continue-on-error: true
node: 20 # node: 20
runs-on: macos-13 # x86_64 # runs-on: macos-13 # x86_64
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang # CCOMPILER: clang
CXXCOMPILER: clang++ # CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON # ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON # ENABLE_CONAN: ON
- name: conan-macos-arm64-release-node # - name: conan-macos-arm64-release-node
build_node_package: true # build_node_package: true
continue-on-error: true # continue-on-error: true
node: 20 # node: 20
runs-on: macos-14 # arm64 # runs-on: macos-14 # arm64
BUILD_TYPE: Release # BUILD_TYPE: Release
CCOMPILER: clang # CCOMPILER: clang
CXXCOMPILER: clang++ # CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000 # CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON # ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON # ENABLE_CONAN: ON
name: ${{ matrix.name}} name: ${{ matrix.name}}
continue-on-error: ${{ matrix.continue-on-error }} continue-on-error: ${{ matrix.continue-on-error }}
@ -524,6 +524,18 @@ jobs:
echo "Using ${JOBS} jobs" echo "Using ${JOBS} jobs"
pushd ${OSRM_BUILD_DIR} pushd ${OSRM_BUILD_DIR}
python3 -m venv .venv
source .venv/bin/activate
python3 -m pip install conan==2.7.1
conan profile new default --detect
sed -i '' 's/compiler.cppstd=.*$/compiler.cppstd=20/' ~/.conan/profiles/default
if [[ "${ENABLE_CONAN}" == "ON" ]]; then
conan install .. --output-folder=. --build=missing --settings compiler.cppstd=20
fi
ccache --zero-stats ccache --zero-stats
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \
@ -536,7 +548,8 @@ jobs:
-DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \
-DENABLE_CCACHE=ON \ -DENABLE_CCACHE=ON \
-DENABLE_LTO=${ENABLE_LTO:-ON} \ -DENABLE_LTO=${ENABLE_LTO:-ON} \
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} \
-DCMAKE_TOOLCHAIN_FILE=./conan_toolchain.cmake
make --jobs=${JOBS} make --jobs=${JOBS}
if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then
@ -650,130 +663,130 @@ jobs:
ccache -p ccache -p
ccache -s ccache -s
benchmarks: # benchmarks:
if: github.event_name == 'pull_request' # if: github.event_name == 'pull_request'
needs: [format-taginfo-docs] # needs: [format-taginfo-docs]
runs-on: self-hosted # runs-on: self-hosted
env: # env:
CCOMPILER: clang-16 # CCOMPILER: clang-16
CXXCOMPILER: clang++-16 # CXXCOMPILER: clang++-16
CC: clang-16 # CC: clang-16
CXX: clang++-16 # CXX: clang++-16
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }} # PR_NUMBER: ${{ github.event.pull_request.number }}
GITHUB_REPOSITORY: ${{ github.repository }} # GITHUB_REPOSITORY: ${{ github.repository }}
RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }} # RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
steps: # steps:
- name: Checkout PR Branch # - name: Checkout PR Branch
uses: actions/checkout@v4 # uses: actions/checkout@v4
with: # with:
ref: ${{ github.head_ref }} # ref: ${{ github.head_ref }}
path: pr # path: pr
- name: Activate virtualenv # - name: Activate virtualenv
run: | # run: |
python3 -m venv .venv # python3 -m venv .venv
source .venv/bin/activate # source .venv/bin/activate
echo PATH=$PATH >> $GITHUB_ENV # echo PATH=$PATH >> $GITHUB_ENV
pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4" # pip install "conan<2.0.0" "requests==2.31.0" "numpy==1.26.4"
- name: Prepare data # - name: Prepare data
run: | # run: |
if [ "$RUN_BIG_BENCHMARK" = "true" ]; then # if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
rm -rf ~/data.osm.pbf # rm -rf ~/data.osm.pbf
wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet # wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv # gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
else # else
if [ ! -f "~/data.osm.pbf" ]; then # if [ ! -f "~/data.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
else # else
echo "Using cached data.osm.pbf" # echo "Using cached data.osm.pbf"
fi # fi
gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv # gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
fi # fi
- name: Prepare environment # - name: Prepare environment
run: | # run: |
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV # echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
mkdir -p $HOME/.ccache # mkdir -p $HOME/.ccache
ccache --zero-stats # ccache --zero-stats
ccache --max-size=256M # ccache --max-size=256M
- name: Checkout Base Branch # - name: Checkout Base Branch
uses: actions/checkout@v4 # uses: actions/checkout@v4
with: # with:
ref: ${{ github.event.pull_request.base.ref }} # ref: ${{ github.event.pull_request.base.ref }}
path: base # path: base
- name: Build Base Branch # - name: Build Base Branch
run: | # run: |
cd base # cd base
npm ci --ignore-scripts # npm ci --ignore-scripts
cd .. # cd ..
mkdir base/build # mkdir base/build
cd base/build # cd base/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc) # make -j$(nproc)
make -j$(nproc) benchmarks # make -j$(nproc) benchmarks
cd .. # cd ..
make -C test/data # make -C test/data
- name: Build PR Branch # - name: Build PR Branch
run: | # run: |
cd pr # cd pr
npm ci --ignore-scripts # npm ci --ignore-scripts
cd .. # cd ..
mkdir -p pr/build # mkdir -p pr/build
cd pr/build # cd pr/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON .. # cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc) # make -j$(nproc)
make -j$(nproc) benchmarks # make -j$(nproc) benchmarks
cd .. # cd ..
make -C test/data # make -C test/data
# we run benchmarks in tmpfs to avoid impact of disk IO # # we run benchmarks in tmpfs to avoid impact of disk IO
- name: Create folder for tmpfs # - name: Create folder for tmpfs
run: | # run: |
# if by any chance it was mounted before(e.g. due to previous job failed), unmount it # # if by any chance it was mounted before(e.g. due to previous job failed), unmount it
sudo umount ~/benchmarks | true # sudo umount ~/benchmarks | true
rm -rf ~/benchmarks # rm -rf ~/benchmarks
mkdir -p ~/benchmarks # mkdir -p ~/benchmarks
# see https://llvm.org/docs/Benchmarking.html # # see https://llvm.org/docs/Benchmarking.html
- name: Run PR Benchmarks # - name: Run PR Benchmarks
run: | # run: |
sudo cset shield -c 2-3 -k on # sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks # sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf pr/build ~/benchmarks/build # cp -rf pr/build ~/benchmarks/build
cp -rf pr/lib ~/benchmarks/lib # cp -rf pr/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test # mkdir -p ~/benchmarks/test
cp -rf pr/test/data ~/benchmarks/test/data # cp -rf pr/test/data ~/benchmarks/test/data
cp -rf pr/profiles ~/benchmarks/profiles # cp -rf pr/profiles ~/benchmarks/profiles
sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv # sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks # sudo umount ~/benchmarks
sudo cset shield --reset # sudo cset shield --reset
- name: Run Base Benchmarks # - name: Run Base Benchmarks
run: | # run: |
sudo cset shield -c 2-3 -k on # sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks # sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf base/build ~/benchmarks/build # cp -rf base/build ~/benchmarks/build
cp -rf base/lib ~/benchmarks/lib # cp -rf base/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test # mkdir -p ~/benchmarks/test
cp -rf base/test/data ~/benchmarks/test/data # cp -rf base/test/data ~/benchmarks/test/data
cp -rf base/profiles ~/benchmarks/profiles # cp -rf base/profiles ~/benchmarks/profiles
# TODO: remove it when base branch will have this file at needed location # # TODO: remove it when base branch will have this file at needed location
if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then # if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json # cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
fi # fi
# we intentionally use scripts from PR branch to be able to update them and see results in the same PR # # we intentionally use scripts from PR branch to be able to update them and see results in the same PR
sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv # sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks # sudo umount ~/benchmarks
sudo cset shield --reset # sudo cset shield --reset
- name: Post Benchmark Results # - name: Post Benchmark Results
run: | # run: |
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results # python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
- name: Show CCache statistics # - name: Show CCache statistics
run: | # run: |
ccache -p # ccache -p
ccache -s # ccache -s
ci-complete: # ci-complete:
runs-on: ubuntu-22.04 # runs-on: ubuntu-22.04
needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] # needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
steps: # steps:
- run: echo "CI complete" # - run: echo "CI complete"

View File

@ -22,7 +22,7 @@ if (NOT WIN32 AND NOT DEFINED ENV{OSRM_BUILD_DIR})
set(ENV{OSRM_BUILD_DIR} ${CMAKE_CURRENT_BINARY_DIR}) set(ENV{OSRM_BUILD_DIR} ${CMAKE_CURRENT_BINARY_DIR})
endif() endif()
option(ENABLE_CONAN "Use conan for dependencies" OFF)
option(ENABLE_CCACHE "Speed up incremental rebuilds via ccache" ON) option(ENABLE_CCACHE "Speed up incremental rebuilds via ccache" ON)
option(BUILD_TOOLS "Build OSRM tools" OFF) option(BUILD_TOOLS "Build OSRM tools" OFF)
option(BUILD_PACKAGE "Build OSRM package" OFF) option(BUILD_PACKAGE "Build OSRM package" OFF)
@ -321,125 +321,50 @@ if (MSVC)
add_definitions(-DBOOST_ALL_NO_LIB) add_definitions(-DBOOST_ALL_NO_LIB)
endif() endif()
if(ENABLE_CONAN) find_package(Boost 1.70 REQUIRED COMPONENTS ${BOOST_COMPONENTS})
message(STATUS "Installing dependencies via Conan") add_dependency_includes(${Boost_INCLUDE_DIRS})
#message(FATAL_ERROR "Boost_INCLUDE_DIRS: ${Boost_INCLUDE_DIRS} Boost_LIBRARIES: ${Boost_LIBRARIES}")
set(BOOST_LIBRARIES ${Boost_LIBRARIES})
# set(BOOST_IN ${Boost_LIBRARIES})
find_package(TBB REQUIRED)
add_dependency_includes(${TBB_INCLUDE_DIR})
set(TBB_LIBRARIES ${TBB_LIBRARIES})
# Conan will generate Find*.cmake files to build directory, so we use them with the highest priority find_package(EXPAT REQUIRED)
list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_BINARY_DIR}) add_dependency_includes(${EXPAT_INCLUDE_DIRS})
list(INSERT CMAKE_PREFIX_PATH 0 ${CMAKE_BINARY_DIR}) #message(FATAL_ERROR "EXPAT_INCLUDE_DIRS: ${expat_INCLUDE_DIRS} EXPAT_LIBRARIES: ${expat_LIBRARIES}")
set(EXPAT_LIBRARIES ${expat_LIBRARIES})
set(EXPAT_INCLUDE_DIRS ${expat_INCLUDE_DIRS})
find_package(BZip2 REQUIRED)
add_dependency_includes(${BZIP2_INCLUDE_DIR})
include(${CMAKE_CURRENT_LIST_DIR}/cmake/conan.cmake) find_package(Lua 5.2 REQUIRED)
set(LUA_LIBRARIES lua::lua)
conan_check(REQUIRED) if (LUA_FOUND)
set(CONAN_BOOST_VERSION "1.85.0@#14265ec82b25d91305bbb3b30d3357f8")
set(CONAN_BZIP2_VERSION "1.0.8@#d1b2d5816f25865acf978501dff1f897")
set(CONAN_EXPAT_VERSION "2.6.2@#2d385d0d50eb5561006a7ff9e356656b")
set(CONAN_LUA_VERSION "5.4.6@#658d6089093cf01992c2737ab2e96763")
set(CONAN_TBB_VERSION "2021.12.0@#e56e5b44be8d690530585dd3634c0106")
set(CONAN_SYSTEM_INCLUDES ON)
set(CONAN_ARGS
REQUIRES
"boost/${CONAN_BOOST_VERSION}"
"bzip2/${CONAN_BZIP2_VERSION}"
"expat/${CONAN_EXPAT_VERSION}"
"lua/${CONAN_LUA_VERSION}"
"onetbb/${CONAN_TBB_VERSION}"
BASIC_SETUP
GENERATORS cmake_find_package json # json generator generates a conanbuildinfo.json in the build folder so (non-CMake) projects can easily parse OSRM's dependencies
KEEP_RPATHS
NO_OUTPUT_DIRS
OPTIONS boost:without_stacktrace=True # Apple Silicon cross-compilation fails without it
BUILD missing
)
# Enable revisions in the conan config
execute_process(COMMAND ${CONAN_CMD} config set general.revisions_enabled=1 RESULT_VARIABLE RET_CODE)
if(NOT "${RET_CODE}" STREQUAL "0")
message(FATAL_ERROR "Error setting revisions for Conan: '${RET_CODE}'")
endif()
# explicitly say Conan to use x86 dependencies if build for x86 platforms (https://github.com/conan-io/cmake-conan/issues/141)
if(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
conan_cmake_run("${CONAN_ARGS};ARCH;x86")
# cross-compilation for Apple Silicon
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64" AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL "x86_64")
conan_cmake_run("${CONAN_ARGS};ARCH;armv8")
else()
conan_cmake_run("${CONAN_ARGS}")
endif()
add_dependency_includes(${CONAN_INCLUDE_DIRS_BOOST})
add_dependency_includes(${CONAN_INCLUDE_DIRS_BZIP2})
add_dependency_includes(${CONAN_INCLUDE_DIRS_EXPAT})
add_dependency_includes(${CONAN_INCLUDE_DIRS_LUA})
add_dependency_includes(${CONAN_INCLUDE_DIRS_TBB})
set(Boost_USE_STATIC_LIBS ON)
find_package(Boost REQUIRED COMPONENTS ${BOOST_COMPONENTS})
set(Boost_DATE_TIME_LIBRARY "${Boost_date_time_LIB_TARGETS}")
set(Boost_PROGRAM_OPTIONS_LIBRARY "${Boost_program_options_LIB_TARGETS}")
set(Boost_IOSTREAMS_LIBRARY "${Boost_iostreams_LIB_TARGETS}")
set(Boost_THREAD_LIBRARY "${Boost_thread_LIB_TARGETS}")
set(Boost_ZLIB_LIBRARY "${Boost_zlib_LIB_TARGETS}")
set(Boost_UNIT_TEST_FRAMEWORK_LIBRARY "${Boost_unit_test_framework_LIB_TARGETS}")
find_package(BZip2 REQUIRED)
find_package(EXPAT REQUIRED)
find_package(lua REQUIRED)
set(LUA_LIBRARIES ${lua_LIBRARIES})
find_package(TBB REQUIRED)
# note: we avoid calling find_package(Osmium ...) here to ensure that the
# expat and bzip2 are used from conan rather than the system
include_directories(SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include)
else()
find_package(Boost 1.70 REQUIRED COMPONENTS ${BOOST_COMPONENTS})
add_dependency_includes(${Boost_INCLUDE_DIRS})
find_package(TBB REQUIRED)
add_dependency_includes(${TBB_INCLUDE_DIR})
set(TBB_LIBRARIES TBB::tbb)
find_package(EXPAT REQUIRED)
add_dependency_includes(${EXPAT_INCLUDE_DIRS})
find_package(BZip2 REQUIRED)
add_dependency_includes(${BZIP2_INCLUDE_DIR})
find_package(Lua 5.2 REQUIRED)
if (LUA_FOUND)
message(STATUS "Using Lua ${LUA_VERSION_STRING}") message(STATUS "Using Lua ${LUA_VERSION_STRING}")
endif()
add_dependency_includes(${LUA_INCLUDE_DIR})
# add a target to generate API documentation with Doxygen
find_package(Doxygen)
if(DOXYGEN_FOUND)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
add_custom_target(doc
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen" VERBATIM
)
endif()
# note libosmium depends on expat and bzip2
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/cmake")
if(NOT OSMIUM_INCLUDE_DIR)
set(OSMIUM_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include")
endif()
find_package(Osmium REQUIRED COMPONENTS io)
include_directories(SYSTEM ${OSMIUM_INCLUDE_DIR})
endif() endif()
add_dependency_includes(${lua_INCLUDE_DIRS})
# add a target to generate API documentation with Doxygen
find_package(Doxygen)
if(DOXYGEN_FOUND)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
add_custom_target(doc
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen" VERBATIM
)
endif()
# note libosmium depends on expat and bzip2
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/cmake")
if(NOT OSMIUM_INCLUDE_DIR)
set(OSMIUM_INCLUDE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/third_party/libosmium/include")
endif()
find_package(Osmium REQUIRED COMPONENTS io)
include_directories(SYSTEM ${OSMIUM_INCLUDE_DIR})
# prefix compilation with ccache by default if available and on clang or gcc # prefix compilation with ccache by default if available and on clang or gcc
if(ENABLE_CCACHE AND (CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU")) if(ENABLE_CCACHE AND (CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
find_program(CCACHE_FOUND ccache) find_program(CCACHE_FOUND ccache)
@ -464,15 +389,15 @@ add_dependency_defines(-DBOOST_PHOENIX_STL_TUPLE_H_)
add_definitions(${OSRM_DEFINES}) add_definitions(${OSRM_DEFINES})
include_directories(SYSTEM ${DEPENDENCIES_INCLUDE_DIRS}) include_directories(SYSTEM ${DEPENDENCIES_INCLUDE_DIRS})
set(BOOST_BASE_LIBRARIES # set(BOOST_LIBRARIES
${Boost_DATE_TIME_LIBRARY} # ${Boost_DATE_TIME_LIBRARY}
${Boost_IOSTREAMS_LIBRARY} # ${Boost_IOSTREAMS_LIBRARY}
${Boost_THREAD_LIBRARY}) # ${Boost_THREAD_LIBRARY})
set(BOOST_ENGINE_LIBRARIES # set(BOOST_LIBRARIES
${Boost_ZLIB_LIBRARY} # ${Boost_ZLIB_LIBRARY}
${Boost_REGEX_LIBRARY} # ${Boost_REGEX_LIBRARY}
${BOOST_BASE_LIBRARIES}) # ${BOOST_LIBRARIES})
# Binaries # Binaries
target_link_libraries(osrm-datastore osrm_store ${Boost_PROGRAM_OPTIONS_LIBRARY}) target_link_libraries(osrm-datastore osrm_store ${Boost_PROGRAM_OPTIONS_LIBRARY})
@ -486,7 +411,7 @@ endif()
set(EXTRACTOR_LIBRARIES set(EXTRACTOR_LIBRARIES
${BZIP2_LIBRARIES} ${BZIP2_LIBRARIES}
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${EXPAT_LIBRARIES} ${EXPAT_LIBRARIES}
${LUA_LIBRARIES} ${LUA_LIBRARIES}
@ -495,53 +420,53 @@ set(EXTRACTOR_LIBRARIES
${ZLIB_LIBRARY} ${ZLIB_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES}) ${MAYBE_COVERAGE_LIBRARIES})
set(GUIDANCE_LIBRARIES set(GUIDANCE_LIBRARIES
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${LUA_LIBRARIES} ${LUA_LIBRARIES}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_COVERAGE_LIBRARIES}) ${MAYBE_COVERAGE_LIBRARIES})
set(PARTITIONER_LIBRARIES set(PARTITIONER_LIBRARIES
${BOOST_ENGINE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES} ${MAYBE_COVERAGE_LIBRARIES}
${ZLIB_LIBRARY}) ${ZLIB_LIBRARY})
set(CUSTOMIZER_LIBRARIES set(CUSTOMIZER_LIBRARIES
${BOOST_ENGINE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES}) ${MAYBE_COVERAGE_LIBRARIES})
set(UPDATER_LIBRARIES set(UPDATER_LIBRARIES
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES} ${MAYBE_COVERAGE_LIBRARIES}
${ZLIB_LIBRARY}) ${ZLIB_LIBRARY})
set(CONTRACTOR_LIBRARIES set(CONTRACTOR_LIBRARIES
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${LUA_LIBRARIES} ${LUA_LIBRARIES}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES}) ${MAYBE_COVERAGE_LIBRARIES})
set(ENGINE_LIBRARIES set(ENGINE_LIBRARIES
${BOOST_ENGINE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES} ${MAYBE_COVERAGE_LIBRARIES}
${ZLIB_LIBRARY}) ${ZLIB_LIBRARY})
set(STORAGE_LIBRARIES set(STORAGE_LIBRARIES
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_RT_LIBRARY} ${MAYBE_RT_LIBRARY}
${MAYBE_COVERAGE_LIBRARIES}) ${MAYBE_COVERAGE_LIBRARIES})
set(UTIL_LIBRARIES set(UTIL_LIBRARIES
${BOOST_BASE_LIBRARIES} ${BOOST_LIBRARIES}
${CMAKE_THREAD_LIBS_INIT} ${CMAKE_THREAD_LIBS_INIT}
${TBB_LIBRARIES} ${TBB_LIBRARIES}
${MAYBE_COVERAGE_LIBRARIES} ${MAYBE_COVERAGE_LIBRARIES}
@ -558,13 +483,13 @@ target_link_libraries(osrm_store ${STORAGE_LIBRARIES})
# BUILD_COMPONENTS # BUILD_COMPONENTS
add_executable(osrm-components src/tools/components.cpp $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>) add_executable(osrm-components src/tools/components.cpp $<TARGET_OBJECTS:MICROTAR> $<TARGET_OBJECTS:UTIL>)
target_link_libraries(osrm-components ${TBB_LIBRARIES} ${BOOST_BASE_LIBRARIES} ${UTIL_LIBRARIES}) target_link_libraries(osrm-components ${TBB_LIBRARIES} ${BOOST_LIBRARIES} ${UTIL_LIBRARIES})
install(TARGETS osrm-components DESTINATION bin) install(TARGETS osrm-components DESTINATION bin)
if(BUILD_TOOLS) if(BUILD_TOOLS)
message(STATUS "Activating OSRM internal tools") message(STATUS "Activating OSRM internal tools")
add_executable(osrm-io-benchmark src/tools/io-benchmark.cpp $<TARGET_OBJECTS:UTIL>) add_executable(osrm-io-benchmark src/tools/io-benchmark.cpp $<TARGET_OBJECTS:UTIL>)
target_link_libraries(osrm-io-benchmark ${BOOST_BASE_LIBRARIES} ${TBB_LIBRARIES}) target_link_libraries(osrm-io-benchmark ${BOOST_LIBRARIES} ${TBB_LIBRARIES})
install(TARGETS osrm-io-benchmark DESTINATION bin) install(TARGETS osrm-io-benchmark DESTINATION bin)
endif() endif()