diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml
index e6dc8fca9..f1b28b310 100644
--- a/.github/workflows/osrm-backend.yml
+++ b/.github/workflows/osrm-backend.yml
@@ -253,7 +253,6 @@ jobs:
BUILD_TYPE: Release
CCOMPILER: gcc-13
CXXCOMPILER: g++-13
- ENABLE_BENCHMARKS: ON
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-12-release
@@ -264,7 +263,6 @@ jobs:
BUILD_TYPE: Release
CCOMPILER: gcc-12
CXXCOMPILER: g++-12
- ENABLE_BENCHMARKS: ON
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-11-release
@@ -275,7 +273,6 @@ jobs:
BUILD_TYPE: Release
CCOMPILER: gcc-11
CXXCOMPILER: g++-11
- ENABLE_BENCHMARKS: ON
- name: conan-linux-release-node
build_node_package: true
@@ -511,18 +508,6 @@ jobs:
fi
popd
npm test
- - name: Run benchmarks
- if: ${{ matrix.ENABLE_BENCHMARKS == 'ON' }}
- run: |
- pushd ${OSRM_BUILD_DIR}
- make --jobs=${JOBS} benchmarks
- ./src/benchmarks/alias-bench
- ./src/benchmarks/json-render-bench ../src/benchmarks/portugal_to_korea.json
- ./src/benchmarks/match-bench ../test/data/ch/monaco.osrm ch
- ./src/benchmarks/match-bench ../test/data/mld/monaco.osrm mld
- ./src/benchmarks/packedvector-bench
- ./src/benchmarks/rtree-bench ../test/data/monaco.osrm.ramIndex ../test/data/monaco.osrm.fileIndex ../test/data/monaco.osrm.nbg_nodes
- popd
- name: Use Node 18
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
@@ -595,8 +580,72 @@ jobs:
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
+
+ benchmarks:
+ if: github.event_name == 'pull_request'
+ needs: [format-taginfo-docs]
+ runs-on: ubuntu-22.04
+ env:
+ CCOMPILER: clang-13
+ CXXCOMPILER: clang++-13
+ CC: clang-13
+ CXX: clang++-13
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ PR_NUMBER: ${{ github.event.pull_request.number }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ steps:
+ - name: Enable compiler cache
+ uses: actions/cache@v3
+ with:
+ path: ~/.ccache
+ key: v1-ccache-benchmarks-${{ github.sha }}
+ restore-keys: |
+ v1-ccache-benchmarks-
+ - name: Enable Conan cache
+ uses: actions/cache@v3
+ with:
+ path: ~/.conan
+ key: v1-conan-benchmarks-${{ github.sha }}
+ restore-keys: |
+ v1-conan-benchmarks-
+ - name: Checkout PR Branch
+ uses: actions/checkout@v3
+ with:
+ ref: ${{ github.head_ref }}
+ path: pr
+ - run: python3 -m pip install "conan<2.0.0" "requests==2.31.0"
+ - name: Build PR Branch
+ run: |
+ mkdir -p pr/build
+ cd pr/build
+ cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release ..
+ make -j$(nproc)
+ make -j$(nproc) benchmarks
+ cd ..
+ make -C test/data
+ - name: Checkout Base Branch
+ uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.base.ref }}
+ path: base
+ - name: Build Base Branch
+ run: |
+ mkdir base/build
+ cd base/build
+ cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release ..
+ make -j$(nproc)
+ make -j$(nproc) benchmarks
+ cd ..
+ make -C test/data
+ - name: Run Benchmarks
+ run: |
+ ./pr/scripts/ci/run_benchmarks.sh base pr
+ - name: Post Benchmark Results
+ run: |
+ python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
+
ci-complete:
runs-on: ubuntu-22.04
- needs: [build-test-publish, docker-image, windows-release-node]
+ needs: [build-test-publish, docker-image, windows-release-node, benchmarks]
steps:
- run: echo "CI complete"
diff --git a/scripts/ci/post_benchmark_results.py b/scripts/ci/post_benchmark_results.py
new file mode 100644
index 000000000..3efd5fcad
--- /dev/null
+++ b/scripts/ci/post_benchmark_results.py
@@ -0,0 +1,96 @@
+import requests
+import os
+import re
+import sys
+import json
+
+GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
+REPO = os.getenv('GITHUB_REPOSITORY')
+PR_NUMBER = os.getenv('PR_NUMBER')
+
+REPO_OWNER, REPO_NAME = REPO.split('/')
+
+def create_markdown_table(results):
+ results = sorted(results, key=lambda x: x['name'])
+ header = "| Benchmark | Base | PR |\n|-----------|------|----|"
+ rows = []
+ for result in results:
+ name = result['name']
+ base = result['base'].replace('\n', '
')
+ pr = result['pr'].replace('\n', '
')
+ row = f"| {name} | {base} | {pr} |"
+ rows.append(row)
+ return f"{header}\n" + "\n".join(rows)
+
+def get_pr_details(repo_owner, repo_name, pr_number):
+ url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/pulls/{pr_number}"
+ headers = {'Authorization': f'token {GITHUB_TOKEN}'}
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+ return response.json()
+
+def update_pr_description(repo_owner, repo_name, pr_number, body):
+ url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/pulls/{pr_number}"
+ headers = {'Authorization': f'token {GITHUB_TOKEN}'}
+ data = {'body': body}
+ response = requests.patch(url, headers=headers, json=data)
+ response.raise_for_status()
+ return response.json()
+
+
+def collect_benchmark_results(base_folder, pr_folder):
+ results = []
+ results_index = {}
+
+ for file in os.listdir(base_folder):
+ if not file.endswith('.bench'): continue
+ with open(f"{base_folder}/{file}") as f:
+ result = f.read().strip()
+ results.append({'base': result, 'pr': None, 'name': os.path.splitext(file)[0]})
+ results_index[file] = len(results) - 1
+
+ for file in os.listdir(pr_folder):
+ if not file.endswith('.bench'): continue
+ with open(f"{pr_folder}/{file}") as f:
+ result = f.read().strip()
+ if file in results_index:
+ results[results_index[file]]['pr'] = result
+ else:
+ results.append({'base': None, 'pr': result, 'name': os.path.splitext(file)[0]})
+
+ return results
+
+def main():
+ if len(sys.argv) != 3:
+ print("Usage: python post_benchmark_results.py ")
+ exit(1)
+
+ base_folder = sys.argv[1]
+ pr_folder = sys.argv[2]
+
+ benchmark_results = collect_benchmark_results(base_folder, pr_folder)
+
+ pr_details = get_pr_details(REPO_OWNER, REPO_NAME, PR_NUMBER)
+ pr_body = pr_details.get('body', '')
+
+
+ markdown_table = create_markdown_table(benchmark_results)
+ new_benchmark_section = f"\n## Benchmark Results\n{markdown_table}\n"
+
+ if re.search(r'.*', pr_body, re.DOTALL):
+ updated_body = re.sub(
+ r'.*',
+ new_benchmark_section,
+ pr_body,
+ flags=re.DOTALL
+ )
+ else:
+ updated_body = f"{pr_body}\n\n{new_benchmark_section}"
+
+ update_pr_description(REPO_OWNER, REPO_NAME, PR_NUMBER, updated_body)
+ print("PR description updated successfully.")
+
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh
new file mode 100755
index 000000000..94cf57a57
--- /dev/null
+++ b/scripts/ci/run_benchmarks.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+set -eou pipefail
+
+function run_benchmarks_for_folder {
+ echo "Running benchmarks for $1"
+
+ FOLDER=$1
+ RESULTS_FOLDER=$2
+
+ mkdir -p $RESULTS_FOLDER
+
+ BENCHMARKS_FOLDER="$FOLDER/build/src/benchmarks"
+
+ ./$BENCHMARKS_FOLDER/match-bench "./$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench"
+ ./$BENCHMARKS_FOLDER/match-bench "./$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench"
+ ./$BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench"
+ ./$BENCHMARKS_FOLDER/json-render-bench "./$FOLDER/src/benchmarks/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench"
+ ./$BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench"
+ ./$BENCHMARKS_FOLDER/rtree-bench "./$FOLDER/test/data/monaco.osrm.ramIndex" "./$FOLDER/test/data/monaco.osrm.fileIndex" "./$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench"
+}
+
+run_benchmarks_for_folder $1 "${1}_results"
+run_benchmarks_for_folder $2 "${2}_results"
+
diff --git a/src/benchmarks/alias.cpp b/src/benchmarks/alias.cpp
index f8ca5dd12..f2eab6af8 100644
--- a/src/benchmarks/alias.cpp
+++ b/src/benchmarks/alias.cpp
@@ -64,7 +64,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(aliased_u32);
- util::Log() << "aliased u32: " << TIMER_MSEC(aliased_u32);
+ std::cout << "aliased u32: " << TIMER_MSEC(aliased_u32) << std::endl;
TIMER_START(plain_u32);
for (auto round : util::irange(0, num_rounds))
@@ -83,7 +83,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(plain_u32);
- util::Log() << "plain u32: " << TIMER_MSEC(plain_u32);
+ std::cout << "plain u32: " << TIMER_MSEC(plain_u32) << std::endl;
TIMER_START(aliased_double);
for (auto round : util::irange(0, num_rounds))
@@ -103,7 +103,7 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(aliased_double);
- util::Log() << "aliased double: " << TIMER_MSEC(aliased_double);
+ std::cout << "aliased double: " << TIMER_MSEC(aliased_double) << std::endl;
TIMER_START(plain_double);
for (auto round : util::irange(0, num_rounds))
@@ -123,5 +123,5 @@ int main(int, char **)
return EXIT_FAILURE;
}
TIMER_STOP(plain_double);
- util::Log() << "plain double: " << TIMER_MSEC(plain_double);
+ std::cout << "plain double: " << TIMER_MSEC(plain_double) << std::endl;
}
diff --git a/src/benchmarks/packed_vector.cpp b/src/benchmarks/packed_vector.cpp
index ac51d1a68..62aa1634d 100644
--- a/src/benchmarks/packed_vector.cpp
+++ b/src/benchmarks/packed_vector.cpp
@@ -72,10 +72,10 @@ int main(int, char **)
auto write_slowdown = result_packed.random_write_ms / result_plain.random_write_ms;
auto read_slowdown = result_packed.random_read_ms / result_plain.random_read_ms;
- util::Log() << "random write: std::vector " << result_plain.random_write_ms
- << " ms, util::packed_vector " << result_packed.random_write_ms << " ms. "
- << write_slowdown;
- util::Log() << "random read: std::vector " << result_plain.random_read_ms
- << " ms, util::packed_vector " << result_packed.random_read_ms << " ms. "
- << read_slowdown;
+ std::cout << "random write:\nstd::vector " << result_plain.random_write_ms
+ << " ms\nutil::packed_vector " << result_packed.random_write_ms << " ms\n"
+ << "slowdown: " << write_slowdown << std::endl;
+ std::cout << "random read:\nstd::vector " << result_plain.random_read_ms
+ << " ms\nutil::packed_vector " << result_packed.random_read_ms << " ms\n"
+ << "slowdown: " << read_slowdown << std::endl;
}
diff --git a/src/benchmarks/static_rtree.cpp b/src/benchmarks/static_rtree.cpp
index d2dd08fe5..eaa784ea8 100644
--- a/src/benchmarks/static_rtree.cpp
+++ b/src/benchmarks/static_rtree.cpp
@@ -36,8 +36,6 @@ void benchmarkQuery(const std::vector &queries,
const std::string &name,
QueryT query)
{
- std::cout << "Running " << name << " with " << queries.size() << " coordinates: " << std::flush;
-
TIMER_START(query);
for (const auto &q : queries)
{
@@ -46,11 +44,9 @@ void benchmarkQuery(const std::vector &queries,
}
TIMER_STOP(query);
- std::cout << "Took " << TIMER_SEC(query) << " seconds "
- << "(" << TIMER_MSEC(query) << "ms"
- << ") -> " << TIMER_MSEC(query) / queries.size() << " ms/query "
- << "(" << TIMER_MSEC(query) << "ms"
- << ")" << std::endl;
+ std::cout << name << ":\n"
+ << TIMER_MSEC(query) << "ms"
+ << " -> " << TIMER_MSEC(query) / queries.size() << " ms/query" << std::endl;
}
void benchmark(BenchStaticRTree &rtree, unsigned num_queries)
@@ -65,11 +61,10 @@ void benchmark(BenchStaticRTree &rtree, unsigned num_queries)
util::FixedLatitude{lat_udist(mt_rand)});
}
+ benchmarkQuery(
+ queries, "1 result", [&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 1); });
benchmarkQuery(queries,
- "raw RTree queries (1 result)",
- [&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 1); });
- benchmarkQuery(queries,
- "raw RTree queries (10 results)",
+ "10 results",
[&rtree](const util::Coordinate &q) { return rtree.Nearest(q, 10); });
}
} // namespace osrm::benchmarks