From 69cbb1046de994aaa614625067aaeed1027d9b7d Mon Sep 17 00:00:00 2001 From: Patrick Roy Date: Wed, 7 Jun 2023 12:06:00 +0100 Subject: [PATCH] test: print benchmark numbers at end of benchmark test This will allows us to keep track of the numbers Signed-off-by: Patrick Roy --- .buildkite/pipeline_pr_no_block.py | 2 +- src/vmm/benches/cpu_templates.rs | 2 +- .../performance/test_benchmarks.py | 13 +++++++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipeline_pr_no_block.py b/.buildkite/pipeline_pr_no_block.py index 2a1471ad872..3782542b80b 100755 --- a/.buildkite/pipeline_pr_no_block.py +++ b/.buildkite/pipeline_pr_no_block.py @@ -26,7 +26,7 @@ optional_grp = group( "❓ Optional", - "./tools/devtool -y test -c 1-10 -m 0 -- ../tests/integration_tests/ -m no_block_pr", + "./tools/devtool -y test -c 1-10 -m 0 -- ../tests/integration_tests/ -m no_block_pr --log-cli-level=INFO", **defaults, ) diff --git a/src/vmm/benches/cpu_templates.rs b/src/vmm/benches/cpu_templates.rs index ed77cd30333..7fc752c8792 100644 --- a/src/vmm/benches/cpu_templates.rs +++ b/src/vmm/benches/cpu_templates.rs @@ -45,7 +45,7 @@ pub fn cpu_template_benchmark(c: &mut Criterion) { criterion_group! { name = cpu_template_benches; - config = Criterion::default().sample_size(200).output_directory(Path::new("../../build/vmm_benchmark/cpu_templates")); + config = Criterion::default().sample_size(200); targets = cpu_template_benchmark } diff --git a/tests/integration_tests/performance/test_benchmarks.py b/tests/integration_tests/performance/test_benchmarks.py index ce234370b2c..e2102ea5338 100644 --- a/tests/integration_tests/performance/test_benchmarks.py +++ b/tests/integration_tests/performance/test_benchmarks.py @@ -2,6 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 """Optional benchmarks-do-not-regress test""" +import json +import logging import os import platform @@ -11,6 +13,7 @@ from host_tools.cargo_build import cargo TARGET_BRANCH = os.environ.get("BUILDKITE_PULL_REQUEST_BASE_BRANCH") or "main" +LOGGER = logging.getLogger(__name__) @pytest.mark.no_block_pr @@ -41,6 +44,16 @@ def test_no_regression_relative_to_target_branch(): if "Performance has regressed." in result ) + for benchmark in os.listdir("../build/cargo_target/criterion"): + with open( + f"../build/cargo_target/criterion/{benchmark}/new/estimates.json", + encoding="utf-8", + ) as file: + data = json.load(file) + average_ns = data["mean"]["point_estimate"] + + LOGGER.info("%s mean: %iµs", benchmark, average_ns / 1000) + # If this string is anywhere in stdout, then at least one of our benchmarks # is now performing worse with the PR changes. assert not regressions_only, "\n" + regressions_only