Skip to content

Add BLS With Taproot Member #23664

Add BLS With Taproot Member

Add BLS With Taproot Member #23664

Workflow file for this run

name: ⚡️ Benchmarks
on:
workflow_dispatch:
inputs:
repeats:
description: "The number of times to execute each benchmark"
type: int
default: 1
push:
paths-ignore:
- "**.md"
branches:
- "long_lived/**"
- main
- "release/**"
release:
types: [published]
pull_request:
paths-ignore:
- "**.md"
branches:
- "**"
concurrency:
group: ${{ github.event_name == 'pull_request' && format('{0}-{1}', github.workflow_ref, github.event.pull_request.number) || github.run_id }}
cancel-in-progress: true
jobs:
setup:
name: Setup
runs-on: ubuntu-latest
timeout-minutes: 5
outputs:
repeats: ${{ steps.repeats.outputs.repeats }}
timeout: ${{ steps.timeout.outputs.timeout }}
steps:
- name: Calculate repeats
id: repeats
run: |
echo "repeats=${{ inputs.repeats != '' && inputs.repeats || 1 }}" >> "$GITHUB_OUTPUT"
- name: Calculate timeout
id: timeout
run: |
echo "timeout=$(( ${{ steps.repeats.outputs.repeats }} * 20 ))" >> "$GITHUB_OUTPUT"
build:
name: Benchmarks
runs-on: benchmark
needs:
- setup
container:
image: chianetwork/ubuntu-22.04-builder:latest
timeout-minutes: ${{ fromJSON(needs.setup.outputs.timeout) }}
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
env:
CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet
BLOCKS_AND_PLOTS_VERSION: 0.40.0
steps:
- name: Clean workspace
uses: Chia-Network/actions/clean-workspace@main
- name: Add safe git directory
uses: Chia-Network/actions/git-mark-workspace-safe@main
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: chia-network/actions/cache-pip@main
with:
mode: poetry
- name: Checkout test blocks and plots
uses: actions/checkout@v4
with:
repository: "Chia-Network/test-cache"
path: ".chia"
ref: ${{ env.BLOCKS_AND_PLOTS_VERSION }}
fetch-depth: 1
- uses: ./.github/actions/install
with:
python-version: ${{ matrix.python-version }}
development: true
legacy_keyring: true
- uses: chia-network/actions/activate-venv@main
- name: pytest
run: |
pytest -n 0 --capture no -m benchmark -o 'junit_suite_name=benchmarks' --junitxml=junit-data/benchmarks.raw.xml --benchmark-repeats ${{ needs.setup.outputs.repeats }} chia/_tests/
- name: Format JUnit data and prepare results
if: always()
run: |
yq junit-data/benchmarks.raw.xml > junit-data/benchmarks.xml
- name: Publish JUnit results
if: always()
uses: actions/upload-artifact@v4
with:
name: junit-data
path: junit-data/*
if-no-files-found: error
- name: Add benchmark results to workflow summary
if: always()
run: |
python -m chia._tests.process_junit --type benchmark --xml junit-data/benchmarks.xml --markdown --link-prefix ${{ github.event.repository.html_url }}/blob/${{ github.sha }}/ --link-line-separator \#L >> "$GITHUB_STEP_SUMMARY"