Skip to content

chore(deps): pin dependencies #105

chore(deps): pin dependencies

chore(deps): pin dependencies #105

Workflow file for this run

name: Performance Benchmarks
on:
push:
branches: [ "main" ]
pull_request:
types: [opened, synchronize, reopened, labeled]
branches: [ "main" ]
permissions:
contents: write
pull-requests: write
jobs:
benchmark:
name: Execute & Evaluate
# Condition: Run on main branch pushes, OR if the PR has the specific label
if: >
github.event_name == 'push' ||
contains(github.event.pull_request.labels.*.name, 'run-benchmark')
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install uv
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7
with:
enable-cache: true
cache-dependency-glob: "uv.lock"
python-version: "3.14"
- name: Install hyperfine
run: |
sudo apt-get update
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y hyperfine
- name: Install dependencies
run: uv sync --group ci
- name: Run rigerous benchmarks
run: make test-benchmark-slower
- name: Convert hyperfine output for benchmark action
run: |
python << 'PY'
import json
data = json.load(open("benchmark.json"))
mean_ms = data["results"][0]["mean"] * 1000
converted = [{
"name": "Protostar Initialization Latency",
"unit": "ms",
"value": round(mean_ms, 2)
}]
json.dump(converted, open("benchmark-gh.json", "w"))
PY
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: Protostar Initialization Latency
tool: 'customSmallerIsBetter'
output-file-path: benchmark-gh.json
gh-pages-branch: 'performance-benchmarks'
benchmark-data-dir-path: './'
github-token: ${{ secrets.GITHUB_TOKEN }}
fail-on-alert: true
alert-threshold: '150%'
comment-on-alert: true
auto-push: ${{ github.event_name == 'push' }}