Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
0c9475b
Initial plan
Copilot Mar 25, 2026
429c9cc
ci: Replace pytest-benchmark with pytest-codspeed and improve benchmarks
Copilot Mar 25, 2026
6a5f2e9
Benchmark only process.run(), add RayConnector/RayProcess parametriza…
Copilot Mar 25, 2026
4dac1b5
Use codspeed runners
toby-coleman Mar 28, 2026
adfd12c
Add codspeed badge
toby-coleman Mar 29, 2026
6a427e5
Remove run-only tests and keep full lifecycle tests
toby-coleman Mar 31, 2026
ece386c
Merge remote-tracking branch 'origin/main' into copilot/improve-bench…
toby-coleman Mar 31, 2026
83eea99
Missing await
toby-coleman Apr 1, 2026
c663755
Try reinstated benchmark
toby-coleman Apr 6, 2026
f2254a1
Fix
toby-coleman Apr 6, 2026
48872a6
Reinstate setup
toby-coleman Apr 6, 2026
d0efaf5
Try marking asyncio
toby-coleman Apr 6, 2026
f8c7636
Retry
toby-coleman Apr 6, 2026
e2c6d53
Simple test
toby-coleman Apr 6, 2026
95dde5d
Note to resinstate run-only tests
toby-coleman Apr 6, 2026
85362c6
Reinstate run-only benchmark tests and update pytest-codspeed to >=4.4.0
Copilot Apr 17, 2026
f585742
Fix run-only benchmark: remove unused arg, add teardown for cleanup
Copilot Apr 17, 2026
787dc56
Support AARCH64
toby-coleman Apr 17, 2026
71910d3
Upgrade greenlet
toby-coleman Apr 17, 2026
6d1c54e
Merge remote-tracking branch 'origin/main' into copilot/improve-bench…
toby-coleman Apr 17, 2026
4c5223f
Fixup the run-only test
toby-coleman Apr 19, 2026
9356ef9
Quick test
toby-coleman Apr 19, 2026
0415516
Revert "Quick test"
toby-coleman Apr 19, 2026
29feb10
ci: use ubuntu-latest runner and default CodSpeed mode
Copilot Apr 19, 2026
f4ee50b
Set mode
toby-coleman Apr 19, 2026
76af665
fix: disable Ray dashboard in benchmarks to avoid MetricsHead timeout…
Copilot Apr 19, 2026
0f59fff
Fixup and reduce permissions
toby-coleman Apr 19, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 19 additions & 55 deletions .github/workflows/benchmarks.yaml
Original file line number Diff line number Diff line change
@@ -1,83 +1,47 @@
name: Benchmarks

on:
push:
branches:
- main
pull_request:
types:
- opened
- synchronize
# `workflow_dispatch` allows CodSpeed to trigger backtest
# performance analysis in order to generate initial data.
workflow_dispatch:

jobs:
benchmark:
name: Benchmark tests
name: Run benchmarks
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
strategy:
matrix:
python_version: [3.12]
steps:
- name: Checkout branch
- name: Checkout
uses: actions/checkout@v4
with:
path: pr

- name: Checkout main
uses: actions/checkout@v4
with:
ref: main
path: main

- name: Install python
uses: actions/setup-python@v5
with:
python-version: ${{matrix.python_version}}
python-version: "3.12"

- name: Install uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "main/uv.lock"

- name: Setup benchmarks
run: |
echo "BASE_SHA=$(echo ${{ github.event.pull_request.base.sha }} | cut -c1-8)" >> $GITHUB_ENV
echo "HEAD_SHA=$(echo ${{ github.event.pull_request.head.sha }} | cut -c1-8)" >> $GITHUB_ENV
echo "PR_COMMENT=$(mktemp)" >> $GITHUB_ENV

- name: Run benchmarks on PR
working-directory: ./pr
run: |
uv sync --group test
uv run pytest --benchmark-only --benchmark-save=pr

- name: Run benchmarks on main
working-directory: ./main
continue-on-error: true
run: |
uv sync --group test
uv run pytest --benchmark-only --benchmark-save=base
cache-dependency-glob: "uv.lock"

- name: Compare results
continue-on-error: false
run: |
uvx pytest-benchmark compare **/.benchmarks/**/*.json | tee cmp_results
- name: Install project
run: uv sync --group test

echo 'Benchmark comparison for [`${{ env.BASE_SHA }}`](${{ github.event.repository.html_url }}/commit/${{ github.event.pull_request.base.sha }}) (base) vs [`${{ env.HEAD_SHA }}`](${{ github.event.repository.html_url }}/commit/${{ github.event.pull_request.head.sha }}) (PR)' >> pr_comment
echo '```' >> pr_comment
cat cmp_results >> pr_comment
echo '```' >> pr_comment
cat pr_comment > ${{ env.PR_COMMENT }}

- name: Comment on PR
uses: actions/github-script@v7
- name: Run benchmarks
uses: CodSpeedHQ/action@v4
env:
RAY_ENABLE_UV_RUN_RUNTIME_ENV: 0
PLUGBOARD_IO_READ_TIMEOUT: 5.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: require('fs').readFileSync('${{ env.PR_COMMENT }}').toString()
});
mode: simulation
run: uv run pytest tests/benchmark/ --codspeed

2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
<img alt="CodeQL" src="https://github.com/plugboard-dev/plugboard/actions/workflows/github-code-scanning/codeql/badge.svg"></a>
<a href="https://codecov.io/gh/plugboard-dev/plugboard" >
<img src="https://codecov.io/gh/plugboard-dev/plugboard/graph/badge.svg?token=4LU4K6TOLQ"/></a>
<a href="https://codspeed.io/plugboard-dev/plugboard?utm_source=badge">
<img src="https://img.shields.io/endpoint?url=https://codspeed.io/badge.json" alt="CodSpeed Badge"/></a>
<br>
<a href="https://docs.plugboard.dev" alt="Documentation">
<img alt="Docs" src="https://github.com/plugboard-dev/plugboard/actions/workflows/docs.yaml/badge.svg"></a>
Expand Down
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ test = [
"optuna>=3.0,<5",
"pytest>=8.3,<10",
"pytest-asyncio>=1.0,<2",
"pytest-benchmark>=5.1.0",
"pytest-codspeed>=4.4.0",
"pytest-cases>=3.8,<4",
"pytest-env>=1.1,<2",
"pytest-rerunfailures>=15.0,<17",
Expand Down Expand Up @@ -117,6 +117,10 @@ fallback_version = "0.0.0"
[tool.uv]
package = true
default-groups = ["all"]
required-environments = [
# AARCH64 linux support required for Codspeed benchmarks
"sys_platform == 'linux' and platform_machine == 'aarch64'"
]

[tool.uv.workspace]
members = ["plugboard-schemas"]
Expand Down
17 changes: 17 additions & 0 deletions tests/benchmark/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Configuration for benchmark tests."""

import typing as _t

import pytest
import ray


@pytest.fixture(scope="session")
def ray_ctx() -> _t.Iterator[None]:
"""Initialises and shuts down Ray for benchmarks.

Dashboard is disabled to avoid MetricsHead timeout in CI.
"""
ray.init(num_cpus=5, num_gpus=1, resources={"custom_hardware": 10}, include_dashboard=False)
yield
ray.shutdown()
83 changes: 63 additions & 20 deletions tests/benchmark/test_benchmarking.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,78 @@
"""Simple benchmark tests for Plugboard models."""
"""Benchmark tests for Plugboard processes."""

import asyncio
import pytest
from pytest_codspeed import BenchmarkFixture
import uvloop

from pytest_benchmark.fixture import BenchmarkFixture

from plugboard.connector import AsyncioConnector
from plugboard.process import LocalProcess, Process
from plugboard.connector import AsyncioConnector, Connector, RayConnector, ZMQConnector
from plugboard.process import LocalProcess, Process, RayProcess
from plugboard.schemas import ConnectorSpec
from tests.integration.test_process_with_components_run import A, B


def _setup_process() -> tuple[tuple[Process], dict]:
comp_a = A(name="comp_a", iters=1000)
ITERS = 1000

CONNECTOR_PROCESS_PARAMS = [
(AsyncioConnector, LocalProcess),
(ZMQConnector, LocalProcess),
(RayConnector, RayProcess),
]
CONNECTOR_PROCESS_IDS = ["asyncio", "zmq", "ray"]


def _build_process(connector_cls: type[Connector], process_cls: type[Process]) -> Process:
"""Build a process with the given connector and process class."""
comp_a = A(name="comp_a", iters=ITERS)
comp_b1 = B(name="comp_b1", factor=1)
comp_b2 = B(name="comp_b2", factor=2)
components = [comp_a, comp_b1, comp_b2]
connectors = [
AsyncioConnector(spec=ConnectorSpec(source="comp_a.out_1", target="comp_b1.in_1")),
AsyncioConnector(spec=ConnectorSpec(source="comp_b1.out_1", target="comp_b2.in_1")),
connector_cls(spec=ConnectorSpec(source="comp_a.out_1", target="comp_b1.in_1")),
connector_cls(spec=ConnectorSpec(source="comp_b1.out_1", target="comp_b2.in_1")),
]
process = LocalProcess(components=components, connectors=connectors)
# Initialise process so that this is excluded from the benchmark timing
asyncio.run(process.init())
# Return args and kwargs tuple for benchmark.pedantic
return (process,), {}
return process_cls(components=components, connectors=connectors)


@pytest.mark.benchmark
@pytest.mark.parametrize(
"connector_cls, process_cls",
CONNECTOR_PROCESS_PARAMS,
ids=CONNECTOR_PROCESS_IDS,
)
@pytest.mark.asyncio
async def test_benchmark_process_lifecycle(
connector_cls: type[Connector],
process_cls: type[Process],
ray_ctx: None,
) -> None:
"""Benchmark the full lifecycle (init, run, destroy) of a Plugboard Process."""
process = _build_process(connector_cls, process_cls)
async with process:
await process.run()


@pytest.mark.parametrize(
"connector_cls, process_cls",
CONNECTOR_PROCESS_PARAMS,
ids=CONNECTOR_PROCESS_IDS,
)
def test_benchmark_process_run(
benchmark: BenchmarkFixture,
connector_cls: type[Connector],
process_cls: type[Process],
ray_ctx: None,
) -> None:
"""Benchmark running of a Plugboard Process."""

def _setup() -> tuple[tuple[Process], dict]:
async def _init() -> Process:
process = _build_process(connector_cls, process_cls)
await process.init()
return process

def _run_process(process: Process) -> None:
asyncio.run(process.run())
return (uvloop.run(_init()),), {}

def _run(process: Process) -> None:
uvloop.run(process.run())

def test_benchmark_process_run(benchmark: BenchmarkFixture) -> None:
"""Benchmark the running of a Plugboard Process."""
benchmark.pedantic(_run_process, setup=_setup_process, rounds=5)
benchmark.pedantic(_run, setup=_setup, rounds=5)
Loading
Loading