Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ jobs:
python-version: "3.14"
- name: Install dependencies
run: uv sync --all-extras --dev --locked
# Samply cannot profile signed binaries, this is a temporary workaround
- run: brew install bash
- name: Run the benchmarks
uses: CodSpeedHQ/action@main
env:
Expand Down
12 changes: 12 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,18 @@
"src/pytest_codspeed/instruments/hooks/instrument-hooks/dist/core.c",
],
include_dirs=["src/pytest_codspeed/instruments/hooks/instrument-hooks/includes"],
# IMPORTANT: Keep in sync with instrument-hooks/.github/workflows/ci.yml
# (COMMON_CFLAGS). The Zig-generated core.c emits many warnings that
# upstream silences; in particular distros like Nix/Debian/Fedora inject
# -Werror=format-security, which would otherwise fail the build.
extra_compile_args=[
"-Wno-format",
"-Wno-format-security",
"-Wno-unused-but-set-variable",
"-Wno-unused-const-variable",
"-Wno-type-limits",
"-Wno-uninitialized",
],
optional=not IS_EXTENSION_REQUIRED,
)

Expand Down
9 changes: 3 additions & 6 deletions src/pytest_codspeed/instruments/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@

from pytest_codspeed import __semver_version__
from pytest_codspeed.instruments import Instrument
from pytest_codspeed.instruments.hooks import (
FEATURE_DISABLE_CALLGRIND_MARKERS,
InstrumentHooks,
)
from pytest_codspeed.instruments.hooks import InstrumentHooks
from pytest_codspeed.utils import SUPPORTS_PERF_TRAMPOLINE

if TYPE_CHECKING:
Expand Down Expand Up @@ -79,7 +76,7 @@ def __codspeed_root_frame__() -> T:
# Warmup CPython performance map cache
__codspeed_root_frame__()

self.instrument_hooks.set_feature(FEATURE_DISABLE_CALLGRIND_MARKERS, True)
self.instrument_hooks.disable_callgrind_markers()
self.instrument_hooks.start_benchmark()

# Manually call the library function to avoid an extra stack frame. Also
Expand Down Expand Up @@ -128,7 +125,7 @@ def __codspeed_root_frame__(*args, **kwargs) -> T:
# Compute the actual result of the function
args, kwargs = pedantic_options.setup_and_get_args_kwargs()

self.instrument_hooks.set_feature(FEATURE_DISABLE_CALLGRIND_MARKERS, True)
self.instrument_hooks.disable_callgrind_markers()
self.instrument_hooks.start_benchmark()

# Manually call the library function to avoid an extra stack frame. Also
Expand Down
33 changes: 29 additions & 4 deletions src/pytest_codspeed/instruments/hooks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@
if TYPE_CHECKING:
from typing import Any, Callable

# Feature flags for instrument hooks
FEATURE_DISABLE_CALLGRIND_MARKERS = 0


class InstrumentHooks:
"""Native library wrapper class providing benchmark measurement functionality."""
Expand Down Expand Up @@ -85,6 +82,30 @@ def set_executed_benchmark(self, uri: str, pid: int | None = None) -> None:
if ret != 0:
warnings.warn("Failed to set executed benchmark", RuntimeWarning)

@staticmethod
def current_timestamp() -> int:
"""Return a monotonic timestamp in nanoseconds from the native library."""
from . import dist_instrument_hooks # type: ignore

return dist_instrument_hooks.instrument_hooks_current_timestamp()

def add_marker(
self, marker_type: int, timestamp: int, pid: int | None = None
) -> None:
"""Emit a single marker at the given timestamp."""
if pid is None:
pid = os.getpid()
ret = self._module.instrument_hooks_add_marker(
self._instance, pid, marker_type, timestamp
)
if ret != 0:
warnings.warn("Failed to add marker", RuntimeWarning)

def add_benchmark_timestamps(self, start: int, end: int) -> None:
"""Emit a BenchmarkStart/BenchmarkEnd marker pair around a captured window."""
self.add_marker(self._module.MARKER_TYPE_BENCHMARK_START, start)
self.add_marker(self._module.MARKER_TYPE_BENCHMARK_END, end)

def set_integration(self, name: str, version: str) -> None:
"""Set the integration name and version."""
ret = self._module.instrument_hooks_set_integration(
Expand All @@ -97,7 +118,7 @@ def is_instrumented(self) -> bool:
"""Check if simulation is active."""
return self._module.instrument_hooks_is_instrumented(self._instance)

def set_feature(self, feature: int, enabled: bool) -> None:
def _set_feature(self, feature: int, enabled: bool) -> None:
"""Set a feature flag in the instrument hooks library.

Args:
Expand All @@ -106,6 +127,10 @@ def set_feature(self, feature: int, enabled: bool) -> None:
"""
self._module.instrument_hooks_set_feature(feature, enabled)

def disable_callgrind_markers(self, disabled: bool = True) -> None:
"""Disable automatic callgrind markers around benchmark start/stop."""
self._set_feature(self._module.FEATURE_DISABLE_CALLGRIND_MARKERS, disabled)

def set_environment(self, section_name: str, key: str, value: str) -> None:
"""Register a key-value pair under a named section for environment collection.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@ PyMODINIT_FUNC PyInit_dist_instrument_hooks(void) {
PyModule_AddIntConstant(module, "MARKER_TYPE_SAMPLE_END", MARKER_TYPE_SAMPLE_END);
PyModule_AddIntConstant(module, "MARKER_TYPE_BENCHMARK_START", MARKER_TYPE_BENCHMARK_START);
PyModule_AddIntConstant(module, "MARKER_TYPE_BENCHMARK_END", MARKER_TYPE_BENCHMARK_END);
PyModule_AddIntConstant(module, "FEATURE_DISABLE_CALLGRIND_MARKERS", FEATURE_DISABLE_CALLGRIND_MARKERS);

#ifdef Py_GIL_DISABLED
PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED);
Expand Down
42 changes: 31 additions & 11 deletions src/pytest_codspeed/instruments/walltime.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def get_instrument_config_str_and_warns(self) -> tuple[str, list[str]]:
)
return config_str, []

def measure(
def measure( # noqa: C901
self,
marker_options: BenchmarkMarkerOptions,
name: str,
Expand Down Expand Up @@ -232,21 +232,31 @@ def __codspeed_root_frame__() -> T:
# Benchmark
iter_range = range(iter_per_round)
run_start = perf_counter_ns()
if self.instrument_hooks:
self.instrument_hooks.start_benchmark()
hooks = self.instrument_hooks
if hooks:
hooks.start_benchmark()
for _ in range(rounds):
instrument_hooks_start = hooks.current_timestamp() if hooks else None
start = perf_counter_ns()

for _ in iter_range:
__codspeed_root_frame__()

end = perf_counter_ns()
if hooks and instrument_hooks_start is not None:
instrument_hooks_end = hooks.current_timestamp()
hooks.add_benchmark_timestamps(
instrument_hooks_start, instrument_hooks_end
)

times_per_round_ns.append(end - start)

if end - run_start > benchmark_config.max_time_ns:
# TODO: log something
break
if self.instrument_hooks:
self.instrument_hooks.stop_benchmark()
self.instrument_hooks.set_executed_benchmark(uri)
if hooks:
hooks.stop_benchmark()
hooks.set_executed_benchmark(uri)
benchmark_end = perf_counter_ns()
total_time = (benchmark_end - run_start) / 1e9

Expand Down Expand Up @@ -290,20 +300,30 @@ def __codspeed_root_frame__(*args, **kwargs) -> T:
# Benchmark
times_per_round_ns: list[float] = []
benchmark_start = perf_counter_ns()
if self.instrument_hooks:
self.instrument_hooks.start_benchmark()
hooks = self.instrument_hooks
if hooks:
hooks.start_benchmark()
for _ in range(pedantic_options.rounds):
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
instrument_hooks_start = hooks.current_timestamp() if hooks else None
start = perf_counter_ns()

for _ in iter_range:
__codspeed_root_frame__(*args, **kwargs)

end = perf_counter_ns()
if hooks and instrument_hooks_start is not None:
instrument_hooks_end = hooks.current_timestamp()
hooks.add_benchmark_timestamps(
instrument_hooks_start, instrument_hooks_end
)

times_per_round_ns.append(end - start)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
if self.instrument_hooks:
self.instrument_hooks.stop_benchmark()
self.instrument_hooks.set_executed_benchmark(uri)
if hooks:
hooks.stop_benchmark()
hooks.set_executed_benchmark(uri)
benchmark_end = perf_counter_ns()
total_time = (benchmark_end - benchmark_start) / 1e9
stats = BenchmarkStats.from_list(
Expand Down
Loading