diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
new file mode 100644
index 000000000000..e8b1f9c6fa37
--- /dev/null
+++ b/.github/workflows/benchmark.yml
@@ -0,0 +1,121 @@
+name: CI
+on:
+ pull_request:
+ branches:
+ - master
+jobs:
+ build-binaries:
+ runs-on: [self-hosted, linux, x64]
+ env:
+ NIX_PATH: nixpkgs=channel:nixos-unstable
+ BASE_SHA: ${{ github.event.pull_request.base.sha }}
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ - name: Fetch base commit
+ run: |
+ echo "CHECKOUT_COMMIT=$(git rev-parse HEAD)" >> "$GITHUB_ENV"
+ git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }}
+ - name: Setup ccache
+ run: |
+ mkdir -p /data/ccache
+ export CCACHE_DIR=/data/ccache
+ export CCACHE_MAXSIZE=50G
+ ccache -M 50G
+ ccache -s
+ - name: Build both binaries
+ env:
+ CCACHE_DIR: /data/ccache
+ run: |
+ mkdir -p ${{ runner.temp }}/binaries/base
+ mkdir -p ${{ runner.temp }}/binaries/head
+ nix-shell --command "just build-assumeutxo-binaries-guix $BASE_SHA $CHECKOUT_COMMIT"
+ cp binaries/base/bitcoind ${{ runner.temp }}/binaries/base/bitcoind
+ cp binaries/head/bitcoind ${{ runner.temp }}/binaries/head/bitcoind
+ - name: Upload binaries
+ uses: actions/upload-artifact@v4
+ with:
+ name: bitcoind-binaries
+ path: ${{ runner.temp }}/binaries/
+ assumeutxo:
+ needs: build-binaries
+ strategy:
+ matrix:
+ include:
+ - network: signet
+ timeout: 20
+ utxo_path: /var/lib/bitcoin/utxo-signet-160000.dat
+ dbcache: 550
+ - network: mainnet-default
+ timeout: 600
+ utxo_path: /var/lib/bitcoin/utxo-840000.dat
+ dbcache: 550
+ - network: mainnet-large
+ timeout: 600
+ utxo_path: /var/lib/bitcoin/utxo-840000.dat
+ dbcache: 32000
+ runs-on: [self-hosted, linux, x64]
+ timeout-minutes: ${{ matrix.timeout }}
+ env:
+ NIX_PATH: nixpkgs=channel:nixos-unstable
+ UTXO_PATH: ${{ matrix.utxo_path }}
+ BASE_SHA: ${{ github.event.pull_request.base.sha }}
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ - name: Download binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: bitcoind-binaries
+ path: ${{ runner.temp }}/binaries
+ - name: Set binary permissions
+ run: |
+ chmod +x ${{ runner.temp }}/binaries/base/bitcoind
+ chmod +x ${{ runner.temp }}/binaries/head/bitcoind
+ - name: Fetch base commit
+ run: |
+ echo "CHECKOUT_COMMIT=$(git rev-parse HEAD)" >> "$GITHUB_ENV"
+ git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }}
+ - name: Run AssumeUTXO ${{ matrix.network }}
+ env:
+ TMP_DATADIR: "${{ runner.temp }}/base_datadir"
+ BINARIES_DIR: "${{ runner.temp }}/binaries"
+ run: |
+ env
+
+ # Patch binary shared object dependencies
+ nix-shell --command "patch-binary $BINARIES_DIR/head/bitcoind"
+ nix-shell --command "patch-binary $BINARIES_DIR/base/bitcoind"
+
+ # Run test
+ mkdir -p "$TMP_DATADIR"
+ nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }} ${{ runner.temp }}/pngs $BINARIES_DIR"
+ - uses: actions/upload-artifact@v4
+ with:
+ name: result-${{ matrix.network }}
+ path: "${{ runner.temp }}/results.json"
+ - uses: actions/upload-artifact@v4
+ with:
+ name: pngs-${{ matrix.network }}
+ path: "${{ runner.temp }}/pngs/*.png"
+ - uses: actions/upload-artifact@v4
+ with:
+ name: flamegraph-${{ matrix.network }}
+ path: "**/*-flamegraph.svg"
+ - name: Write GitHub and runner context files
+ env:
+ GITHUB_CONTEXT: ${{ toJSON(github) }}
+ RUNNER_CONTEXT: ${{ toJSON(runner) }}
+ run: |
+ mkdir contexts
+ echo "$GITHUB_CONTEXT" | nix-shell -p jq --command "jq 'del(.token)' > contexts/github.json"
+ echo "$RUNNER_CONTEXT" > contexts/runner.json
+ - name: Upload context metadata as artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: run-metadata-${{ matrix.network }}
+ path: ./contexts/
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index 0b4b8bae1e38..000000000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,306 +0,0 @@
-# Copyright (c) 2023-present The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-
-name: CI
-on:
- # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request.
- pull_request:
- # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push.
- push:
- branches:
- - '**'
- tags-ignore:
- - '**'
-
-concurrency:
- group: ${{ github.event_name != 'pull_request' && github.run_id || github.ref }}
- cancel-in-progress: true
-
-env:
- CI_FAILFAST_TEST_LEAVE_DANGLING: 1 # GHA does not care about dangling processes and setting this variable avoids killing the CI script itself on error
- MAKEJOBS: '-j10'
-
-jobs:
- test-each-commit:
- name: 'test each commit'
- runs-on: ubuntu-24.04
- if: github.event_name == 'pull_request' && github.event.pull_request.commits != 1
- timeout-minutes: 360 # Use maximum time, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes. Assuming a worst case time of 1 hour per commit, this leads to a --max-count=6 below.
- env:
- MAX_COUNT: 6
- steps:
- - name: Determine fetch depth
- run: echo "FETCH_DEPTH=$((${{ github.event.pull_request.commits }} + 2))" >> "$GITHUB_ENV"
- - uses: actions/checkout@v4
- with:
- ref: ${{ github.event.pull_request.head.sha }}
- fetch-depth: ${{ env.FETCH_DEPTH }}
- - name: Determine commit range
- run: |
- # Checkout HEAD~ and find the test base commit
- # Checkout HEAD~ because it would be wasteful to rerun tests on the PR
- # head commit that are already run by other jobs.
- git checkout HEAD~
- # Figure out test base commit by listing ancestors of HEAD, excluding
- # ancestors of the most recent merge commit, limiting the list to the
- # newest MAX_COUNT ancestors, ordering it from oldest to newest, and
- # taking the first one.
- #
- # If the branch contains up to MAX_COUNT ancestor commits after the
- # most recent merge commit, all of those commits will be tested. If it
- # contains more, only the most recent MAX_COUNT commits will be
- # tested.
- #
- # In the command below, the ^@ suffix is used to refer to all parents
- # of the merge commit as described in:
- # https://git-scm.com/docs/git-rev-parse#_other_rev_parent_shorthand_notations
- # and the ^ prefix is used to exclude these parents and all their
- # ancestors from the rev-list output as described in:
- # https://git-scm.com/docs/git-rev-list
- MERGE_BASE=$(git rev-list -n1 --merges HEAD)
- EXCLUDE_MERGE_BASE_ANCESTORS=
- # MERGE_BASE can be empty due to limited fetch-depth
- if test -n "$MERGE_BASE"; then
- EXCLUDE_MERGE_BASE_ANCESTORS=^${MERGE_BASE}^@
- fi
- echo "TEST_BASE=$(git rev-list -n$((${{ env.MAX_COUNT }} + 1)) --reverse HEAD $EXCLUDE_MERGE_BASE_ANCESTORS | head -1)" >> "$GITHUB_ENV"
- - run: |
- sudo apt-get update
- sudo apt-get install clang ccache build-essential cmake pkgconf python3-zmq libevent-dev libboost-dev libsqlite3-dev libdb++-dev systemtap-sdt-dev libzmq3-dev qtbase5-dev qttools5-dev qttools5-dev-tools qtwayland5 libqrencode-dev -y
- - name: Compile and run tests
- run: |
- # Run tests on commits after the last merge commit and before the PR head commit
- # Use clang++, because it is a bit faster and uses less memory than g++
- git rebase --exec "echo Running test-one-commit on \$( git log -1 ) && CC=clang CXX=clang++ cmake -B build -DWERROR=ON -DWITH_ZMQ=ON -DBUILD_GUI=ON -DBUILD_BENCH=ON -DBUILD_FUZZ_BINARY=ON -DWITH_BDB=ON -DWITH_USDT=ON -DCMAKE_CXX_FLAGS='-Wno-error=unused-member-function' && cmake --build build -j $(nproc) && ctest --output-on-failure --stop-on-failure --test-dir build -j $(nproc) && ./build/test/functional/test_runner.py -j $(( $(nproc) * 2 )) --combinedlogslen=99999999" ${{ env.TEST_BASE }}
-
- macos-native-arm64:
- name: ${{ matrix.job-name }}
- # Use any image to support the xcode-select below, but hardcode version to avoid silent upgrades (and breaks).
- # See: https://github.com/actions/runner-images#available-images.
- runs-on: macos-14
-
- # When a contributor maintains a fork of the repo, any pull request they make
- # to their own fork, or to the main repository, will trigger two CI runs:
- # one for the branch push and one for the pull request.
- # This can be avoided by setting SKIP_BRANCH_PUSH=true as a custom env variable
- # in Github repository settings.
- if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }}
-
- timeout-minutes: 120
-
- strategy:
- fail-fast: false
- matrix:
- job-type: [standard, fuzz]
- include:
- - job-type: standard
- file-env: './ci/test/00_setup_env_mac_native.sh'
- job-name: 'macOS 14 native, arm64, no depends, sqlite only, gui'
- - job-type: fuzz
- file-env: './ci/test/00_setup_env_mac_native_fuzz.sh'
- job-name: 'macOS 14 native, arm64, fuzz'
-
- env:
- DANGER_RUN_CI_ON_HOST: 1
- BASE_ROOT_DIR: ${{ github.workspace }}
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Clang version
- run: |
- # Use the earliest Xcode supported by the version of macOS denoted in
- # doc/release-notes-empty-template.md and providing at least the
- # minimum clang version denoted in doc/dependencies.md.
- # See: https://developer.apple.com/documentation/xcode-release-notes/xcode-15-release-notes
- sudo xcode-select --switch /Applications/Xcode_15.0.app
- clang --version
-
- - name: Install Homebrew packages
- env:
- HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
- run: |
- # A workaround for "The `brew link` step did not complete successfully" error.
- brew install --quiet python@3 || brew link --overwrite python@3
- brew install --quiet coreutils ninja pkgconf gnu-getopt ccache boost libevent zeromq qt@5 qrencode
-
- - name: Set Ccache directory
- run: echo "CCACHE_DIR=${RUNNER_TEMP}/ccache_dir" >> "$GITHUB_ENV"
-
- - name: Restore Ccache cache
- id: ccache-cache
- uses: actions/cache/restore@v4
- with:
- path: ${{ env.CCACHE_DIR }}
- key: ${{ github.job }}-${{ matrix.job-type }}-ccache-${{ github.run_id }}
- restore-keys: ${{ github.job }}-${{ matrix.job-type }}-ccache-
-
- - name: CI script
- run: ./ci/test_run_all.sh
- env:
- FILE_ENV: ${{ matrix.file-env }}
-
- - name: Save Ccache cache
- uses: actions/cache/save@v4
- if: github.event_name != 'pull_request' && steps.ccache-cache.outputs.cache-hit != 'true'
- with:
- path: ${{ env.CCACHE_DIR }}
- # https://github.com/actions/cache/blob/main/tips-and-workarounds.md#update-a-cache
- key: ${{ github.job }}-${{ matrix.job-type }}-ccache-${{ github.run_id }}
-
- win64-native:
- name: ${{ matrix.job-name }}
- # Use latest image, but hardcode version to avoid silent upgrades (and breaks).
- # See: https://github.com/actions/runner-images#available-images.
- runs-on: windows-2022
-
- if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }}
-
- env:
- PYTHONUTF8: 1
- TEST_RUNNER_TIMEOUT_FACTOR: 40
-
- strategy:
- fail-fast: false
- matrix:
- job-type: [standard, fuzz]
- include:
- - job-type: standard
- generate-options: '-DBUILD_GUI=ON -DWITH_BDB=ON -DWITH_ZMQ=ON -DBUILD_BENCH=ON -DWERROR=ON'
- job-name: 'Win64 native, VS 2022'
- - job-type: fuzz
- generate-options: '-DVCPKG_MANIFEST_NO_DEFAULT_FEATURES=ON -DVCPKG_MANIFEST_FEATURES="sqlite" -DBUILD_GUI=OFF -DBUILD_FOR_FUZZING=ON -DWERROR=ON'
- job-name: 'Win64 native fuzz, VS 2022'
-
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Configure Developer Command Prompt for Microsoft Visual C++
- # Using microsoft/setup-msbuild is not enough.
- uses: ilammy/msvc-dev-cmd@v1
- with:
- arch: x64
-
- - name: Get tool information
- run: |
- cmake -version | Tee-Object -FilePath "cmake_version"
- Write-Output "---"
- msbuild -version | Tee-Object -FilePath "msbuild_version"
- $env:VCToolsVersion | Tee-Object -FilePath "toolset_version"
- py -3 --version
- Write-Host "PowerShell version $($PSVersionTable.PSVersion.ToString())"
-
- - name: Using vcpkg with MSBuild
- run: |
- Set-Location "$env:VCPKG_INSTALLATION_ROOT"
- Add-Content -Path "triplets\x64-windows.cmake" -Value "set(VCPKG_BUILD_TYPE release)"
- Add-Content -Path "triplets\x64-windows-static.cmake" -Value "set(VCPKG_BUILD_TYPE release)"
-
- - name: vcpkg tools cache
- uses: actions/cache@v4
- with:
- path: C:/vcpkg/downloads/tools
- key: ${{ github.job }}-vcpkg-tools
-
- - name: Restore vcpkg binary cache
- uses: actions/cache/restore@v4
- id: vcpkg-binary-cache
- with:
- path: ~/AppData/Local/vcpkg/archives
- key: ${{ github.job }}-vcpkg-binary-${{ hashFiles('cmake_version', 'msbuild_version', 'toolset_version', 'vcpkg.json') }}
-
- - name: Generate build system
- run: |
- cmake -B build --preset vs2022-static -DCMAKE_TOOLCHAIN_FILE="$env:VCPKG_INSTALLATION_ROOT\scripts\buildsystems\vcpkg.cmake" ${{ matrix.generate-options }}
-
- - name: Save vcpkg binary cache
- uses: actions/cache/save@v4
- if: github.event_name != 'pull_request' && steps.vcpkg-binary-cache.outputs.cache-hit != 'true' && matrix.job-type == 'standard'
- with:
- path: ~/AppData/Local/vcpkg/archives
- key: ${{ github.job }}-vcpkg-binary-${{ hashFiles('cmake_version', 'msbuild_version', 'toolset_version', 'vcpkg.json') }}
-
- - name: Build
- working-directory: build
- run: |
- cmake --build . -j $env:NUMBER_OF_PROCESSORS --config Release
-
- - name: Run test suite
- if: matrix.job-type == 'standard'
- working-directory: build
- run: |
- ctest --output-on-failure --stop-on-failure -j $env:NUMBER_OF_PROCESSORS -C Release
-
- - name: Run functional tests
- if: matrix.job-type == 'standard'
- working-directory: build
- env:
- BITCOIND: '${{ github.workspace }}\build\src\Release\bitcoind.exe'
- BITCOINCLI: '${{ github.workspace }}\build\src\Release\bitcoin-cli.exe'
- BITCOINUTIL: '${{ github.workspace }}\build\src\Release\bitcoin-util.exe'
- BITCOINWALLET: '${{ github.workspace }}\build\src\Release\bitcoin-wallet.exe'
- TEST_RUNNER_EXTRA: ${{ github.event_name != 'pull_request' && '--extended' || '' }}
- shell: cmd
- run: py -3 test\functional\test_runner.py --jobs %NUMBER_OF_PROCESSORS% --ci --quiet --tmpdirprefix=%RUNNER_TEMP% --combinedlogslen=99999999 --timeout-factor=%TEST_RUNNER_TIMEOUT_FACTOR% %TEST_RUNNER_EXTRA%
-
- - name: Clone corpora
- if: matrix.job-type == 'fuzz'
- run: |
- git clone --depth=1 https://github.com/bitcoin-core/qa-assets "$env:RUNNER_TEMP\qa-assets"
- Set-Location "$env:RUNNER_TEMP\qa-assets"
- Write-Host "Using qa-assets repo from commit ..."
- git log -1
-
- - name: Run fuzz tests
- if: matrix.job-type == 'fuzz'
- working-directory: build
- env:
- BITCOINFUZZ: '${{ github.workspace }}\build\src\test\fuzz\Release\fuzz.exe'
- shell: cmd
- run: |
- py -3 test\fuzz\test_runner.py --par %NUMBER_OF_PROCESSORS% --loglevel DEBUG %RUNNER_TEMP%\qa-assets\fuzz_corpora
-
- asan-lsan-ubsan-integer-no-depends-usdt:
- name: 'ASan + LSan + UBSan + integer, no depends, USDT'
- runs-on: ubuntu-24.04 # has to match container in ci/test/00_setup_env_native_asan.sh for tracing tools
- if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }}
- timeout-minutes: 120
- env:
- FILE_ENV: "./ci/test/00_setup_env_native_asan.sh"
- DANGER_CI_ON_HOST_CACHE_FOLDERS: 1
- steps:
- - name: Checkout
- uses: actions/checkout@v4
-
- - name: Set Ccache directory
- run: echo "CCACHE_DIR=${RUNNER_TEMP}/ccache_dir" >> "$GITHUB_ENV"
-
- - name: Set base root directory
- run: echo "BASE_ROOT_DIR=${RUNNER_TEMP}" >> "$GITHUB_ENV"
-
- - name: Restore Ccache cache
- id: ccache-cache
- uses: actions/cache/restore@v4
- with:
- path: ${{ env.CCACHE_DIR }}
- key: ${{ github.job }}-ccache-${{ github.run_id }}
- restore-keys: ${{ github.job }}-ccache-
-
- - name: Enable bpfcc script
- # In the image build step, no external environment variables are available,
- # so any settings will need to be written to the settings env file:
- run: sed -i "s|\${INSTALL_BCC_TRACING_TOOLS}|true|g" ./ci/test/00_setup_env_native_asan.sh
-
- - name: CI script
- run: ./ci/test_run_all.sh
-
- - name: Save Ccache cache
- uses: actions/cache/save@v4
- if: github.event_name != 'pull_request' && steps.ccache-cache.outputs.cache-hit != 'true'
- with:
- path: ${{ env.CCACHE_DIR }}
- # https://github.com/actions/cache/blob/main/tips-and-workarounds.md#update-a-cache
- key: ${{ github.job }}-ccache-${{ github.run_id }}
diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml
new file mode 100644
index 000000000000..b2a66cfa0045
--- /dev/null
+++ b/.github/workflows/publish-results.yml
@@ -0,0 +1,319 @@
+name: Publish Results
+on:
+ workflow_run:
+ workflows: ["CI"]
+ types: [completed]
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ if: ${{ github.event.workflow_run.conclusion == 'success' }}
+ permissions:
+ actions: read
+ contents: write
+ checks: read
+ env:
+ NETWORKS: "mainnet-default,mainnet-large,signet"
+ outputs:
+ speedups: ${{ steps.organize.outputs.speedups }}
+ pr-number: ${{ steps.organize.outputs.pr-number }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: gh-pages
+ - name: Download artifacts
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ gh run download ${{ github.event.workflow_run.id }} --repo ${{ github.repository }}
+
+ - name: Extract artifacts
+ run: |
+ for network in ${NETWORKS//,/ }; do
+ if [ -d "result-${network}" ]; then
+ mkdir -p "${network}-results"
+ mv "result-${network}/results.json" "${network}-results/"
+ fi
+
+ if [ -d "flamegraph-${network}" ]; then
+ mkdir -p "${network}-flamegraph"
+ mv "flamegraph-${network}"/* "${network}-flamegraph/"
+ fi
+
+ if [ -d "run-metadata-${network}" ]; then
+ mkdir -p "${network}-metadata"
+ mv "run-metadata-${network}"/* "${network}-metadata/"
+ fi
+
+ if [ -d "pngs-${network}" ]; then
+ mkdir -p "${network}-plots"
+ mv "pngs-${network}"/* "${network}-plots/"
+ fi
+ done
+ - name: Organize results
+ id: organize
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const fs = require('fs');
+ const path = require('path');
+ const networks = process.env.NETWORKS.split(',');
+ let prNumber = 'main';
+ let runId;
+
+ // First, extract metadata and get PR number
+ for (const network of networks) {
+ if (fs.existsSync(`${network}-metadata/github.json`)) {
+ const metadata = JSON.parse(fs.readFileSync(`${network}-metadata/github.json`, 'utf8'));
+ prNumber = metadata.event.pull_request?.number || prNumber;
+ runId = metadata.run_id;
+ }
+ }
+
+ if (!runId) {
+ console.error('No valid metadata found for any network');
+ process.exit(1);
+ }
+
+ // Create directory structure
+ const resultDir = `results/pr-${prNumber}/${runId}`;
+ fs.mkdirSync(resultDir, { recursive: true });
+
+ // Now copy metadata files
+ for (const network of networks) {
+ if (fs.existsSync(`${network}-metadata/github.json`)) {
+ const metadataDir = `${resultDir}/${network}-metadata`;
+ fs.mkdirSync(metadataDir, { recursive: true });
+ fs.copyFileSync(`${network}-metadata/github.json`, `${metadataDir}/github.json`);
+ }
+ }
+
+ // Process each network's results
+ const combinedResults = {
+ results: [],
+ speedups: {}
+ };
+
+ for (const network of networks) {
+ if (fs.existsSync(`${network}-results`)) {
+ const networkResults = JSON.parse(fs.readFileSync(`${network}-results/results.json`, 'utf8'));
+ let baseMean, headMean;
+
+ // Add network name to each result and collect means
+ networkResults.results.forEach(result => {
+ result.network = network;
+ combinedResults.results.push(result);
+ if (result.command.includes('base')) {
+ baseMean = result.mean;
+ } else if (result.command.includes('head')) {
+ headMean = result.mean;
+ }
+ });
+
+ // Calculate speedup if we have both measurements
+ if (baseMean && headMean) {
+ const speedup = baseMean > 0 ? ((baseMean - headMean) / baseMean * 100).toFixed(1) : 'N/A';
+ combinedResults.speedups[network] = speedup;
+ }
+
+ // Move flamegraphs
+ if (fs.existsSync(`${network}-flamegraph`)) {
+ fs.readdirSync(`${network}-flamegraph`).forEach(file => {
+ const sourceFile = `${network}-flamegraph/${file}`;
+ const targetFile = `${resultDir}/${network}-${file}`;
+ fs.copyFileSync(sourceFile, targetFile);
+ });
+ }
+
+ // Move plots
+ if (fs.existsSync(`${network}-plots`)) {
+ const targetPlotsDir = `${resultDir}/${network}-plots`;
+ fs.mkdirSync(targetPlotsDir, { recursive: true });
+ fs.readdirSync(`${network}-plots`).forEach(plot => {
+ const sourcePlot = `${network}-plots/${plot}`;
+ const targetPlot = `${targetPlotsDir}/${plot}`;
+ fs.copyFileSync(sourcePlot, targetPlot);
+ });
+ }
+ }
+ }
+
+ // Write combined results
+ fs.writeFileSync(`${resultDir}/results.json`, JSON.stringify(combinedResults, null, 2));
+
+ // Function to generate content for a single column (Base or Head)
+ function generateColumnContent(results, network, commitType, resultDir) {
+ return `
+
+
${commitType}
+ ${results
+ .filter(result => result.network === network && result.command.includes(commitType.toLowerCase()))
+ .map(result => {
+ const commitShortId = result.parameters.commit.slice(0, 8);
+ const flameGraphFile = `${network}-${result.parameters.commit}-flamegraph.svg`;
+ const flameGraphPath = `${resultDir}/${network}-${result.parameters.commit}-flamegraph.svg`;
+
+ // Query PNG files dynamically
+ const plotDir = `${resultDir}/${network}-plots`;
+ const plots = fs.existsSync(plotDir)
+ ? fs.readdirSync(plotDir)
+ .filter(plot => plot.startsWith(`${result.parameters.commit}-`))
+ .map(
+ plot => `
+
+
+
+ `
+ )
+ .join('')
+ : '';
+
+ return `
+
+
+
+ | Command |
+ Mean (s) |
+ Std Dev |
+ User (s) |
+ System (s) |
+
+
+
+
+ |
+ ${result.command.replace(
+ /\((\w+)\)/,
+ (_, commit) => `(${commit.slice(0, 8)})`
+ )}
+ |
+ ${result.mean.toFixed(3)} |
+ ${result.stddev?.toFixed(3) || 'N/A'} |
+ ${result.user.toFixed(3)} |
+ ${result.system.toFixed(3)} |
+
+ ${fs.existsSync(flameGraphPath) ? `
+
+ |
+
+
+
+ |
+
+ ` : ''}
+
+
+ ${plots}
+ `;
+ })
+ .join('')}
+
`;
+ }
+
+ // Create index.html for this run
+ const indexHtml = `
+
+
+ Benchmark Results
+
+
+
+
+
Benchmark Results
+
+
PR #${prNumber} - Run ${runId}
+ ${networks
+ .map(network => `
+
+
+ ${network} Results
+ ${combinedResults.speedups[network] ?
+ `(${combinedResults.speedups[network]}% speedup)`
+ : ''}
+
+
+ ${generateColumnContent(combinedResults.results, network, 'Base', resultDir)}
+ ${generateColumnContent(combinedResults.results, network, 'Head', resultDir)}
+
+
`)
+ .join('')}
+
+
+
+ `;
+
+ fs.writeFileSync(`${resultDir}/index.html`, indexHtml);
+
+ // Update main index.html
+ const prs = fs.readdirSync('results')
+ .filter(dir => dir.startsWith('pr-'))
+ .map(dir => ({
+ pr: dir.replace('pr-', ''),
+ runs: fs.readdirSync(`results/${dir}`)
+ }));
+
+ const mainIndexHtml = `
+
+
+ Bitcoin Benchmark Results
+
+
+
+
+
Bitcoin Benchmark Results
+
+
Available Results
+
+ ${prs.map(({pr, runs}) => `
+ - PR #${pr}
+
+ ${runs.map(run => `
+ - Run ${run}
+ `).join('')}
+
+
+ `).join('')}
+
+
+
+
+ `;
+
+ fs.writeFileSync('index.html', mainIndexHtml);
+
+ // Set outputs for use in PR comment
+ const resultUrl = `https://${context.repo.owner}.github.io/${context.repo.name}/results/pr-${prNumber}/${runId}/index.html`;
+ const speedupString = Object.entries(combinedResults.speedups)
+ .map(([network, speedup]) => `${network}: ${speedup}%`)
+ .join(', ');
+
+ core.setOutput('result-url', resultUrl);
+ core.setOutput('speedups', speedupString);
+ core.setOutput('pr-number', prNumber);
+ return { url: resultUrl, speedups: speedupString };
+ - name: Upload Pages artifact
+ uses: actions/upload-pages-artifact@v3
+ with:
+ path: results
+ - name: Commit and push to gh-pages
+ run: |
+ git config --global user.name "github-actions[bot]"
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git add results/ index.html
+ git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}"
+ git push origin gh-pages
+ comment-pr:
+ needs: build
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ actions: read
+ steps:
+ - name: Comment on PR
+ if: ${{ needs.build.outputs.pr-number != 'main' }}
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ gh pr comment ${{ needs.build.outputs.pr-number }} \
+ --repo ${{ github.repository }} \
+ --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed.
+ 🚀 Speedups: ${{ needs.build.outputs.speedups }}"
diff --git a/.gitignore b/.gitignore
index a419c9bde745..240c4a116082 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,4 @@ test/lint/test_runner/target/
/guix-build-*
/ci/scratch/
+utxo-signet-160000.dat
diff --git a/README.md b/README.md
index c5b6ce4588ef..309ea94a3e04 100644
--- a/README.md
+++ b/README.md
@@ -1,79 +1,125 @@
-Bitcoin Core integration/staging tree
-=====================================
+# benchcoin
-https://bitcoincore.org
+A Bitcoin Core benchmarking fork
-For an immediately usable, binary version of the Bitcoin Core software, see
-https://bitcoincore.org/en/download/.
+This repository is a fork of Bitcoin Core that performs automated assumeutxo-based IBD benchmarking.
+It allows you to measure and compare the performance impact of certain types of changes to Bitcoin Core's codebase on a longer-running IBD benchmark, in a (pretty) reproducible fashion.
-What is Bitcoin Core?
----------------------
+## Features
-Bitcoin Core connects to the Bitcoin peer-to-peer network to download and fully
-validate blocks and transactions. It also includes a wallet and graphical user
-interface, which can be optionally built.
+- Automated IBD benchmarking on pull requests
+- Multiple configurations:
+ - Signet (fast fail test)
+ - Mainnet with default cache
+ - Mainnet with large cache
+- Performance visualizations including:
+ - Flamegraphs for CPU profiling
+ - Time series plots of various metrics
+ - Compare `base` (bitcoin/bitcoin:master) and `head` (PR)
-Further information about Bitcoin Core is available in the [doc folder](/doc).
+## Example Flamegraph
-License
--------
+Below is an example flamegraph showing CPU utilization during IBD:
-Bitcoin Core is released under the terms of the MIT license. See [COPYING](COPYING) for more
-information or see https://opensource.org/licenses/MIT.
+
-Development Process
--------------------
+## How to use it
-The `master` branch is regularly built (see `doc/build-*.md` for instructions) and tested, but it is not guaranteed to be
-completely stable. [Tags](https://github.com/bitcoin/bitcoin/tags) are created
-regularly from release branches to indicate new official, stable release versions of Bitcoin Core.
+1. Open a Pull Request against **this repo**
+2. Wait for the bot to comment on your PR after it's finished.
-The https://github.com/bitcoin-core/gui repository is used exclusively for the
-development of the GUI. Its master branch is identical in all monotree
-repositories. Release branches and tags do not exist, so please do not fork
-that repository unless it is for development reasons.
+## How it works
-The contribution workflow is described in [CONTRIBUTING.md](CONTRIBUTING.md)
-and useful hints for developers can be found in [doc/developer-notes.md](doc/developer-notes.md).
+When you open a pull request against this repository:
-Testing
--------
+1. The CI workflow automatically builds both the base and PR versions of bitcoind
+2. Runs IBD benchmarks using assumeutxo snapshots
+3. Records performance metrics and creates various visualizations
+4. Posts results as a comment on your PR
-Testing and code review is the bottleneck for development; we get more pull
-requests than we can review and test on short notice. Please be patient and help out by testing
-other people's pull requests, and remember this is a security-critical project where any mistake might cost people
-lots of money.
+The benchmarks test three configurations:
+- Signet
+ - From snapshot @ height 160,000 to height 220,000
+- Mainnet-default: with default (450 MB) dbcache
+ - From snapshot @ height 840,000 to height 855,000
+- Mainnet-large: with 32000 MB dbcache
+ - From snapshot @ height 840,000 to height 855,000
-### Automated Testing
+## Benchmark Outputs
-Developers are strongly encouraged to write [unit tests](src/test/README.md) for new code, and to
-submit new unit tests for old code. Unit tests can be compiled and run
-(assuming they weren't disabled during the generation of the build system) with: `ctest`. Further details on running
-and extending unit tests can be found in [/src/test/README.md](/src/test/README.md).
+For each benchmark run, you'll get a github pages page with:
-There are also [regression and integration tests](/test), written
-in Python.
-These tests can be run (if the [test dependencies](/test) are installed) with: `build/test/functional/test_runner.py`
-(assuming `build` is your build directory).
+- Timing comparisons between base and PR versions
+- CPU flamegraphs showing where time is spent
+- Time series plots showing:
+ - Block height vs time
+ - Cache size vs block height
+ - Cache size vs time
+ - Transaction count vs block height
+ - Coins cache size vs time
+ - LevelDB metrics
+ - Memory pool metrics
-The CI (Continuous Integration) systems make sure that every pull request is built for Windows, Linux, and macOS,
-and that unit/sanity tests are run automatically.
+## Local Development
-### Manual Quality Assurance (QA) Testing
+To run benchmarks locally (WIP, and Linux-only due to [shell.nix](shell.nix) limitations):
-Changes should be tested by somebody other than the developer who wrote the
-code. This is especially important for large or high-risk changes. It is useful
-to add a test plan to the pull request description if testing the changes is
-not straightforward.
+1. Make sure you have [Nix package manager](https://nixos.org/download/) installed
-Translations
-------------
+2. Setup the Nix development environment:
+```bash
+nix-shell
+```
-Changes to translations as well as new translations can be submitted to
-[Bitcoin Core's Transifex page](https://www.transifex.com/bitcoin/bitcoin/).
+3. Run a local benchmark:
+```bash
+just run-signet
+```
-Translations are periodically pulled from Transifex and merged into the git repository. See the
-[translation process](doc/translation_process.md) for details on how this works.
+This will:
+- Create a temporary directory for testing
+- Build both base and PR versions
+- Download the required UTXO snapshot if needed
+- Run the benchmark
+- Generate performance visualizations
-**Important**: We do not accept translation changes as GitHub pull requests because the next
-pull from Transifex would automatically overwrite them again.
+## Technical Details
+
+The benchmarking system uses:
+- [Hyperfine](https://github.com/sharkdp/hyperfine) for benchmark timing
+- [Flamegraph](https://github.com/willcl-ark/flamegraph) for CPU profiling
+- [matplotlib](https://matplotlib.org/) for metric visualization
+- [GitHub Actions](https://github.com/features/actions) for CI automation
+
+The system leverages assumeutxo to speed up IBD (to a more interesting height) by loading a snapshot.
+
+We use a custom assumeutxo patch which does introduces two commandline options for assumeutxo, specifically for
+benchmarking. these commands are:
+
+```
+-pausebackgroundsync - pauses background verification of historical blocks in the background.
+-loadutxosnapshot= - load an assumeutxo snapshot on startup, instead of needing to go through the rpc command.
+ The node will shutdown immediately after the snapshot has been loaded.
+```
+
+### Runner & seed
+
+The CI runner is self-hosted on a Hetzner AX52 running at the bitcoin-dev-tools organsation level.
+It is running NixOS using configuration found in this repo: [nix-github-runner](https://github.com/bitcoin-dev-tools/nix-github-runner) for easier deployment and reproducibility.
+
+The runner host has 16 cores, with one used for system, one for `flamegraph` (i.e. `perf record`) and 14 dedicated to the Bitcoin Core node under test.
+
+The benchmarking peer on the runner is served blocks over the (real) "internet" (it may be LAN as it's within a single Hetzner region) via a single peer to exercise full IBD codepaths. This naturally may introduce some variance, but it was deemed preferable to running another bitcoin core on the same machine.
+
+This seed peer is another Hetzner VPS in the same region, and its configuration can be found here: [nix-seed-node](https://github.com/bitcoin-dev-tools/nix-seed-node)
+
+## Contributing
+
+1. Fork this repository (or bitcoin/bitcoin and add this as a remote)
+2. Make your changes to Bitcoin Core
+3. Open a pull request **against this repo. NOT bitcoin/bitcoin**
+4. Wait for benchmark results to be posted on your PR here
+
+## License
+
+This project is licensed under the same terms as Bitcoin Core - see the [COPYING](COPYING) file for details.
diff --git a/bench-ci/parse_and_plot.py b/bench-ci/parse_and_plot.py
new file mode 100755
index 000000000000..f2548a43cab6
--- /dev/null
+++ b/bench-ci/parse_and_plot.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3
+import sys
+import os
+import re
+import datetime
+import matplotlib.pyplot as plt
+
+
+def parse_updatetip_line(line):
+ match = re.match(
+ r'^([\d\-:TZ]+) UpdateTip: new best.+height=(\d+).+tx=(\d+).+cache=([\d.]+)MiB\((\d+)txo\).+mem=(\d+)MiB',
+ line
+ )
+ if not match:
+ return None
+ iso_str, height_str, tx_str, cache_size_mb_str, cache_coins_count_str, mem_str = match.groups()
+ return (
+ datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ"),
+ int(height_str),
+ int(tx_str),
+ float(cache_size_mb_str),
+ int(cache_coins_count_str),
+ int(mem_str),
+ )
+
+def parse_leveldb_compact_line(line):
+ match = re.match(r'^([\d\-:TZ]+) \[leveldb] Compacting.*files', line)
+ if not match:
+ return None
+ iso_str = match.groups()[0]
+ parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ")
+ return parsed_datetime
+
+
+def parse_leveldb_generated_table_line(line):
+ match = re.match(r'^([\d\-:TZ]+) \[leveldb] Generated table.*: (\d+) keys, (\d+) bytes', line)
+ if not match:
+ return None
+ iso_str, keys_count_str, bytes_count_str = match.groups()
+ parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ")
+ return parsed_datetime, int(keys_count_str), int(bytes_count_str)
+
+def parse_validation_txadd_line(line):
+ match = re.match(r'^([\d\-:TZ]+) \[validation] TransactionAddedToMempool: txid=.+wtxid=.+', line)
+ if not match:
+ return None
+ iso_str = match.groups()[0]
+ parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ")
+ return parsed_datetime
+
+
+def parse_coindb_write_batch_line(line):
+ match = re.match(r'^([\d\-:TZ]+) \[coindb] Writing (partial|final) batch of ([\d.]+) MiB', line)
+ if not match:
+ return None
+ iso_str, is_partial_str, size_mb_str = match.groups()
+ parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ")
+ return parsed_datetime, is_partial_str, float(size_mb_str)
+
+
+def parse_coindb_commit_line(line):
+ match = re.match(r'^([\d\-:TZ]+) \[coindb] Committed (\d+) changed transaction outputs', line)
+ if not match:
+ return None
+ iso_str, txout_count_str = match.groups()
+ parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ")
+ return parsed_datetime, int(txout_count_str)
+
+def parse_log_file(log_file):
+ with open(log_file, 'r', encoding='utf-8') as f:
+ update_tip_data = []
+ leveldb_compact_data = []
+ leveldb_gen_table_data = []
+ validation_txadd_data = []
+ coindb_write_batch_data = []
+ coindb_commit_data = []
+
+ for line in f:
+ if result := parse_updatetip_line(line):
+ update_tip_data.append(result)
+ elif result := parse_leveldb_compact_line(line):
+ leveldb_compact_data.append(result)
+ elif result := parse_leveldb_generated_table_line(line):
+ leveldb_gen_table_data.append(result)
+ elif result := parse_validation_txadd_line(line):
+ validation_txadd_data.append(result)
+ elif result := parse_coindb_write_batch_line(line):
+ coindb_write_batch_data.append(result)
+ elif result := parse_coindb_commit_line(line):
+ coindb_commit_data.append(result)
+
+ if not update_tip_data:
+ print("No UpdateTip entries found.")
+ sys.exit(0)
+
+ assert all(update_tip_data[i][0] <= update_tip_data[i + 1][0] for i in
+ range(len(update_tip_data) - 1)), "UpdateTip entries are not sorted by time"
+
+ return update_tip_data, leveldb_compact_data, leveldb_gen_table_data, validation_txadd_data, coindb_write_batch_data, coindb_commit_data
+
+
+def generate_plot(x, y, x_label, y_label, title, output_file):
+ if not x or not y:
+ print(f"Skipping plot '{title}' as there is no data.")
+ return
+
+ plt.figure(figsize=(30, 10))
+ plt.plot(x, y)
+ plt.title(title, fontsize=20)
+ plt.xlabel(x_label, fontsize=16)
+ plt.ylabel(y_label, fontsize=16)
+ plt.grid(True)
+ plt.xticks(rotation=90, fontsize=12)
+ plt.yticks(fontsize=12)
+ plt.tight_layout()
+ plt.savefig(output_file)
+ plt.close()
+ print(f"Saved plot to {output_file}")
+
+
+if __name__ == "__main__":
+ if len(sys.argv) != 4:
+ print(f"Usage: {sys.argv[0]} ")
+ sys.exit(1)
+
+ commit = sys.argv[1]
+
+ log_file = sys.argv[2]
+ if not os.path.isfile(log_file):
+ print(f"File not found: {log_file}")
+ sys.exit(1)
+
+ png_dir = sys.argv[3]
+ os.makedirs(png_dir, exist_ok=True)
+
+ update_tip_data, leveldb_compact_data, leveldb_gen_table_data, validation_txadd_data, coindb_write_batch_data, coindb_commit_data = parse_log_file(log_file)
+ times, heights, tx_counts, cache_size, cache_count, mem_size = zip(*update_tip_data)
+ float_minutes = [(t - times[0]).total_seconds() / 60 for t in times]
+
+ generate_plot(float_minutes, heights, "Elapsed minutes", "Block Height", "Block Height vs Time", os.path.join(png_dir, f"{commit}-height_vs_time.png"))
+ generate_plot(heights, cache_size, "Block Height", "Cache Size (MiB)", "Cache Size vs Block Height", os.path.join(png_dir, f"{commit}-cache_vs_height.png"))
+ generate_plot(float_minutes, cache_size, "Elapsed minutes", "Cache Size (MiB)", "Cache Size vs Time", os.path.join(png_dir, f"{commit}-cache_vs_time.png"))
+ generate_plot(heights, tx_counts, "Block Height", "Transaction Count", "Transactions vs Block Height", os.path.join(png_dir, f"{commit}-tx_vs_height.png"))
+ generate_plot(heights, mem_size, "Block Height", "Total Memory (MiB)", "Memory vs Block Height", os.path.join(png_dir, f"{commit}-mem_vs_height.png"))
+ generate_plot(times, cache_count, "Block Height", "Coins Cache Size", "Coins Cache Size vs Time", os.path.join(png_dir, f"{commit}-coins_cache_vs_time.png"))
+
+ # LevelDB Compaction and Generated Tables
+ if leveldb_compact_data:
+ leveldb_compact_times = [(t - times[0]).total_seconds() / 60 for t in leveldb_compact_data]
+ leveldb_compact_y = [1 for _ in leveldb_compact_times] # dummy y axis to mark compactions
+ generate_plot(leveldb_compact_times, leveldb_compact_y, "Elapsed minutes", "LevelDB Compaction", "LevelDB Compaction Events vs Time", os.path.join(png_dir, f"{commit}-leveldb_compact_vs_time.png"))
+ if leveldb_gen_table_data:
+ leveldb_gen_table_times, leveldb_gen_table_keys, leveldb_gen_table_bytes = zip(*leveldb_gen_table_data)
+ leveldb_gen_table_float_minutes = [(t - times[0]).total_seconds() / 60 for t in leveldb_gen_table_times]
+ generate_plot(leveldb_gen_table_float_minutes, leveldb_gen_table_keys, "Elapsed minutes", "Number of keys", "LevelDB Keys Generated vs Time", os.path.join(png_dir, f"{commit}-leveldb_gen_keys_vs_time.png"))
+ generate_plot(leveldb_gen_table_float_minutes, leveldb_gen_table_bytes, "Elapsed minutes", "Number of bytes", "LevelDB Bytes Generated vs Time", os.path.join(png_dir, f"{commit}-leveldb_gen_bytes_vs_time.png"))
+
+ # validation mempool add transaction lines
+ if validation_txadd_data:
+ validation_txadd_times = [(t - times[0]).total_seconds() / 60 for t in validation_txadd_data]
+ validation_txadd_y = [1 for _ in validation_txadd_times] # dummy y axis to mark transaction additions
+ generate_plot(validation_txadd_times, validation_txadd_y, "Elapsed minutes", "Transaction Additions", "Transaction Additions to Mempool vs Time", os.path.join(png_dir, f"{commit}-validation_txadd_vs_time.png"))
+
+ # coindb write batch lines
+ if coindb_write_batch_data:
+ coindb_write_batch_times, is_partial_strs, sizes_mb = zip(*coindb_write_batch_data)
+ coindb_write_batch_float_minutes = [(t - times[0]).total_seconds() / 60 for t in coindb_write_batch_times]
+ generate_plot(coindb_write_batch_float_minutes, sizes_mb, "Elapsed minutes", "Batch Size MiB", "Coin Database Partial/Final Write Batch Size vs Time", os.path.join(png_dir, f"{commit}-coindb_write_batch_size_vs_time.png"))
+ if coindb_commit_data:
+ coindb_commit_times, txout_counts = zip(*coindb_commit_data)
+ coindb_commit_float_minutes = [(t - times[0]).total_seconds() / 60 for t in coindb_commit_times]
+ generate_plot(coindb_commit_float_minutes, txout_counts, "Elapsed minutes", "Transaction Output Count", "Coin Database Transaction Output Committed vs Time", os.path.join(png_dir, f"{commit}-coindb_commit_txout_vs_time.png"))
+
+ print("Plots saved!")
\ No newline at end of file
diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh
new file mode 100755
index 000000000000..64b2af6fdb5b
--- /dev/null
+++ b/bench-ci/run-assumeutxo-bench.sh
@@ -0,0 +1,154 @@
+#!/usr/bin/env bash
+
+set -euxo pipefail
+
+# Helper function to check and clean datadir
+clean_datadir() {
+ set -euxo pipefail
+
+ local TMP_DATADIR="$1"
+
+ # Create the directory if it doesn't exist
+ mkdir -p "${TMP_DATADIR}"
+
+ # If we're in CI, clean without confirmation
+ if [ -n "${CI:-}" ]; then
+ rm -Rf "${TMP_DATADIR:?}"/*
+ else
+ read -rp "Are you sure you want to delete everything in ${TMP_DATADIR}? [y/N] " response
+ if [[ "$response" =~ ^[Yy]$ ]]; then
+ rm -Rf "${TMP_DATADIR:?}"/*
+ else
+ echo "Aborting..."
+ exit 1
+ fi
+ fi
+}
+
+# Helper function to clear logs
+clean_logs() {
+ set -euxo pipefail
+
+ local TMP_DATADIR="$1"
+ local logfile="${TMP_DATADIR}/debug.log"
+
+ echo "Checking for ${logfile}"
+ if [ -e "${logfile}" ]; then
+ echo "Removing ${logfile}"
+ rm "${logfile}"
+ fi
+}
+
+# Execute CMD before each set of timing runs.
+setup_assumeutxo_snapshot_run() {
+ set -euxo pipefail
+
+ local TMP_DATADIR="$1"
+ local commit="$2"
+ clean_datadir "${TMP_DATADIR}"
+}
+
+# Execute CMD before each timing run.
+prepare_assumeutxo_snapshot_run() {
+ set -euxo pipefail
+
+ local TMP_DATADIR="$1"
+ local UTXO_PATH="$2"
+ local CONNECT_ADDRESS="$3"
+ local CHAIN="$4"
+ local DBCACHE="$5"
+ local commit="$6"
+ local BINARIES_DIR="$7"
+
+ # Run the actual preparation steps
+ clean_datadir "${TMP_DATADIR}"
+ # Use the pre-built binaries from BINARIES_DIR
+ "${BINARIES_DIR}/${commit}/bitcoind" --help
+ taskset -c 0-15 "${BINARIES_DIR}/${commit}/bitcoind" -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -stopatheight=1 -printtoconsole=1
+ taskset -c 0-15 "${BINARIES_DIR}/${commit}/bitcoind" -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE}" -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=1 || true
+ clean_logs "${TMP_DATADIR}"
+}
+
+# Executed after each timing run
+conclude_assumeutxo_snapshot_run() {
+ set -euxo pipefail
+
+ local commit="$1"
+ local TMP_DATADIR="$2"
+ local PNG_DIR="$3"
+
+ # Search in subdirs e.g. $datadir/signet
+ debug_log=$(find "${TMP_DATADIR}" -name debug.log -print -quit)
+ if [ -n "${debug_log}" ]; then
+ echo "Generating plots from ${debug_log}"
+ if [ -x "bench-ci/parse_and_plot.py" ]; then
+ bench-ci/parse_and_plot.py "${commit}" "${debug_log}" "${PNG_DIR}"
+ else
+ ls -al "bench-ci/"
+ echo "parse_and_plot.py not found or not executable, skipping plot generation"
+ fi
+ else
+ ls -al "${TMP_DATADIR}/"
+ echo "debug.log not found, skipping plot generation"
+ fi
+
+ # Move flamegraph if exists
+ if [ -e flamegraph.svg ]; then
+ mv flamegraph.svg "${commit}"-flamegraph.svg
+ fi
+}
+
+# Execute CMD after the completion of all benchmarking runs for each individual
+# command to be benchmarked.
+cleanup_assumeutxo_snapshot_run() {
+ set -euxo pipefail
+
+ local TMP_DATADIR="$1"
+
+ # Clean up the datadir
+ clean_datadir "${TMP_DATADIR}"
+}
+
+run_benchmark() {
+ local base_commit="$1"
+ local head_commit="$2"
+ local TMP_DATADIR="$3"
+ local UTXO_PATH="$4"
+ local results_file="$5"
+ local png_dir="$6"
+ local chain="$7"
+ local stop_at_height="$8"
+ local connect_address="$9"
+ local dbcache="${10}"
+ local BINARIES_DIR="${11}"
+
+ # Export functions so they can be used by hyperfine
+ export -f setup_assumeutxo_snapshot_run
+ export -f prepare_assumeutxo_snapshot_run
+ export -f conclude_assumeutxo_snapshot_run
+ export -f cleanup_assumeutxo_snapshot_run
+ export -f clean_datadir
+ export -f clean_logs
+
+ # Run hyperfine
+ hyperfine \
+ --shell=bash \
+ --setup "setup_assumeutxo_snapshot_run ${TMP_DATADIR} {commit}" \
+ --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain} ${dbcache} {commit} ${BINARIES_DIR}" \
+ --conclude "conclude_assumeutxo_snapshot_run {commit} ${TMP_DATADIR} ${png_dir}" \
+ --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \
+ --runs 1 \
+ --export-json "${results_file}" \
+ --command-name "base (${base_commit})" \
+ --command-name "head (${head_commit})" \
+ "taskset -c 1 flamegraph --palette bitcoin --title 'bitcoind assumeutxo IBD@{commit}' -c 'record -F 101 --call-graph fp' -- taskset -c 2-15 ${BINARIES_DIR}/{commit}/bitcoind -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \
+ -L commit "head,base"
+}
+
+# Main execution
+if [ "$#" -ne 11 ]; then
+ echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir png_dir chain stop_at_height connect_address dbcache BINARIES_DIR"
+ exit 1
+fi
+
+run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" "${11}"
diff --git a/contrib/guix/libexec/build.sh b/contrib/guix/libexec/build.sh
index 6c252e787022..b7ffcaba3952 100755
--- a/contrib/guix/libexec/build.sh
+++ b/contrib/guix/libexec/build.sh
@@ -171,6 +171,12 @@ make -C depends --jobs="$JOBS" HOST="$HOST" \
${SOURCES_PATH+SOURCES_PATH="$SOURCES_PATH"} \
${BASE_CACHE+BASE_CACHE="$BASE_CACHE"} \
${SDK_PATH+SDK_PATH="$SDK_PATH"} \
+ NO_QT=1 \
+ NO_QR=1 \
+ NO_ZMQ=1 \
+ NO_WALLET=1 \
+ NO_BDB=1 \
+ NO_USDT=1 \
x86_64_linux_CC=x86_64-linux-gnu-gcc \
x86_64_linux_CXX=x86_64-linux-gnu-g++ \
x86_64_linux_AR=x86_64-linux-gnu-gcc-ar \
@@ -208,6 +214,9 @@ mkdir -p "$OUTDIR"
# CONFIGFLAGS
CONFIGFLAGS="-DREDUCE_EXPORTS=ON -DBUILD_BENCH=OFF -DBUILD_GUI_TESTS=OFF -DBUILD_FUZZ_BINARY=OFF"
+# BENCHCOINFLAGS
+BENCHCOINFLAGS="-DBUILD_CLI=OFF -DBUILD_TESTS=OFF -DCMAKE_CXX_FLAGS=-fno-omit-frame-pointer"
+
# CFLAGS
HOST_CFLAGS="-O2 -g"
HOST_CFLAGS+=$(find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -ffile-prefix-map={}=/usr" \;)
@@ -242,17 +251,18 @@ mkdir -p "$DISTSRC"
cmake -S . -B build \
--toolchain "${BASEPREFIX}/${HOST}/toolchain.cmake" \
-DWITH_CCACHE=OFF \
- ${CONFIGFLAGS}
+ ${CONFIGFLAGS} \
+ ${BENCHCOINFLAGS}
# Build Bitcoin Core
cmake --build build -j "$JOBS" ${V:+--verbose}
# Check that symbol/security checks tools are sane.
- cmake --build build --target test-security-check ${V:+--verbose}
+ # cmake --build build --target test-security-check ${V:+--verbose}
# Perform basic security checks on a series of executables.
- cmake --build build -j 1 --target check-security ${V:+--verbose}
+ # cmake --build build -j 1 --target check-security ${V:+--verbose}
# Check that executables only contain allowed version symbols.
- cmake --build build -j 1 --target check-symbols ${V:+--verbose}
+ # cmake --build build -j 1 --target check-symbols ${V:+--verbose}
mkdir -p "$OUTDIR"
@@ -304,15 +314,15 @@ mkdir -p "$DISTSRC"
(
cd installed
- case "$HOST" in
- *darwin*) ;;
- *)
- # Split binaries from their debug symbols
- {
- find "${DISTNAME}/bin" -type f -executable -print0
- } | xargs -0 -P"$JOBS" -I{} "${DISTSRC}/build/split-debug.sh" {} {} {}.dbg
- ;;
- esac
+ # case "$HOST" in
+ # *darwin*) ;;
+ # *)
+ # # Split binaries from their debug symbols
+ # {
+ # find "${DISTNAME}/bin" -type f -executable -print0
+ # } | xargs -0 -P"$JOBS" -I{} "${DISTSRC}/build/split-debug.sh" {} {} {}.dbg
+ # ;;
+ # esac
case "$HOST" in
*mingw*)
diff --git a/contrib/guix/libexec/prelude.bash b/contrib/guix/libexec/prelude.bash
index 428fc41e7393..19b188236926 100644
--- a/contrib/guix/libexec/prelude.bash
+++ b/contrib/guix/libexec/prelude.bash
@@ -48,9 +48,13 @@ fi
################
# Execute "$@" in a pinned, possibly older version of Guix, for reproducibility
# across time.
+
+GUIX_PROFILE=/home/github-runner/.config/guix/current
+. "$GUIX_PROFILE/etc/profile"
+
time-machine() {
# shellcheck disable=SC2086
- guix time-machine --url=https://git.savannah.gnu.org/git/guix.git \
+ guix time-machine --url=https://github.com/fanquake/guix.git \
--commit=53396a22afc04536ddf75d8f82ad2eafa5082725 \
--cores="$JOBS" \
--keep-failed \
diff --git a/doc/flamegraph.svg b/doc/flamegraph.svg
new file mode 100644
index 000000000000..b0246d6ce9ce
--- /dev/null
+++ b/doc/flamegraph.svg
@@ -0,0 +1,491 @@
+
\ No newline at end of file
diff --git a/justfile b/justfile
new file mode 100644
index 000000000000..aeca9851c0a6
--- /dev/null
+++ b/justfile
@@ -0,0 +1,120 @@
+set shell := ["bash", "-uc"]
+
+os := os()
+
+default:
+ just --list
+
+# Build base and head binaries for CI
+[group('ci')]
+build-assumeutxo-binaries-guix base_commit head_commit:
+ #!/usr/bin/env bash
+ set -euxo pipefail
+
+ mkdir -p binaries/base
+ mkdir -p binaries/head
+
+ for build in "base:{{ base_commit }}" "head:{{ head_commit }}"; do
+ name="${build%%:*}"
+ commit="${build#*:}"
+ git checkout "$commit"
+ HOSTS=x86_64-linux-gnu SOURCES_PATH=/data/SOURCES_PATH BASE_CACHE=/data/BASE_CACHE taskset -c 0-15 contrib/guix/guix-build
+
+ # Truncate commit hash to 12 characters
+ short_commit=$(echo "$commit" | cut -c 1-12)
+
+ # Extract the Guix output
+ tar -xzf "guix-build-${short_commit}/output/x86_64-linux-gnu/bitcoin-${short_commit}-x86_64-linux-gnu.tar.gz"
+
+ # Copy the binary to our binaries directory
+ cp "bitcoin-${short_commit}/bin/bitcoind" "binaries/${name}/bitcoind"
+
+ # Cleanup extracted files
+ rm -rf "bitcoin-${short_commit}"
+ done
+
+# Run signet assumeutxo CI workflow
+[group('ci')]
+run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir:
+ ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 220000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }}
+
+# Run mainnet assumeutxo CI workflow for default cache
+[group('ci')]
+run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir:
+ ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 855000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }}
+
+# Run mainnet assumeutxo CI workflow for large cache
+[group('ci')]
+run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir:
+ ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 855000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }}
+
+# Run a signet benchmark locally
+[group('local')]
+run-signet:
+ #!/usr/bin/env bash
+ set -euo pipefail
+ set -x
+
+ # Get git HEAD and merge-base with master (as BASE)
+ HEAD=$(git rev-parse HEAD)
+ BASE=$(git merge-base HEAD master)
+ echo "Using BASE: $BASE"
+ echo "Using HEAD: $HEAD"
+
+ # Make a random temp dir and save it as TMPDIR
+ TMPDIR=$(mktemp -d)
+ echo "Using temporary directory: $TMPDIR"
+
+ # Create required directories
+ mkdir -p "$TMPDIR/datadir"
+ mkdir -p "$TMPDIR/png"
+ mkdir -p "$TMPDIR/binaries"
+
+ # Build binaries
+ just build-assumeutxo-binaries "$BASE" "$HEAD"
+ cp build-head/src/bitcoind "$TMPDIR/binaries/bitcoind-head"
+ cp build-base/src/bitcoind "$TMPDIR/binaries/bitcoind-base"
+
+ # Fetch utxo-signet-160000.dat if not exists in $CWD
+ if [ ! -f "./utxo-signet-160000.dat" ]; then
+ echo "Downloading utxo-signet-160000.dat..."
+ if command -v curl &> /dev/null; then
+ curl -L -o "./utxo-signet-160000.dat" "https://tmp.256k1.dev/utxo-signet-160000.dat"
+ elif command -v wget &> /dev/null; then
+ wget -O "./utxo-signet-160000.dat" "https://tmp.256k1.dev/utxo-signet-160000.dat"
+ else
+ echo "Error: Neither curl nor wget is available. Please install one of them."
+ exit 1
+ fi
+ echo "Download complete."
+ else
+ echo "Using existing utxo-signet-160000.dat"
+ fi
+
+ # Run signet CI
+ CI=1 just run-assumeutxo-signet-ci \
+ "$BASE" \
+ "$HEAD" \
+ "$TMPDIR/datadir" \
+ "$PWD/utxo-signet-160000.dat" \
+ "$TMPDIR/result" \
+ 16000 \
+ "$TMPDIR/png" \
+ "$TMPDIR/binaries"
+
+ echo "Results saved in: $TMPDIR/result"
+ echo "PNG files saved in: $TMPDIR/png"
+
+# Cherry-pick commits from a bitcoin core PR onto this branch
+[group('git')]
+pick-pr pr_number:
+ #!/usr/bin/env bash
+ set -euxo pipefail
+
+ if ! git remote get-url upstream 2>/dev/null | grep -q "bitcoin/bitcoin"; then
+ echo "Error: 'upstream' remote not found or doesn't point to bitcoin/bitcoin"
+ echo "Please add it with: `git remote add upstream https://github.com/bitcoin/bitcoin.git`"
+ exit 1
+ fi
+
+ git fetch upstream pull/{{ pr_number }}/head:bench-{{ pr_number }} && git cherry-pick $(git rev-list bench-{{ pr_number }} --not upstream/master)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 000000000000..26605fc84930
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,15 @@
+[project]
+name = "bitcoin-core-deps"
+version = "0.1.0"
+dependencies = [
+ "codespell==2.2.6",
+ "lief==0.13.2",
+ "mypy==1.4.1",
+ "pyzmq==25.1.0",
+ # Removing in favour of packaged nixpkgs bin which is not dynamically linked
+ # "ruff==0.5.5",
+ "vulture==2.6",
+ "pyperf==2.8.0",
+ "matplotlib==3.8.0",
+ "numpy==1.26.0"
+]
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 000000000000..c9b220b6fe46
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,28 @@
+# This file was autogenerated by uv via the following command:
+# uv pip compile pyproject.toml -o requirements.txt
+codespell==2.2.6
+ # via bitcoin-core-deps (pyproject.toml)
+lief==0.13.2
+ # via bitcoin-core-deps (pyproject.toml)
+matplotlib==3.8.0
+ # via bitcoin-core-deps (pyproject.toml)
+mypy==1.4.1
+ # via bitcoin-core-deps (pyproject.toml)
+mypy-extensions==1.0.0
+ # via mypy
+numpy==1.26.0
+ # via bitcoin-core-deps (pyproject.toml)
+psutil==6.1.0
+ # via pyperf
+pyperf==2.8.0
+ # via bitcoin-core-deps (pyproject.toml)
+pyzmq==25.1.0
+ # via bitcoin-core-deps (pyproject.toml)
+toml==0.10.2
+ # via vulture
+tomli==2.0.2
+ # via mypy
+typing-extensions==4.12.2
+ # via mypy
+vulture==2.6
+ # via bitcoin-core-deps (pyproject.toml)
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 000000000000..f0dfd29c0cbb
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,126 @@
+# Copyright 0xB10C, willcl-ark
+{ pkgs ? import
+ (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-24.11.tar.gz")
+ { }, }:
+let
+ inherit (pkgs.lib) optionals strings;
+ inherit (pkgs) stdenv;
+
+ # Override the default cargo-flamegraph with a custom fork
+ cargo-flamegraph = pkgs.rustPlatform.buildRustPackage rec {
+ pname =
+ "flamegraph"; # Match the name in Cargo.toml, doesn't seem to work otherwise
+ version = "bitcoin-core";
+
+ src = pkgs.fetchFromGitHub {
+ owner = "willcl-ark";
+ repo = "flamegraph";
+ rev = "bitcoin-core";
+ sha256 = "sha256-tQbr3MYfAiOxeT12V9au5KQK5X5JeGuV6p8GR/Sgen4=";
+ };
+
+ doCheck = false;
+ cargoHash = "sha256-QWPqTyTFSZNJNayNqLmsQSu0rX26XBKfdLROZ9tRjrg=";
+
+ useFetchCargoVendor = true;
+
+ nativeBuildInputs =
+ pkgs.lib.optionals stdenv.hostPlatform.isLinux [ pkgs.makeWrapper ];
+ buildInputs = pkgs.lib.optionals stdenv.hostPlatform.isDarwin
+ [ pkgs.darwin.apple_sdk.frameworks.Security ];
+
+ postFixup = pkgs.lib.optionalString stdenv.hostPlatform.isLinux ''
+ wrapProgram $out/bin/cargo-flamegraph \
+ --set-default PERF ${pkgs.linuxPackages.perf}/bin/perf
+ wrapProgram $out/bin/flamegraph \
+ --set-default PERF ${pkgs.linuxPackages.perf}/bin/perf
+ '';
+ };
+
+ # Hyperfine
+ # Included here because we need master for the `--conclude` flag from pr 719
+ hyperfine = pkgs.rustPlatform.buildRustPackage rec {
+ pname = "hyperfine";
+ name = "hyperfine";
+ version = "e3e86174d9e11dd3a8951990f279c3b85f5fc0b9";
+
+ src = pkgs.fetchFromGitHub {
+ owner = "sharkdp";
+ repo = "hyperfine";
+ rev = version;
+ sha256 = "sha256-WCc7gURd8dFgUC8moxB7y16e1jNKtImwsfXnqU36IrE=";
+ };
+
+ nativeBuildInputs = with pkgs; [ sqlite ];
+
+ cargoHash = "sha256-E46//75Dgg+XClhD2iV86PYYwEE7bLeYMLK5UkyRpyg=";
+
+ meta = with pkgs.lib; {
+ description = "A command-line benchmarking tool.";
+ homepage = "https://github.com/sharkdp/hyperfine";
+ license = licenses.mit;
+ };
+ };
+
+in pkgs.mkShell {
+ nativeBuildInputs = with pkgs; [
+ autoconf
+ automake
+ boost
+ ccache
+ clang_18
+ cmake
+ libevent
+ libtool
+ pkg-config
+ sqlite
+ zeromq
+ ];
+ buildInputs = with pkgs; [
+ just
+ bash
+ git
+ shellcheck
+ python310
+ uv
+
+ # Benchmarking
+ cargo-flamegraph
+ flamegraph
+ hyperfine
+ jq
+ linuxKernel.packages.linux_6_6.perf
+ perf-tools
+ util-linux
+
+ # Binary patching
+ patchelf
+
+ # Guix
+ curl
+ getent
+ ];
+
+ shellHook = ''
+ echo "Bitcoin Core build nix-shell"
+ echo ""
+ echo "Setting up python venv"
+
+ # fixes libstdc++ issues and libgl.so issues
+ export LD_LIBRARY_PATH=${stdenv.cc.cc.lib}/lib/:$LD_LIBRARY_PATH
+
+ uv venv --python 3.10
+ source .venv/bin/activate
+ uv pip install -r pyproject.toml
+
+ patch-binary() {
+ if [ -z "$1" ]; then
+ echo "Usage: patch-binary "
+ return 1
+ fi
+ patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" "$1"
+ }
+ echo "Added patch-binary command"
+ echo " Usage: 'patch-binary '"
+ '';
+}
diff --git a/src/coins.h b/src/coins.h
index 61fb4af6420d..2e887140d012 100644
--- a/src/coins.h
+++ b/src/coins.h
@@ -239,6 +239,7 @@ class CCoinsViewCursor
virtual bool GetKey(COutPoint &key) const = 0;
virtual bool GetValue(Coin &coin) const = 0;
+ virtual bool SeekAndGetValue(const COutPoint& key, Coin &coin) const = 0;
virtual bool Valid() const = 0;
virtual void Next() = 0;
diff --git a/src/init.cpp b/src/init.cpp
index 1f597cb7cb29..704c05a9d120 100644
--- a/src/init.cpp
+++ b/src/init.cpp
@@ -36,6 +36,7 @@
#include
#include
#include
+#include
#include
#include
#include
@@ -55,6 +56,7 @@
#include
#include
#include
+#include
#include
#include
#include
@@ -140,6 +142,7 @@ using node::VerifyLoadedChainstate;
using util::Join;
using util::ReplaceAll;
using util::ToString;
+using node::SnapshotMetadata;
static constexpr bool DEFAULT_PROXYRANDOMIZE{true};
static constexpr bool DEFAULT_REST_ENABLE{false};
@@ -158,6 +161,44 @@ static constexpr bool DEFAULT_STOPAFTERBLOCKIMPORT{false};
static constexpr int MIN_CORE_FDS = MIN_LEVELDB_FDS + NUM_FDS_MESSAGE_CAPTURE;
static const char* DEFAULT_ASMAP_FILENAME="ip_asn.map";
+bool LoadUTXOSnapshot(NodeContext& node, const fs::path& snapshot_path) {
+ ChainstateManager& chainman = *node.chainman;
+
+ FILE* file{fsbridge::fopen(snapshot_path, "rb")};
+ AutoFile afile{file};
+ if (afile.IsNull()) {
+ LogPrintf("Error: Couldn't open UTXO snapshot file %s for reading\n", snapshot_path.utf8string());
+ return false;
+ }
+
+ SnapshotMetadata metadata{chainman.GetParams().MessageStart()};
+ try {
+ afile >> metadata;
+ } catch (const std::ios_base::failure& e) {
+ LogPrintf("Error: Unable to parse snapshot metadata: %s\n", e.what());
+ return false;
+ }
+
+ auto activation_result{chainman.ActivateSnapshot(afile, metadata, false)};
+ if (!activation_result) {
+ LogPrintf("Error: Unable to load UTXO snapshot: %s\n",
+ util::ErrorString(activation_result).original);
+ return false;
+ }
+
+ // Update services to reflect limited peer capabilities during sync
+ node.connman->RemoveLocalServices(NODE_NETWORK);
+ node.connman->AddLocalServices(NODE_NETWORK_LIMITED);
+
+ CBlockIndex& snapshot_index{*CHECK_NONFATAL(*activation_result)};
+ LogPrintf("Loaded UTXO snapshot: coins=%d, height=%d, hash=%s\n",
+ metadata.m_coins_count,
+ snapshot_index.nHeight,
+ snapshot_index.GetBlockHash().ToString());
+
+ return true;
+}
+
/**
* The PID file facilities.
*/
@@ -497,6 +538,12 @@ void SetupServerArgs(ArgsManager& argsman, bool can_listen_ipc)
argsman.AddArg("-minimumchainwork=", strprintf("Minimum work assumed to exist on a valid chain in hex (default: %s, testnet3: %s, testnet4: %s, signet: %s)", defaultChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnetChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnet4ChainParams->GetConsensus().nMinimumChainWork.GetHex(), signetChainParams->GetConsensus().nMinimumChainWork.GetHex()), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::OPTIONS);
argsman.AddArg("-par=", strprintf("Set the number of script verification threads (0 = auto, up to %d, <0 = leave that many cores free, default: %d)",
MAX_SCRIPTCHECK_THREADS, DEFAULT_SCRIPTCHECK_THREADS), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
+ argsman.AddArg("-pausebackgroundsync", strprintf("When a UTXO snapshot is loaded, pause the verification of historical blocks in the background (default: %u)", DEFAULT_PAUSE_BACKGROUND_SYNC), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
+ gArgs.AddArg("-loadutxosnapshot=",
+ "Load UTXO set from snapshot file at startup. "
+ "This allows fast synchronization by loading a pre-built UTXO "
+ "snapshot while the full chain validation happens in background.",
+ ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
argsman.AddArg("-persistmempool", strprintf("Whether to save the mempool on shutdown and load on restart (default: %u)", DEFAULT_PERSIST_MEMPOOL), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
argsman.AddArg("-persistmempoolv1",
strprintf("Whether a mempool.dat file created by -persistmempool or the savemempool RPC will be written in the legacy format "
@@ -1268,6 +1315,113 @@ static ChainstateLoadResult InitAndLoadChainstate(
return {status, error};
};
+bool operator==(const Coin& a, const Coin& b)
+{
+ return a.fCoinBase == b.fCoinBase &&
+ a.nHeight == b.nHeight &&
+ a.out == b.out;
+}
+
+void BenchmarkLoadAllUTXOs(const CCoinsViewDB& coins_db)
+{
+ constexpr int max_utxo_count{200'000'000};
+ std::vector> utxos;
+
+ {
+ LOG_TIME_SECONDS("Loading all UTXOs from disk");
+ for (const auto cursor{coins_db.Cursor()}; cursor->Valid(); cursor->Next()) {
+ COutPoint key;
+ Coin coin;
+ if (cursor->GetKey(key) && cursor->GetValue(coin)) {
+ utxos.emplace_back(key, coin);
+ }
+ }
+ assert(utxos.size() < max_utxo_count);
+ assert(utxos.size() > max_utxo_count / 2);
+ }
+ {
+ LOG_TIME_SECONDS("Populating new cache with all UTXOs");
+ CCoinsView viewDummy;
+ CCoinsViewCache view(&viewDummy);
+ for (const auto& [outpoint, coin] : utxos) {
+ view.AddCoin(outpoint, Coin{coin}, /*possible_overwrite=*/false);
+ }
+ assert(view.GetCacheSize() == utxos.size());
+ }
+
+ // Raw gets vs cursor gets
+
+ {
+ LOG_TIME_SECONDS("Loading UTXOs one-by-one");
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(coins_db.GetCoin(outpoint) == coin);
+ }
+ }
+ {
+ LOG_TIME_SECONDS("Loading UTXOs using cursor");
+ const auto cursor = coins_db.Cursor();
+ Coin next;
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(cursor->SeekAndGetValue(outpoint, next));
+ assert(next == coin);
+ }
+ }
+
+ std::ranges::shuffle(utxos, FastRandomContext());
+ {
+ LOG_TIME_SECONDS("Loading shuffled UTXOs one-by-one");
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(coins_db.GetCoin(outpoint) == coin);
+ }
+ }
+ {
+ LOG_TIME_SECONDS("Loading shuffled UTXOs using cursor");
+ const auto cursor = coins_db.Cursor();
+ Coin next;
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(cursor->SeekAndGetValue(outpoint, next));
+ assert(next == coin);
+ }
+ }
+
+ std::ranges::sort(utxos, [](auto& a, auto& b) { return a.first < b.first; });
+ {
+ LOG_TIME_SECONDS("Loading ascending UTXOs one-by-one");
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(coins_db.GetCoin(outpoint) == coin);
+ }
+ }
+ {
+ LOG_TIME_SECONDS("Loading ascending UTXOs using cursor");
+ const auto cursor = coins_db.Cursor();
+ Coin next;
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(cursor->SeekAndGetValue(outpoint, next));
+ assert(next == coin);
+ }
+ }
+
+ std::ranges::reverse(utxos);
+ {
+ LOG_TIME_SECONDS("Loading descending UTXOs one-by-one");
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(coins_db.GetCoin(outpoint) == coin);
+ }
+ }
+ {
+ LOG_TIME_SECONDS("Loading descending UTXOs using cursor");
+ const auto cursor = coins_db.Cursor();
+ Coin next;
+ for (const auto& [outpoint, coin] : utxos) {
+ assert(cursor->SeekAndGetValue(outpoint, next));
+ assert(next == coin);
+ }
+ }
+
+ fflush(stdout);
+ throw "Done!";
+}
+
bool AppInitMain(NodeContext& node, interfaces::BlockAndHeaderTipInfo* tip_info)
{
const ArgsManager& args = *Assert(node.args);
@@ -1660,6 +1814,17 @@ bool AppInitMain(NodeContext& node, interfaces::BlockAndHeaderTipInfo* tip_info)
}
ChainstateManager& chainman = *Assert(node.chainman);
+ CCoinsViewDB& coins_db{WITH_LOCK(cs_main, return chainman.ActiveChainstate().CoinsDB())};
+ BenchmarkLoadAllUTXOs(coins_db);
+
+ if (args.IsArgSet("-loadutxosnapshot")) {
+ fs::path snapshot_path = fs::u8path(args.GetArg("-loadutxosnapshot", ""));
+ snapshot_path = AbsPathForConfigVal(args, snapshot_path);
+
+ if (!LoadUTXOSnapshot(node, snapshot_path)) {
+ LogPrintf("Failed to load UTXO snapshot from %s", snapshot_path.utf8string());
+ }
+ }
assert(!node.peerman);
node.peerman = PeerManager::make(*node.connman, *node.addrman,
@@ -1810,7 +1975,9 @@ bool AppInitMain(NodeContext& node, interfaces::BlockAndHeaderTipInfo* tip_info)
});
}
- if (ShutdownRequested(node)) {
+ // if loadutxosnapshot is set, we want to load the snapshot then shut down so that only
+ // syncing to chaintip is benchmarked
+ if (ShutdownRequested(node) || args.IsArgSet("-loadutxosnapshot")) {
return false;
}
diff --git a/src/kernel/chainstatemanager_opts.h b/src/kernel/chainstatemanager_opts.h
index 1b605f3d55df..dee7292b6310 100644
--- a/src/kernel/chainstatemanager_opts.h
+++ b/src/kernel/chainstatemanager_opts.h
@@ -51,6 +51,8 @@ struct ChainstateManagerOpts {
int worker_threads_num{0};
size_t script_execution_cache_bytes{DEFAULT_SCRIPT_EXECUTION_CACHE_BYTES};
size_t signature_cache_bytes{DEFAULT_SIGNATURE_CACHE_BYTES};
+ //! Whether to defer syncing the background chainstate after an assumeutxo snapshot is loaded
+ bool pause_background_sync{false};
};
} // namespace kernel
diff --git a/src/node/chainstatemanager_args.cpp b/src/node/chainstatemanager_args.cpp
index 0ac96c55149a..cef66c730d25 100644
--- a/src/node/chainstatemanager_args.cpp
+++ b/src/node/chainstatemanager_args.cpp
@@ -72,6 +72,8 @@ util::Result ApplyArgsManOptions(const ArgsManager& args, ChainstateManage
opts.signature_cache_bytes = clamped_size_each;
}
+ opts.pause_background_sync = args.GetBoolArg("-pausebackgroundsync", DEFAULT_PAUSE_BACKGROUND_SYNC);
+
return {};
}
} // namespace node
diff --git a/src/node/chainstatemanager_args.h b/src/node/chainstatemanager_args.h
index af13aa8d3ce6..e20b3fc799d1 100644
--- a/src/node/chainstatemanager_args.h
+++ b/src/node/chainstatemanager_args.h
@@ -12,6 +12,8 @@ class ArgsManager;
/** -par default (number of script-checking threads, 0 = auto) */
static constexpr int DEFAULT_SCRIPTCHECK_THREADS{0};
+/** -pausebackgroundsync default */
+static const bool DEFAULT_PAUSE_BACKGROUND_SYNC{false};
namespace node {
[[nodiscard]] util::Result ApplyArgsManOptions(const ArgsManager& args, ChainstateManager::Options& opts);
diff --git a/src/txdb.cpp b/src/txdb.cpp
index 1622039d63b5..2f62d5f5c3de 100644
--- a/src/txdb.cpp
+++ b/src/txdb.cpp
@@ -168,6 +168,7 @@ class CCoinsViewDBCursor: public CCoinsViewCursor
bool GetKey(COutPoint &key) const override;
bool GetValue(Coin &coin) const override;
+ bool SeekAndGetValue(const COutPoint& key, Coin &coin) const override;
bool Valid() const override;
void Next() override;
@@ -213,6 +214,14 @@ bool CCoinsViewDBCursor::GetValue(Coin &coin) const
return pcursor->GetValue(coin);
}
+bool CCoinsViewDBCursor::SeekAndGetValue(const COutPoint& key, Coin &coin) const
+{
+ pcursor->Seek(CoinEntry(&key));
+ if (!pcursor->Valid()) return false;
+ if (CoinEntry e(&keyTmp.second); !pcursor->GetKey(e) || keyTmp.second != key) return false; // TODO should compare the serialized form
+ return pcursor->GetValue(coin);
+}
+
bool CCoinsViewDBCursor::Valid() const
{
return keyTmp.first == DB_COIN;
diff --git a/src/validation.cpp b/src/validation.cpp
index 64588e802d76..a842c2552197 100644
--- a/src/validation.cpp
+++ b/src/validation.cpp
@@ -2985,6 +2985,77 @@ void Chainstate::PruneAndFlush()
}
}
+/*
+ * Author: David Robert Nadeau
+ * Site: http://NadeauSoftware.com/
+ * License: Creative Commons Attribution 3.0 Unported License
+ * http://creativecommons.org/licenses/by/3.0/deed.en_US
+ */
+#if defined(_WIN32)
+#include
+#include
+
+#elif defined(__unix__) || defined(__unix) || defined(unix) || (defined(__APPLE__) && defined(__MACH__))
+#include
+#include
+
+#if defined(__APPLE__) && defined(__MACH__)
+#include
+
+#elif (defined(_AIX) || defined(__TOS__AIX__)) || (defined(__sun__) || defined(__sun) || defined(sun) && (defined(__SVR4) || defined(__svr4__)))
+#include
+#include
+
+#elif defined(__linux__) || defined(__linux) || defined(linux) || defined(__gnu_linux__)
+#include
+
+#endif
+
+#else
+#error "Cannot define getCurrentRSS( ) for an unknown OS."
+#endif
+
+/**
+ * Returns the current resident set size (physical memory use) measured
+ * in bytes, or zero if the value cannot be determined on this OS.
+ */
+size_t getCurrentRSS( )
+{
+#if defined(_WIN32)
+ /* Windows -------------------------------------------------- */
+ PROCESS_MEMORY_COUNTERS info;
+ GetProcessMemoryInfo( GetCurrentProcess( ), &info, sizeof(info) );
+ return (size_t)info.WorkingSetSize;
+
+#elif defined(__APPLE__) && defined(__MACH__)
+ /* OSX ------------------------------------------------------ */
+ struct mach_task_basic_info info;
+ mach_msg_type_number_t infoCount = MACH_TASK_BASIC_INFO_COUNT;
+ if ( task_info( mach_task_self( ), MACH_TASK_BASIC_INFO,
+ (task_info_t)&info, &infoCount ) != KERN_SUCCESS )
+ return (size_t)0L; /* Can't access? */
+ return (size_t)info.resident_size;
+
+#elif defined(__linux__) || defined(__linux) || defined(linux) || defined(__gnu_linux__)
+ /* Linux ---------------------------------------------------- */
+ long rss = 0L;
+ FILE* fp = NULL;
+ if ( (fp = fopen( "/proc/self/statm", "r" )) == NULL )
+ return (size_t)0L; /* Can't open? */
+ if ( fscanf( fp, "%*s%ld", &rss ) != 1 )
+ {
+ fclose( fp );
+ return (size_t)0L; /* Can't read? */
+ }
+ fclose( fp );
+ return (size_t)rss * (size_t)sysconf( _SC_PAGESIZE);
+
+#else
+ /* AIX, BSD, Solaris, and Unknown OS ------------------------ */
+ return (size_t)0L; /* Unsupported. */
+#endif
+}
+
static void UpdateTipLog(
const ChainstateManager& chainman,
const CCoinsViewCache& coins_tip,
@@ -2995,7 +3066,7 @@ static void UpdateTipLog(
{
AssertLockHeld(::cs_main);
- LogPrintf("%s%s: new best=%s height=%d version=0x%08x log2_work=%f tx=%lu date='%s' progress=%f cache=%.1fMiB(%utxo)%s\n",
+ LogInfo("%s%s: new best=%s height=%d version=0x%08x log2_work=%f tx=%lu date='%s' progress=%f cache=%.1fMiB(%utxo)%s mem=%dMiB",
prefix, func_name,
tip->GetBlockHash().ToString(), tip->nHeight, tip->nVersion,
log(tip->nChainWork.getdouble()) / log(2.0), tip->m_chain_tx_count,
@@ -3003,7 +3074,8 @@ static void UpdateTipLog(
chainman.GuessVerificationProgress(tip),
coins_tip.DynamicMemoryUsage() * (1.0 / (1 << 20)),
coins_tip.GetCacheSize(),
- !warning_messages.empty() ? strprintf(" warning='%s'", warning_messages) : "");
+ !warning_messages.empty() ? strprintf(" warning='%s'", warning_messages) : "",
+ getCurrentRSS() >> 20);
}
void Chainstate::UpdateTip(const CBlockIndex* pindexNew)
@@ -6464,6 +6536,12 @@ void ChainstateManager::RecalculateBestHeader()
}
}
+bool ChainstateManager::BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) {
+ if (!IsUsable(m_snapshot_chainstate.get())) return false;
+ if (!IsUsable(m_ibd_chainstate.get())) return false;
+ return !m_options.pause_background_sync;
+}
+
bool ChainstateManager::ValidatedSnapshotCleanup()
{
AssertLockHeld(::cs_main);
diff --git a/src/validation.h b/src/validation.h
index 9e4fdbe68092..0cb571910dd9 100644
--- a/src/validation.h
+++ b/src/validation.h
@@ -1113,9 +1113,7 @@ class ChainstateManager
CBlockIndex* ActiveTip() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) { return ActiveChain().Tip(); }
//! The state of a background sync (for net processing)
- bool BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) {
- return IsUsable(m_snapshot_chainstate.get()) && IsUsable(m_ibd_chainstate.get());
- }
+ bool BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex());
//! The tip of the background sync chain
const CBlockIndex* GetBackgroundSyncTip() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) {
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 000000000000..090e5f1cb4f1
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,251 @@
+version = 1
+requires-python = ">=3.10"
+
+[[package]]
+name = "bitcoin-core-deps"
+version = "0.1.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "codespell" },
+ { name = "lief" },
+ { name = "mypy" },
+ { name = "pyperf" },
+ { name = "pyzmq" },
+ { name = "vulture" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "codespell", specifier = "==2.2.6" },
+ { name = "lief", specifier = "==0.13.2" },
+ { name = "mypy", specifier = "==1.4.1" },
+ { name = "pyperf" },
+ { name = "pyzmq", specifier = "==25.1.0" },
+ { name = "vulture", specifier = "==2.6" },
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 },
+ { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 },
+ { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 },
+ { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 },
+ { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 },
+ { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 },
+ { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 },
+ { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 },
+ { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 },
+ { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 },
+ { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 },
+ { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 },
+ { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 },
+ { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 },
+ { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 },
+ { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 },
+ { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 },
+ { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 },
+ { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 },
+ { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 },
+ { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 },
+ { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 },
+ { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 },
+ { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 },
+ { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 },
+ { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 },
+ { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 },
+ { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 },
+ { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 },
+ { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 },
+ { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 },
+ { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 },
+ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 },
+ { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
+ { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
+ { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 },
+ { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 },
+ { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 },
+ { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 },
+ { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 },
+ { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 },
+ { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 },
+ { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 },
+ { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 },
+ { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
+ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
+]
+
+[[package]]
+name = "codespell"
+version = "2.2.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e1/97/df3e00b4d795c96233e35d269c211131c5572503d2270afb6fed7d859cc2/codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9", size = 300968 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/e0/5437cc96b74467c4df6e13b7128cc482c48bb43146fb4c11cf2bcd604e1f/codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07", size = 301382 },
+]
+
+[[package]]
+name = "lief"
+version = "0.13.2"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/e2/c4125c279eb2a23ecc86cdb188ed06e9d81a9c700e9412f9be866afc2c7d/lief-0.13.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:0390cfaaf0e9aed46bebf26f00f34852768f76bc7f90abf7ceb384566200e5f5", size = 3424746 },
+ { url = "https://files.pythonhosted.org/packages/5f/d6/72235d648c6630c37ef52b9f6f4e2f3337842bc4b08c75abcae3052b2c17/lief-0.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5581bf0072c1e7a9ea2fb2e2252b8582016e8b298804b5461e552b402c9cd4e9", size = 3249141 },
+ { url = "https://files.pythonhosted.org/packages/d7/cc/9895dff094cad3e88636195640b4b47caefe3d300d3f37b653bd109348df/lief-0.13.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:dbbf2fb3d7807e815f345c77e287da162e081100f059ec03005995befc295d7f", size = 3793938 },
+ { url = "https://files.pythonhosted.org/packages/0d/1b/f4bf63bfce187ae210980bdd1a20ea7d8e080381eef09e7d26c585eaa614/lief-0.13.2-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:d344d37334c2b488dc02f04cb13c22cd61aa065eeb9bca7424588e0c8c23bdfb", size = 4045328 },
+ { url = "https://files.pythonhosted.org/packages/2c/2a/abac2e42c3cc56f2b5020e58b99f700c4d3236d49451607add0f628d737b/lief-0.13.2-cp310-cp310-win32.whl", hash = "sha256:bc041b28b94139843a33c014e355822a9276b35f3c5ae10d82da56bf572f8222", size = 2493454 },
+ { url = "https://files.pythonhosted.org/packages/ed/14/34a12787dc4328227e0e84a97db8142aa1e2b33e0aabc538e93abf7d6e5a/lief-0.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:01d4075bbc3541e9dd3ef008045fa1eb128294a0c5b0c1f69ce60d8948d248c7", size = 3089949 },
+ { url = "https://files.pythonhosted.org/packages/2e/95/9d7377095fb7cf195aca8f64d9696705c71884dcba16663472ce17139b9c/lief-0.13.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6570dacebe107ad60c2ba0968d1a865d316009d43cc85af3719d3eeb0911abf3", size = 3424752 },
+ { url = "https://files.pythonhosted.org/packages/00/2b/7ac8e15ca198a5c50397aec32102e81ef97fd573a4285ee889ec9084d110/lief-0.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ce2e3f7c791efba327c2bb3499dbef81e682027109045a9bae696c62e2aeeb0", size = 3249263 },
+ { url = "https://files.pythonhosted.org/packages/d6/8d/b50cc4ad91278015e5ac18fc76f32098ed6887c371bef6f4997af4cb97c9/lief-0.13.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:11ab900e0644b6735ecdef2bbd04439b4866a527650fc054470c195d6cfe2917", size = 3792343 },
+ { url = "https://files.pythonhosted.org/packages/6b/bd/ea25e9c8ff0a55b5534e5881fa6e5eeca0ed3eeb7c772a276984b8c182d9/lief-0.13.2-cp311-cp311-manylinux_2_24_x86_64.whl", hash = "sha256:042ad2105a136b11a7494b9af8178468e8cb32b8fa2a0a55cb659a5605aeb069", size = 4045112 },
+ { url = "https://files.pythonhosted.org/packages/d9/06/ddacd724f65fa8e7eca438c335aa77878a260fbc714cdba252387c33a4cc/lief-0.13.2-cp311-cp311-win32.whl", hash = "sha256:1ce289b6ab3cf4be654270007e8a2c0d2e42116180418c29d3ce83762955de63", size = 2493336 },
+ { url = "https://files.pythonhosted.org/packages/82/95/1de9a497946fed9d15f847d8a4a0630dfda6d186c044f8731f53d0d3d758/lief-0.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:eccb248ffb598e410fd2ef7c1f171a3cde57a40c9bb8c4fa15d8e7b90eb4eb2d", size = 3090328 },
+]
+
+[[package]]
+name = "mypy"
+version = "1.4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "tomli", marker = "python_full_version < '3.11'" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b3/28/d8a8233ff167d06108e53b7aefb4a8d7350adbbf9d7abd980f17fdb7a3a6/mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b", size = 2855162 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/3b/1c7363863b56c059f60a1dfdca9ac774a22ba64b7a4da0ee58ee53e5243f/mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8", size = 10451043 },
+ { url = "https://files.pythonhosted.org/packages/a7/24/6f0df1874118839db1155fed62a4bd7e80c181367ff8ea07d40fbaffcfb4/mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878", size = 9542079 },
+ { url = "https://files.pythonhosted.org/packages/04/5c/deeac94fcccd11aa621e6b350df333e1b809b11443774ea67582cc0205da/mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd", size = 11974913 },
+ { url = "https://files.pythonhosted.org/packages/e5/2f/de3c455c54e8cf5e37ea38705c1920f2df470389f8fc051084d2dd8c9c59/mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc", size = 12044492 },
+ { url = "https://files.pythonhosted.org/packages/e7/d3/6f65357dcb68109946de70cd55bd2e60f10114f387471302f48d54ff5dae/mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1", size = 8831655 },
+ { url = "https://files.pythonhosted.org/packages/94/01/e34e37a044325af4d4af9825c15e8a0d26d89b5a9624b4d0908449d3411b/mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462", size = 10338636 },
+ { url = "https://files.pythonhosted.org/packages/92/58/ccc0b714ecbd1a64b34d8ce1c38763ff6431de1d82551904ecc3711fbe05/mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258", size = 9444172 },
+ { url = "https://files.pythonhosted.org/packages/73/72/dfc0b46e6905eafd598e7c48c0c4f2e232647e4e36547425c64e6c850495/mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2", size = 11855450 },
+ { url = "https://files.pythonhosted.org/packages/66/f4/60739a2d336f3adf5628e7c9b920d16e8af6dc078550d615e4ba2a1d7759/mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7", size = 11928679 },
+ { url = "https://files.pythonhosted.org/packages/8c/26/6ff2b55bf8b605a4cc898883654c2ca4dd4feedf0bb04ecaacf60d165cde/mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01", size = 8831134 },
+ { url = "https://files.pythonhosted.org/packages/3d/9a/e13addb8d652cb068f835ac2746d9d42f85b730092f581bb17e2059c28f1/mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4", size = 2451741 },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 },
+]
+
+[[package]]
+name = "psutil"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 },
+ { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 },
+ { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 },
+ { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 },
+ { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 },
+ { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 },
+ { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
+]
+
+[[package]]
+name = "pyperf"
+version = "2.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "psutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/02/2a/758b3c4cc9843bd385bc595b777345fbf4cd00733b7830cdff43e30002c0/pyperf-2.8.0.tar.gz", hash = "sha256:b30a20465819daf102b6543b512f6799a5a879ff2a123981e6cd732d0e6a7a79", size = 225186 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/f7/bb8965520a9b0a3d720b282e67b5cb7f3305b96e4bacaee2794550e67e94/pyperf-2.8.0-py3-none-any.whl", hash = "sha256:1a775b5a09882f18bf876430ef78e07646f773f50774546f5f6a8b34d60e3968", size = 142508 },
+]
+
+[[package]]
+name = "pyzmq"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "implementation_name == 'pypy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/64/9c/2b2614b0b86ff703b3a33ea5e044923bd7d100adc8c829d579a9b71ea9e7/pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957", size = 1224640 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/17/6a70f84b79e361af34f6c99064ecf9e87112c4c48b9c7ea78f8e680b57d8/pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d", size = 1826810 },
+ { url = "https://files.pythonhosted.org/packages/2f/53/fc7dbdd32e275aee0961e2a5bed1bb64223846f959fd6e0c9a39aab08eed/pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101", size = 1236489 },
+ { url = "https://files.pythonhosted.org/packages/04/0b/bff5b6c1680e248bad2df8248a060645709fe2aef9689e9f7c81c587bad4/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc", size = 864304 },
+ { url = "https://files.pythonhosted.org/packages/5e/9e/32074bd8bcf2a5cf282d8817458fd5479c68b487b6c3a5d4627711ad38f5/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f", size = 1116061 },
+ { url = "https://files.pythonhosted.org/packages/fa/fb/a114ba641eb873c165106d3c8ee75eb49d6ea3204168808708d866de360d/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89", size = 1065090 },
+ { url = "https://files.pythonhosted.org/packages/ca/db/f9976803f1a660e753d0f2426065975bad5db8272fd5284efaf488dc0ce1/pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3", size = 1062464 },
+ { url = "https://files.pythonhosted.org/packages/94/3a/c3964c0a86c3535ae240799d3b7c8e13527e7a092080dda9012b1401fa86/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9", size = 1391159 },
+ { url = "https://files.pythonhosted.org/packages/a1/87/92556ffa8fbe7dc497d847e39d5c46134f9ad047b23f5bcefc8fbd0c2c9c/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80", size = 1721009 },
+ { url = "https://files.pythonhosted.org/packages/66/96/129706be681649f43bde93811416f566acfefcd3fb18156d5df349c360ab/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c", size = 1611290 },
+ { url = "https://files.pythonhosted.org/packages/64/db/e19f69fe9b1a4e53f6382274f553358e2e7305d2a2b9d9db36087bf52d5e/pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1", size = 880070 },
+ { url = "https://files.pythonhosted.org/packages/32/e4/ce4f94009f84c2a688082c2674d490d2e20e0c9058087f5358a2bf29ddf1/pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba", size = 1137827 },
+ { url = "https://files.pythonhosted.org/packages/bb/80/ae792378f98d6d0e39c975c334603d3d2535f7897707fe91f31d37f94fdb/pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d", size = 1816147 },
+ { url = "https://files.pythonhosted.org/packages/5a/b6/3c2ddd09aa24352e4f6aade53e9b9a1816c0774c844f11b1a2f508ddc0be/pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a", size = 1230845 },
+ { url = "https://files.pythonhosted.org/packages/26/bb/80535157e8811095901f98688839092afb6dcaf2ff154aa8fa2e575f540d/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883", size = 866042 },
+ { url = "https://files.pythonhosted.org/packages/7c/65/bccec1eae7c0e089d90648f350e6c2ff40ccb8c6d1b929548f4cd304b1f7/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b", size = 1116285 },
+ { url = "https://files.pythonhosted.org/packages/b7/cb/2a36d3eed310efb342fbb7b4adf6b05f46401c4b937154bd1c9b703314e0/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5", size = 1066280 },
+ { url = "https://files.pythonhosted.org/packages/66/f5/15db4c297957f049cd4dcd35eb7fbe9098a72489e0abdb289c529d7327cc/pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc", size = 1061673 },
+ { url = "https://files.pythonhosted.org/packages/fa/40/7729719e38324e5e9f2e77f6131fc253f063a3741eab170ef610196098e8/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994", size = 1393337 },
+ { url = "https://files.pythonhosted.org/packages/fd/12/0324dcb2554cd3f2ebb851ddbfbac27c4bb384394ba4a8978dec093fe71d/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c", size = 1723679 },
+ { url = "https://files.pythonhosted.org/packages/04/15/b8ab292f0b74e0440547185fb67167c87454a2b3be429d64de569f7142a2/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a", size = 1612761 },
+ { url = "https://files.pythonhosted.org/packages/22/3e/3670e36c6f42e124492ddd2af550ca13bd4a9f1edd562e1ae7c35a1f230b/pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425", size = 878704 },
+ { url = "https://files.pythonhosted.org/packages/a0/db/4e586c563b48dec09b8f7c2728b905e29db61af89b5c58e4eba9ad36fdec/pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6", size = 1135692 },
+]
+
+[[package]]
+name = "toml"
+version = "0.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 },
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
+]
+
+[[package]]
+name = "vulture"
+version = "2.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "toml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/18/e51a6e575047d19dbcd7394f05b2afa6191fe9ce30bd5bcfb3f850501e0c/vulture-2.6.tar.gz", hash = "sha256:2515fa848181001dc8a73aba6a01a1a17406f5d372f24ec7f7191866f9f4997e", size = 53777 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5e/9d/3c4df0c704ddb5ecf07fcd92cfe6d4a5dc000b7f5459afcb7e98a2ffea1e/vulture-2.6-py2.py3-none-any.whl", hash = "sha256:e792e903ccc063ec4873a8979dcf11b51ea3d65a2d3b31c113d47be48f0cdcae", size = 26494 },
+]