diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 2aea4e6705fc..42479d8a2824 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} @@ -32,10 +33,8 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -60,6 +59,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml deleted file mode 100644 index 421261fb436f..000000000000 --- a/.github/workflows/create_release.yml +++ /dev/null @@ -1,353 +0,0 @@ -name: CreateRelease - -concurrency: - group: release - -env: - PYTHONUNBUFFERED: 1 - -'on': - workflow_dispatch: - inputs: - ref: - description: 'Git reference (branch or commit sha) from which to create the release' - required: true - type: string - type: - description: 'The type of release: "new" for a new release or "patch" for a patch release' - required: true - type: choice - options: - - patch - - new - only-repo: - description: 'Run only repos updates including docker (repo-recovery, tests)' - required: false - default: false - type: boolean - only-docker: - description: 'Run only docker builds (repo-recovery, tests)' - required: false - default: false - type: boolean - dry-run: - description: 'Dry run' - required: false - default: false - type: boolean - workflow_call: - inputs: - ref: - description: 'Git reference (branch or commit sha) from which to create the release' - required: true - type: string - type: - description: 'The type of release: "new" for a new release or "patch" for a patch release' - required: true - type: string - only-repo: - description: 'Run only repos updates including docker (repo-recovery, tests)' - required: false - default: false - type: boolean - only-docker: - description: 'Run only docker builds (repo-recovery, tests)' - required: false - default: false - type: boolean - dry-run: - description: 'Dry run' - required: false - default: false - type: boolean - secrets: - ROBOT_CLICKHOUSE_COMMIT_TOKEN: - -jobs: - CreateRelease: - env: - GH_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - runs-on: [self-hosted, release-maker] - steps: - - name: Check out repository code - uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f - with: - token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}} - fetch-depth: 0 - - name: Debug Info - uses: ./.github/actions/debug - - name: Prepare Release Info - shell: bash - run: | - if [ ${{ inputs.only-repo }} == "true" ] || [ ${{ inputs.only-docker }} == "true" ]; then - git tag -l ${{ inputs.ref }} || { echo "With only-repo/docker option ref must be a valid release tag"; exit 1; } - fi - python3 ./tests/ci/create_release.py --prepare-release-info \ - --ref ${{ inputs.ref }} --release-type ${{ inputs.type }} \ - ${{ inputs.dry-run == true && '--dry-run' || '' }} \ - ${{ (inputs.only-repo == true || inputs.only-docker == true) && '--skip-tag-check' || '' }} - echo "::group::Release Info" - python3 -m json.tool /tmp/release_info.json - echo "::endgroup::" - release_tag=$(jq -r '.release_tag' /tmp/release_info.json) - commit_sha=$(jq -r '.commit_sha' /tmp/release_info.json) - is_latest=$(jq -r '.latest' /tmp/release_info.json) - echo "Release Tag: $release_tag" - echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV" - echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV" - if [ "$is_latest" == "true" ]; then - echo "DOCKER_TAG_TYPE=release-latest" >> "$GITHUB_ENV" - echo "IS_LATEST=1" >> "$GITHUB_ENV" - else - echo "DOCKER_TAG_TYPE=release" >> "$GITHUB_ENV" - echo "IS_LATEST=0" >> "$GITHUB_ENV" - fi - - name: Download All Release Artifacts - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --download-packages ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Push Git Tag for the Release - if: ${{ ! inputs.only-repo && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --push-release-tag ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Push New Release Branch - if: ${{ inputs.type == 'new' && ! inputs.only-repo && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --push-new-release-branch ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Bump CH Version and Update Contributors' List - if: ${{ ! inputs.only-repo && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --create-bump-version-pr ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Bump Docker versions, Changelog, Security - if: ${{ inputs.type == 'patch' && ! inputs.only-repo && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --set-progress-started --progress "update changelog, docker version, security" - - # git checkout master # in case WF started from feature branch - echo "List versions" - ./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv - echo "Update docker version" - ./utils/list-versions/update-docker-version.sh - echo "Generate ChangeLog" - export CI=1 - docker pull clickhouse/style-test:latest - docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 -e CI=1 --network=host \ - --volume=".:/wd" --workdir="/wd" \ - clickhouse/style-test:latest \ - ./tests/ci/changelog.py -v --debug-helpers \ - --gh-user-or-token ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} \ - --jobs=5 \ - --output="./docs/changelogs/${{ env.RELEASE_TAG }}.md" ${{ env.RELEASE_TAG }} - git add ./docs/changelogs/${{ env.RELEASE_TAG }}.md - echo "Generate Security" - python3 ./utils/security-generator/generate_security.py > SECURITY.md - git diff HEAD - - name: Create ChangeLog PR - if: ${{ inputs.type == 'patch' && ! inputs.dry-run && ! inputs.only-repo && ! inputs.only-docker }} - uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6 - with: - author: "robot-clickhouse " - token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} - committer: "robot-clickhouse " - commit-message: Update version_date.tsv and changelogs after ${{ env.RELEASE_TAG }} - branch: auto/${{ env.RELEASE_TAG }} - base: master - assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher - delete-branch: true - title: Update version_date.tsv and changelog after ${{ env.RELEASE_TAG }} - labels: do not test - body: | - Update version_date.tsv and changelogs after ${{ env.RELEASE_TAG }} - ### Changelog category (leave one): - - Not for changelog (changelog entry is not required) - - name: Complete previous steps and Restore git state - if: ${{ inputs.type == 'patch' && ! inputs.only-repo && ! inputs.only-docker }} - shell: bash - run: | - git reset --hard HEAD - git checkout "$GITHUB_REF_NAME" - python3 ./tests/ci/create_release.py --set-progress-completed - - name: Create GH Release - if: ${{ inputs.type == 'patch' && ! inputs.only-repo && ! inputs.only-docker}} - shell: bash - run: | - python3 ./tests/ci/create_release.py --create-gh-release ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Export TGZ Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --export-tgz ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Test TGZ Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --test-tgz ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Export RPM Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --export-rpm ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Test RPM Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --test-rpm ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Export Debian Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --export-debian ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Test Debian Packages - if: ${{ inputs.type == 'patch' && ! inputs.only-docker }} - shell: bash - run: | - python3 ./tests/ci/artifactory.py --test-debian ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Docker clickhouse/clickhouse-server building - if: ${{ inputs.type == 'patch' && inputs.dry-run != true }} - shell: bash - run: | - cd "./tests/ci" - python3 ./create_release.py --set-progress-started --progress "docker server release" - export DOCKER_IMAGE="clickhouse/clickhouse-server" - - # We must use docker file from the release commit - git checkout "${{ env.RELEASE_TAG }}" - python3 ./version_helper.py --export > /tmp/version.sh - . /tmp/version.sh - - if [[ $CLICKHOUSE_VERSION_STRING =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - echo "ClickHouse version: $CLICKHOUSE_VERSION_STRING" - else - echo "Invalid version string: $CLICKHOUSE_VERSION_STRING" - exit 1 - fi - CLICKHOUSE_VERSION_MINOR=${CLICKHOUSE_VERSION_STRING%.*} - CLICKHOUSE_VERSION_MAJOR=${CLICKHOUSE_VERSION_MINOR%.*} - - # Define build configurations - configs=( - "ubuntu:../../docker/server/Dockerfile.ubuntu" - "alpine:../../docker/server/Dockerfile.alpine" - ) - - for config in "${configs[@]}"; do - # Split the config into variant and Dockerfile path - variant=${config%%:*} - dockerfile=${config##*:} - - VERSION_SUFFIX=$([ "$variant" = "ubuntu" ] && echo "" || echo "-$variant") - LABEL_VERSION="${CLICKHOUSE_VERSION_STRING}${VERSION_SUFFIX}" - TAGS=( - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_STRING}${VERSION_SUFFIX}" - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_MINOR}${VERSION_SUFFIX}" - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_MAJOR}${VERSION_SUFFIX}" - ) - - if [ "$IS_LATEST" = "1" ]; then - TAGS+=("--tag=${DOCKER_IMAGE}:latest${VERSION_SUFFIX}") - fi - - echo "Following tags will be created: ${TAGS[*]}" - - # shellcheck disable=SC2086,SC2048 - docker buildx build \ - --platform=linux/amd64,linux/arm64 \ - --output=type=registry \ - --label=com.clickhouse.build.version="$LABEL_VERSION" \ - ${TAGS[*]} \ - --build-arg=VERSION="$CLICKHOUSE_VERSION_STRING" \ - --progress=plain \ - --file="$dockerfile" \ - ../../docker/server - done - - git checkout - - python3 ./create_release.py --set-progress-completed - - name: Docker clickhouse/clickhouse-keeper building - if: ${{ inputs.type == 'patch' && inputs.dry-run != true }} - shell: bash - run: | - cd "./tests/ci" - python3 ./create_release.py --set-progress-started --progress "docker keeper release" - - export DOCKER_IMAGE="clickhouse/clickhouse-keeper" - - # We must use docker file from the release commit - git checkout "${{ env.RELEASE_TAG }}" - python3 ./version_helper.py --export > /tmp/version.sh - . /tmp/version.sh - - if [[ $CLICKHOUSE_VERSION_STRING =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - echo "ClickHouse version: $CLICKHOUSE_VERSION_STRING" - else - echo "Invalid version string: $CLICKHOUSE_VERSION_STRING" - exit 1 - fi - CLICKHOUSE_VERSION_MINOR=${CLICKHOUSE_VERSION_STRING%.*} - CLICKHOUSE_VERSION_MAJOR=${CLICKHOUSE_VERSION_MINOR%.*} - - # Define build configurations - configs=( - "ubuntu:../../docker/keeper/Dockerfile.ubuntu" - "alpine:../../docker/keeper/Dockerfile.alpine" - ) - - for config in "${configs[@]}"; do - # Split the config into variant and Dockerfile path - variant=${config%%:*} - dockerfile=${config##*:} - - VERSION_SUFFIX=$([ "$variant" = "ubuntu" ] && echo "" || echo "-$variant") - LABEL_VERSION="${CLICKHOUSE_VERSION_STRING}${VERSION_SUFFIX}" - TAGS=( - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_STRING}${VERSION_SUFFIX}" - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_MINOR}${VERSION_SUFFIX}" - "--tag=${DOCKER_IMAGE}:${CLICKHOUSE_VERSION_MAJOR}${VERSION_SUFFIX}" - ) - - if [ "$IS_LATEST" = "1" ]; then - TAGS+=("--tag=${DOCKER_IMAGE}:latest${VERSION_SUFFIX}") - fi - - echo "Following tags will be created: ${TAGS[*]}" - - # shellcheck disable=SC2086,SC2048 - docker buildx build \ - --platform=linux/amd64,linux/arm64 \ - --output=type=registry \ - --label=com.clickhoghuse.build.version="$LABEL_VERSION" \ - ${TAGS[*]} \ - --build-arg=VERSION="$CLICKHOUSE_VERSION_STRING" \ - --progress=plain \ - --file="$dockerfile" \ - ../../docker/keeper - done - - git checkout - - python3 ./create_release.py --set-progress-completed - # check out back if previous steps failed - - name: Checkout back - if: ${{ ! cancelled() }} - shell: bash - run: | - git checkout ${{ github.ref }} - - name: Update release info. Merge created PRs - shell: bash - run: | - python3 ./tests/ci/create_release.py --merge-prs ${{ inputs.dry-run == true && '--dry-run' || '' }} - - name: Set current Release progress to Completed with OK - shell: bash - run: | - # dummy stage to finalize release info with "progress: completed; status: OK" - python3 ./tests/ci/create_release.py --set-progress-started --progress "completed" - python3 ./tests/ci/create_release.py --set-progress-completed - - name: Post Slack Message - if: ${{ !cancelled() }} - shell: bash - run: | - python3 ./tests/ci/create_release.py --post-status ${{ inputs.dry-run == true && '--dry-run' || '' }} diff --git a/.github/workflows/hourly.yml b/.github/workflows/hourly.yml index 25e32f4ba0f6..717de78bfef1 100644 --- a/.github/workflows/hourly.yml +++ b/.github/workflows/hourly.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: @@ -36,6 +37,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -77,6 +79,7 @@ jobs: collect_flaky_tests: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] needs: [config_workflow] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'Q29sbGVjdCBmbGFreSB0ZXN0cw==') }} name: "Collect flaky tests" outputs: data: ${{ steps.run.outputs.DATA }} @@ -123,6 +126,7 @@ jobs: autoassign_approvers: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] needs: [config_workflow] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QXV0b2Fzc2lnbiBhcHByb3ZlcnM=') }} name: "Autoassign approvers" outputs: data: ${{ steps.run.outputs.DATA }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 8a1ba5e73958..d4f8f73fe5bb 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -30,10 +31,8 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -58,6 +57,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -2540,6 +2540,100 @@ jobs: python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log fi + stateless_tests_arm_asan_azure_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_arm_asan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbiwgYXp1cmUsIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (arm_asan, azure, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_asan, azure, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_asan, azure, parallel)' --workflow "MasterCI" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_asan, azure, parallel)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_arm_asan_azure_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_arm_asan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbiwgYXp1cmUsIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (arm_asan, azure, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_asan, azure, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_asan, azure, sequential)' --workflow "MasterCI" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_asan, azure, sequential)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log + fi + integration_tests_amd_asan_db_disk_old_analyzer_1_6: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_asan] @@ -3809,6 +3903,100 @@ jobs: python3 -m praktika run 'Stress test (amd_msan)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log fi + stress_test_azure_amd_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_msan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RyZXNzIHRlc3QgKGF6dXJlLCBhbWRfbXNhbik=') }} + name: "Stress test (azure, amd_msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stress test (azure, amd_msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stress test (azure, amd_msan)' --workflow "MasterCI" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stress test (azure, amd_msan)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log + fi + + stress_test_azure_amd_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_tsan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RyZXNzIHRlc3QgKGF6dXJlLCBhbWRfdHNhbik=') }} + name: "Stress test (azure, amd_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stress test (azure, amd_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stress test (azure, amd_tsan)' --workflow "MasterCI" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stress test (azure, amd_tsan)' --workflow "MasterCI" --ci |& tee ./ci/tmp/job.log + fi + ast_fuzzer_amd_debug: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_debug] @@ -4422,7 +4610,7 @@ jobs: finish_workflow: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_debug, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_asan, build_arm_binary, build_amd_release, build_arm_release, unit_tests_asan, unit_tests_tsan, unit_tests_msan, unit_tests_ubsan, docker_server_image, docker_keeper_image, install_packages_amd_release, install_packages_arm_release, compatibility_check_amd_release, compatibility_check_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_asan_db_disk_distributed_plan_sequential, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential, stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel, stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential, stateless_tests_amd_debug_asyncinsert_s3_storage_parallel, stateless_tests_amd_debug_asyncinsert_s3_storage_sequential, stateless_tests_amd_debug_parallel, stateless_tests_amd_debug_sequential, stateless_tests_amd_tsan_parallel_1_2, stateless_tests_amd_tsan_parallel_2_2, stateless_tests_amd_tsan_sequential_1_2, stateless_tests_amd_tsan_sequential_2_2, stateless_tests_amd_msan_parallel_1_2, stateless_tests_amd_msan_parallel_2_2, stateless_tests_amd_msan_sequential_1_2, stateless_tests_amd_msan_sequential_2_2, stateless_tests_amd_ubsan_parallel, stateless_tests_amd_ubsan_sequential, stateless_tests_amd_debug_distributed_plan_s3_storage_parallel, stateless_tests_amd_debug_distributed_plan_s3_storage_sequential, stateless_tests_amd_tsan_s3_storage_parallel_1_2, stateless_tests_amd_tsan_s3_storage_parallel_2_2, stateless_tests_amd_tsan_s3_storage_sequential_1_2, stateless_tests_amd_tsan_s3_storage_sequential_2_2, stateless_tests_arm_binary_parallel, stateless_tests_arm_binary_sequential, integration_tests_amd_asan_db_disk_old_analyzer_1_6, integration_tests_amd_asan_db_disk_old_analyzer_2_6, integration_tests_amd_asan_db_disk_old_analyzer_3_6, integration_tests_amd_asan_db_disk_old_analyzer_4_6, integration_tests_amd_asan_db_disk_old_analyzer_5_6, integration_tests_amd_asan_db_disk_old_analyzer_6_6, integration_tests_amd_binary_1_5, integration_tests_amd_binary_2_5, integration_tests_amd_binary_3_5, integration_tests_amd_binary_4_5, integration_tests_amd_binary_5_5, integration_tests_arm_binary_distributed_plan_1_4, integration_tests_arm_binary_distributed_plan_2_4, integration_tests_arm_binary_distributed_plan_3_4, integration_tests_arm_binary_distributed_plan_4_4, integration_tests_amd_tsan_1_6, integration_tests_amd_tsan_2_6, integration_tests_amd_tsan_3_6, integration_tests_amd_tsan_4_6, integration_tests_amd_tsan_5_6, integration_tests_amd_tsan_6_6, stress_test_amd_debug, stress_test_amd_tsan, stress_test_arm_asan, stress_test_arm_asan_s3, stress_test_amd_ubsan, stress_test_amd_msan, ast_fuzzer_amd_debug, ast_fuzzer_arm_asan, ast_fuzzer_amd_tsan, ast_fuzzer_amd_msan, ast_fuzzer_amd_ubsan, buzzhouse_amd_debug, buzzhouse_arm_asan, buzzhouse_amd_tsan, buzzhouse_amd_msan, buzzhouse_amd_ubsan, clickbench_amd_release, clickbench_arm_release, sqltest] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_debug, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_asan, build_arm_binary, build_amd_release, build_arm_release, unit_tests_asan, unit_tests_tsan, unit_tests_msan, unit_tests_ubsan, docker_server_image, docker_keeper_image, install_packages_amd_release, install_packages_arm_release, compatibility_check_amd_release, compatibility_check_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_asan_db_disk_distributed_plan_sequential, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential, stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel, stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential, stateless_tests_amd_debug_asyncinsert_s3_storage_parallel, stateless_tests_amd_debug_asyncinsert_s3_storage_sequential, stateless_tests_amd_debug_parallel, stateless_tests_amd_debug_sequential, stateless_tests_amd_tsan_parallel_1_2, stateless_tests_amd_tsan_parallel_2_2, stateless_tests_amd_tsan_sequential_1_2, stateless_tests_amd_tsan_sequential_2_2, stateless_tests_amd_msan_parallel_1_2, stateless_tests_amd_msan_parallel_2_2, stateless_tests_amd_msan_sequential_1_2, stateless_tests_amd_msan_sequential_2_2, stateless_tests_amd_ubsan_parallel, stateless_tests_amd_ubsan_sequential, stateless_tests_amd_debug_distributed_plan_s3_storage_parallel, stateless_tests_amd_debug_distributed_plan_s3_storage_sequential, stateless_tests_amd_tsan_s3_storage_parallel_1_2, stateless_tests_amd_tsan_s3_storage_parallel_2_2, stateless_tests_amd_tsan_s3_storage_sequential_1_2, stateless_tests_amd_tsan_s3_storage_sequential_2_2, stateless_tests_arm_binary_parallel, stateless_tests_arm_binary_sequential, stateless_tests_arm_asan_azure_parallel, stateless_tests_arm_asan_azure_sequential, integration_tests_amd_asan_db_disk_old_analyzer_1_6, integration_tests_amd_asan_db_disk_old_analyzer_2_6, integration_tests_amd_asan_db_disk_old_analyzer_3_6, integration_tests_amd_asan_db_disk_old_analyzer_4_6, integration_tests_amd_asan_db_disk_old_analyzer_5_6, integration_tests_amd_asan_db_disk_old_analyzer_6_6, integration_tests_amd_binary_1_5, integration_tests_amd_binary_2_5, integration_tests_amd_binary_3_5, integration_tests_amd_binary_4_5, integration_tests_amd_binary_5_5, integration_tests_arm_binary_distributed_plan_1_4, integration_tests_arm_binary_distributed_plan_2_4, integration_tests_arm_binary_distributed_plan_3_4, integration_tests_arm_binary_distributed_plan_4_4, integration_tests_amd_tsan_1_6, integration_tests_amd_tsan_2_6, integration_tests_amd_tsan_3_6, integration_tests_amd_tsan_4_6, integration_tests_amd_tsan_5_6, integration_tests_amd_tsan_6_6, stress_test_amd_debug, stress_test_amd_tsan, stress_test_arm_asan, stress_test_arm_asan_s3, stress_test_amd_ubsan, stress_test_amd_msan, stress_test_azure_amd_msan, stress_test_azure_amd_tsan, ast_fuzzer_amd_debug, ast_fuzzer_arm_asan, ast_fuzzer_amd_tsan, ast_fuzzer_amd_msan, ast_fuzzer_amd_ubsan, buzzhouse_amd_debug, buzzhouse_arm_asan, buzzhouse_amd_tsan, buzzhouse_amd_msan, buzzhouse_amd_ubsan, clickbench_amd_release, clickbench_arm_release, sqltest] if: ${{ always() }} name: "Finish Workflow" outputs: @@ -4503,7 +4691,7 @@ jobs: commit: c5cae9b244e0839fb307a9fb67a40fe80d93810b arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -4515,7 +4703,7 @@ jobs: commit: c5cae9b244e0839fb307a9fb67a40fe80d93810b arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} SignRelease: @@ -4593,6 +4781,8 @@ jobs: - stateless_tests_amd_tsan_s3_storage_sequential_2_2 - stateless_tests_arm_binary_parallel - stateless_tests_arm_binary_sequential + - stateless_tests_arm_asan_azure_parallel + - stateless_tests_arm_asan_azure_sequential - integration_tests_amd_asan_db_disk_old_analyzer_1_6 - integration_tests_amd_asan_db_disk_old_analyzer_2_6 - integration_tests_amd_asan_db_disk_old_analyzer_3_6 @@ -4620,6 +4810,8 @@ jobs: - stress_test_arm_asan_s3 - stress_test_amd_ubsan - stress_test_amd_msan + - stress_test_azure_amd_msan + - stress_test_azure_amd_tsan - ast_fuzzer_amd_debug - ast_fuzzer_arm_asan - ast_fuzzer_amd_tsan diff --git a/.github/workflows/merge_queue.yml b/.github/workflows/merge_queue.yml index 790b868632fb..9e29fa300c07 100644 --- a/.github/workflows/merge_queue.yml +++ b/.github/workflows/merge_queue.yml @@ -8,6 +8,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -22,7 +23,6 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} jobs: @@ -48,6 +48,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/nightly_coverage.yml b/.github/workflows/nightly_coverage.yml index 0130e949e5ad..f7e4f0d0ffe2 100644 --- a/.github/workflows/nightly_coverage.yml +++ b/.github/workflows/nightly_coverage.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: @@ -36,6 +37,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/nightly_fuzzers.yml b/.github/workflows/nightly_fuzzers.yml index 640032352e28..2200c3cf086a 100644 --- a/.github/workflows/nightly_fuzzers.yml +++ b/.github/workflows/nightly_fuzzers.yml @@ -11,6 +11,7 @@ concurrency: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" jobs: @@ -36,6 +37,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 9152dd13cff7..bf4ccbf5374c 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} @@ -32,10 +33,8 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -60,6 +59,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -711,7 +711,7 @@ jobs: build_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} name: "Build (amd_release)" outputs: @@ -758,7 +758,7 @@ jobs: build_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} name: "Build (arm_release)" outputs: @@ -3390,7 +3390,7 @@ jobs: docker_server_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_release, build_arm_release] if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }} name: "Docker server image" outputs: @@ -3437,7 +3437,7 @@ jobs: docker_keeper_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_amd_release, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_release, build_arm_release] if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }} name: "Docker keeper image" outputs: @@ -4505,7 +4505,7 @@ jobs: commit: c5cae9b244e0839fb307a9fb67a40fe80d93810b arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -4517,7 +4517,7 @@ jobs: commit: c5cae9b244e0839fb307a9fb67a40fe80d93810b arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} FinishCIReport: diff --git a/.github/workflows/pull_request_community.yml b/.github/workflows/pull_request_community.yml new file mode 100644 index 000000000000..8f3880c1c6da --- /dev/null +++ b/.github/workflows/pull_request_community.yml @@ -0,0 +1,4012 @@ +# generated by praktika + +name: Community PR + +on: + workflow_dispatch: + inputs: + no_cache: + description: Run without cache + required: false + type: boolean + default: false + pull_request: + branches: ['antalya', 'releases/*', 'antalya-*'] + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} + DISABLE_CI_MERGE_COMMIT: ${{ vars.DISABLE_CI_MERGE_COMMIT || '0' }} + DISABLE_CI_CACHE: ${{ github.event.inputs.no_cache || '0' }} + CHECKOUT_REF: ${{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }} + + + +jobs: + + config_workflow: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] + needs: [] + if: ${{ github.repository != github.event.pull_request.head.repo.full_name }} + name: "Config Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Config Workflow" + + - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} + env: + PR_NUMBER: ${{ github.event.pull_request.number || 0 }} + COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + run: | + if [ "$PR_NUMBER" -eq 0 ]; then + PREFIX="REFs/$GITHUB_REF_NAME/$COMMIT_SHA" + else + PREFIX="PRs/$PR_NUMBER/$COMMIT_SHA" + fi + REPORT_LINK=https://s3.amazonaws.com/altinity-build-artifacts/$PREFIX/$GITHUB_RUN_ID/ci_run_report.html + echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Config Workflow' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + fast_test: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RmFzdCB0ZXN0') }} + name: "Fast test" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Fast test" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Fast test' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + build_amd_debug: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} + name: "Build (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_debug)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_debug)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_DEBUG + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_DEBUG + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_DEBUG + path: ci/tmp/*.deb + + build_amd_asan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }} + name: "Build (amd_asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_asan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_ASAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_ASAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_ASAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_ASAN + path: ci/tmp/*.deb + + build_amd_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }} + name: "Build (amd_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_TSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_TSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_TSAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_TSAN + path: ci/tmp/*.deb + + build_amd_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }} + name: "Build (amd_msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_MSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_MSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_MSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_MSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_MSAM + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_MSAM + path: ci/tmp/*.deb + + build_amd_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }} + name: "Build (amd_ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact UNITTEST_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: UNITTEST_AMD_UBSAN + path: ci/tmp/build/src/unit_tests_dbms + + + - name: Upload artifact CH_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_AMD_UBSAN + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_UBSAN + path: ci/tmp/*.deb + + build_amd_binary: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }} + name: "Build (amd_binary)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_binary)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_BINARY + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_BINARY + path: ci/tmp/build/programs/self-extracting/clickhouse + + build_arm_asan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }} + name: "Build (arm_asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_asan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_ASAN + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_ASAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact DEB_ARM_ASAN + uses: actions/upload-artifact@v4 + with: + name: DEB_ARM_ASAN + path: ci/tmp/*.deb + + build_arm_binary: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }} + name: "Build (arm_binary)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_binary)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_binary)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_BIN + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_BIN + path: ci/tmp/build/programs/self-extracting/clickhouse + + build_arm_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV90c2FuKQ==') }} + name: "Build (arm_tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_TSAN + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_TSAN + path: ci/tmp/build/programs/self-extracting/clickhouse + + build_amd_release: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} + name: "Build (amd_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (amd_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_RELEASE + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact CH_AMD_RELEASE_STRIPPED + uses: actions/upload-artifact@v4 + with: + name: CH_AMD_RELEASE_STRIPPED + path: ci/tmp/build/programs/self-extracting/clickhouse-stripped + + + - name: Upload artifact DEB_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: DEB_AMD_RELEASE + path: ci/tmp/*.deb + + + - name: Upload artifact RPM_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: RPM_AMD_RELEASE + path: ci/tmp/*.rpm + + + - name: Upload artifact TGZ_AMD_RELEASE + uses: actions/upload-artifact@v4 + with: + name: TGZ_AMD_RELEASE + path: ci/tmp/*64.tgz* + + build_arm_release: + runs-on: [self-hosted, altinity-on-demand, altinity-builder] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} + name: "Build (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Build (arm_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Build (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + - name: Upload artifact CH_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_RELEASE + path: ci/tmp/build/programs/self-extracting/clickhouse + + + - name: Upload artifact CH_ARM_RELEASE_STRIPPED + uses: actions/upload-artifact@v4 + with: + name: CH_ARM_RELEASE_STRIPPED + path: ci/tmp/build/programs/self-extracting/clickhouse-stripped + + + - name: Upload artifact DEB_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: DEB_ARM_RELEASE + path: ci/tmp/*.deb + + + - name: Upload artifact RPM_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: RPM_ARM_RELEASE + path: ci/tmp/*.rpm + + + - name: Upload artifact TGZ_ARM_RELEASE + uses: actions/upload-artifact@v4 + with: + name: TGZ_ARM_RELEASE + path: ci/tmp/*64.tgz* + + quick_functional_tests: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] + needs: [config_workflow, fast_test, build_amd_debug] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'UXVpY2sgZnVuY3Rpb25hbCB0ZXN0cw==') }} + name: "Quick functional tests" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Quick functional tests" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Quick functional tests' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Quick functional tests' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_distributed_plan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_asan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, distributed plan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_distributed_plan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_asan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGlzdHJpYnV0ZWQgcGxhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, distributed plan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, distributed plan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_asan_db_disk_distributed_plan_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYXNhbiwgZGIgZGlzaywgZGlzdHJpYnV0ZWQgcGxhbiwgc2VxdWVudGlhbCk=') }} + name: "Stateless tests (amd_asan, db disk, distributed plan, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_asan, db disk, distributed plan, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_asan, db disk, distributed plan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_asan, db disk, distributed plan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }} + name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }} + name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} + name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_asyncinsert_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} + name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_asyncinsert_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIEFzeW5jSW5zZXJ0LCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, AsyncInsert, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_debug, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_debug, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_tsan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_tsan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_tsan, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_tsan, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_msan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_msan, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_msan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_msan, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_msan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_msan, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_msan_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_msan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfbXNhbiwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_msan, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_msan, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_msan, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_ubsan_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_ubsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_ubsan, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_ubsan, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_ubsan, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_ubsan_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_ubsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdWJzYW4sIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_ubsan, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_ubsan, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_ubsan, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_distributed_plan_s3_storage_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_debug_distributed_plan_s3_storage_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsIGRpc3RyaWJ1dGVkIHBsYW4sIHMzIHN0b3JhZ2UsIHNlcXVlbnRpYWwp') }} + name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_debug, distributed plan, s3 storage, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_DEBUG + uses: actions/download-artifact@v4 + with: + name: CH_AMD_DEBUG + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_debug, distributed plan, s3 storage, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_parallel_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgcGFyYWxsZWwsIDEvMik=') }} + name: "Stateless tests (amd_tsan, s3 storage, parallel, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, parallel, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_parallel_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgcGFyYWxsZWwsIDIvMik=') }} + name: "Stateless tests (amd_tsan, s3 storage, parallel, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, parallel, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, parallel, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_sequential_1_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMS8yKQ==') }} + name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 1/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 1/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_amd_tsan_s3_storage_sequential_2_2: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfdHNhbiwgczMgc3RvcmFnZSwgc2VxdWVudGlhbCwgMi8yKQ==') }} + name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (amd_tsan, s3 storage, sequential, 2/2)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (amd_tsan, s3 storage, sequential, 2/2)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_arm_binary_parallel: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }} + name: "Stateless tests (arm_binary, parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_binary, parallel)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_binary, parallel)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + stateless_tests_arm_binary_sequential: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }} + name: "Stateless tests (arm_binary, sequential)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Stateless tests (arm_binary, sequential)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Stateless tests (arm_binary, sequential)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_1_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDEvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 1/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 1/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_2_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDIvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 2/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 2/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_3_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDMvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 3/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 3/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_4_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDQvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 4/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 4/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_5_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDUvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 5/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 5/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_asan_db_disk_old_analyzer_6_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9hc2FuLCBkYiBkaXNrLCBvbGQgYW5hbHl6ZXIsIDYvNik=') }} + name: "Integration tests (amd_asan, db disk, old analyzer, 6/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_asan, db disk, old analyzer, 6/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_asan, db disk, old analyzer, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_1_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDEvNSk=') }} + name: "Integration tests (amd_binary, 1/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 1/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 1/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_2_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDIvNSk=') }} + name: "Integration tests (amd_binary, 2/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 2/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 2/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_3_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDMvNSk=') }} + name: "Integration tests (amd_binary, 3/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 3/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 3/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_4_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDQvNSk=') }} + name: "Integration tests (amd_binary, 4/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 4/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 4/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_binary_5_5: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_binary, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF9iaW5hcnksIDUvNSk=') }} + name: "Integration tests (amd_binary, 5/5)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_binary, 5/5)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_BINARY + uses: actions/download-artifact@v4 + with: + name: CH_AMD_BINARY + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_binary, 5/5)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_1_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDEvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 1/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 1/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 1/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_2_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDIvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 2/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 2/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 2/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_3_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDMvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 3/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 3/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 3/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_arm_binary_distributed_plan_4_4: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFybV9iaW5hcnksIGRpc3RyaWJ1dGVkIHBsYW4sIDQvNCk=') }} + name: "Integration tests (arm_binary, distributed plan, 4/4)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (arm_binary, distributed plan, 4/4)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_BIN + uses: actions/download-artifact@v4 + with: + name: CH_ARM_BIN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (arm_binary, distributed plan, 4/4)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_1_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAxLzYp') }} + name: "Integration tests (amd_tsan, 1/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 1/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 1/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_2_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAyLzYp') }} + name: "Integration tests (amd_tsan, 2/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 2/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 2/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_3_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCAzLzYp') }} + name: "Integration tests (amd_tsan, 3/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 3/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 3/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_4_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA0LzYp') }} + name: "Integration tests (amd_tsan, 4/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 4/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 4/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_5_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA1LzYp') }} + name: "Integration tests (amd_tsan, 5/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 5/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 5/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + integration_tests_amd_tsan_6_6: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_amd_tsan, build_arm_binary, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW50ZWdyYXRpb24gdGVzdHMgKGFtZF90c2FuLCA2LzYp') }} + name: "Integration tests (amd_tsan, 6/6)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Integration tests (amd_tsan, 6/6)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: CH_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Integration tests (amd_tsan, 6/6)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_asan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_asan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'VW5pdCB0ZXN0cyAoYXNhbik=') }} + name: "Unit tests (asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (asan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_ASAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_ASAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (asan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_tsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_tsan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'VW5pdCB0ZXN0cyAodHNhbik=') }} + name: "Unit tests (tsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (tsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_TSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_TSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (tsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_msan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_msan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'VW5pdCB0ZXN0cyAobXNhbik=') }} + name: "Unit tests (msan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (msan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_MSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_MSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (msan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + unit_tests_ubsan: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_ubsan] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'VW5pdCB0ZXN0cyAodWJzYW4p') }} + name: "Unit tests (ubsan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Unit tests (ubsan)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact UNITTEST_AMD_UBSAN + uses: actions/download-artifact@v4 + with: + name: UNITTEST_AMD_UBSAN + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Unit tests (ubsan)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + install_packages_amd_release: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_amd_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX3JlbGVhc2Up') }} + name: "Install packages (amd_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Install packages (amd_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_AMD_RELEASE + uses: actions/download-artifact@v4 + with: + name: CH_AMD_RELEASE + path: ./ci/tmp + + + - name: Download artifact DEB_AMD_RELEASE + uses: actions/download-artifact@v4 + with: + name: DEB_AMD_RELEASE + path: ./ci/tmp + + + - name: Download artifact RPM_AMD_RELEASE + uses: actions/download-artifact@v4 + with: + name: RPM_AMD_RELEASE + path: ./ci/tmp + + + - name: Download artifact TGZ_AMD_RELEASE + uses: actions/download-artifact@v4 + with: + name: TGZ_AMD_RELEASE + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Install packages (amd_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Install packages (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + install_packages_arm_release: + runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYXJtX3JlbGVhc2Up') }} + name: "Install packages (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Install packages (arm_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact CH_ARM_RELEASE + uses: actions/download-artifact@v4 + with: + name: CH_ARM_RELEASE + path: ./ci/tmp + + + - name: Download artifact DEB_ARM_RELEASE + uses: actions/download-artifact@v4 + with: + name: DEB_ARM_RELEASE + path: ./ci/tmp + + + - name: Download artifact RPM_ARM_RELEASE + uses: actions/download-artifact@v4 + with: + name: RPM_ARM_RELEASE + path: ./ci/tmp + + + - name: Download artifact TGZ_ARM_RELEASE + uses: actions/download-artifact@v4 + with: + name: TGZ_ARM_RELEASE + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Install packages (arm_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Install packages (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + compatibility_check_amd_release: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_amd_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAoYW1kX3JlbGVhc2Up') }} + name: "Compatibility check (amd_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Compatibility check (amd_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact DEB_AMD_RELEASE + uses: actions/download-artifact@v4 + with: + name: DEB_AMD_RELEASE + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Compatibility check (amd_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Compatibility check (amd_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi + + compatibility_check_arm_release: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] + needs: [config_workflow, fast_test, build_amd_debug, build_amd_asan, build_arm_binary, build_arm_release, stateless_tests_amd_asan_distributed_plan_parallel_1_2, stateless_tests_amd_asan_distributed_plan_parallel_2_2, stateless_tests_amd_debug_parallel, stateless_tests_arm_binary_parallel] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'Q29tcGF0aWJpbGl0eSBjaGVjayAoYXJtX3JlbGVhc2Up') }} + name: "Compatibility check (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Compatibility check (arm_release)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Download artifact DEB_ARM_RELEASE + uses: actions/download-artifact@v4 + with: + name: DEB_ARM_RELEASE + path: ./ci/tmp + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Compatibility check (arm_release)' --workflow "Community PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Compatibility check (arm_release)' --workflow "Community PR" --ci |& tee ./ci/tmp/job.log + fi diff --git a/.github/workflows/regression-reusable-suite.yml b/.github/workflows/regression-reusable-suite.yml index 45b2b09e06bb..dfcabb63da72 100644 --- a/.github/workflows/regression-reusable-suite.yml +++ b/.github/workflows/regression-reusable-suite.yml @@ -168,7 +168,7 @@ jobs: exit $EXITCODE - name: 📊 Set Commit Status - if: always() && inputs.set_commit_status + if: ${{ !cancelled() && inputs.set_commit_status }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} @@ -176,16 +176,16 @@ jobs: run: python3 .github/set_builds_status.py - name: 📝 Create and upload logs - if: always() + if: ${{ !cancelled() }} run: .github/create_and_upload_logs.sh 1 - name: 📤 Upload logs to results database - if: always() + if: ${{ !cancelled() }} timeout-minutes: 20 run: .github/upload_results_to_database.sh 1 - uses: actions/upload-artifact@v4 - if: always() + if: ${{ !cancelled() }} with: name: ${{ format('{0}{1}-artifacts-{2}{3}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '', inputs.runner_arch, contains(inputs.extra_args, '--use-keeper') && '_keeper' || '_zookeeper') }} path: ${{ env.artifact_paths }} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 8f1bc37ab701..fa80b5f85a72 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -16,6 +16,7 @@ on: env: # Force the stdout and stderr streams to be unbuffered PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -30,10 +31,8 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all + jobs: @@ -58,6 +57,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index f7ddfda058fc..ffb11ee24c89 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -7,6 +7,7 @@ on: env: PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} CHECKOUT_REF: "" DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }} @@ -21,7 +22,6 @@ env: AZURE_CONTAINER_NAME: ${{ secrets.AZURE_CONTAINER_NAME }} AZURE_STORAGE_ACCOUNT_URL: "https://${{ secrets.AZURE_ACCOUNT_NAME }}.blob.core.windows.net/" ROBOT_TOKEN: ${{ secrets.ROBOT_TOKEN }} - GH_TOKEN: ${{ github.token }} jobs: @@ -47,6 +47,7 @@ jobs: test_name: "Config Workflow" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -90,6 +91,7 @@ jobs: dockers_build_amd: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] needs: [config_workflow] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYW1kKQ==') }} name: "Dockers Build (amd)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -138,6 +140,7 @@ jobs: dockers_build_arm: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] needs: [config_workflow] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VycyBCdWlsZCAoYXJtKQ==') }} name: "Dockers Build (arm)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -183,9 +186,59 @@ jobs: python3 -m praktika run 'Dockers Build (arm)' --workflow "Release Builds" --ci |& tee ./ci/tmp/job.log fi + dockers_build_multiplatform_manifest: + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] + needs: [config_workflow, dockers_build_amd, dockers_build_arm] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VycyBCdWlsZCAobXVsdGlwbGF0Zm9ybSBtYW5pZmVzdCk=') }} + name: "Dockers Build (multiplatform manifest)" + outputs: + data: ${{ steps.run.outputs.DATA }} + pipeline_status: ${{ steps.run.outputs.pipeline_status || 'undefined' }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ env.CHECKOUT_REF }} + + - name: Setup + uses: ./.github/actions/runner_setup + - name: Docker setup + uses: ./.github/actions/docker_setup + with: + test_name: "Dockers Build (multiplatform manifest)" + + - name: Prepare env script + run: | + rm -rf ./ci/tmp + mkdir -p ./ci/tmp + cat > ./ci/tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:.: + cat > ./ci/tmp/workflow_inputs.json << 'EOF' + ${{ toJson(github.event.inputs) }} + EOF + cat > ./ci/tmp/workflow_job.json << 'EOF' + ${{ toJson(job) }} + EOF + cat > ./ci/tmp/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + - name: Run + id: run + run: | + . ./ci/tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run 'Dockers Build (multiplatform manifest)' --workflow "Release Builds" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee ./ci/tmp/job.log + else + python3 -m praktika run 'Dockers Build (multiplatform manifest)' --workflow "Release Builds" --ci |& tee ./ci/tmp/job.log + fi + build_amd_debug: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} name: "Build (amd_debug)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -233,7 +286,8 @@ jobs: build_amd_asan: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }} name: "Build (amd_asan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -281,7 +335,8 @@ jobs: build_amd_tsan: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }} name: "Build (amd_tsan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -329,7 +384,8 @@ jobs: build_amd_msan: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }} name: "Build (amd_msan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -377,7 +433,8 @@ jobs: build_amd_ubsan: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }} name: "Build (amd_ubsan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -425,7 +482,8 @@ jobs: build_amd_binary: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }} name: "Build (amd_binary)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -473,7 +531,8 @@ jobs: build_arm_asan: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }} name: "Build (arm_asan)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -521,7 +580,8 @@ jobs: build_arm_binary: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9iaW5hcnkp') }} name: "Build (arm_binary)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -569,7 +629,8 @@ jobs: build_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} name: "Build (amd_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -617,7 +678,8 @@ jobs: build_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-builder] - needs: [config_workflow, dockers_build_amd, dockers_build_arm] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} name: "Build (arm_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -665,7 +727,8 @@ jobs: docker_server_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_release, build_arm_release] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VyIHNlcnZlciBpbWFnZQ==') }} name: "Docker server image" outputs: data: ${{ steps.run.outputs.DATA }} @@ -713,7 +776,8 @@ jobs: docker_keeper_image: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release, build_arm_release] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_release, build_arm_release] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'RG9ja2VyIGtlZXBlciBpbWFnZQ==') }} name: "Docker keeper image" outputs: data: ${{ steps.run.outputs.DATA }} @@ -761,7 +825,8 @@ jobs: install_packages_amd_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_release] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_release] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYW1kX3JlbGVhc2Up') }} name: "Install packages (amd_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -809,7 +874,8 @@ jobs: install_packages_arm_release: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_release] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_arm_release] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'SW5zdGFsbCBwYWNrYWdlcyAoYXJtX3JlbGVhc2Up') }} name: "Install packages (arm_release)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -857,7 +923,8 @@ jobs: stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgcGFyYWxsZWwp') }} name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -905,7 +972,8 @@ jobs: stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBvbGQgYW5hbHl6ZXIsIHMzIHN0b3JhZ2UsIERhdGFiYXNlUmVwbGljYXRlZCwgc2VxdWVudGlhbCk=') }} name: "Stateless tests (amd_binary, old analyzer, s3 storage, DatabaseReplicated, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -953,7 +1021,8 @@ jobs: stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBwYXJhbGxlbCk=') }} name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -1001,7 +1070,8 @@ jobs: stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfYmluYXJ5LCBQYXJhbGxlbFJlcGxpY2FzLCBzMyBzdG9yYWdlLCBzZXF1ZW50aWFsKQ==') }} name: "Stateless tests (amd_binary, ParallelReplicas, s3 storage, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -1049,7 +1119,8 @@ jobs: stateless_tests_arm_binary_parallel: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBwYXJhbGxlbCk=') }} name: "Stateless tests (arm_binary, parallel)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -1097,7 +1168,8 @@ jobs: stateless_tests_arm_binary_sequential: runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_arm_binary] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_arm_binary] + if: ${{ !cancelled() && !contains(needs.*.outputs.pipeline_status, 'failure') && !contains(needs.*.outputs.pipeline_status, 'undefined') && !contains(fromJson(needs.config_workflow.outputs.data).workflow_config.cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYmluYXJ5LCBzZXF1ZW50aWFsKQ==') }} name: "Stateless tests (arm_binary, sequential)" outputs: data: ${{ steps.run.outputs.DATA }} @@ -1145,7 +1217,7 @@ jobs: finish_workflow: runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] - needs: [config_workflow, dockers_build_amd, dockers_build_arm, build_amd_debug, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_asan, build_arm_binary, build_amd_release, build_arm_release, docker_server_image, docker_keeper_image, install_packages_amd_release, install_packages_arm_release, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential, stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel, stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential, stateless_tests_arm_binary_parallel, stateless_tests_arm_binary_sequential] + needs: [config_workflow, dockers_build_amd, dockers_build_arm, dockers_build_multiplatform_manifest, build_amd_debug, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_asan, build_arm_binary, build_amd_release, build_arm_release, docker_server_image, docker_keeper_image, install_packages_amd_release, install_packages_arm_release, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_parallel, stateless_tests_amd_binary_old_analyzer_s3_storage_databasereplicated_sequential, stateless_tests_amd_binary_parallelreplicas_s3_storage_parallel, stateless_tests_amd_binary_parallelreplicas_s3_storage_sequential, stateless_tests_arm_binary_parallel, stateless_tests_arm_binary_sequential] if: ${{ always() }} name: "Finish Workflow" outputs: @@ -1243,6 +1315,7 @@ jobs: - config_workflow - dockers_build_amd - dockers_build_arm + - dockers_build_multiplatform_manifest - build_amd_debug - build_amd_asan - build_amd_tsan diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index a7948079b28b..65486f2f460c 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -112,6 +112,16 @@ def main(): os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200" os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache" + + if info.is_community_pr: + print( + "NOTE: Community contribution - set sccache to run without AWS credentials" + ) + os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1" + # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1 + os.environ.pop("AWS_SECRET_ACCESS_KEY", None) + os.environ.pop("AWS_ACCESS_KEY_ID", None) + os.environ["CTCACHE_LOG_LEVEL"] = "debug" os.environ["CTCACHE_DIR"] = f"{build_dir}/ccache/clang-tidy-cache" os.environ["CTCACHE_S3_BUCKET"] = Settings.S3_ARTIFACT_PATH diff --git a/ci/jobs/docker_server.py b/ci/jobs/docker_server.py index fb1486939e11..cb47eff97272 100644 --- a/ci/jobs/docker_server.py +++ b/ci/jobs/docker_server.py @@ -168,8 +168,8 @@ def buildx_args( args = [ f"--platform=linux/{arch}", f"--label=build-url={action_url}", - f"--label=com.clickhouse.build.githash={sha}", - f"--label=com.clickhouse.build.version={version}", + f"--label=com.altinity.build.githash={sha}", + f"--label=com.altinity.build.version={version}", ] if direct_urls: args.append(f"--build-arg=DIRECT_DOWNLOAD_URLS='{' '.join(direct_urls)}'") diff --git a/ci/jobs/fast_test.py b/ci/jobs/fast_test.py index a6b350bcd0e8..35a898e655bb 100644 --- a/ci/jobs/fast_test.py +++ b/ci/jobs/fast_test.py @@ -155,6 +155,16 @@ def main(): os.environ["SCCACHE_IDLE_TIMEOUT"] = "7200" os.environ["SCCACHE_BUCKET"] = Settings.S3_ARTIFACT_PATH os.environ["SCCACHE_S3_KEY_PREFIX"] = "ccache/sccache" + + if Info().is_community_pr: + print( + "NOTE: Community contribution - set sccache to run without AWS credentials" + ) + os.environ["SCCACHE_S3_NO_CREDENTIALS"] = "1" + # NOTE (strtgbb): sccache will throw an error if AWS credentials are present with SCCACHE_S3_NO_CREDENTIALS=1 + os.environ.pop("AWS_SECRET_ACCESS_KEY", None) + os.environ.pop("AWS_ACCESS_KEY_ID", None) + Shell.check("sccache --show-stats", verbose=True) Utils.add_to_PATH(f"{build_dir}/programs:{current_directory}/tests") diff --git a/ci/jobs/functional_tests.py b/ci/jobs/functional_tests.py index 00ab7baaccd4..e5cd4fe1ada6 100644 --- a/ci/jobs/functional_tests.py +++ b/ci/jobs/functional_tests.py @@ -234,7 +234,15 @@ def main(): # verbose=True, # ) # NOTE(strtgbb): We pass azure credentials through the docker command, not SSM. - pass + # NOTE(strtgbb): Azure credentials don't exist in community workflow + if info.is_community_pr: + print( + "NOTE: No azure credentials provided for community PR - disable azure storage" + ) + config_installs_args += " --no-azure" + + # NOTE(strtgbb): With the above, some tests are still trying to use azure, try this: + os.environ["USE_AZURE_STORAGE_FOR_MERGE_TREE"] = "0" else: print("Disable azure for a local run") config_installs_args += " --no-azure" diff --git a/ci/jobs/scripts/integration_tests_configs.py b/ci/jobs/scripts/integration_tests_configs.py index f7aaa4cfabde..e4015ab6bd68 100644 --- a/ci/jobs/scripts/integration_tests_configs.py +++ b/ci/jobs/scripts/integration_tests_configs.py @@ -928,7 +928,7 @@ def get_tests_execution_time(info: Info, job_options: str) -> dict[str, int]: if info.pr_number == 0: branch_filter = f"head_ref = '{info.git_branch}'" else: - branch_filter = f"base_ref = '{info.base_branch}'" + branch_filter = f"head_ref = '{info.base_branch}'" build = job_options.split(",", 1)[0] diff --git a/ci/praktika/info.py b/ci/praktika/info.py index a2e971ba0720..3926126e206c 100644 --- a/ci/praktika/info.py +++ b/ci/praktika/info.py @@ -102,6 +102,10 @@ def repo_owner(self): def fork_name(self): return self.env.FORK_NAME + @property + def is_community_pr(self): + return "community" in self.env.WORKFLOW_NAME.lower() + @property def commit_message(self): return self.env.COMMIT_MESSAGE diff --git a/ci/praktika/parser.py b/ci/praktika/parser.py index 5e7eef4a949c..efb19245709d 100644 --- a/ci/praktika/parser.py +++ b/ci/praktika/parser.py @@ -48,6 +48,7 @@ class JobAddonYaml: tags: List[str] jobs: List[JobYaml] additional_jobs: List[str] + if_condition: str job_to_config: Dict[str, JobYaml] artifact_to_config: Dict[str, ArtifactYaml] secret_names_gh: List[str] @@ -78,6 +79,7 @@ def __init__(self, config: Workflow.Config): tags=config.tags, jobs=[], additional_jobs=[], + if_condition=config.if_condition, secret_names_gh=[], variable_names_gh=[], job_to_config={}, @@ -238,7 +240,8 @@ def parse(self): assert ( False ), f"Artifact [{artifact_name}] has unsupported type [{artifact.type}]" - if artifact.type == Artifact.Type.GH: + # NOTE (strtgbb): Added a check that provided_by is not empty, which is the case for artifacts that are defined in defs.py but not used in a workflow + if artifact.type == Artifact.Type.GH and artifact.provided_by != "": self.workflow_yaml_config.job_to_config[ artifact.provided_by ].artifacts_gh_provides.append(artifact) diff --git a/ci/praktika/workflow.py b/ci/praktika/workflow.py index 193097a62b5b..7ecdde5404b5 100644 --- a/ci/praktika/workflow.py +++ b/ci/praktika/workflow.py @@ -26,6 +26,7 @@ class Config: event: str jobs: List[Job.Config] additional_jobs: List[str] = field(default_factory=list) + if_condition: str = "" branches: List[str] = field(default_factory=list) base_branches: List[str] = field(default_factory=list) tags: List[str] = field(default_factory=list) diff --git a/ci/praktika/yaml_additional_templates.py b/ci/praktika/yaml_additional_templates.py index 64dc62445920..218057e4e195 100644 --- a/ci/praktika/yaml_additional_templates.py +++ b/ci/praktika/yaml_additional_templates.py @@ -8,7 +8,6 @@ class AltinityWorkflowTemplates: AZURE_CONTAINER_NAME: ${{{{ secrets.AZURE_CONTAINER_NAME }}}} AZURE_STORAGE_ACCOUNT_URL: "https://${{{{ secrets.AZURE_ACCOUNT_NAME }}}}.blob.core.windows.net/" ROBOT_TOKEN: ${{{{ secrets.ROBOT_TOKEN }}}} - GH_TOKEN: ${{{{ github.token }}}} """ # Additional pre steps for all jobs JOB_SETUP_STEPS = """ @@ -22,6 +21,7 @@ class AltinityWorkflowTemplates: # Additional pre steps for config workflow job ADDITIONAL_CI_CONFIG_STEPS = r""" - name: Note report location to summary + if: ${{ !failure() && env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY }} env: PR_NUMBER: ${{ github.event.pull_request.number || 0 }} COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} @@ -71,7 +71,7 @@ class AltinityWorkflowTemplates: commit: {REGRESSION_HASH} arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} RegressionTestsAarch64: needs: [config_workflow, build_arm_binary] @@ -83,7 +83,7 @@ class AltinityWorkflowTemplates: commit: {REGRESSION_HASH} arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 workflow_config: ${{ needs.config_workflow.outputs.data.workflow_config }} """, "SignRelease": r""" diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py index 3d941d2fd1b4..6f7f4f2e8061 100644 --- a/ci/praktika/yaml_generator.py +++ b/ci/praktika/yaml_generator.py @@ -1,4 +1,5 @@ import dataclasses +import os from typing import List from . import Artifact, Job, Workflow @@ -54,16 +55,15 @@ class Templates: jobs: {JOBS}\ """ - TEMPLATE_GH_TOKEN_PERMISSIONS = """\ -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all\ -""" + TEMPLATE_GH_TOKEN_PERMISSIONS = "" TEMPLATE_ENV_CHECKOUT_REF_PR = """\ + GH_TOKEN: ${{{{ github.token }}}} DISABLE_CI_MERGE_COMMIT: ${{{{ vars.DISABLE_CI_MERGE_COMMIT || '0' }}}} DISABLE_CI_CACHE: ${{{{ github.event.inputs.no_cache || '0' }}}} CHECKOUT_REF: ${{{{ vars.DISABLE_CI_MERGE_COMMIT == '1' && github.event.pull_request.head.sha || '' }}}}\ """ TEMPLATE_ENV_CHECKOUT_REF_DEFAULT = """\ + GH_TOKEN: ${{{{ github.token }}}} CHECKOUT_REF: ""\ """ TEMPLATE_ENV_SECRET = """\ @@ -306,7 +306,8 @@ def generate(self): for artifact in job.artifacts_gh_provides: uploads_github.append( YamlGenerator.Templates.TEMPLATE_GH_UPLOAD.format( - NAME=artifact.name, PATH=artifact.path + NAME=artifact.name, + PATH=os.path.relpath(artifact.path, os.getcwd()), ) ) downloads_github = [] @@ -322,14 +323,23 @@ def generate(self): ) if_expression = "" + # NOTE (strtgbb): We still want the cache logic, we use it for skipping based on PR config if ( - self.workflow_config.config.enable_cache - and job_name_normalized != config_job_name_normalized + # self.workflow_config.config.enable_cache + # and + job_name_normalized != config_job_name_normalized ): if_expression = YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION.format( WORKFLOW_CONFIG_JOB_NAME=config_job_name_normalized, JOB_NAME_BASE64=Utils.to_base64(job_name), ) + + elif self.workflow_config.if_condition: + # If this is the config workflow and the workflow template sets an if condition, use it + if_expression = ( + f"\n if: ${{{{ {self.workflow_config.if_condition} }}}}" + ) + if job.run_unless_cancelled: if_expression = ( YamlGenerator.Templates.TEMPLATE_IF_EXPRESSION_NOT_CANCELLED @@ -470,7 +480,12 @@ def generate(self): VAR_NAME=secret.name ) format_kwargs["ENV_SECRETS"] = GH_VAR_ENVS + SECRET_ENVS - format_kwargs["ENV_SECRETS"] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV + + if self.parser.config.secrets: + # Only add global env if there are secrets in workflow config + format_kwargs[ + "ENV_SECRETS" + ] += AltinityWorkflowTemplates.ADDITIONAL_GLOBAL_ENV template_1 = base_template.strip().format( NAME=self.workflow_config.name, diff --git a/ci/settings/altinity_overrides.py b/ci/settings/altinity_overrides.py index dc91e4ada82c..5c2c550d51d5 100644 --- a/ci/settings/altinity_overrides.py +++ b/ci/settings/altinity_overrides.py @@ -49,6 +49,9 @@ class RunnerLabels: DISABLED_WORKFLOWS = [ "new_pull_request.py", + "nightly_statistics.py", + "nightly_jepsen.py", + "VectorSearchStress.py", ] DEFAULT_LOCAL_TEST_WORKFLOW = "pull_request.py" diff --git a/ci/workflows/master.py b/ci/workflows/master.py index 9296dce38c4e..0730b19f8557 100644 --- a/ci/workflows/master.py +++ b/ci/workflows/master.py @@ -14,7 +14,6 @@ # *JobConfigs.tidy_build_arm_jobs, *JobConfigs.build_jobs, *JobConfigs.release_build_jobs, - # *[ # NOTE (strtgbb): we don't run special build jobs # job.set_dependency( # REGULAR_BUILD_NAMES # + [JobConfigs.tidy_build_arm_jobs[0].name] # NOTE (strtgbb): we don't run tidy build jobs @@ -27,11 +26,11 @@ *JobConfigs.install_check_master_jobs, *JobConfigs.compatibility_test_jobs, *JobConfigs.functional_tests_jobs, - # *JobConfigs.functional_tests_jobs_azure_master_only, # NOTE (strtgbb): disabled due to ASAN build failure + *JobConfigs.functional_tests_jobs_azure_master_only, *JobConfigs.integration_test_jobs_required, *JobConfigs.integration_test_jobs_non_required, *JobConfigs.stress_test_jobs, - # *JobConfigs.stress_test_azure_jobs, # NOTE (strtgbb): disabled due to ASAN build failure + *JobConfigs.stress_test_azure_jobs, *JobConfigs.ast_fuzzer_jobs, *JobConfigs.buzz_fuzzer_jobs, # *JobConfigs.performance_comparison_with_master_head_jobs, # NOTE (strtgbb): fails due to GH secrets not being handled properly @@ -42,7 +41,13 @@ # *JobConfigs.sqlancer_master_jobs, JobConfigs.sqltest_master_job, ], - additional_jobs=["GrypeScan", "Regression", "SignRelease", "CIReport", "SourceUpload"], + additional_jobs=[ + "GrypeScan", + "Regression", + "SignRelease", + "CIReport", + "SourceUpload", + ], artifacts=[ *ArtifactConfigs.unittests_binaries, *ArtifactConfigs.clickhouse_binaries, diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 072545e8f26f..09abdf1074ab 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -47,10 +47,7 @@ job.set_dependency(STYLE_AND_FAST_TESTS) for job in JobConfigs.extra_validation_build_jobs ], - *[ - job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) - for job in JobConfigs.release_build_jobs - ], + *JobConfigs.release_build_jobs, # *[ # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) # for job in JobConfigs.special_build_jobs @@ -80,12 +77,8 @@ for job in JobConfigs.integration_test_jobs_non_required ], *JobConfigs.unittest_jobs, - JobConfigs.docker_server.set_dependency( - FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES - ), - JobConfigs.docker_keeper.set_dependency( - FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES - ), + JobConfigs.docker_server, + JobConfigs.docker_keeper, *[ job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) for job in JobConfigs.install_check_jobs diff --git a/ci/workflows/pull_request_community.py b/ci/workflows/pull_request_community.py new file mode 100644 index 000000000000..cd336279f1c6 --- /dev/null +++ b/ci/workflows/pull_request_community.py @@ -0,0 +1,137 @@ +import copy +from praktika import Workflow, Artifact + +from ci.defs.defs import BASE_BRANCH, DOCKERS, ArtifactConfigs, JobNames +from ci.defs.job_configs import JobConfigs +from ci.jobs.scripts.workflow_hooks.filter_job import should_skip_job + +FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES = [ + job.name + for job in JobConfigs.functional_tests_jobs + if any( + substr in job.name + for substr in ( + "_debug, parallel", + "_binary, parallel", + "_asan, distributed plan, parallel", + # "_tsan, parallel", + ) + ) +] + +PLAIN_FUNCTIONAL_TEST_JOB = [ + j for j in JobConfigs.functional_tests_jobs if "amd_debug, parallel" in j.name +][0] + +workflow = Workflow.Config( + name="Community PR", + event=Workflow.Event.PULL_REQUEST, + base_branches=[BASE_BRANCH, "releases/*", "antalya-*"], + if_condition="github.repository != github.event.pull_request.head.repo.full_name", + jobs=[ + JobConfigs.fast_test, + *[job.set_dependency([JobNames.FAST_TEST]) for job in JobConfigs.build_jobs], + *[ + job.set_dependency([JobNames.FAST_TEST]) + for job in JobConfigs.extra_validation_build_jobs + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.release_build_jobs + ], + # *[ + # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + # for job in JobConfigs.special_build_jobs + # ], + # TODO: stabilize new jobs and remove set_allow_merge_on_failure + JobConfigs.lightweight_functional_tests_job, + # JobConfigs.stateless_tests_targeted_pr_jobs[0].set_allow_merge_on_failure(), # NOTE (strtgbb): Needs configuration + # JobConfigs.integration_test_targeted_pr_jobs[0].set_allow_merge_on_failure(), + # *JobConfigs.stateless_tests_flaky_pr_jobs, + # *JobConfigs.integration_test_asan_flaky_pr_jobs, + # JobConfigs.bugfix_validation_ft_pr_job, + # JobConfigs.bugfix_validation_it_job, + *[ + j.set_dependency( + FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES + if j.name not in FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES + else [] + ) + for j in JobConfigs.functional_tests_jobs + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.integration_test_jobs_required[:] + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.integration_test_jobs_non_required + ], + *JobConfigs.unittest_jobs, + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.install_check_jobs + ], + *[ + job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + for job in JobConfigs.compatibility_test_jobs + ], + # *[ + # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + # for job in JobConfigs.stress_test_jobs + # ], # NOTE (strtgbb): Does not support github artifacts + # *[ + # job.set_dependency(FUNCTIONAL_TESTS_PARALLEL_BLOCKING_JOB_NAMES) + # for job in JobConfigs.upgrade_test_jobs + # ], # TODO: customize for our repo + ], + artifacts=[ + *ArtifactConfigs.unittests_binaries, + *ArtifactConfigs.clickhouse_binaries, + *ArtifactConfigs.clickhouse_stripped_binaries, + *ArtifactConfigs.clickhouse_debians, + *ArtifactConfigs.clickhouse_rpms, + *ArtifactConfigs.clickhouse_tgzs, + ArtifactConfigs.fuzzers, + ArtifactConfigs.fuzzers_corpus, + ], + dockers=DOCKERS, + disable_dockers_build=True, + enable_dockers_manifest_merge=False, + secrets=[], + enable_job_filtering_by_changes=False, # TODO: Change this back? + enable_cache=False, + enable_report=False, + enable_cidb=False, + enable_merge_ready_status=False, + enable_gh_summary_comment=False, + enable_commit_status_on_failure=False, + enable_open_issues_check=False, + enable_slack_feed=False, + pre_hooks=[ + "python3 ./ci/jobs/scripts/workflow_hooks/store_data.py", + "python3 ./ci/jobs/scripts/workflow_hooks/version_log.py", + "python3 ./ci/jobs/scripts/workflow_hooks/parse_ci_tags.py", + ], + workflow_filter_hooks=[should_skip_job], + post_hooks=[], + job_aliases={ + "integration": JobConfigs.integration_test_jobs_non_required[ + 0 + ].name, # plain integration test job, no old analyzer, no dist plan + "functional": PLAIN_FUNCTIONAL_TEST_JOB.name, + }, +) + +# NOTE (strtgbb): use deepcopy to avoid modifying workflows generated after this one +for i, job in enumerate(workflow.jobs): + workflow.jobs[i] = copy.deepcopy(job) + workflow.jobs[i].enable_commit_status = False + +for i, artifact in enumerate(workflow.artifacts): + workflow.artifacts[i] = copy.deepcopy(artifact) + workflow.artifacts[i].type = Artifact.Type.GH + +WORKFLOWS = [ + workflow, +] diff --git a/ci/workflows/release_builds.py b/ci/workflows/release_builds.py index 92529427ddad..97a965d29a69 100644 --- a/ci/workflows/release_builds.py +++ b/ci/workflows/release_builds.py @@ -48,14 +48,17 @@ *ArtifactConfigs.clickhouse_tgzs, ], dockers=DOCKERS, + enable_dockers_manifest_merge=True, secrets=SECRETS, enable_job_filtering_by_changes=False, enable_cache=False, enable_report=True, enable_cidb=True, + enable_merge_ready_status=False, # NOTE (strtgbb): we don't use this, TODO, see if we can use it enable_gh_summary_comment=False, enable_commit_status_on_failure=True, enable_open_issues_check=False, + enable_slack_feed=False, pre_hooks=[ "python3 ./ci/jobs/scripts/workflow_hooks/store_data.py", "python3 ./ci/jobs/scripts/workflow_hooks/version_log.py", diff --git a/docker/keeper/Dockerfile b/docker/keeper/Dockerfile index 60e99c30581e..aae7fe7b524f 100644 --- a/docker/keeper/Dockerfile +++ b/docker/keeper/Dockerfile @@ -35,11 +35,8 @@ RUN arch=${TARGETARCH:-amd64} \ arm64) ln -sf /lib/ld-2.35.so /lib/ld-linux-aarch64.so.1 ;; \ esac -# lts / testing / prestable / etc -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="25.12.4.35" -ARG PACKAGES="clickhouse-keeper" +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile + ARG DIRECT_DOWNLOAD_URLS="" # user/group precreated explicitly with fixed uid/gid on purpose. @@ -63,12 +60,7 @@ RUN arch=${TARGETARCH:-amd64} \ && wget -c -q "$url" \ ; done \ else \ - for package in ${PACKAGES}; do \ - cd /tmp \ - && echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \ - ; done \ + exit 1; \ fi \ && cat *.tgz.sha512 | sha512sum -c \ && for file in *.tgz; do \ diff --git a/docker/server/Dockerfile.alpine b/docker/server/Dockerfile.alpine index eea90aa82219..39fde82efda5 100644 --- a/docker/server/Dockerfile.alpine +++ b/docker/server/Dockerfile.alpine @@ -32,11 +32,10 @@ RUN arch=${TARGETARCH:-amd64} \ arm64) ln -sf /lib/ld-2.35.so /lib/ld-linux-aarch64.so.1 ;; \ esac -# lts / testing / prestable / etc -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="25.12.4.35" -ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" + + +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile + ARG DIRECT_DOWNLOAD_URLS="" # user/group precreated explicitly with fixed uid/gid on purpose. @@ -59,11 +58,7 @@ RUN arch=${TARGETARCH:-amd64} \ && wget -c -q "$url" \ ; done \ else \ - for package in ${PACKAGES}; do \ - echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \ - && wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \ - ; done \ + exit 1; \ fi \ && cat *.tgz.sha512 | sed 's:/output/:/tmp/:' | sha512sum -c \ && for file in *.tgz; do \ diff --git a/docker/server/Dockerfile.ubuntu b/docker/server/Dockerfile.ubuntu index 1a84551b8a79..6c76b8dee446 100644 --- a/docker/server/Dockerfile.ubuntu +++ b/docker/server/Dockerfile.ubuntu @@ -30,23 +30,12 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list && busybox --install -s \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* -ARG REPO_CHANNEL="stable" -ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main" -ARG VERSION="25.12.4.35" -ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" - #docker-official-library:off # The part between `docker-official-library` tags is related to our builds -# set non-empty deb_location_url url to create a docker image -# from debs created by CI build, for example: -# docker build . --network host --build-arg version="21.4.1.6282" --build-arg deb_location_url="https://..." -t ... -ARG deb_location_url="" -ARG DIRECT_DOWNLOAD_URLS="" +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile -# set non-empty single_binary_location_url to create docker image -# from a single binary url (useful for non-standard builds - with sanitizers, for arm64). -ARG single_binary_location_url="" +ARG DIRECT_DOWNLOAD_URLS="" ARG TARGETARCH @@ -62,64 +51,7 @@ RUN if [ -n "${DIRECT_DOWNLOAD_URLS}" ]; then \ && rm -rf /tmp/* ; \ fi -# install from a web location with deb packages -RUN arch="${TARGETARCH:-amd64}" \ - && if [ -n "${deb_location_url}" ]; then \ - echo "installing from custom url with deb packages: ${deb_location_url}" \ - && rm -rf /tmp/clickhouse_debs \ - && mkdir -p /tmp/clickhouse_debs \ - && for package in ${PACKAGES}; do \ - { wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \ - wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \ - || exit 1 \ - ; done \ - && dpkg -i /tmp/clickhouse_debs/*.deb \ - && rm -rf /tmp/* ; \ - fi - -# install from a single binary -RUN if [ -n "${single_binary_location_url}" ]; then \ - echo "installing from single binary url: ${single_binary_location_url}" \ - && rm -rf /tmp/clickhouse_binary \ - && mkdir -p /tmp/clickhouse_binary \ - && wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \ - && chmod +x /tmp/clickhouse_binary/clickhouse \ - && /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" \ - && rm -rf /tmp/* ; \ - fi - -# The rest is the same in the official docker and in our build system -#docker-official-library:on - -# A fallback to installation from ClickHouse repository -# It works unless the clickhouse binary already exists -RUN clickhouse local -q 'SELECT 1' >/dev/null 2>&1 && exit 0 || : \ - ; apt-get update \ - && apt-get install --yes --no-install-recommends \ - dirmngr \ - gnupg2 \ - && mkdir -p /etc/apt/sources.list.d \ - && GNUPGHOME=$(mktemp -d) \ - && GNUPGHOME="$GNUPGHOME" gpg --batch --no-default-keyring \ - --keyring /usr/share/keyrings/clickhouse-keyring.gpg \ - --keyserver hkp://keyserver.ubuntu.com:80 \ - --recv-keys 3a9ea1193a97b548be1457d48919f6bd2b48d754 \ - && rm -rf "$GNUPGHOME" \ - && chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \ - && echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \ - && echo "installing from repository: ${REPOSITORY}" \ - && apt-get update \ - && for package in ${PACKAGES}; do \ - packages="${packages} ${package}=${VERSION}" \ - ; done \ - && apt-get install --yes --no-install-recommends ${packages} || exit 1 \ - && rm -rf \ - /var/lib/apt/lists/* \ - /var/cache/debconf \ - /tmp/* \ - && apt-get autoremove --purge -yq dirmngr gnupg2 \ - && chmod ugo+Xrw -R /etc/clickhouse-server /etc/clickhouse-client -# The last chmod is here to make the next one is No-op in docker official library Dockerfile +# NOTE (strtgbb): Removed install methods other than direct URL install to tidy the Dockerfile # post install # we need to allow "others" access to clickhouse folder, because docker container