From b053455bd9c23b3ece5ab62220f742ed18cae64f Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 5 May 2026 12:06:05 -0400 Subject: [PATCH 1/2] bump version, regression and sign fixes --- .../workflows/regression-reusable-suite.yml | 193 +++ .github/workflows/regression.yml | 1042 ++++++----------- .github/workflows/release_branches.yml | 27 +- cmake/autogenerated_versions.txt | 6 +- tests/ci/ci.py | 4 +- tests/ci/sign_release.py | 15 + 6 files changed, 566 insertions(+), 721 deletions(-) create mode 100644 .github/workflows/regression-reusable-suite.yml diff --git a/.github/workflows/regression-reusable-suite.yml b/.github/workflows/regression-reusable-suite.yml new file mode 100644 index 000000000000..5c0fab69c701 --- /dev/null +++ b/.github/workflows/regression-reusable-suite.yml @@ -0,0 +1,193 @@ +name: Regression suite +on: + workflow_call: + inputs: + ref: + description: "Commit SHA to checkout. Default: current (empty string)." + type: string + default: "" + workflow_config: + required: true + type: string + flags: + required: false + type: string + output_format: + required: true + type: string + extra_args: + required: false + type: string + suite_name: + required: true + type: string + suite_executable: + required: false + type: string + default: "regression.py" + timeout_minutes: + required: true + type: number + storage_path: + required: false + type: string + default: "" + regression_args: + required: false + type: string + default: "" + runner_type: + required: false + type: string + default: "" + runner_arch: + required: false + type: string + default: "x86" + job_name: + required: false + type: string + default: "" + part: + required: false + type: string + default: "" + build_sha: + required: false + type: string + default: "" + set_commit_status: + required: false + type: boolean + default: false +jobs: + suite: + name: ${{ format('{0}{1}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} + runs-on: [ + "self-hosted", + "altinity-on-demand", + "${{ inputs.runner_type }}", + ] + timeout-minutes: ${{ inputs.timeout_minutes }} + env: + SUITE: ${{ inputs.suite_name }} + SUITE_EXECUTABLE: ${{ inputs.suite_executable }} + STORAGE: ${{ inputs.storage_path }} + PART: ${{ inputs.part }} + REPORT_JOB_NAME: ${{ format('{0}{1}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} + # AWS credentials + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + # Docker credentials + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + # Database credentials + CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CHECKS_DATABASE_USER: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }} + CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} + # LocalStack token + LOCALSTACK_AUTH_TOKEN: ${{ secrets.LOCALSTACK_AUTH_TOKEN }} + # Python encoding + PYTHONIOENCODING: utf-8 + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + artifacts: builds + # Args + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output ${{ inputs.output_format }} + --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" user.name="${GITHUB_ACTOR}" version="${{ fromJson(inputs.workflow_config).JOB_KV_DATA.version.string }}" package="$clickhouse_path" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$REPORT_JOB_NAME" job.retry=$GITHUB_RUN_ATTEMPT job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" + --cicd + --log raw.log + ${{ inputs.flags != 'none' && inputs.flags || ''}} + ${{ inputs.extra_args }} + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + + steps: + - name: โคต๏ธ Checkout + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.ref }} + + - name: โ™ป๏ธ Cache setup + uses: ./.github/actions/cache-setup + + - name: ๐Ÿ› ๏ธ Setup + run: .github/setup.sh + + - name: ๐Ÿ“ฆ Get deb url + env: + S3_BASE_URL: https://altinity-build-artifacts.s3.amazonaws.com/ + PR_NUMBER: ${{ github.event.pull_request.number || 0 }} + run: | + REPORTS_PATH=${{ runner.temp }}/reports_dir + mkdir -p $REPORTS_PATH + cat > $REPORTS_PATH/workflow_config.json << 'EOF' + ${{ toJson(fromJson(inputs.workflow_config).WORKFLOW_CONFIG) }} + EOF + + python3 .github/get-deb-url.py --github-env $GITHUB_ENV --workflow-config $REPORTS_PATH/workflow_config.json --s3-base-url $S3_BASE_URL --pr-number $PR_NUMBER --branch-name ${{ github.ref_name }} --commit-hash ${{ inputs.build_sha || github.sha }} --binary + + - name: ๐Ÿ”„ Process regression args + run: | + REGRESSION_ARGS='${{ inputs.regression_args }}' + # AWS replacements + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AWS_BUCKET}}'/${{ secrets.REGRESSION_AWS_S3_BUCKET }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AWS_REGION}}'/${{ secrets.REGRESSION_AWS_S3_REGION }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AWS_KEY_ID}}'/${{ secrets.REGRESSION_AWS_S3_KEY_ID }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AWS_ACCESS_KEY}}'/${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}}" + # GCS replacements + REGRESSION_ARGS="${REGRESSION_ARGS//'{{GCS_URI}}'/${{ secrets.REGRESSION_GCS_URI }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{GCS_KEY_ID}}'/${{ secrets.REGRESSION_GCS_KEY_ID }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{GCS_KEY_SECRET}}'/${{ secrets.REGRESSION_GCS_KEY_SECRET }}}" + # Azure replacements + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AZURE_ACCOUNT_NAME}}'/${{ secrets.AZURE_ACCOUNT_NAME }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AZURE_STORAGE_KEY}}'/${{ secrets.AZURE_STORAGE_KEY }}}" + REGRESSION_ARGS="${REGRESSION_ARGS//'{{AZURE_CONTAINER_NAME}}'/${{ secrets.AZURE_CONTAINER_NAME }}}" + echo "REGRESSION_ARGS=$REGRESSION_ARGS" >> $GITHUB_ENV + + - name: ๐Ÿงช Run ${{ env.SUITE }} suite + id: run_suite + run: python3 + -u ${{ env.SUITE }}/${{ env.SUITE_EXECUTABLE }} + --clickhouse ${{ env.clickhouse_path }} + ${{ env.REGRESSION_ARGS }} + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + + - name: ๐Ÿ“Š Set Commit Status + if: ${{ !cancelled() && inputs.set_commit_status }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + JOB_OUTCOME: ${{ steps.run_suite.outcome }} + SUITE_NAME: ${{ format('Regression {0} {1}{2}', inputs.runner_arch, inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} + run: python3 .github/set_builds_status.py + + - name: ๐Ÿ“ Create and upload logs + if: ${{ !cancelled() }} + run: .github/create_and_upload_logs.sh 1 + + - name: ๐Ÿ“ค Upload logs to results database + if: ${{ !cancelled() }} + timeout-minutes: 20 + run: .github/upload_results_to_database.sh 1 + + - uses: actions/upload-artifact@v4 + if: ${{ !cancelled() }} + with: + name: ${{ format('{0}{1}-artifacts-{2}{3}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '', inputs.runner_arch, contains(inputs.extra_args, '--use-keeper') && '_keeper' || '_zookeeper') }} + path: ${{ env.artifact_paths }} + diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index bcd054d15dff..d30ee15f6520 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -3,7 +3,7 @@ name: Regression test workflow - Release workflow_call: inputs: runner_type: - description: the label of runner to use, can be a simple string or a comma-separated list + description: the (meta-)label of runner to use required: true type: string commit: @@ -35,6 +35,10 @@ name: Regression test workflow - Release additional_envs: description: additional ENV variables to setup the job type: string + workflow_config: + description: workflow config for the run + required: true + type: string secrets: secret_envs: description: if given, it's passed to the environments @@ -60,8 +64,8 @@ name: Regression test workflow - Release DOCKER_USERNAME: description: username of the docker user. required: true - DOCKER_PASSWORD: - description: password to the docker user. + DOCKER_TOKEN: + description: token of the docker user. required: true REGRESSION_AWS_S3_BUCKET: description: aws s3 bucket used for regression tests. @@ -92,7 +96,7 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_TOKEN }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} CHECKS_DATABASE_USER: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }} CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} @@ -104,7 +108,6 @@ env: --parallel 1 --log raw.log --with-analyzer - artifacts: builds artifact_paths: | ./report.html ./*.log.txt @@ -114,620 +117,293 @@ env: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - build_sha: ${{ inputs.build_sha }} - pr_number: ${{ github.event.number }} - event_name: ${{ github.event_name }} jobs: - runner_labels_setup: - name: Compute proper runner labels for the rest of the jobs - runs-on: ubuntu-latest - outputs: - runner_labels: ${{ steps.setVariables.outputs.runner_labels }} - steps: - - id: setVariables - name: Prepare runner_labels variables for the later steps - run: | - - # Prepend self-hosted - input="self-hosted, ${input}" - - # Remove all whitespace - input="$(echo ${input} | tr -d [:space:])" - # Make something like a JSON array from comma-separated list - input="[ '${input//\,/\'\, \'}' ]" - - echo "runner_labels=$input" >> ${GITHUB_OUTPUT} - env: - input: ${{ inputs.runner_type }} - Common: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'common') strategy: fail-fast: false matrix: - SUITE: [aes_encryption, atomic_insert, base_58, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, tiered_storage, version, window_functions] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=${{ matrix.SUITE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} ${{ matrix.SUITE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + SUITE: [aes_encryption, atomic_insert, attach, base_58, clickhouse_keeper_failover,data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, jwt_authentication, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, settings, version, window_functions] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ${{ matrix.SUITE }} + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ${{ matrix.SUITE }} + secrets: inherit AggregateFunctions: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'aggregate_functions') strategy: fail-fast: false matrix: PART: [1, 2, 3] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=aggregate_functions - PART=${{ matrix.PART }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --only "part ${{ matrix.PART }}/*" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: aggregate_functions + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: aggregate_functions + extra_args: --only "part ${{ matrix.PART }}/*" + secrets: inherit Alter: - strategy: - fail-fast: false - matrix: - ONLY: [replace, move] - include: - - ONLY: attach - PART: 1 - - ONLY: attach - PART: 2 - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=alter - STORAGE=/${{ matrix.ONLY }}_partition - PART='${{ matrix.PART }}' - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u alter/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*" - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.ONLY }}${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition ${{ matrix.PART }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: alter-${{ matrix.ONLY }}${{ matrix.PART && format('-{0}', matrix.PART) || '' }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'alter') + strategy: + fail-fast: false + matrix: + ONLY: [replace, move] + include: + - ONLY: attach + PART: 1 + - ONLY: attach + PART: 2 + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: alter + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.ONLY }}_partition + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: alter_${{ matrix.ONLY }} + extra_args: --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*" + secrets: inherit Benchmark: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'benchmark') strategy: fail-fast: false matrix: STORAGE: [minio, aws_s3, gcs] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Benchmark ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ontime_benchmark + suite_executable: benchmark.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: benchmark_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} + secrets: inherit ClickHouseKeeper: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'clickhouse_keeper') strategy: fail-fast: false matrix: PART: [1, 2] SSL: [ssl, no_ssl] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/${{ matrix.SSL }} - PART=${{ matrix.PART }} - SSL=${{ matrix.SSL == 'ssl' && '--ssl' || '' }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py ${{ env.SSL }} - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }}, ${{ matrix.SSL }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --only "part ${{ matrix.PART }}/*" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper ${{ matrix.SSL }} ${{ matrix.PART }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-${{ matrix.SSL }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: clickhouse_keeper + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.SSL }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: clickhouse_keeper_${{ matrix.SSL }} + extra_args: ${{ matrix.SSL == 'ssl' && '--ssl' || '' }} --only "part ${{ matrix.PART }}/*" + secrets: inherit + Iceberg: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'iceberg') + strategy: + fail-fast: false + matrix: + PART: [1, 2] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: iceberg + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: iceberg + extra_args: --only ${{ matrix.PART == 1 && '"/iceberg/iceberg engine/rest catalog/*" "/iceberg/s3 table function/*" "/iceberg/icebergS3 table function/*" "/iceberg/iceberg cache/*"' || '"/iceberg/iceberg engine/glue catalog/*" "/iceberg/iceberg table engine/*" "/iceberg/export partition/*"' }} + secrets: inherit LDAP: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ldap') strategy: fail-fast: false matrix: SUITE: [authentication, external_user_directory, role_mapping] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} LDAP ${{ matrix.SUITE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ldap/${{ matrix.SUITE }} + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ldap_${{ matrix.SUITE }} + secrets: inherit Parquet: - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=parquet - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Parquet" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: parquet + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: parquet + secrets: inherit ParquetS3: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') strategy: fail-fast: false matrix: STORAGE: [minio, aws_s3] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=parquet - STORAGE=${{ matrix.STORAGE}} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Parquet ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: parquet + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: ${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: parquet_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --only "/parquet/${{ matrix.STORAGE }}/*" + secrets: inherit + RBAC: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'rbac') + strategy: + fail-fast: false + matrix: + PART: [1, 2, 3] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: rbac + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: rbac + extra_args: --only "/rbac/part ${{ matrix.PART }}/*" + secrets: inherit SSLServer: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ssl_server') strategy: fail-fast: false matrix: PART: [1, 2, 3] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=ssl_server - PART=${{ matrix.PART }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --only "part ${{ matrix.PART }}/*" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ssl_server + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ssl_server + extra_args: --only "part ${{ matrix.PART }}/*" + secrets: inherit S3: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3') strategy: fail-fast: false matrix: @@ -736,143 +412,97 @@ jobs: include: - STORAGE: minio PART: 3 - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=s3 - PART=${{ matrix.PART }} - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }} - --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }} - --azure-container ${{ secrets.AZURE_CONTAINER_NAME }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }}-${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - --only ":/try*" ":/part ${{ matrix.PART }}/*" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}-${{ matrix.PART }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: s3 + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: s3_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --azure-account-name {{AZURE_ACCOUNT_NAME}} --azure-storage-key {{AZURE_STORAGE_KEY}} --azure-container {{AZURE_CONTAINER_NAME}} + extra_args: --only ":/try*" ":/part ${{ matrix.PART }}/*" + secrets: inherit + + S3Export: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3_export') + strategy: + fail-fast: false + matrix: + PART: [part, partition] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: s3 + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /minio + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: s3_export + regression_args: --storage minio + extra_args: --only ":/try*" "minio/export tests/export ${{ matrix.PART }}/*" + secrets: inherit + + Swarms: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'swarms') + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: swarms + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: swarms + secrets: inherit TieredStorage: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'tiered_storage') strategy: fail-fast: false matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Tiered Storage ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + STORAGE: [local, minio, s3amazon, s3gcs] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: tiered_storage + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: tiered_storage_${{ matrix.STORAGE }} + regression_args: --aws-s3-access-key {{AWS_ACCESS_KEY}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-uri https://s3.{{AWS_REGION}}.amazonaws.com/{{AWS_BUCKET}}/data/ --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --gcs-uri {{GCS_URI}} + extra_args: ${{ matrix.STORAGE != 'local' && format('--with-{0}', matrix.STORAGE) || '' }} + secrets: inherit diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f9a2953e0d2a..a8ff62bd04f7 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1,5 +1,6 @@ # yamllint disable rule:comments-indentation name: ReleaseBranchCI +run-name: "${{ github.event.inputs.workflow_name || ' ' }}" env: # Force the stdout and stderr streams to be unbuffered @@ -20,19 +21,22 @@ on: # yamllint disable-line rule:truthy - reopened - opened branches: - # Anything/24.8 (e.g customizations/24.8.x) - - '**24.8*' + - 'customizations/*' + - 'releases/*' + - 'stable-*' release: types: - published - prereleased push: - branches: - - 'releases/24.8**' - - 'customizations/24.8**' tags: - '*' workflow_dispatch: + inputs: + workflow_name: + description: 'Name of the workflow' + required: false + type: string jobs: RunConfig: @@ -552,22 +556,22 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-regression-tester - commit: b72ac10337ea9d7a0f764b86a40f4bb7dc0f81ff + runner_type: altinity-regression-tester + commit: release arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 RegressionTestsAarch64: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'regression') && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'aarch64')}} uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-regression-tester-aarch64 - commit: b72ac10337ea9d7a0f764b86a40f4bb7dc0f81ff + runner_type: altinity-regression-tester-aarch64 + commit: release arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 SignRelease: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} @@ -641,6 +645,7 @@ jobs: - RegressionTestsAarch64 - GrypeScan - SignRelease + - SignAarch64 runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] steps: - name: Check out repository code diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index d072f08317bc..2020313cb7fd 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -10,10 +10,10 @@ SET(VERSION_GITHASH c8a1e828dcf9832dc2d71adcbd50c698f93bb69b) #10000 for altinitystable candidates #20000 for altinityedge candidates -SET(VERSION_TWEAK 10545) +SET(VERSION_TWEAK 10546) SET(VERSION_FLAVOUR altinitytest) -SET(VERSION_DESCRIBE v24.8.14.10545.altinitytest) -SET(VERSION_STRING 24.8.14.10545.altinitytest) +SET(VERSION_DESCRIBE v24.8.14.10546.altinitytest) +SET(VERSION_STRING 24.8.14.10546.altinitytest) # end of autochange diff --git a/tests/ci/ci.py b/tests/ci/ci.py index 95edf2cd7501..f4c4cf7ee9b6 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -296,7 +296,9 @@ def _pre_action(s3, job_name, batch, indata, pr_info): # for release/master branches reports must be from the same branch report_prefix = "" - if pr_info.is_master or pr_info.is_release: + if pr_info.is_push_event and pr_info.ref.startswith("refs/tags/"): + report_prefix = Utils.normalize_string(pr_info.head_ref) + elif pr_info.is_master or pr_info.is_release: # do not set report prefix for scheduled or dispatched wf (in case it started from feature branch while # testing), otherwise reports won't be found if not (pr_info.is_scheduled or pr_info.is_dispatched): diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 8a5827097c8b..a3988aef3485 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -6,6 +6,8 @@ from s3_helper import S3Helper from pr_info import PRInfo from build_download_helper import download_builds_filter +from report import JobReport, TestResult, OK, SUCCESS +from stopwatch import Stopwatch import hashlib from pathlib import Path @@ -47,6 +49,7 @@ def sign_file(file_path): return out_file_path def main(): + stopwatch = Stopwatch() reports_path = Path(REPORT_PATH) if not os.path.exists(TEMP_PATH): @@ -65,6 +68,7 @@ def main(): # downloads `package_release` artifacts generated download_builds_filter(CHECK_NAME, reports_path, Path(TEMP_PATH)) + signed_count = 0 for f in os.listdir(TEMP_PATH): full_path = os.path.join(TEMP_PATH, f) if os.path.isdir(full_path): @@ -74,6 +78,17 @@ def main(): s3_path = s3_path_prefix / os.path.basename(signed_file_path) s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path)) print(f'Uploaded file {signed_file_path} to {s3_path}') + signed_count += 1 + + description = f"Signed and uploaded {signed_count} hashes" + JobReport( + description=description, + test_results=[TestResult(description, OK)], + status=SUCCESS, + start_time=stopwatch.start_time_str, + duration=stopwatch.duration_seconds, + additional_files=[], + ).dump() # Signed hashes are: # clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg From cc46042ccea426904722a5a32102fa1385e6c665 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 5 May 2026 12:18:04 -0400 Subject: [PATCH 2/2] regression fix --- .../workflows/regression-reusable-suite.yml | 98 ++++----- .github/workflows/regression.yml | 188 ++---------------- 2 files changed, 64 insertions(+), 222 deletions(-) diff --git a/.github/workflows/regression-reusable-suite.yml b/.github/workflows/regression-reusable-suite.yml index 5c0fab69c701..677cd35a1aa2 100644 --- a/.github/workflows/regression-reusable-suite.yml +++ b/.github/workflows/regression-reusable-suite.yml @@ -6,13 +6,7 @@ on: description: "Commit SHA to checkout. Default: current (empty string)." type: string default: "" - workflow_config: - required: true - type: string - flags: - required: false - type: string - output_format: + arch: required: true type: string extra_args: @@ -40,10 +34,6 @@ on: required: false type: string default: "" - runner_arch: - required: false - type: string - default: "x86" job_name: required: false type: string @@ -56,10 +46,6 @@ on: required: false type: string default: "" - set_commit_status: - required: false - type: boolean - default: false jobs: suite: name: ${{ format('{0}{1}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} @@ -81,29 +67,28 @@ jobs: AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} # Docker credentials DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_TOKEN }} # Database credentials CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} CHECKS_DATABASE_USER: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }} CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} - # LocalStack token + # LocalStack token LOCALSTACK_AUTH_TOKEN: ${{ secrets.LOCALSTACK_AUTH_TOKEN }} - # Python encoding + # Python + PYTHONUNBUFFERED: 1 PYTHONIOENCODING: utf-8 build_sha: ${{ inputs.build_sha }} pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} artifacts: builds - # Args args: --test-to-end --no-colors --local --collect-service-logs - --output ${{ inputs.output_format }} - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" user.name="${GITHUB_ACTOR}" version="${{ fromJson(inputs.workflow_config).JOB_KV_DATA.version.string }}" package="$clickhouse_path" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$REPORT_JOB_NAME" job.retry=$GITHUB_RUN_ATTEMPT job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --cicd + --output classic + --parallel 1 --log raw.log - ${{ inputs.flags != 'none' && inputs.flags || ''}} - ${{ inputs.extra_args }} + --with-analyzer artifact_paths: | ./report.html ./*.log.txt @@ -115,32 +100,35 @@ jobs: ./*/*/_instances/*.log steps: - - name: โคต๏ธ Checkout + - name: Checkout uses: actions/checkout@v4 with: repository: Altinity/clickhouse-regression ref: ${{ inputs.ref }} - - name: โ™ป๏ธ Cache setup - uses: ./.github/actions/cache-setup - - - name: ๐Ÿ› ๏ธ Setup - run: .github/setup.sh - - - name: ๐Ÿ“ฆ Get deb url - env: - S3_BASE_URL: https://altinity-build-artifacts.s3.amazonaws.com/ - PR_NUMBER: ${{ github.event.pull_request.number || 0 }} + - name: Set envs run: | + cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{ runner.temp }}/reports_dir - mkdir -p $REPORTS_PATH - cat > $REPORTS_PATH/workflow_config.json << 'EOF' - ${{ toJson(fromJson(inputs.workflow_config).WORKFLOW_CONFIG) }} EOF - python3 .github/get-deb-url.py --github-env $GITHUB_ENV --workflow-config $REPORTS_PATH/workflow_config.json --s3-base-url $S3_BASE_URL --pr-number $PR_NUMBER --branch-name ${{ github.ref_name }} --commit-hash ${{ inputs.build_sha || github.sha }} --binary + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} - - name: ๐Ÿ”„ Process regression args + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + + - name: Setup + run: .github/setup.sh + + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + + - name: Process regression args run: | REGRESSION_ARGS='${{ inputs.regression_args }}' # AWS replacements @@ -158,36 +146,38 @@ jobs: REGRESSION_ARGS="${REGRESSION_ARGS//'{{AZURE_CONTAINER_NAME}}'/${{ secrets.AZURE_CONTAINER_NAME }}}" echo "REGRESSION_ARGS=$REGRESSION_ARGS" >> $GITHUB_ENV - - name: ๐Ÿงช Run ${{ env.SUITE }} suite + - name: Run ${{ env.SUITE }} suite id: run_suite - run: python3 + run: EXITCODE=0; + python3 -u ${{ env.SUITE }}/${{ env.SUITE_EXECUTABLE }} - --clickhouse ${{ env.clickhouse_path }} + --clickhouse-binary-path ${{ env.clickhouse_path }} ${{ env.REGRESSION_ARGS }} + ${{ inputs.extra_args }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$REPORT_JOB_NAME" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE - - name: ๐Ÿ“Š Set Commit Status - if: ${{ !cancelled() && inputs.set_commit_status }} + - name: Set Commit Status + if: always() env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: ${{ format('Regression {0} {1}{2}', inputs.runner_arch, inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} + SUITE_NAME: ${{ format('Regression {0} {1}{2}', inputs.arch, inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '') }} run: python3 .github/set_builds_status.py - - name: ๐Ÿ“ Create and upload logs - if: ${{ !cancelled() }} + - name: Create and upload logs + if: always() run: .github/create_and_upload_logs.sh 1 - - name: ๐Ÿ“ค Upload logs to results database - if: ${{ !cancelled() }} - timeout-minutes: 20 + - name: Upload logs to results database + if: always() + timeout-minutes: 30 run: .github/upload_results_to_database.sh 1 - uses: actions/upload-artifact@v4 - if: ${{ !cancelled() }} + if: always() with: - name: ${{ format('{0}{1}-artifacts-{2}{3}', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '', inputs.runner_arch, contains(inputs.extra_args, '--use-keeper') && '_keeper' || '_zookeeper') }} + name: ${{ format('{0}{1}-{2}-artifacts', inputs.job_name != '' && inputs.job_name || inputs.suite_name, inputs.part != '' && format('_{0}', inputs.part) || '', inputs.arch) }} path: ${{ env.artifact_paths }} - diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index d30ee15f6520..1f894a59427d 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -35,10 +35,6 @@ name: Regression test workflow - Release additional_envs: description: additional ENV variables to setup the job type: string - workflow_config: - description: workflow config for the run - required: true - type: string secrets: secret_envs: description: if given, it's passed to the environments @@ -89,64 +85,25 @@ name: Regression test workflow - Release description: gcs uri used for regression tests. required: true -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_TOKEN }} - CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} - CHECKS_DATABASE_USER: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }} - CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} - args: --test-to-end - --no-colors - --local - --collect-service-logs - --output new-fails - --parallel 1 - --log raw.log - --with-analyzer - artifact_paths: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - jobs: Common: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'common') strategy: fail-fast: false matrix: - SUITE: [aes_encryption, atomic_insert, attach, base_58, clickhouse_keeper_failover,data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, jwt_authentication, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, settings, version, window_functions] + SUITE: [aes_encryption, atomic_insert, base_58, clickhouse_keeper_failover, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, jwt_authentication, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, settings, swarms, version, window_functions] uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: ${{ matrix.SUITE }} suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: ${{ matrix.SUITE }} secrets: inherit AggregateFunctions: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'aggregate_functions') strategy: fail-fast: false matrix: @@ -154,24 +111,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: aggregate_functions suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: aggregate_functions extra_args: --only "part ${{ matrix.PART }}/*" secrets: inherit + Alter: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'alter') strategy: fail-fast: false matrix: @@ -184,26 +135,19 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: alter suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /${{ matrix.ONLY }}_partition part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: alter_${{ matrix.ONLY }} extra_args: --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*" secrets: inherit Benchmark: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'benchmark') strategy: fail-fast: false matrix: @@ -211,25 +155,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: ontime_benchmark suite_executable: benchmark.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /${{ matrix.STORAGE }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: benchmark_${{ matrix.STORAGE }} regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} secrets: inherit ClickHouseKeeper: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'clickhouse_keeper') strategy: fail-fast: false matrix: @@ -238,26 +175,19 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: clickhouse_keeper suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /${{ matrix.SSL }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: clickhouse_keeper_${{ matrix.SSL }} extra_args: ${{ matrix.SSL == 'ssl' && '--ssl' || '' }} --only "part ${{ matrix.PART }}/*" secrets: inherit Iceberg: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'iceberg') strategy: fail-fast: false matrix: @@ -265,24 +195,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: iceberg suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: iceberg - extra_args: --only ${{ matrix.PART == 1 && '"/iceberg/iceberg engine/rest catalog/*" "/iceberg/s3 table function/*" "/iceberg/icebergS3 table function/*" "/iceberg/iceberg cache/*"' || '"/iceberg/iceberg engine/glue catalog/*" "/iceberg/iceberg table engine/*" "/iceberg/export partition/*"' }} + extra_args: --only ${{ matrix.PART == 1 && '"/iceberg/iceberg engine/rest catalog/*" "/iceberg/s3 table function/*" "/iceberg/icebergS3 table function/*" "/iceberg/iceberg cache/*"' || '"/iceberg/iceberg engine/glue catalog/*" "/iceberg/iceberg table engine/*"' }} secrets: inherit + LDAP: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ldap') strategy: fail-fast: false matrix: @@ -290,43 +214,29 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: ldap/${{ matrix.SUITE }} suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: ldap_${{ matrix.SUITE }} secrets: inherit Parquet: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: parquet suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: parquet secrets: inherit ParquetS3: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') strategy: fail-fast: false matrix: @@ -334,25 +244,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: parquet suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: ${{ matrix.STORAGE }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: parquet_${{ matrix.STORAGE }} - regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --only "/parquet/${{ matrix.STORAGE }}/*" + regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} secrets: inherit RBAC: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'rbac') strategy: fail-fast: false matrix: @@ -360,24 +263,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: rbac suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: rbac extra_args: --only "/rbac/part ${{ matrix.PART }}/*" secrets: inherit + SSLServer: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ssl_server') strategy: fail-fast: false matrix: @@ -385,25 +282,18 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: ssl_server suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: ssl_server extra_args: --only "part ${{ matrix.PART }}/*" secrets: inherit S3: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3') strategy: fail-fast: false matrix: @@ -415,27 +305,20 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: s3 suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /${{ matrix.STORAGE }} part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: s3_${{ matrix.STORAGE }} regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --azure-account-name {{AZURE_ACCOUNT_NAME}} --azure-storage-key {{AZURE_STORAGE_KEY}} --azure-container {{AZURE_CONTAINER_NAME}} extra_args: --only ":/try*" ":/part ${{ matrix.PART }}/*" secrets: inherit S3Export: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3_export') strategy: fail-fast: false matrix: @@ -443,47 +326,20 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: s3 suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /minio part: ${{ matrix.PART }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: s3_export regression_args: --storage minio extra_args: --only ":/try*" "minio/export tests/export ${{ matrix.PART }}/*" secrets: inherit - Swarms: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'swarms') - uses: ./.github/workflows/regression-reusable-suite.yml - with: - ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} - suite_name: swarms - suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer - timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} - runner_type: ${{ inputs.runner_type }} - build_sha: ${{ inputs.build_sha }} - set_commit_status: true - job_name: swarms - secrets: inherit - TieredStorage: - if: | - fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || - contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'tiered_storage') strategy: fail-fast: false matrix: @@ -491,17 +347,13 @@ jobs: uses: ./.github/workflows/regression-reusable-suite.yml with: ref: ${{ inputs.commit }} - workflow_config: ${{ inputs.workflow_config }} + arch: ${{ inputs.arch }} suite_name: tiered_storage suite_executable: regression.py - output_format: new-fails - flags: --with-analyzer timeout_minutes: ${{ inputs.timeout_minutes }} - runner_arch: ${{ inputs.arch }} runner_type: ${{ inputs.runner_type }} storage_path: /${{ matrix.STORAGE }} build_sha: ${{ inputs.build_sha }} - set_commit_status: true job_name: tiered_storage_${{ matrix.STORAGE }} regression_args: --aws-s3-access-key {{AWS_ACCESS_KEY}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-uri https://s3.{{AWS_REGION}}.amazonaws.com/{{AWS_BUCKET}}/data/ --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --gcs-uri {{GCS_URI}} extra_args: ${{ matrix.STORAGE != 'local' && format('--with-{0}', matrix.STORAGE) || '' }}