From 56f28c9c3df2e7d0bd8143f7d2c5b7be43a8553c Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 Apr 2026 14:58:20 -0400 Subject: [PATCH 1/8] fixes and backports --- .../ci_run_report.html.jinja | 2 +- .github/workflows/release_branches.yml | 16 ++-- .github/workflows/reusable_sign.yml | 4 +- tests/ci/build_download_helper.py | 34 +++++++- tests/ci/ci.py | 4 + tests/ci/sign_release.py | 85 +++++++++++++++---- 6 files changed, 119 insertions(+), 26 deletions(-) diff --git a/.github/actions/create_workflow_report/ci_run_report.html.jinja b/.github/actions/create_workflow_report/ci_run_report.html.jinja index a92c1aa34e3a..509f5d835cd9 100644 --- a/.github/actions/create_workflow_report/ci_run_report.html.jinja +++ b/.github/actions/create_workflow_report/ci_run_report.html.jinja @@ -266,4 +266,4 @@ }); - \ No newline at end of file + diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index b7ee01c54740..b5bbbd5c0a61 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1,5 +1,6 @@ # yamllint disable rule:comments-indentation name: ReleaseBranchCI +run-name: ${{ github.event.inputs.workflow_name || format('{0} ({1})', github.workflow, github.ref_name) }} env: # Force the stdout and stderr streams to be unbuffered @@ -22,8 +23,6 @@ on: # yamllint disable-line rule:truthy branches: - 'releases/*' push: - branches: - - 'releases/*' tags: - '*' workflow_dispatch: @@ -443,22 +442,22 @@ jobs: uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-regression-tester - commit: b8e6b17fdd6f6c0db1234c23eca5294c9400a196 + runner_type: altinity-regression-tester + commit: b28bcd03f30440f25a17917e5005670c28e3a703 arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 RegressionTestsAarch64: needs: [RunConfig, BuilderDebAarch64] if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'regression') && !contains(fromJson(needs.RunConfig.outputs.data).ci_settings.exclude_keywords, 'aarch64')}} uses: ./.github/workflows/regression.yml secrets: inherit with: - runner_type: altinity-on-demand, altinity-regression-tester-aarch64 - commit: b8e6b17fdd6f6c0db1234c23eca5294c9400a196 + runner_type: altinity-regression-tester-aarch64 + commit: b28bcd03f30440f25a17917e5005670c28e3a703 arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} - timeout_minutes: 300 + timeout_minutes: 210 SignRelease: needs: [RunConfig, BuilderDebRelease] if: ${{ !failure() && !cancelled() }} @@ -506,6 +505,7 @@ jobs: - RegressionTestsAarch64 - GrypeScan - SignRelease + - SignAarch64 runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] steps: - name: Check out repository code diff --git a/.github/workflows/reusable_sign.yml b/.github/workflows/reusable_sign.yml index 7bfed2758359..8df0d292a361 100644 --- a/.github/workflows/reusable_sign.yml +++ b/.github/workflows/reusable_sign.yml @@ -160,7 +160,9 @@ jobs: uses: actions/upload-artifact@v4 with: name: ${{inputs.test_name}} signed-hashes - path: ${{ env.TEMP_PATH }}/*.gpg + path: | + ${{ runner.temp }}/signed/*.gpg + ${{ runner.temp }}/signed/signing_pubkey.asc - name: Clean if: always() uses: ./.github/actions/clean diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index f9b4d0feab05..be052567bfb4 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -155,6 +155,16 @@ def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str if artifact_report.is_file(): with open(artifact_report, "r", encoding="utf-8") as f: return json.load(f)["build_urls"] # type: ignore + pr_info = None + try: + from pr_info import PRInfo # pylint: disable=import-outside-toplevel + + pr_info = PRInfo() + except Exception as ex: + logger.warning("Failed to init PRInfo while selecting build report: %s", ex) + + reports_by_sha = [] # type: List[List[str]] + fallback_reports = [] # type: List[List[str]] for root, _, files in os.walk(reports_path): for file in files: if file.endswith(f"_{build_name}.json"): @@ -163,7 +173,29 @@ def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str os.path.join(root, file), "r", encoding="utf-8" ) as file_handler: build_report = json.load(file_handler) - return build_report["build_urls"] # type: ignore + build_urls = build_report.get("build_urls", []) + if not isinstance(build_urls, list): + continue + fallback_reports.append(build_urls) + if pr_info and pr_info.sha and any( + f"/{pr_info.sha}/" in str(url) for url in build_urls + ): + reports_by_sha.append(build_urls) + + if reports_by_sha: + logger.info( + "Using build report matched by SHA [%s] for [%s]", + pr_info.sha if pr_info else "", + build_name, + ) + return reports_by_sha[0] + + if fallback_reports: + logger.warning( + "No SHA-matched build report found for [%s], fallback to first discovered", + build_name, + ) + return fallback_reports[0] logger.info("A build report is not found for %s", build_name) return [] diff --git a/tests/ci/ci.py b/tests/ci/ci.py index bd9c2d66769b..08c92c0f6b75 100755 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -303,6 +303,10 @@ def _pre_action(s3, job_name, batch, indata, pr_info): # testing), otherwise reports won't be found if not (pr_info.is_scheduled or pr_info.is_dispatched): report_prefix = Utils.normalize_string(pr_info.head_ref) + elif isinstance(pr_info.ref, str) and pr_info.ref.startswith("refs/tags/"): + # For tag-triggered runs, use tag name as prefix to avoid downloading + # reports from other runs with the same digest. + report_prefix = Utils.normalize_string(pr_info.head_ref) elif pr_info.is_pr: report_prefix = str(pr_info.number) print( diff --git a/tests/ci/sign_release.py b/tests/ci/sign_release.py index 8a5827097c8b..4bea91432b26 100644 --- a/tests/ci/sign_release.py +++ b/tests/ci/sign_release.py @@ -2,10 +2,13 @@ import sys import os import logging +import subprocess from env_helper import TEMP_PATH, REPO_COPY, REPORT_PATH from s3_helper import S3Helper from pr_info import PRInfo from build_download_helper import download_builds_filter +from report import FAIL, OK, FAILURE, SUCCESS, JobReport, TestResult +from stopwatch import Stopwatch import hashlib from pathlib import Path @@ -13,6 +16,7 @@ GPG_BINARY_SIGNING_PASSPHRASE = os.getenv("GPG_BINARY_SIGNING_PASSPHRASE") CHECK_NAME = os.getenv("CHECK_NAME", "Sign release") +SIGNING_PUBLIC_KEY_FILE = "signing_pubkey.asc" def hash_file(file_path): BLOCK_SIZE = 65536 # The size of each read from the file @@ -32,22 +36,47 @@ def hash_file(file_path): return hash_file_path -def sign_file(file_path): +def import_private_signing_key(): priv_key_file_path = 'priv.key' - with open(priv_key_file_path, 'x') as f: + with open(priv_key_file_path, 'w') as f: f.write(GPG_BINARY_SIGNING_KEY) - out_file_path = f'{file_path}.gpg' + try: + subprocess.run( + f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}', + shell=True, + check=True, + ) + finally: + os.remove(priv_key_file_path) - os.system(f'echo {GPG_BINARY_SIGNING_PASSPHRASE} | gpg --batch --import {priv_key_file_path}') - os.system(f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}') + +def sign_file(file_path): + out_file_path = f'{file_path}.gpg' + subprocess.run( + f'gpg -o {out_file_path} --pinentry-mode=loopback --batch --yes --passphrase {GPG_BINARY_SIGNING_PASSPHRASE} --sign {file_path}', + shell=True, + check=True, + ) print(f"Signed {file_path}") - os.remove(priv_key_file_path) return out_file_path + +def export_public_signing_key(out_file_path: Path): + subprocess.run( + f"gpg --armor --output {out_file_path} --export", + shell=True, + check=True, + ) + print(f"Exported signing public key to {out_file_path}") + def main(): + stopwatch = Stopwatch() reports_path = Path(REPORT_PATH) + test_results = [] + state = SUCCESS + description = "Signed artifact hashes successfully" if not os.path.exists(TEMP_PATH): os.makedirs(TEMP_PATH) @@ -65,15 +94,29 @@ def main(): # downloads `package_release` artifacts generated download_builds_filter(CHECK_NAME, reports_path, Path(TEMP_PATH)) - for f in os.listdir(TEMP_PATH): - full_path = os.path.join(TEMP_PATH, f) - if os.path.isdir(full_path): - continue - hashed_file_path = hash_file(full_path) - signed_file_path = sign_file(hashed_file_path) - s3_path = s3_path_prefix / os.path.basename(signed_file_path) - s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path)) - print(f'Uploaded file {signed_file_path} to {s3_path}') + try: + import_private_signing_key() + for f in os.listdir(TEMP_PATH): + full_path = os.path.join(TEMP_PATH, f) + if os.path.isdir(full_path): + continue + hashed_file_path = hash_file(full_path) + signed_file_path = sign_file(hashed_file_path) + s3_path = s3_path_prefix / os.path.basename(signed_file_path) + s3_helper.upload_build_file_to_s3(Path(signed_file_path), str(s3_path)) + print(f'Uploaded file {signed_file_path} to {s3_path}') + test_results.append(TestResult(name=os.path.basename(full_path), status=OK)) + + public_key_path = Path(TEMP_PATH) / SIGNING_PUBLIC_KEY_FILE + export_public_signing_key(public_key_path) + s3_helper.upload_build_file_to_s3( + public_key_path, str(s3_path_prefix / SIGNING_PUBLIC_KEY_FILE) + ) + test_results.append(TestResult(name=SIGNING_PUBLIC_KEY_FILE, status=OK)) + except Exception as ex: + state = FAILURE + description = f"Failed to sign release artifacts: {ex}" + test_results.append(TestResult(name=CHECK_NAME, status=FAIL, raw_logs=str(ex))) # Signed hashes are: # clickhouse-client_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-keeper_22.3.15.2.altinitystable_x86_64.apk.sha512.gpg @@ -91,6 +134,18 @@ def main(): # clickhouse-keeper_22.3.15.2.altinitystable_amd64.deb.sha512.gpg clickhouse-server-22.3.15.2.altinitystable.x86_64.rpm.sha512.gpg # clickhouse-keeper-22.3.15.2.altinitystable-amd64.tgz.sha512.gpg clickhouse.sha512.gpg + JobReport( + description=description, + test_results=test_results, + status=state, + start_time=stopwatch.start_time_str, + duration=stopwatch.duration_seconds, + additional_files=[], + ).dump() + + if state == FAILURE: + sys.exit(1) + sys.exit(0) if __name__ == "__main__": From 1b21fbfa9fbe95884a53a32645914c03d2203ea9 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 Apr 2026 15:55:18 -0400 Subject: [PATCH 2/8] regression refactor --- .github/workflows/regression.yml | 939 ++++++++++++------------------- 1 file changed, 358 insertions(+), 581 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index cc1c6e8d0eaf..ef1135d55897 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -3,7 +3,7 @@ name: Regression test workflow - Release workflow_call: inputs: runner_type: - description: the label of runner to use, can be a simple string or a comma-separated list + description: the (meta-)label of runner to use required: true type: string commit: @@ -35,6 +35,10 @@ name: Regression test workflow - Release additional_envs: description: additional ENV variables to setup the job type: string + workflow_config: + description: workflow config for the run + required: true + type: string secrets: secret_envs: description: if given, it's passed to the environments @@ -95,17 +99,15 @@ env: DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} CHECKS_DATABASE_USER: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }} - CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} - LOCALSTACK_AUTH_TOKEN: ${{ secrets.LOCALSTACK_AUTH_TOKEN }} + CHECKS_DATABASE_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }} args: --test-to-end --no-colors --local --collect-service-logs - --output classic + --output new-fails --parallel 1 --log raw.log --with-analyzer - artifacts: builds artifact_paths: | ./report.html ./*.log.txt @@ -115,618 +117,393 @@ env: ./*/_instances/*/logs/*.log ./*/*/_instances/*/logs/*.log ./*/*/_instances/*.log - build_sha: ${{ inputs.build_sha }} - pr_number: ${{ github.event.number }} - event_name: ${{ github.event_name }} jobs: - runner_labels_setup: - name: Compute proper runner labels for the rest of the jobs - runs-on: ubuntu-latest - outputs: - runner_labels: ${{ steps.setVariables.outputs.runner_labels }} - steps: - - id: setVariables - name: Prepare runner_labels variables for the later steps - run: | - - # Prepend self-hosted - input="self-hosted, ${input}" - - # Remove all whitespace - input="$(echo ${input} | tr -d [:space:])" - # Make something like a JSON array from comma-separated list - input="[ '${input//\,/\'\, \'}' ]" - - echo "runner_labels=$input" >> ${GITHUB_OUTPUT} - env: - input: ${{ inputs.runner_type }} - Common: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'common') + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, atomic_insert, attach, base_58, clickhouse_keeper_failover,data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, jwt_authentication, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, selects, session_timezone, settings, version, window_functions] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ${{ matrix.SUITE }} + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ${{ matrix.SUITE }} + secrets: inherit + + AggregateFunctions: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'aggregate_functions') strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, iceberg, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, swarms, tiered_storage, version, window_functions] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=${{ matrix.SUITE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} ${{ matrix.SUITE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + PART: [1, 2, 3] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: aggregate_functions + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: aggregate_functions + extra_args: --only "part ${{ matrix.PART }}/*" + secrets: inherit Alter: - strategy: - fail-fast: false - matrix: - ONLY: [replace, attach, move] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=alter - STORAGE=/${{ matrix.ONLY }}_partition - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u alter/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --only "/alter/${{ matrix.ONLY }} partition/*" - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.ONLY }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'alter') + strategy: + fail-fast: false + matrix: + ONLY: [replace, move] + include: + - ONLY: attach + PART: 1 + - ONLY: attach + PART: 2 + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: alter + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.ONLY }}_partition + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: alter_${{ matrix.ONLY }} + extra_args: --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*" + secrets: inherit Benchmark: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'benchmark') strategy: fail-fast: false matrix: STORAGE: [minio, aws_s3, gcs] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Benchmark ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ontime_benchmark + suite_executable: benchmark.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: benchmark_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} + secrets: inherit - ClickHouseKeeperSSL: - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper SSL" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts - path: ${{ env.artifact_paths }} + ClickHouseKeeper: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'clickhouse_keeper') + strategy: + fail-fast: false + matrix: + PART: [1, 2] + SSL: [ssl, no_ssl] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: clickhouse_keeper + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.SSL }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: clickhouse_keeper_${{ matrix.SSL }} + extra_args: ${{ matrix.SSL == 'ssl' && '--ssl' || '' }} --only "part ${{ matrix.PART }}/*" + secrets: inherit + Iceberg: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'iceberg') + strategy: + fail-fast: false + matrix: + PART: [1, 2] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: iceberg + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: iceberg + extra_args: --only ${{ matrix.PART == 1 && '"/iceberg/iceberg engine/rest catalog/*" "/iceberg/s3 table function/*" "/iceberg/icebergS3 table function/*" "/iceberg/iceberg cache/*"' || '"/iceberg/iceberg engine/glue catalog/*" "/iceberg/iceberg table engine/*"' }} + secrets: inherit LDAP: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ldap') strategy: fail-fast: false matrix: SUITE: [authentication, external_user_directory, role_mapping] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} LDAP ${{ matrix.SUITE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ldap/${{ matrix.SUITE }} + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ldap_${{ matrix.SUITE }} + secrets: inherit Parquet: - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=parquet - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Parquet" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: parquet + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: parquet + secrets: inherit ParquetS3: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'parquet') strategy: fail-fast: false matrix: STORAGE: [minio, aws_s3] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=parquet - STORAGE=${{ matrix.STORAGE}} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Parquet ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths }} + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: parquet + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: ${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: parquet_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --only "/parquet/${{ matrix.STORAGE }}/*" + secrets: inherit + + RBAC: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'rbac') + strategy: + fail-fast: false + matrix: + PART: [1, 2, 3] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: rbac + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: rbac + extra_args: --only "/rbac/part ${{ matrix.PART }}/*" + secrets: inherit + SSLServer: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'ssl_server') + strategy: + fail-fast: false + matrix: + PART: [1, 2, 3] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: ssl_server + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: ssl_server + extra_args: --only "part ${{ matrix.PART }}/*" + secrets: inherit S3: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3') strategy: fail-fast: false matrix: - STORAGE: [minio, aws_s3, gcs, azure] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }} - --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }} - --azure-container ${{ secrets.AZURE_CONTAINER_NAME }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + STORAGE: [aws_s3, gcs, azure, minio] + PART: [1, 2] + include: + - STORAGE: minio + PART: 3 + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: s3 + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: s3_${{ matrix.STORAGE }} + regression_args: --storage ${{ matrix.STORAGE }} --gcs-uri {{GCS_URI}} --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --aws-s3-bucket {{AWS_BUCKET}} --aws-s3-region {{AWS_REGION}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-access-key {{AWS_ACCESS_KEY}} --azure-account-name {{AZURE_ACCOUNT_NAME}} --azure-storage-key {{AZURE_STORAGE_KEY}} --azure-container {{AZURE_CONTAINER_NAME}} + extra_args: --only ":/try*" ":/part ${{ matrix.PART }}/*" + secrets: inherit + + S3Export: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 's3_export') + strategy: + fail-fast: false + matrix: + PART: [part, partition] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: s3 + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /minio + part: ${{ matrix.PART }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: s3_export + regression_args: --storage minio + extra_args: --only ":/try*" "minio/export tests/export ${{ matrix.PART }}/*" + secrets: inherit + + Swarms: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'swarms') + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: swarms + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: swarms + secrets: inherit TieredStorage: + if: | + fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs[0] == null || + contains(fromJson(inputs.workflow_config).JOB_KV_DATA.ci_regression_jobs, 'tiered_storage') strategy: fail-fast: false matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [runner_labels_setup] - runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} - timeout-minutes: ${{ inputs.timeout_minutes }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v4 - with: - repository: Altinity/clickhouse-regression - ref: ${{ inputs.commit }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{ runner.temp }}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - EOF - - name: Download json reports - uses: actions/download-artifact@v4 - with: - path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} - - name: Rename reports - run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - id: run_suite - run: EXITCODE=0; - python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_path }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" - ${{ env.args }} || EXITCODE=$?; - .github/add_link_to_logs.sh; - exit $EXITCODE - - name: Set Commit Status - if: always() - run: python3 .github/set_builds_status.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Tiered Storage ${{ matrix.STORAGE }}" - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - name: Upload logs to regression results database - if: always() - timeout-minutes: 20 - run: .github/upload_results_to_database.sh 1 - - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts - path: ${{ env.artifact_paths}} + STORAGE: [local, minio, s3amazon, s3gcs] + uses: ./.github/workflows/regression-reusable-suite.yml + with: + ref: ${{ inputs.commit }} + workflow_config: ${{ inputs.workflow_config }} + suite_name: tiered_storage + suite_executable: regression.py + output_format: new-fails + flags: --with-analyzer + timeout_minutes: ${{ inputs.timeout_minutes }} + runner_arch: ${{ inputs.arch }} + runner_type: ${{ inputs.runner_type }} + storage_path: /${{ matrix.STORAGE }} + build_sha: ${{ inputs.build_sha }} + set_commit_status: true + job_name: tiered_storage_${{ matrix.STORAGE }} + regression_args: --aws-s3-access-key {{AWS_ACCESS_KEY}} --aws-s3-key-id {{AWS_KEY_ID}} --aws-s3-uri https://s3.{{AWS_REGION}}.amazonaws.com/{{AWS_BUCKET}}/data/ --gcs-key-id {{GCS_KEY_ID}} --gcs-key-secret {{GCS_KEY_SECRET}} --gcs-uri {{GCS_URI}} + extra_args: ${{ matrix.STORAGE != 'local' && format('--with-{0}', matrix.STORAGE) || '' }} + secrets: inherit From ec3666ec581a82e5ed0fcfb72edab610d5707776 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Tue, 14 Apr 2026 23:51:09 -0400 Subject: [PATCH 3/8] naming and regression fixes --- .../ci_run_report.html.jinja | 9 +- .../workflows/regression-reusable-suite.yml | 183 +++++++++++++++++ .github/workflows/regression.yml | 185 ++---------------- .github/workflows/release_branches.yml | 3 +- 4 files changed, 210 insertions(+), 170 deletions(-) create mode 100644 .github/workflows/regression-reusable-suite.yml diff --git a/.github/actions/create_workflow_report/ci_run_report.html.jinja b/.github/actions/create_workflow_report/ci_run_report.html.jinja index 509f5d835cd9..4c94465a16c6 100644 --- a/.github/actions/create_workflow_report/ci_run_report.html.jinja +++ b/.github/actions/create_workflow_report/ci_run_report.html.jinja @@ -5,6 +5,9 @@ + {%- if is_preview %} + + {%- endif %}