diff --git a/.github/workflows/docker_test_images.yml b/.github/workflows/docker_test_images.yml index be54cdaf3ddf..5e1778838bd1 100644 --- a/.github/workflows/docker_test_images.yml +++ b/.github/workflows/docker_test_images.yml @@ -10,7 +10,7 @@ name: Build docker images description: set latest tag for resulting multiarch manifest required: false type: boolean - default: false + default: false secrets: secret_envs: description: if given, it's passed to the environments @@ -26,10 +26,14 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + ROBOT_TOKEN: ${{ secrets.GITHUB_TOKEN }} + jobs: DockerBuildAarch64: - runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none] + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker-aarch64] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_aarch64) != '[]' steps: @@ -44,7 +48,7 @@ jobs: --image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \ --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_aarch64) }}' DockerBuildAmd64: - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] if: | !failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_amd64) != '[]' steps: @@ -60,7 +64,7 @@ jobs: --missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_amd64) }}' DockerMultiArchManifest: needs: [DockerBuildAmd64, DockerBuildAarch64] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] + runs-on: [self-hosted, altinity-on-demand, altinity-style-checker] if: | !failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest) steps: diff --git a/.github/workflows/grype_scan.yml b/.github/workflows/grype_scan.yml index e68c3e63e283..b6781c386f94 100644 --- a/.github/workflows/grype_scan.yml +++ b/.github/workflows/grype_scan.yml @@ -61,7 +61,7 @@ jobs: TAG_SUFFIX: ${{ inputs.tag-suffix }} SPECIFIED_VERSION: ${{ inputs.version }} run: | - python3 ./tests/ci/version_helper.py | tee /tmp/version_info + python3 ./tests/ci/version_helper.py | grep = | tee /tmp/version_info source /tmp/version_info if [ -z "$SPECIFIED_VERSION" ]; then VERSION=$CLICKHOUSE_VERSION_STRING diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 3d5d06e5b296..bcd054d15dff 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -100,7 +100,7 @@ env: --no-colors --local --collect-service-logs - --output classic + --output new-fails --parallel 1 --log raw.log --with-analyzer @@ -145,7 +145,7 @@ jobs: strategy: fail-fast: false matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + SUITE: [aes_encryption, atomic_insert, base_58, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, functions, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, tiered_storage, version, window_functions] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -179,7 +179,7 @@ jobs: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -203,11 +203,79 @@ jobs: name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} + AggregateFunctions: + strategy: + fail-fast: false + matrix: + PART: [1, 2, 3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=aggregate_functions + PART=${{ matrix.PART }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + id: run_suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --only "part ${{ matrix.PART }}/*" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Set Commit Status + if: always() + run: python3 .github/set_builds_status.py + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + JOB_OUTCOME: ${{ steps.run_suite.outcome }} + SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}" + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - name: Upload logs to regression results database + if: always() + timeout-minutes: 20 + run: .github/upload_results_to_database.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} Alter: strategy: fail-fast: false matrix: - ONLY: [replace, attach, move] + ONLY: [replace, move] + include: + - ONLY: attach + PART: 1 + - ONLY: attach + PART: 2 needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -223,6 +291,7 @@ jobs: REPORTS_PATH=${{ runner.temp }}/reports_dir SUITE=alter STORAGE=/${{ matrix.ONLY }}_partition + PART='${{ matrix.PART }}' EOF - name: Download json reports uses: actions/download-artifact@v4 @@ -242,8 +311,8 @@ jobs: python3 -u alter/regression.py --clickhouse-binary-path ${{ env.clickhouse_path }} - --only "/alter/${{ matrix.ONLY }} partition/*" - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --only "/alter/${{ matrix.ONLY }} partition/${{ matrix.PART && format('part {0}/', matrix.PART) || '' }}*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.ONLY }}${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -253,7 +322,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition" + SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition ${{ matrix.PART }}" - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -264,7 +333,7 @@ jobs: - uses: actions/upload-artifact@v4 if: always() with: - name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts + name: alter-${{ matrix.ONLY }}${{ matrix.PART && format('-{0}', matrix.PART) || '' }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} Benchmark: @@ -300,6 +369,7 @@ jobs: run: .github/setup.sh - name: Get deb url run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite id: run_suite run: EXITCODE=0; @@ -314,7 +384,7 @@ jobs: --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -338,7 +408,12 @@ jobs: name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths }} - ClickHouseKeeperSSL: + ClickHouseKeeper: + strategy: + fail-fast: false + matrix: + PART: [1, 2] + SSL: [ssl, no_ssl] needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -353,7 +428,9 @@ jobs: cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{runner.temp}}/reports_dir SUITE=clickhouse_keeper - STORAGE=/ssl + STORAGE=/${{ matrix.SSL }} + PART=${{ matrix.PART }} + SSL=${{ matrix.SSL == 'ssl' && '--ssl' || '' }} EOF - name: Download json reports uses: actions/download-artifact@v4 @@ -371,10 +448,10 @@ jobs: id: run_suite run: EXITCODE=0; python3 - -u ${{ env.SUITE }}/regression.py - --ssl + -u ${{ env.SUITE }}/regression.py ${{ env.SSL }} --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }}, ${{ matrix.SSL }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --only "part ${{ matrix.PART }}/*" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -384,7 +461,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper SSL" + SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper ${{ matrix.SSL }} ${{ matrix.PART }}" - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -395,7 +472,7 @@ jobs: - uses: actions/upload-artifact@v4 if: always() with: - name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-${{ matrix.SSL }}-artifacts path: ${{ env.artifact_paths }} LDAP: @@ -436,7 +513,7 @@ jobs: python3 -u ${{ env.SUITE }}/regression.py --clickhouse-binary-path ${{ env.clickhouse_path }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.SUITE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -562,7 +639,7 @@ jobs: --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -586,11 +663,79 @@ jobs: name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths }} + SSLServer: + strategy: + fail-fast: false + matrix: + PART: [1, 2, 3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ssl_server + PART=${{ matrix.PART }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + id: run_suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --only "part ${{ matrix.PART }}/*" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Set Commit Status + if: always() + run: python3 .github/set_builds_status.py + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + JOB_OUTCOME: ${{ steps.run_suite.outcome }} + SUITE_NAME: "Regression ${{ inputs.arch }} ${{ env.SUITE }}-${{ matrix.PART }}" + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - name: Upload logs to regression results database + if: always() + timeout-minutes: 20 + run: .github/upload_results_to_database.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + S3: strategy: fail-fast: false matrix: - STORAGE: [minio, aws_s3, gcs, azure] + STORAGE: [aws_s3, gcs, azure, minio] + PART: [1, 2] + include: + - STORAGE: minio + PART: 3 needs: [runner_labels_setup] runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} timeout-minutes: ${{ inputs.timeout_minutes }} @@ -605,6 +750,7 @@ jobs: cat >> "$GITHUB_ENV" << 'EOF' REPORTS_PATH=${{ runner.temp }}/reports_dir SUITE=s3 + PART=${{ matrix.PART }} STORAGE=/${{ matrix.STORAGE }} EOF - name: Download json reports @@ -636,7 +782,8 @@ jobs: --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }} --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }} --azure-container ${{ secrets.AZURE_CONTAINER_NAME }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }}-${{ matrix.PART }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --only ":/try*" ":/part ${{ matrix.PART }}/*" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE @@ -646,7 +793,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} JOB_OUTCOME: ${{ steps.run_suite.outcome }} - SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}" + SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}-${{ matrix.PART }}" - name: Create and upload logs if: always() run: .github/create_and_upload_logs.sh 1 @@ -657,7 +804,7 @@ jobs: - uses: actions/upload-artifact@v4 if: always() with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ matrix.PART }}-${{ inputs.arch }}-artifacts path: ${{ env.artifact_paths}} TieredStorage: @@ -706,7 +853,7 @@ jobs: --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} --with-${{ matrix.STORAGE }} - --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name="$GITHUB_JOB (${{ matrix.STORAGE }})" job.retry=$GITHUB_RUN_ATTEMPT job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" ${{ env.args }} || EXITCODE=$?; .github/add_link_to_logs.sh; exit $EXITCODE diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 105efcbd0086..4e4d93f7cbef 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -492,7 +492,7 @@ jobs: secrets: inherit with: test_name: Integration tests (aarch64) - runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-snapshot-22.04-arm, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester-aarch64 data: ${{ needs.RunConfig.outputs.data }} ############################################################################################# ####################################### AST FUZZERS ######################################### @@ -504,7 +504,7 @@ jobs: secrets: inherit with: test_name: AST fuzzer (asan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester data: ${{ needs.RunConfig.outputs.data }} ASTFuzzerTsan: needs: [RunConfig, BuilderDebTsan] @@ -513,7 +513,7 @@ jobs: secrets: inherit with: test_name: AST fuzzer (tsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester data: ${{ needs.RunConfig.outputs.data }} ASTFuzzerMsan: needs: [RunConfig, BuilderDebMsan] @@ -522,7 +522,7 @@ jobs: secrets: inherit with: test_name: AST fuzzer (msan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester data: ${{ needs.RunConfig.outputs.data }} ASTFuzzerUBsan: needs: [RunConfig, BuilderDebUBsan] @@ -531,7 +531,7 @@ jobs: secrets: inherit with: test_name: AST fuzzer (ubsan) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester data: ${{ needs.RunConfig.outputs.data }} ASTFuzzerDebug: needs: [RunConfig, BuilderDebDebug] @@ -540,7 +540,7 @@ jobs: secrets: inherit with: test_name: AST fuzzer (debug) - runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none + runner_type: altinity-on-demand, altinity-func-tester data: ${{ needs.RunConfig.outputs.data }} ############################################################################################# ##################################### REGRESSION TESTS ###################################### @@ -552,7 +552,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-regression-tester - commit: e3c00be97a045aa04e9d1a6ec50cc64f4c387b70 + commit: fd33d9e73ebd14392d601d87902ddf0e7c90709c arch: release build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} timeout_minutes: 300 @@ -563,7 +563,7 @@ jobs: secrets: inherit with: runner_type: altinity-on-demand, altinity-regression-tester-aarch64 - commit: e3c00be97a045aa04e9d1a6ec50cc64f4c387b70 + commit: fd33d9e73ebd14392d601d87902ddf0e7c90709c arch: aarch64 build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} timeout_minutes: 300 diff --git a/docker/test/fuzzer/run-fuzzer.sh b/docker/test/fuzzer/run-fuzzer.sh index 2a5956340d69..f7ba3fa93ac1 100755 --- a/docker/test/fuzzer/run-fuzzer.sh +++ b/docker/test/fuzzer/run-fuzzer.sh @@ -24,7 +24,10 @@ BASE_REF=${BASE_REF:="master"} function git_clone_with_retry { for _ in 1 2 3 4; do - if git clone --depth 1 https://github.com/Altinity/ClickHouse.git -b "${BASE_REF}" -- "$1" 2>&1 | ts '%Y-%m-%d %H:%M:%S';then + # Strip refs/tags/ prefix if present, as --branch expects just the tag/branch name + local ref_name="${BASE_REF#refs/tags/}" + + if git clone --depth 1 https://github.com/Altinity/ClickHouse.git --branch "${ref_name}" -- "$1" 2>&1 | ts '%Y-%m-%d %H:%M:%S';then return 0 else sleep 0.5 diff --git a/docker/test/stress/run.sh b/docker/test/stress/run.sh index 48348aa131ca..cf87c5eca088 100644 --- a/docker/test/stress/run.sh +++ b/docker/test/stress/run.sh @@ -60,6 +60,10 @@ azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log & config_logs_export_cluster /etc/clickhouse-server/config.d/system_logs_export.yaml +# NOTE(strtgbb): Trying to avoid errors that may be related to running out of resources +export CLICKHOUSE_MAX_THREADS=8 +export CLICKHOUSE_MAX_CONCURRENT_QUERIES=4 + start_server setup_logs_replication diff --git a/tests/broken_tests.json b/tests/broken_tests.json index 6aec8d3336af..358e0967f14a 100644 --- a/tests/broken_tests.json +++ b/tests/broken_tests.json @@ -1,11 +1,29 @@ { "02700_s3_part_INT_MAX": { - "reason": "Fails on asan" + "reason": "INVESTIGATE: Fails on asan" }, "02815_no_throw_in_simple_queries": { - "reason": "Fails on asan,msan,tsan,debug,Aarch64" + "reason": "INVESTIGATE: Fails on asan,msan,tsan,debug,Aarch64" }, "03206_no_exceptions_clickhouse_local": { - "reason": "Fails on asan,msan,tsan,debug,Aarch64" + "reason": "INVESTIGATE: Fails on asan,msan,tsan,debug,Aarch64" + }, + "02479_race_condition_between_insert_and_droppin_mv": { + "reason": "INVESTIGATE: Unstable on ubsan" + }, + "01825_new_type_json_ghdata": { + "reason": "INVESTIGATE: Sometime out of memory on msan" + }, + "00159_parallel_formatting_http": { + "reason": "INVESTIGATE: Unstable on tsan" + }, + "02941_variant_type_3": { + "reason": "INVESTIGATE: Memory limit exceeded on asan" + }, + "03008_local_plain_rewritable": { + "reason": "INVESTIGATE: Memory limit exceeded on asan" + }, + "02490_benchmark_max_consecutive_errors": { + "reason": "INVESTIGATE: Unstable on msan" } } \ No newline at end of file diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index f9bc53324714..db4e7cb51fe3 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -275,7 +275,8 @@ class CI: required_builds=[BuildNames.PACKAGE_ASAN] ), JobNames.STATEFUL_TEST_TSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( - required_builds=[BuildNames.PACKAGE_TSAN] + required_builds=[BuildNames.PACKAGE_TSAN], + timeout=2 * 3600, ), JobNames.STATEFUL_TEST_MSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( required_builds=[BuildNames.PACKAGE_MSAN] @@ -397,18 +398,19 @@ class CI: required_builds=[BuildNames.PACKAGE_DEBUG], pr_only=True ), JobNames.INTEGRATION_TEST_ASAN: CommonJobConfigs.INTEGRATION_TEST.with_properties( - required_builds=[BuildNames.PACKAGE_ASAN], num_batches=4, - timeout=9000, # the job timed out with default value (7200) + required_builds=[BuildNames.PACKAGE_ASAN], + num_batches=4, + timeout=3 * 3600, # the job timed out with default value (7200) ), JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER: CommonJobConfigs.INTEGRATION_TEST.with_properties( required_builds=[BuildNames.PACKAGE_ASAN], num_batches=6, - timeout=12000, # the job timed out with default value (7200) + timeout=4 * 3600, # the job timed out with default value (7200) ), JobNames.INTEGRATION_TEST_TSAN: CommonJobConfigs.INTEGRATION_TEST.with_properties( required_builds=[BuildNames.PACKAGE_TSAN], num_batches=6, - timeout=12000, # the job timed out with default value (7200) + timeout=4 * 3600, # the job timed out with default value (7200) ), JobNames.INTEGRATION_TEST_ARM: CommonJobConfigs.INTEGRATION_TEST.with_properties( required_builds=[BuildNames.PACKAGE_AARCH64], @@ -419,7 +421,7 @@ class CI: JobNames.INTEGRATION_TEST: CommonJobConfigs.INTEGRATION_TEST.with_properties( required_builds=[BuildNames.PACKAGE_RELEASE], num_batches=4, - #release_only=True, + # release_only=True, timeout=12000, # the job timed out with default value (7200) ), JobNames.INTEGRATION_TEST_FLAKY: CommonJobConfigs.INTEGRATION_TEST.with_properties( @@ -558,11 +560,11 @@ class CI: ), JobNames.SIGN_RELEASE: JobConfig( required_builds=[BuildNames.PACKAGE_RELEASE], - runner_type=Runners.STYLE_CHECKER + runner_type=Runners.STYLE_CHECKER, ), JobNames.SIGN_AARCH64: JobConfig( required_builds=[BuildNames.PACKAGE_AARCH64], - runner_type=Runners.STYLE_CHECKER_ARM + runner_type=Runners.STYLE_CHECKER_ARM, ), } diff --git a/tests/ci/s3_helper.py b/tests/ci/s3_helper.py index 5d9d0758e5c3..141cbbd8c58c 100644 --- a/tests/ci/s3_helper.py +++ b/tests/ci/s3_helper.py @@ -20,9 +20,7 @@ S3_URL, ) -sensitive_var_pattern = re.compile( - r"\b[A-Z_]*(?