Skip to content

Commit 6cc62ff

Browse files
committed
Merge antalya-25.6.5
2 parents 70f47ed + abb3c0b commit 6cc62ff

File tree

333 files changed

+8399
-3298
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

333 files changed

+8399
-3298
lines changed

.github/actions/create_workflow_report/action.yml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,16 @@ runs:
2121
2222
- name: Create and upload workflow report
2323
env:
24+
PR_NUMBER: ${{ github.event.pull_request.number || 0 }}
2425
ACTIONS_RUN_URL: ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}
26+
COMMIT_SHA: ${{ steps.set_version.outputs.commit_sha || github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
2527
FINAL: ${{ inputs.final }}
2628
shell: bash
2729
run: |
2830
pip install clickhouse-driver==0.2.8 numpy==1.26.4 pandas==2.0.3 jinja2==3.1.5
2931
3032
CMD="python3 .github/actions/create_workflow_report/create_workflow_report.py"
31-
ARGS="--actions-run-url $ACTIONS_RUN_URL --known-fails tests/broken_tests.json --cves"
33+
ARGS="--actions-run-url $ACTIONS_RUN_URL --known-fails tests/broken_tests.json --cves --pr-number $PR_NUMBER"
3234
3335
set +e -x
3436
if [[ "$FINAL" == "false" ]]; then

.github/actions/create_workflow_report/ci_run_report.html.jinja

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@
152152
</tr>
153153
<tr>
154154
<th class="hth no-sort">Build Report</th>
155-
<td><a href="https://s3.amazonaws.com/{{ s3_bucket }}/{{ pr_number }}/{{ commit_sha }}/builds/report.html">Build Report</a></td>
155+
<td>{% for job_name, link in build_report_links.items() %}<a href="{{ link }}">[{{ job_name }}]</a> {% endfor %}</td>
156156
</tr>
157157
<tr>
158158
<th class="hth no-sort">Date</th>

.github/actions/create_workflow_report/create_workflow_report.py

Lines changed: 114 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from datetime import datetime
88
from functools import lru_cache
99
from glob import glob
10+
import urllib.parse
1011

1112
import pandas as pd
1213
from jinja2 import Environment, FileSystemLoader
@@ -478,11 +479,15 @@ def format_test_name_for_linewrap(text: str) -> str:
478479

479480
def format_test_status(text: str) -> str:
480481
"""Format the test status for better readability."""
481-
color = (
482-
"red"
483-
if text.lower().startswith("fail")
484-
else "orange" if text.lower() in ("error", "broken", "pending") else "green"
485-
)
482+
if text.lower().startswith("fail"):
483+
color = "red"
484+
elif text.lower() == "skipped":
485+
color = "grey"
486+
elif text.lower() in ("success", "ok", "passed", "pass"):
487+
color = "green"
488+
else:
489+
color = "orange"
490+
486491
return f'<span style="font-weight: bold; color: {color}">{text}</span>'
487492

488493

@@ -510,6 +515,103 @@ def format_results_as_html_table(results) -> str:
510515
return html
511516

512517

518+
def backfill_skipped_statuses(
519+
job_statuses: pd.DataFrame, pr_number: int, branch: str, commit_sha: str
520+
):
521+
"""
522+
Fill in the job statuses for skipped jobs.
523+
"""
524+
525+
if pr_number == 0:
526+
ref_param = f"REF={branch}"
527+
workflow_name = "MasterCI"
528+
else:
529+
ref_param = f"PR={pr_number}"
530+
workflow_name = "PR"
531+
532+
status_file = f"result_{workflow_name.lower()}.json"
533+
s3_path = f"https://{S3_BUCKET}.s3.amazonaws.com/{ref_param.replace('=', 's/')}/{commit_sha}/{status_file}"
534+
response = requests.get(s3_path)
535+
536+
if response.status_code != 200:
537+
return job_statuses
538+
539+
status_data = response.json()
540+
skipped_jobs = []
541+
for job in status_data["results"]:
542+
if job["status"] == "skipped" and len(job["links"]) > 0:
543+
skipped_jobs.append(
544+
{
545+
"job_name": job["name"],
546+
"job_status": job["status"],
547+
"message": job["info"],
548+
"results_link": job["links"][0],
549+
}
550+
)
551+
552+
return pd.concat([job_statuses, pd.DataFrame(skipped_jobs)], ignore_index=True)
553+
554+
555+
def get_build_report_links(
556+
job_statuses: pd.DataFrame, pr_number: int, branch: str, commit_sha: str
557+
):
558+
"""
559+
Get the build report links for the given PR number, branch, and commit SHA.
560+
561+
First checks if a build job submitted a success or skipped status.
562+
If not available, it guesses the links.
563+
"""
564+
build_job_names = [
565+
"Build (amd_release)",
566+
"Build (arm_release)",
567+
"Docker server image",
568+
"Docker keeper image",
569+
]
570+
build_report_links = {}
571+
572+
for job in job_statuses.itertuples():
573+
if (
574+
job.job_name in build_job_names
575+
and job.job_status
576+
in (
577+
"success",
578+
"skipped",
579+
)
580+
and job.results_link
581+
):
582+
build_report_links[job.job_name] = job.results_link
583+
584+
if 0 < len(build_report_links) < len(build_job_names):
585+
# Only have some of the build jobs, guess the rest.
586+
# (It was straightforward to force the build jobs to always appear in the cache,
587+
# however doing the same for the docker image jobs is difficult.)
588+
ref_job, ref_link = list(build_report_links.items())[0]
589+
link_template = ref_link.replace(
590+
urllib.parse.quote(ref_job, safe=""), "{job_name}"
591+
)
592+
for job in build_job_names:
593+
if job not in build_report_links:
594+
build_report_links[job] = link_template.format(job_name=job)
595+
596+
if len(build_report_links) > 0:
597+
return build_report_links
598+
599+
# No cache or build result was found, guess the links
600+
if pr_number == 0:
601+
ref_param = f"REF={branch}"
602+
workflow_name = "MasterCI"
603+
else:
604+
ref_param = f"PR={pr_number}"
605+
workflow_name = "PR"
606+
607+
build_report_link_base = f"https://{S3_BUCKET}.s3.amazonaws.com/json.html?{ref_param}&sha={commit_sha}&name_0={urllib.parse.quote(workflow_name, safe='')}"
608+
build_report_links = {
609+
job_name: f"{build_report_link_base}&name_1={urllib.parse.quote(job_name, safe='')}"
610+
for job_name in build_job_names
611+
}
612+
return build_report_links
613+
614+
513615
def parse_args() -> argparse.Namespace:
514616
parser = argparse.ArgumentParser(description="Create a combined CI report.")
515617
parser.add_argument( # Need the full URL rather than just the ID to query the databases
@@ -626,6 +728,10 @@ def create_workflow_report(
626728
except Exception as e:
627729
pr_info_html = e
628730

731+
fail_results["job_statuses"] = backfill_skipped_statuses(
732+
fail_results["job_statuses"], pr_number, branch_name, commit_sha
733+
)
734+
629735
high_cve_count = 0
630736
if not cves_not_checked and len(fail_results["docker_images_cves"]) > 0:
631737
high_cve_count = (
@@ -666,6 +772,9 @@ def create_workflow_report(
666772
),
667773
"pr_new_fails": len(fail_results["pr_new_fails"]),
668774
},
775+
"build_report_links": get_build_report_links(
776+
fail_results["job_statuses"], pr_number, branch_name, commit_sha
777+
),
669778
"ci_jobs_status_html": format_results_as_html_table(
670779
fail_results["job_statuses"]
671780
),
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22
# This script is for generating preview reports when invoked as a post-hook from a praktika job
33
pip install clickhouse-driver==0.2.8 numpy==1.26.4 pandas==2.0.3 jinja2==3.1.5
4-
ARGS="--mark-preview --known-fails tests/broken_tests.json --cves --actions-run-url $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"
4+
ARGS="--mark-preview --known-fails tests/broken_tests.json --cves --actions-run-url $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID --pr-number $PR_NUMBER"
55
CMD="python3 .github/actions/create_workflow_report/create_workflow_report.py"
66
$CMD $ARGS
77

.github/workflows/backport_branches.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1494,7 +1494,7 @@ jobs:
14941494
secrets: inherit
14951495
with:
14961496
docker_image: altinityinfra/clickhouse-server
1497-
# version: ${{ fromJson(needs.config_workflow.outputs.data).custom_data.version.string }}
1497+
version: ${{ fromJson(needs.config_workflow.outputs.data).custom_data.version.string }}
14981498
tag-suffix: ${{ matrix.suffix }}
14991499
GrypeScanKeeper:
15001500
needs: [config_workflow, docker_keeper_image]
@@ -1503,28 +1503,28 @@ jobs:
15031503
secrets: inherit
15041504
with:
15051505
docker_image: altinityinfra/clickhouse-keeper
1506-
# version: ${{ fromJson(needs.config_workflow.outputs.data).custom_data.version.string }}
1506+
version: ${{ fromJson(needs.config_workflow.outputs.data).custom_data.version.string }}
15071507

15081508
RegressionTestsRelease:
15091509
needs: [config_workflow, build_amd_release]
1510-
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') && !contains(fromJson(needs.config_workflow.outputs.data).pull_request.body, '[x] <!---ci_exclude_regression')}}
1510+
if: ${{ !failure() && !cancelled() && !contains(github.event.pull_request.body, '[x] <!---ci_exclude_regression')}}
15111511
uses: ./.github/workflows/regression.yml
15121512
secrets: inherit
15131513
with:
15141514
runner_type: altinity-on-demand, altinity-regression-tester
1515-
commit: 38b4f3c4cbcf7b38c97e16793c210a1496075af7
1515+
commit: aa4204a74b901a0f2ea7c9f1d631d98221554fb1
15161516
arch: release
15171517
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
15181518
timeout_minutes: 300
15191519
workflow_config: ${{ needs.config_workflow.outputs.data }}
15201520
RegressionTestsAarch64:
15211521
needs: [config_workflow, build_arm_release]
1522-
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') && !contains(fromJson(needs.config_workflow.outputs.data).pull_request.body, '[x] <!---ci_exclude_regression') && !contains(fromJson(needs.config_workflow.outputs.data).pull_request.body, '[x] <!---ci_exclude_aarch64')}}
1522+
if: ${{ !failure() && !cancelled() && !contains(github.event.pull_request.body, '[x] <!---ci_exclude_regression') && !contains(github.event.pull_request.body, '[x] <!---ci_exclude_aarch64')}}
15231523
uses: ./.github/workflows/regression.yml
15241524
secrets: inherit
15251525
with:
15261526
runner_type: altinity-on-demand, altinity-regression-tester-aarch64
1527-
commit: 38b4f3c4cbcf7b38c97e16793c210a1496075af7
1527+
commit: aa4204a74b901a0f2ea7c9f1d631d98221554fb1
15281528
arch: aarch64
15291529
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
15301530
timeout_minutes: 300

.github/workflows/grype_scan.yml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,6 @@ jobs:
6969
VERSION=$SPECIFIED_VERSION
7070
fi
7171
echo "docker_image=${{ inputs.docker_image }}:$PR_NUMBER-$VERSION$TAG_SUFFIX" >> $GITHUB_OUTPUT
72-
echo "commit_sha=$CLICKHOUSE_VERSION_GITHASH" >> $GITHUB_OUTPUT
7372
7473
- name: Run Grype Scan
7574
run: |
@@ -85,7 +84,7 @@ jobs:
8584
id: upload_results
8685
env:
8786
S3_BUCKET: "altinity-build-artifacts"
88-
COMMIT_SHA: ${{ steps.set_version.outputs.commit_sha || github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
87+
COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
8988
PR_NUMBER: ${{ env.PR_NUMBER || github.event.pull_request.number || 0 }}
9089
DOCKER_IMAGE: ${{ steps.set_version.outputs.docker_image || inputs.docker_image }}
9190
run: |
@@ -132,15 +131,18 @@ jobs:
132131
with:
133132
github-token: ${{ secrets.GITHUB_TOKEN }}
134133
script: |
134+
const totalHighCritical = '${{ steps.create_summary.outputs.total_high_critical }}';
135+
const hasError = totalHighCritical === '';
136+
const hasVulnerabilities = parseInt(totalHighCritical) > 0;
135137
github.rest.repos.createCommitStatus({
136138
owner: context.repo.owner,
137139
repo: context.repo.repo,
138140
sha: '${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}',
139-
state: '${{ steps.create_summary.outputs.total_high_critical > 0 && 'failure' || 'success' }}',
141+
state: hasError ? 'error' : hasVulnerabilities ? 'failure' : 'success',
140142
target_url: '${{ steps.upload_results.outputs.https_s3_path }}/results.html',
141-
description: 'Grype Scan Completed with ${{ steps.create_summary.outputs.total_high_critical }} high/critical vulnerabilities',
143+
description: hasError ? 'An error occurred' : `Grype Scan Completed with ${totalHighCritical} high/critical vulnerabilities`,
142144
context: 'Grype Scan ${{ steps.set_version.outputs.docker_image || inputs.docker_image }}'
143-
})
145+
});
144146
145147
- name: Upload artifacts
146148
if: always()

0 commit comments

Comments
 (0)