diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 17ac1c23fa7..2e62a77143d 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -6,29 +6,39 @@ on: branches: - main - main-version-* - paths-ignore: - - "src/HotChocolate/AspNetCore/benchmarks/k6/performance-data.json" + push: + branches: + - main concurrency: - group: benchmarks-${{ github.event.pull_request.number }} + group: benchmarks-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: hotchocolate-core: name: "HotChocolate Core Benchmarks" - if: github.event.pull_request.draft == false + if: github.event_name == 'push' || github.event.pull_request.draft == false runs-on: benchmarking permissions: contents: write pull-requests: write steps: - - name: Checkout PR code + - name: Checkout current repository uses: actions/checkout@v4 with: fetch-depth: 0 show-progress: false + - name: Checkout performance data repository + uses: actions/checkout@v4 + with: + repository: ChilliCream/graphql-platform-performance-data + token: ${{ secrets.PERFORMANCE_DATA_TOKEN }} + path: performance-data-repo + fetch-depth: 0 + show-progress: false + - name: Restore dependencies run: dotnet restore src/HotChocolate/AspNetCore/benchmarks/k6/eShop.slnx @@ -41,14 +51,14 @@ jobs: echo "APPHOST_PID=$APPHOST_PID" >> $GITHUB_ENV echo "Waiting for server to be ready..." - for i in {1..30}; do + for i in {1..15}; do if curl -s -o /dev/null -w "%{http_code}" http://localhost:5224/graphql -X POST \ -H "Content-Type: application/json" \ -d '{"query": "{ __typename }"}' | grep -q "200"; then echo "Server is ready!" break fi - echo "Waiting... ($i/30)" + echo "Waiting... ($i/15)" sleep 2 done @@ -56,7 +66,7 @@ jobs: working-directory: src/HotChocolate/AspNetCore/benchmarks/k6 run: | chmod +x run-and-collect.sh - ./run-and-collect.sh performance-data-current.json + ./run-and-collect.sh hotchocolate-core-performance-data-current.json - name: Stop AppHost if: always() @@ -66,46 +76,28 @@ jobs: wait $APPHOST_PID 2>/dev/null || true fi - - name: Commit and push performance data to current branch - working-directory: src/HotChocolate/AspNetCore/benchmarks/k6 - run: | - # Copy the performance data to the tracked filename - cp performance-data-current.json performance-data.json - - # Configure git - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - - # Add and commit the performance data - git add performance-data.json - - # Only commit if there are changes - if ! git diff --staged --quiet; then - git commit -m "Update performance data [skip ci]" - git push origin HEAD:${{ github.head_ref }} - else - echo "No changes to performance data" - fi - - - name: Fetch baseline performance data from main + - name: Fetch baseline performance data from external repo + if: github.event_name == 'pull_request' run: | - git fetch origin main:main - if git show main:src/HotChocolate/AspNetCore/benchmarks/k6/performance-data.json > baseline-performance.json 2>/dev/null; then - echo "Baseline data fetched successfully" + if [ -f performance-data-repo/hotchocolate-core-performance-data.json ]; then + echo "Baseline data fetched successfully from performance data repository" + cp performance-data-repo/hotchocolate-core-performance-data.json baseline-performance.json cat baseline-performance.json else - echo "No baseline data found on main branch" + echo "No baseline data found in performance data repository" # Don't create the file - let the comparison script handle missing baseline rm -f baseline-performance.json fi - name: Compare performance and generate report + if: github.event_name == 'pull_request' working-directory: src/HotChocolate/AspNetCore/benchmarks/k6 run: | chmod +x compare-performance.sh - ./compare-performance.sh performance-data-current.json ../../../../../baseline-performance.json performance-report.md + ./compare-performance.sh hotchocolate-core-performance-data-current.json ../../../../../baseline-performance.json performance-report.md - name: Comment PR with performance report + if: github.event_name == 'pull_request' uses: actions/github-script@v7 with: github-token: ${{ secrets.GITHUB_TOKEN }} @@ -142,12 +134,13 @@ jobs: with: name: performance-data path: | - src/HotChocolate/AspNetCore/benchmarks/k6/performance-data-current.json + src/HotChocolate/AspNetCore/benchmarks/k6/hotchocolate-core-performance-data-current.json src/HotChocolate/AspNetCore/benchmarks/k6/performance-report.md /tmp/apphost.log retention-days: 30 - name: Check for performance regression + if: github.event_name == 'pull_request' working-directory: src/HotChocolate/AspNetCore/benchmarks/k6 run: | # Fail the build if there's a significant performance regression @@ -156,3 +149,25 @@ jobs: # Uncomment the next line to fail the build on regression # exit 1 fi + + - name: Store performance data to external repository + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + working-directory: performance-data-repo + run: | + # Copy the new performance data + cp ../src/HotChocolate/AspNetCore/benchmarks/k6/hotchocolate-core-performance-data-current.json hotchocolate-core-performance-data.json + + # Configure git + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + # Add and commit the performance data + git add hotchocolate-core-performance-data.json + + # Only commit if there are changes + if ! git diff --staged --quiet; then + git commit -m "Update HotChocolate core performance data from ${{ github.sha }}" + git push + else + echo "No changes to performance data" + fi diff --git a/src/HotChocolate/AspNetCore/benchmarks/k6/compare-performance.sh b/src/HotChocolate/AspNetCore/benchmarks/k6/compare-performance.sh index 7c95dcf04f9..ca95dcb2408 100755 --- a/src/HotChocolate/AspNetCore/benchmarks/k6/compare-performance.sh +++ b/src/HotChocolate/AspNetCore/benchmarks/k6/compare-performance.sh @@ -94,16 +94,30 @@ get_emoji() { # Function to format change format_change() { local change=$1 - local show_sign=${2:-true} + local metric_type=$2 # "latency" or "throughput" + local show_sign=${3:-true} if [ "$change" == "0" ] || [ -z "$change" ]; then echo "(no change)" elif [ "$show_sign" == "true" ]; then - if (( $(echo "$change > 0" | bc -l) )); then - echo "(${change}% worse)" + local is_positive=$(echo "$change > 0" | bc -l) + local abs_change=$(echo "$change" | tr -d '-') + + # For latency: lower is better (negative change = better) + # For throughput: higher is better (positive change = better) + if [ "$metric_type" == "throughput" ]; then + if [ "$is_positive" == "1" ]; then + echo "(${change}% better)" + else + echo "(${abs_change}% worse)" + fi else - local abs_change=$(echo "$change" | tr -d '-') - echo "(${abs_change}% better)" + # Default to latency behavior (lower is better) + if [ "$is_positive" == "1" ]; then + echo "(${change}% worse)" + else + echo "(${abs_change}% better)" + fi fi else echo "(${change}%)" @@ -238,15 +252,15 @@ if [ -n "$BASELINE_FILE" ]; then | Test | Min | Med | Max | Avg | P90 | P95 | P99 | |------|-----|-----|-----|-----|-----|-----|-----| -| **Single Fetch** | $(get_emoji "$CHANGE_SF_MIN" "latency") $(format_change "$CHANGE_SF_MIN") | $(get_emoji "$CHANGE_SF_P50" "latency") $(format_change "$CHANGE_SF_P50") | $(get_emoji "$CHANGE_SF_MAX" "latency") $(format_change "$CHANGE_SF_MAX") | $(get_emoji "$CHANGE_SF_AVG" "latency") $(format_change "$CHANGE_SF_AVG") | $(get_emoji "$CHANGE_SF_P90" "latency") $(format_change "$CHANGE_SF_P90") | $(get_emoji "$CHANGE_SF_P95" "latency") $(format_change "$CHANGE_SF_P95") | $(get_emoji "$CHANGE_SF_P99" "latency") $(format_change "$CHANGE_SF_P99") | -| **DataLoader** | $(get_emoji "$CHANGE_DL_MIN" "latency") $(format_change "$CHANGE_DL_MIN") | $(get_emoji "$CHANGE_DL_P50" "latency") $(format_change "$CHANGE_DL_P50") | $(get_emoji "$CHANGE_DL_MAX" "latency") $(format_change "$CHANGE_DL_MAX") | $(get_emoji "$CHANGE_DL_AVG" "latency") $(format_change "$CHANGE_DL_AVG") | $(get_emoji "$CHANGE_DL_P90" "latency") $(format_change "$CHANGE_DL_P90") | $(get_emoji "$CHANGE_DL_P95" "latency") $(format_change "$CHANGE_DL_P95") | $(get_emoji "$CHANGE_DL_P99" "latency") $(format_change "$CHANGE_DL_P99") | +| **Single Fetch** | $(get_emoji "$CHANGE_SF_MIN" "latency") $(format_change "$CHANGE_SF_MIN" "latency") | $(get_emoji "$CHANGE_SF_P50" "latency") $(format_change "$CHANGE_SF_P50" "latency") | $(get_emoji "$CHANGE_SF_MAX" "latency") $(format_change "$CHANGE_SF_MAX" "latency") | $(get_emoji "$CHANGE_SF_AVG" "latency") $(format_change "$CHANGE_SF_AVG" "latency") | $(get_emoji "$CHANGE_SF_P90" "latency") $(format_change "$CHANGE_SF_P90" "latency") | $(get_emoji "$CHANGE_SF_P95" "latency") $(format_change "$CHANGE_SF_P95" "latency") | $(get_emoji "$CHANGE_SF_P99" "latency") $(format_change "$CHANGE_SF_P99" "latency") | +| **DataLoader** | $(get_emoji "$CHANGE_DL_MIN" "latency") $(format_change "$CHANGE_DL_MIN" "latency") | $(get_emoji "$CHANGE_DL_P50" "latency") $(format_change "$CHANGE_DL_P50" "latency") | $(get_emoji "$CHANGE_DL_MAX" "latency") $(format_change "$CHANGE_DL_MAX" "latency") | $(get_emoji "$CHANGE_DL_AVG" "latency") $(format_change "$CHANGE_DL_AVG" "latency") | $(get_emoji "$CHANGE_DL_P90" "latency") $(format_change "$CHANGE_DL_P90" "latency") | $(get_emoji "$CHANGE_DL_P95" "latency") $(format_change "$CHANGE_DL_P95" "latency") | $(get_emoji "$CHANGE_DL_P99" "latency") $(format_change "$CHANGE_DL_P99" "latency") | ### ⚡ Throughput | Test | Metric | Current | Baseline | Change | |------|--------|---------|----------|--------| -| **Single Fetch** | **Requests/sec** | ${CURRENT_SF_RPS} req/s | ${BASELINE_SF_RPS} req/s | $(get_emoji "$CHANGE_SF_RPS" "throughput") $(format_change "$CHANGE_SF_RPS") | -| **DataLoader** | **Requests/sec** | ${CURRENT_DL_RPS} req/s | ${BASELINE_DL_RPS} req/s | $(get_emoji "$CHANGE_DL_RPS" "throughput") $(format_change "$CHANGE_DL_RPS") | +| **Single Fetch** | **Requests/sec** | ${CURRENT_SF_RPS} req/s | ${BASELINE_SF_RPS} req/s | $(get_emoji "$CHANGE_SF_RPS" "throughput") $(format_change "$CHANGE_SF_RPS" "throughput") | +| **DataLoader** | **Requests/sec** | ${CURRENT_DL_RPS} req/s | ${BASELINE_DL_RPS} req/s | $(get_emoji "$CHANGE_DL_RPS" "throughput") $(format_change "$CHANGE_DL_RPS" "throughput") | ### 🎯 Reliability diff --git a/src/HotChocolate/AspNetCore/benchmarks/k6/run-and-collect.sh b/src/HotChocolate/AspNetCore/benchmarks/k6/run-and-collect.sh index b82ed9ac37a..fac0f024607 100755 --- a/src/HotChocolate/AspNetCore/benchmarks/k6/run-and-collect.sh +++ b/src/HotChocolate/AspNetCore/benchmarks/k6/run-and-collect.sh @@ -105,11 +105,14 @@ SINGLE_P95=$(extract_metric /tmp/single-fetch-summary.json "http_req_duration{ph SINGLE_P99=$(extract_metric /tmp/single-fetch-summary.json "http_req_duration{phase:measurement}" "p(99)") [ "$SINGLE_P99" == "0" ] && SINGLE_P99=$(extract_metric /tmp/single-fetch-summary.json "http_req_duration" "p(99)") -SINGLE_RPS=$(extract_metric /tmp/single-fetch-summary.json "http_reqs" "rate") +SINGLE_RPS=$(extract_metric /tmp/single-fetch-summary.json "http_reqs{phase:measurement}" "rate") +[ "$SINGLE_RPS" == "0" ] && SINGLE_RPS=$(extract_metric /tmp/single-fetch-summary.json "http_reqs" "rate") + SINGLE_ERROR_RATE=$(extract_metric /tmp/single-fetch-summary.json "http_req_failed{phase:measurement}" "rate") [ "$SINGLE_ERROR_RATE" == "0" ] && SINGLE_ERROR_RATE=$(extract_metric /tmp/single-fetch-summary.json "http_req_failed" "rate") -SINGLE_ITERATIONS=$(extract_metric /tmp/single-fetch-summary.json "iterations" "count") +SINGLE_ITERATIONS=$(extract_metric /tmp/single-fetch-summary.json "iterations{phase:measurement}" "count") +[ "$SINGLE_ITERATIONS" == "0" ] && SINGLE_ITERATIONS=$(extract_metric /tmp/single-fetch-summary.json "iterations" "count") # Extract metrics from dataloader test DATALOADER_MIN=$(extract_metric /tmp/dataloader-summary.json "http_req_duration{phase:measurement}" "min") @@ -133,11 +136,14 @@ DATALOADER_P95=$(extract_metric /tmp/dataloader-summary.json "http_req_duration{ DATALOADER_P99=$(extract_metric /tmp/dataloader-summary.json "http_req_duration{phase:measurement}" "p(99)") [ "$DATALOADER_P99" == "0" ] && DATALOADER_P99=$(extract_metric /tmp/dataloader-summary.json "http_req_duration" "p(99)") -DATALOADER_RPS=$(extract_metric /tmp/dataloader-summary.json "http_reqs" "rate") +DATALOADER_RPS=$(extract_metric /tmp/dataloader-summary.json "http_reqs{phase:measurement}" "rate") +[ "$DATALOADER_RPS" == "0" ] && DATALOADER_RPS=$(extract_metric /tmp/dataloader-summary.json "http_reqs" "rate") + DATALOADER_ERROR_RATE=$(extract_metric /tmp/dataloader-summary.json "http_req_failed{phase:measurement}" "rate") [ "$DATALOADER_ERROR_RATE" == "0" ] && DATALOADER_ERROR_RATE=$(extract_metric /tmp/dataloader-summary.json "http_req_failed" "rate") -DATALOADER_ITERATIONS=$(extract_metric /tmp/dataloader-summary.json "iterations" "count") +DATALOADER_ITERATIONS=$(extract_metric /tmp/dataloader-summary.json "iterations{phase:measurement}" "count") +[ "$DATALOADER_ITERATIONS" == "0" ] && DATALOADER_ITERATIONS=$(extract_metric /tmp/dataloader-summary.json "iterations" "count") # Create JSON output cat > "$OUTPUT_FILE" <