diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6e2f4a85..38953967 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -9,7 +9,23 @@ on: types: [published] jobs: + check-skip: + runs-on: ubuntu-latest + outputs: + should-skip: ${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }} + steps: + - name: Check for PERF-TEST-ONLY label + id: check + run: | + if [[ "${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}" == "true" ]]; then + echo "Label PERF-TEST-ONLY is present, skipping build" + else + echo "Proceeding with build" + fi + check-access: + needs: check-skip + if: needs.check-skip.outputs.should-skip != 'true' runs-on: ubuntu-latest outputs: has-token-access: ${{ steps.check.outputs.has-token-access }} @@ -634,10 +650,11 @@ jobs: install-deps: needs: + - check-skip - build-demoapp - merge-router runs-on: ubuntu-latest - if: ${{ !failure() && (needs.build-demoapp.result == 'skipped' || needs.build-demoapp.result == 'success') && (needs.merge-router.result == 'skipped' || needs.merge-router.result == 'success') }} + if: ${{ !failure() && needs.check-skip.outputs.should-skip != 'true' && (needs.build-demoapp.result == 'skipped' || needs.build-demoapp.result == 'success') && (needs.merge-router.result == 'skipped' || needs.merge-router.result == 'success') }} steps: - uses: actions/checkout@v4 - uses: ./.github/actions/configure-nodejs @@ -659,7 +676,7 @@ jobs: outputs: stack-url-suffix: ${{ steps.getDeployUrl.outputs.stack-url-suffix }} url: https://lambdadispatch${{ steps.getDeployUrl.outputs.stack-url-suffix }}.ghpublic.pwrdrvr.com - if: ${{ !cancelled() && needs.install-deps.result != 'failed' }} + if: ${{ needs.install-deps.result == 'success' }} env: DEMO_APP_REGISTRY_IMAGE: public.ecr.aws/pwrdrvr/lambda-dispatch-demo-app${{ github.event_name == 'pull_request' && '-dev' || '' }} ROUTER_REGISTRY_IMAGE: public.ecr.aws/pwrdrvr/lambda-dispatch-router${{ github.event_name == 'pull_request' && '-dev' || '' }} @@ -706,7 +723,7 @@ jobs: smoke-test: needs: [deploy] runs-on: ubuntu-latest - if: ${{ !cancelled() && needs.deploy.result != 'failed' }} + if: ${{ needs.deploy.result == 'success' }} steps: - name: Smoke Test run: | @@ -723,10 +740,20 @@ jobs: curl ${BASE_URL}/ping done + # Dummy comment + performance-test: + needs: + - smoke-test + - deploy + if: ${{ needs.smoke-test.result == 'success' && github.event_name == 'pull_request' }} + uses: ./.github/workflows/performance-test.yml + with: + pr_number: ${{ github.event.pull_request.number }} + create-status-checks: - needs: [deploy, smoke-test] + needs: [deploy, smoke-test, performance-test] runs-on: ubuntu-latest - if: ${{ !cancelled() && needs.deploy.result != 'failed' && github.event_name == 'pull_request' }} + if: ${{ needs.deploy.result == 'success' && github.event_name == 'pull_request' }} steps: - name: Generate URLs id: generate-urls diff --git a/.github/workflows/perf-test-only.yml b/.github/workflows/perf-test-only.yml new file mode 100644 index 00000000..d8436a23 --- /dev/null +++ b/.github/workflows/perf-test-only.yml @@ -0,0 +1,27 @@ +name: Performance Test Only + +on: + pull_request: + types: [opened, synchronize, reopened, labeled] + +jobs: + check-label: + runs-on: ubuntu-latest + outputs: + should-run: ${{ contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }} + steps: + - name: Check for PERF-TEST-ONLY label + id: check + run: | + if [[ "${{ contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}" == "true" ]]; then + echo "Label PERF-TEST-ONLY is present" + else + echo "Label PERF-TEST-ONLY is not present" + fi + + perf-test: + needs: check-label + if: needs.check-label.outputs.should-run == 'true' + uses: ./.github/workflows/performance-test.yml + with: + pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/performance-test.yml b/.github/workflows/performance-test.yml new file mode 100644 index 00000000..5e9f616b --- /dev/null +++ b/.github/workflows/performance-test.yml @@ -0,0 +1,95 @@ +name: Performance Test + +on: + workflow_call: + inputs: + pr_number: + required: true + type: string + description: 'PR number, used to construct the test URL' + +jobs: + performance-test: + runs-on: ubuntu-latest + steps: + - name: Install oha + run: | + wget https://github.com/hatoo/oha/releases/download/v0.5.8/oha-linux-amd64 -O oha + chmod +x oha + sudo mv oha /usr/local/bin/ + + - name: Get Test URL + id: get-url + run: | + if [ "${{ inputs.pr_number }}" != "" ]; then + echo "url=https://lambdadispatch-pr-${{ inputs.pr_number }}.ghpublic.pwrdrvr.com" >> $GITHUB_OUTPUT + else + echo "url=https://lambdadispatch.ghpublic.pwrdrvr.com" >> $GITHUB_OUTPUT + fi + + - name: Run Performance Tests + id: perf-test + run: | + # Create results directory + mkdir -p test-results + + # Run performance tests and save results + echo "Running latency test (20 concurrent, 60s)..." + oha --no-tui -j -c 20 -z 60s ${{ steps.get-url.outputs.url }}/ping > test-results/latency.json + cat test-results/latency.json + + echo "Running throughput test (100 concurrent, 60s)..." + oha --no-tui -j -c 100 -z 60s ${{ steps.get-url.outputs.url }}/ping > test-results/throughput.json + cat test-results/throughput.json + + # Parse results and create markdown table + node -e ' + const fs = require("fs"); + const latencyResults = JSON.parse(fs.readFileSync("test-results/latency.json")); + const throughputResults = JSON.parse(fs.readFileSync("test-results/throughput.json")); + + const formatNumber = (num) => Number(num).toLocaleString(undefined, { maximumFractionDigits: 2 }); + + const table = [ + "### 🚀 Performance Test Results", + "", + "| Metric | Latency Test (20 concurrent) | Throughput Test (100 concurrent) |", + "|--------|----------------------------|--------------------------------|", + `| Duration | ${formatNumber(latencyResults.summary.total)} | ${formatNumber(throughputResults.summary.total)} |`, + `| Total Requests | ${formatNumber(latencyResults.summary.total * latencyResults.summary.requestsPerSec)} | ${formatNumber(throughputResults.summary.total * throughputResults.summary.requestsPerSec)} |`, + `| Total Success | ${formatNumber(latencyResults.statusCodeDistribution["200"])} | ${formatNumber(throughputResults.statusCodeDistribution["200"])} |`, + `| Requests/sec | ${formatNumber(latencyResults.summary.requestsPerSec)} | ${formatNumber(throughputResults.summary.requestsPerSec)} |`, + `| Mean Latency | ${formatNumber(latencyResults.summary.average * 1000)}ms | ${formatNumber(throughputResults.summary.average * 1000)}ms |`, + `| p95 Latency | ${formatNumber(latencyResults.latencyPercentiles.p95 * 1000)}ms | ${formatNumber(throughputResults.latencyPercentiles.p95 * 1000)}ms |`, + `| p99 Latency | ${formatNumber(latencyResults.latencyPercentiles.p99 * 1000)}ms | ${formatNumber(throughputResults.latencyPercentiles.p99 * 1000)}ms |`, + `| Max Latency | ${formatNumber(latencyResults.summary.slowest * 1000)}ms | ${formatNumber(throughputResults.summary.slowest * 1000)}ms |`, + "", + "_Note: Tests run against the /ping endpoint for 60 seconds each._", + "", + `*Last updated: ${new Date().toISOString()}*`, + "" // Add empty line at the end + ].join("\n"); + + fs.writeFileSync("test-results/table.md", table); + ' + + # Save table content to outputs + echo "performance_results<> $GITHUB_OUTPUT + cat test-results/table.md >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + - name: Find Performance Results Comment + uses: peter-evans/find-comment@v3 + id: find-comment + with: + issue-number: ${{ inputs.pr_number }} + comment-author: 'github-actions[bot]' + body-includes: '### 🚀 Performance Test Results' + + - name: Post Performance Results Comment + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + edit-mode: replace + issue-number: ${{ inputs.pr_number }} + body: ${{ steps.perf-test.outputs.performance_results }}