diff --git a/.depcheckrc.json b/.depcheckrc.json index 662628a7..28529cef 100644 --- a/.depcheckrc.json +++ b/.depcheckrc.json @@ -11,6 +11,8 @@ "@swc/cli", "@swc/core", "ts-node", - "typedoc" + "typedoc", + "@actions/core", + "@actions/github" ] } diff --git a/.github/actions/configure-keystore/action.yml b/.github/actions/configure-keystore/action.yml index 9a67871d..a9224da3 100644 --- a/.github/actions/configure-keystore/action.yml +++ b/.github/actions/configure-keystore/action.yml @@ -8,14 +8,11 @@ inputs: aws-region: description: 'The AWS region where the secret is stored' required: true - secret-name: - description: 'The name of the secret in AWS Secrets Manager' - required: true platform: description: 'The platform for which the keystore is being configured (e.g., ios, android)' required: true - environment: - description: 'The environment for which the keystore is being configured (e.g., qa, flask, main)' + target: + description: 'The target for which the keystore is being configured (e.g., qa, flask, main)' required: true runs: @@ -24,7 +21,7 @@ runs: - name: Determine signing secret name shell: bash run: | - case "${{ inputs.environment }}" in + case "${{ inputs.target }}" in qa) SECRET_NAME="metamask-mobile-qa-signing-certificates" ;; @@ -35,7 +32,7 @@ runs: SECRET_NAME="metamask-mobile-main-signing-certificates" ;; *) - echo "❌ Unknown environment: ${{ inputs.environment }}" + echo "❌ Unknown target: ${{ inputs.target }}" exit 1 ;; esac @@ -103,12 +100,45 @@ runs: security unlock-keychain -p "$CERT_PW" "$KEYCHAIN_PATH" # Import cert - security import "$CERT_PATH" -P "$CERT_PW" -A -t cert -f pkcs12 -k "$KEYCHAIN_PATH" > /dev/null - security set-key-partition-list -S apple-tool:,apple: -k "$CERT_PW" "$KEYCHAIN_PATH" > /dev/null - security find-identity -p codesigning "$KEYCHAIN_PATH" + echo "πŸ” Importing certificate..." + if ! security import "$CERT_PATH" -P "$CERT_PW" -A -t cert -f pkcs12 -k "$KEYCHAIN_PATH"; then + echo "❌ Failed to import certificate. Check if the password is correct or the .p12 is valid." + exit 1 + fi + echo "βœ… Certificate imported" + + # Set key partition list + echo "πŸ”‘ Setting key partition list..." + if ! security set-key-partition-list -S apple-tool:,apple: -k "$CERT_PW" "$KEYCHAIN_PATH" 2>/dev/null; then + echo "❌ Failed to set key partition list. Codesigning tools may not have access." + exit 1 + fi + echo "βœ… Key partition list set" + + # Verify signing identities + echo "πŸ” Verifying code signing identities in keychain..." + IDENTITIES=$(security find-identity -p codesigning "$KEYCHAIN_PATH") + + if ! echo "$IDENTITIES" | grep -q "Valid identities"; then + echo "❌ No valid code signing identities found in keychain." + echo "$IDENTITIES" + exit 1 + fi + + # Extract and print alias (first CN string) + CERT_ALIAS=$(echo "$IDENTITIES" | awk -F '"' '/"Apple/ {print $2; exit}') + if [[ -n "$CERT_ALIAS" ]]; then + echo "βœ… Code signing identity available: $CERT_ALIAS" + else + echo "βœ… Code signing identity is available (alias not parsed)" + fi # Install provisioning profile mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles cp "$PROFILE_PATH" ~/Library/MobileDevice/Provisioning\ Profiles/ echo "βœ… Installed provisioning profile" + + echo "Configuring default keychain" + security default-keychain -s "$KEYCHAIN_PATH" + echo "βœ… default keychain set" diff --git a/.github/actions/setup-e2e-env/action.yml b/.github/actions/setup-e2e-env/action.yml index 043fc766..b01c0ccd 100644 --- a/.github/actions/setup-e2e-env/action.yml +++ b/.github/actions/setup-e2e-env/action.yml @@ -1,5 +1,6 @@ name: 'Setup E2E Test Environment' description: 'Sets up the environment for running E2E tests' + inputs: platform: description: 'Platform (ios or android)' @@ -40,10 +41,6 @@ inputs: description: JDK distribution to use (only for Android) required: false default: 'temurin' - ndk-version: - description: NDK version to use (only for Android) - required: false - default: '26.1.10909125' foundry-version: description: Foundry version to install required: false @@ -53,9 +50,9 @@ inputs: required: false default: 'test_e2e_avd' android-device: - description: 'AVD device profile (e.g. "pixel")' + description: 'AVD device profile (e.g. "pixel_5", "pixel", "Nexus 6")' required: false - default: 'pixel' + default: 'pixel_5' android-api-level: description: 'Android API level to use (e.g. "34")' required: false @@ -64,6 +61,14 @@ inputs: description: 'System architecture ABI for the Android system image (e.g. x86_64, arm64-v8a, armeabi-v7a)' required: false default: 'x86_64' + android-tag: + description: 'Android system image tag (e.g. google_apis, default)' + required: false + default: 'google_apis' + android-sdcard-size: + description: 'SD card size for AVD (e.g. 8092M)' + required: false + default: '8092M' configure-keystores: description: 'Whether to configure keystores for E2E tests' required: false @@ -71,9 +76,9 @@ inputs: keystore-role-to-assume: description: 'AWS IAM role to assume for keystore configuration' required: false - default: 'arn:aws:iam::363762752069:role/metamask-mobile-build-signing-certificate-manager' - environment: - description: 'Environment for which the keystore is being configured (e.g., qa, flask, main)' + default: 'arn:aws:iam::363762752069:role/metamask-mobile-build-signer-qa' + target: + description: 'Target for which the keystore is being configured (e.g., qa, flask, main)' required: false default: 'qa' @@ -84,6 +89,94 @@ runs: - run: echo "Setup E2E Environment started" shell: bash + ## Android Setup (early for fail-fast) ## + + # Set Android environment variables (self-hosted runner has SDK pre-installed) + - name: Set Android environment variables + if: ${{ inputs.platform == 'android' }} + run: | + echo "ANDROID_HOME=/opt/android-sdk" >> "$GITHUB_ENV" + echo "ANDROID_SDK_ROOT=/opt/android-sdk" >> "$GITHUB_ENV" + shell: bash + + - name: Configure Android Signing Certificates + if: ${{ inputs.platform == 'android' && inputs.configure-keystores == 'true' }} + uses: MetaMask/github-tools/.github/actions/configure-keystore@e2e-env-actions + with: + aws-role-to-assume: ${{ inputs.keystore-role-to-assume }} + aws-region: 'us-east-2' + platform: 'android' + target: ${{ inputs.target }} + + ## JDK Setup + - name: Setup Java + if: ${{ inputs.platform == 'android' }} + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 + with: + java-version: ${{ inputs.jdk-version }} + distribution: ${{ inputs.jdk-distribution }} + + - name: Install required emulator dependencies + if: ${{ inputs.platform == 'android' && runner.os == 'Linux' }} + run: | + sudo apt-get update + sudo apt-get install -y \ + libpulse0 \ + libglu1-mesa \ + libnss3 \ + libxss1 + + echo "βœ… Linux dependencies installed successfully" + shell: bash + + ## Android SDK Setup (SDK pre-installed in container) + + - name: Install additional Android SDK components if needed + if: ${{ inputs.platform == 'android' && (inputs.android-api-level != '34' || inputs.android-abi != 'x86_64') }} + run: | + # Only install if different from pre-installed defaults (API 34, x86_64) + IMAGE="system-images;android-${{ inputs.android-api-level }};google_apis;${{ inputs.android-abi }}" + echo "Installing additional system image: $IMAGE" + echo "y" | "/opt/android-sdk/cmdline-tools/latest/bin/sdkmanager" "$IMAGE" + shell: bash + + ## Launch AVD + + - name: Set ANDROID_AVD_HOME for downstream steps + if: ${{ inputs.platform == 'android'}} + shell: bash + run: | + echo "ANDROID_AVD_HOME=$HOME/.android/avd" >> "$GITHUB_ENV" + mkdir -p "$HOME/.android/avd" + + - name: Create Android Virtual Device (AVD) + if: ${{ inputs.platform == 'android'}} + run: | + IMAGE="system-images;android-${{ inputs.android-api-level }};${{ inputs.android-tag }};${{ inputs.android-abi }}" + echo "Creating AVD with image: $IMAGE" + "/opt/android-sdk/cmdline-tools/latest/bin/avdmanager" --verbose create avd \ + --force \ + --name "${{ inputs.android-avd-name }}" \ + --package "$IMAGE" \ + --device "${{ inputs.android-device }}" \ + --tag "${{ inputs.android-tag }}" \ + --abi "${{ inputs.android-abi }}" \ + --sdcard "${{ inputs.android-sdcard-size }}" + shell: bash + + ## iOS Platform Setup ## + + - name: Configure iOS Signing Certificates + if: ${{ inputs.platform == 'ios' && inputs.configure-keystores == 'true' }} + uses: MetaMask/github-tools/.github/actions/configure-keystore@self-hosted-runners-config + with: + aws-role-to-assume: ${{ inputs.keystore-role-to-assume }} + aws-region: 'us-east-2' + platform: 'ios' + target: ${{ inputs.target }} + + ## Node.js & JavaScript Dependencies Setup ## + - name: Setup Node.js uses: actions/setup-node@v4 with: @@ -91,10 +184,19 @@ runs: ## Yarn Setup & Cache Management + # - name: Corepack + # id: corepack + # run: corepack enable && corepack prepare yarn@${{ inputs.yarn-version }} --activate + # shell: bash + - name: Corepack id: corepack - run: corepack enable && corepack prepare yarn@${{ inputs.yarn-version }} --activate - shell: bash + uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 #v3.0.2 + with: + timeout_minutes: 15 + max_attempts: 3 + retry_wait_seconds: 30 + command: corepack enable && corepack prepare yarn@${{ inputs.yarn-version }} --activate - name: Restore Yarn cache uses: actions/cache@v4 @@ -105,17 +207,25 @@ runs: restore-keys: | ${{ inputs.cache-prefix }}-yarn-${{ inputs.platform }}-${{ runner.os }}- - - name: Install JavaScript dependencies + - name: Install JavaScript dependencies with retry id: yarn-install - run: yarn install --frozen-lockfile - shell: bash + uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 #v3.0.2 + with: + timeout_minutes: 15 + max_attempts: 3 + retry_wait_seconds: 30 + command: yarn install --frozen-lockfile env: NODE_OPTIONS: --max-old-space-size=4096 # Increase memory limit for Node.js due to large dependencies - - name: Install Detox CLI + - name: Install Detox CLI with retry id: install-detox-cli - run: yarn global add detox-cli - shell: bash + uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 #v3.0.2 + with: + timeout_minutes: 15 + max_attempts: 3 + retry_wait_seconds: 30 + command: yarn global add detox-cli - name: Install Foundry shell: bash @@ -135,14 +245,6 @@ runs: "$FOUNDRY_BIN/foundryup" ## IOS Setup ## - - name: Configure iOS Signing Certificates - if: ${{ inputs.platform == 'ios' && inputs.configure-keystores == 'true' }} - uses: MetaMask/github-tools/.github/actions/configure-keystore@e2e-env-actions - with: - aws-role-to-assume: ${{ inputs.keystore-role-to-assume }} - aws-region: 'us-east-2' - platform: 'ios' - environment: ${{ inputs.environment }} ## Ruby Setup & Cache Management - name: Setup Ruby @@ -155,6 +257,7 @@ runs: - name: Install bundler if: ${{ inputs.platform == 'ios' }} run: gem install bundler -v ${{ inputs.bundler-version }} + working-directory: ios shell: bash # Restore cached Ruby gems @@ -181,6 +284,36 @@ runs: working-directory: ios shell: bash + - name: Generate binstubs for CocoaPods + if: ${{ inputs.platform == 'ios' }} + run: bundle binstubs cocoapods --force --path=vendor/bundle/bin + + working-directory: ios + shell: bash + + - name: Add binstubs to PATH + if: ${{ inputs.platform == 'ios' }} + run: echo "$(pwd)/ios/vendor/bundle/bin" >> "$GITHUB_PATH" + shell: bash + + # Verify CocoaPods is available + - name: Verify CocoaPods + if: ${{ inputs.platform == 'ios' }} + run: | + bundle show cocoapods || (echo "❌ CocoaPods not installed from ios/Gemfile" && exit 1) + bundle exec pod --version + working-directory: ios + shell: bash + + # Verify CocoaPods is available + - name: Verify CocoaPods BinStub + if: ${{ inputs.platform == 'ios' }} + run: | + bundle show cocoapods || (echo "❌ CocoaPods not installed from ios/Gemfile" && exit 1) + pod --version + working-directory: ios + shell: bash + # Select Xcode version - name: Select Xcode version if: ${{ inputs.platform == 'ios' }} @@ -200,7 +333,7 @@ runs: # Install CocoaPods w/ cached bundler environment - name: Install CocoaPods via bundler if: ${{ inputs.platform == 'ios'}} - run: bundle exec pod install --repo-update --verbose + run: bundle exec pod install --repo-update working-directory: ios shell: bash @@ -213,116 +346,3 @@ runs: if: ${{ inputs.platform == 'ios' }} run: xcrun simctl list devices shell: bash - - ## Android Setup ## - - ## JDK Setup - - name: Setup Java - if: ${{ inputs.platform == 'android' }} - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 - with: - java-version: ${{ inputs.jdk-version }} - distribution: ${{ inputs.jdk-distribution }} - - - name: Configure Android Signing Certificates - if: ${{ inputs.platform == 'android' && inputs.configure-keystores == 'true' }} - uses: MetaMask/github-tools/.github/actions/configure-keystore@e2e-env-actions - with: - aws-role-to-assume: ${{ inputs.keystore-role-to-assume }} - aws-region: 'us-east-2' - platform: 'android' - environment: ${{ inputs.environment }} - - - name: Enable KVM group perms (Ubuntu only) - if: ${{ inputs.platform == 'android' && runner.os == 'Linux' }} - run: | - echo 'KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"' | sudo tee /etc/udev/rules.d/99-kvm4all.rules - sudo udevadm control --reload-rules - sudo udevadm trigger --name-match=kvm - shell: bash - - ## Android SDK Setup - - - name: Install required emulator dependencies - if: ${{ inputs.platform == 'android' && runner.os == 'Linux' }} - run: | - sudo apt-get update - sudo apt-get install -y \ - libpulse0 \ - libglu1-mesa \ - libnss3 \ - libxss1 - - echo "βœ… Linux dependencies installed successfully" - shell: bash - - - name: Install Android SDK packages - if: ${{ inputs.platform == 'android' }} - run: | - echo "Accepting SDK licenses..." - printf 'y\n%.0s' {1..10} | "${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager" --licenses - - echo "Installing Android SDK components..." - "${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager" --install \ - "platform-tools" \ - "platforms;android-${{ inputs.android-api-level }}" \ - "build-tools;34.0.0" \ - "emulator" \ - "system-images;android-${{ inputs.android-api-level }};google_apis;${{ inputs.android-abi }}" \ - - echo "Updating SDK packages..." - "${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager" --update - - echo "βœ… Android SDK packages installed successfully" - shell: bash - - ## NDK Setup - - - name: Debug Android SDK Paths - if: ${{ inputs.platform == 'android' }} - run: | - echo "ANDROID_HOME: $ANDROID_HOME" - echo "ANDROID_SDK_ROOT: $ANDROID_SDK_ROOT" - shell: bash - - - name: Install Android NDK - if: ${{ inputs.platform == 'android' }} - run: | - "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;${{ inputs.ndk-version }}" - shell: bash - - - name: Add Android tools to PATH - if: ${{ inputs.platform == 'android' }} - run: | - echo "$ANDROID_HOME/platform-tools" >> "$GITHUB_PATH" - echo "$ANDROID_HOME/emulator" >> "$GITHUB_PATH" - echo "$ANDROID_HOME/cmdline-tools/latest/bin" >> "$GITHUB_PATH" - shell: bash - - - name: Add NDK related toolchains to PATH - if: ${{ inputs.platform == 'android' }} - run: | - NDK_TOOLCHAIN="$ANDROID_SDK_ROOT/ndk/${{ inputs.ndk-version }}/toolchains/llvm/prebuilt/linux-x86_64/bin" - echo "$NDK_TOOLCHAIN" >> "$GITHUB_PATH" - echo "$ANDROID_SDK_ROOT/ndk/${{ inputs.ndk-version }}" >> "$GITHUB_PATH" - shell: bash - - ## Launch AVD - - - name: Set ANDROID_AVD_HOME for downstream steps - if: ${{ inputs.platform == 'android'}} - shell: bash - run: | - echo "ANDROID_AVD_HOME=$HOME/.android/avd" >> "$GITHUB_ENV" - mkdir -p "$HOME/.android/avd" - - - name: Create Android Virtual Device (AVD) - if: ${{ inputs.platform == 'android'}} - run: | - IMAGE="system-images;android-${{ inputs.android-api-level }};google_apis;${{ inputs.android-abi }}" - echo "Creating AVD with image: $IMAGE" - echo "no" | "${ANDROID_HOME}/cmdline-tools/latest/bin/avdmanager" create avd \ - --name "${{ inputs.android-avd-name }}" \ - --package "$IMAGE" \ - --device "${{ inputs.android-device }}" - shell: bash diff --git a/.github/scripts/create-flaky-test-report.mjs b/.github/scripts/create-flaky-test-report.mjs new file mode 100644 index 00000000..474a5168 --- /dev/null +++ b/.github/scripts/create-flaky-test-report.mjs @@ -0,0 +1,655 @@ +#!/usr/bin/env node + +// Based on the original script done by @itsyoboieltr on Extension repo + +import { Octokit } from '@octokit/rest'; +import unzipper from 'unzipper'; +import { IncomingWebhook } from '@slack/webhook'; + +const githubToken = process.env.GITHUB_TOKEN; +if (!githubToken) throw new Error('Missing GITHUB_TOKEN env var'); + +const env = { + GITHUB_TOKEN: process.env.GITHUB_TOKEN, + LOOKBACK_DAYS: parseInt(process.env.LOOKBACK_DAYS ?? '1'), + TEST_RESULTS_FILE_PATTERN: process.env.TEST_RESULTS_FILE_PATTERN || 'test-runs', + OWNER: process.env.OWNER || 'MetaMask', + REPOSITORY: process.env.REPOSITORY || 'metamask-extension', + WORKFLOW_ID: process.env.WORKFLOW_ID || 'main.yml', + BRANCH: process.env.BRANCH || 'main', + SLACK_WEBHOOK_FLAKY_TESTS: process.env.SLACK_WEBHOOK_FLAKY_TESTS || '', + TEST_REPORT_ARTIFACTS: process.env.TEST_REPORT_ARTIFACTS + ? process.env.TEST_REPORT_ARTIFACTS.split(',').map(name => name.trim()) + : ['test-e2e-android-report', 'test-e2e-ios-report', 'test-e2e-chrome-report', 'test-e2e-firefox-report'], +}; + +function getDateRange() { + const today = new Date(); + const daysAgo = new Date(today.getTime() - (env.LOOKBACK_DAYS * 24 * 60 * 60 * 1000)); + + const fromDisplay = daysAgo.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + hour: 'numeric', + minute: '2-digit' + }); + + const toDisplay = today.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + hour: 'numeric', + minute: '2-digit' + }); + + return { + from: daysAgo.toISOString(), + to: today.toISOString(), + display: `${fromDisplay} - ${toDisplay}` + }; +} + +async function getWorkflowRuns(github, from, to) { + try { + const runs = await github.paginate( + github.rest.actions.listWorkflowRuns, + { + owner: env.OWNER, + repo: env.REPOSITORY, + workflow_id: env.WORKFLOW_ID, + branch: env.BRANCH, + created: `${from}..${to}`, + per_page: 100, + } + ); + + // Filter to only completed runs + const completedRuns = runs.filter(run => run.status === 'completed'); + + // Sort by created date (newest first) + completedRuns.sort((a, b) => new Date(b.created_at) - new Date(a.created_at)); + + return completedRuns; + } catch (error) { + if (error.status === 404) { + throw new Error(`Workflow '${env.WORKFLOW_ID}' not found in ${env.OWNER}/${env.REPOSITORY}`); + } + throw error; + } +} + +async function downloadArtifact(github, artifact) { + try { + const response = await github.rest.actions.downloadArtifact({ + owner: env.OWNER, + repo: env.REPOSITORY, + artifact_id: artifact.id, + archive_format: 'zip', + }); + + const buffer = Buffer.from(response.data); + const zip = await unzipper.Open.buffer(buffer); + + const testFile = zip.files.find(file => file.path.startsWith(env.TEST_RESULTS_FILE_PATTERN)); + if (!testFile) { + console.log(` ⚠️ No ${env.TEST_RESULTS_FILE_PATTERN} file found in ${artifact.name}`); + return null; + } + + const content = await testFile.buffer(); + const data = JSON.parse(content.toString()); + + console.log(` Parsed ${artifact.name} (${data.length} top testSuites)`); + return data; + } catch (error) { + console.log(` ❌ Failed to download ${artifact.name}: ${error.message}`); + return null; + } +} + +async function downloadTestArtifacts(github, runs) { + const allTestData = []; + + for (const [index, run] of runs.entries()) { + console.log(`πŸ“¦ Processing run ${index + 1}/${runs.length}: ${run.head_commit?.message?.split('\n')[0] || 'No commit message'}`); + + try { + const artifacts = await github.paginate( + github.rest.actions.listWorkflowRunArtifacts, + { + owner: env.OWNER, + repo: env.REPOSITORY, + run_id: run.id, + } + ); + + const testArtifacts = artifacts.filter(artifact => + env.TEST_REPORT_ARTIFACTS.includes(artifact.name) + ); + + if (testArtifacts.length === 0) { + console.log(` ⚠️ No test artifacts found for run ${run.id}`); + continue; + } + + for (const artifact of testArtifacts) { + const testData = await downloadArtifact(github, artifact); + if (testData) { + allTestData.push(...testData); + } + } + } catch (error) { + console.log(` ❌ Failed to process run ${run.id}: ${error.message}`); + } + } + + return allTestData; +} + + +function extractRealFailures(testData) { + const realFailures = []; + + for (const testRun of testData) { + for (const testFile of testRun.testFiles || []) { + for (const testSuite of testFile.testSuites || []) { + const retryCount = testSuite.attempts ? testSuite.attempts.length : 0; + + // Process tests that failed even after retries + for (const testCase of testSuite.testCases || []) { + if (testCase.status === 'failed') { + realFailures.push({ + name: testCase.name, + path: testFile.path, + error: testCase.error || 'No error details', + time: testCase.time || 0, + suite: testSuite.name, + jobId: testSuite.job?.id, + runId: testSuite.job?.runId, + date: new Date(testSuite.date || Date.now()), + retryCount: retryCount, + type: 'real_failure' + }); + } + } + } + } + } + + return realFailures; +} + +function extractFlakyTests(testData) { + const flakyTests = []; + + for (const testRun of testData) { + for (const testFile of testRun.testFiles || []) { + for (const testSuite of testFile.testSuites || []) { + const retryCount = testSuite.attempts ? testSuite.attempts.length : 0; + + // Only process suites that have attempts (retries) + if (retryCount > 0) { + // Track failed tests in attempts + const failedInAttempts = new Map(); + for (const attempt of testSuite.attempts || []) { + for (const testCase of attempt.testCases || []) { + if (testCase.status === 'failed') { + failedInAttempts.set(testCase.name, { + jobId: attempt.job?.id, + runId: attempt.job?.runId, + error: testCase.error || 'No error details', + date: new Date(attempt.date || Date.now()) + }); + } + } + } + + // Process tests that eventually passed but had initial failures + for (const testCase of testSuite.testCases || []) { + if (testCase.status === 'passed' && failedInAttempts.has(testCase.name)) { + const failureInfo = failedInAttempts.get(testCase.name); + flakyTests.push({ + name: testCase.name, + path: testFile.path, + error: failureInfo.error, + time: testCase.time || 0, + suite: testSuite.name, + jobId: failureInfo.jobId, + runId: failureInfo.runId, + date: failureInfo.date, + retryCount: retryCount, + type: 'flaky' + }); + } + } + } + } + } + } + + return flakyTests; +} + + +function summarizeFailures(realFailures, flakyTests = []) { + const summary = new Map(); + + // Process real failures first + for (const test of realFailures) { + if (summary.has(test.name)) { + const existing = summary.get(test.name); + existing.realFailures += 1; + existing.totalRetries += test.retryCount; + // Update to chronologically latest real failure + if (test.date > existing.lastRealFailureDate) { + existing.lastRealFailureJobId = test.jobId; + existing.lastRealFailureRunId = test.runId; + existing.lastRealFailureError = test.error; + existing.lastRealFailureDate = test.date; + } + // Update last seen + if (test.date > existing.lastSeen) { + existing.lastSeen = test.date; + } + } else { + summary.set(test.name, { + name: test.name, + path: test.path, + realFailures: 1, + totalRetries: test.retryCount, + lastSeen: test.date, + suite: test.suite, + lastRealFailureJobId: test.jobId, + lastRealFailureRunId: test.runId, + lastRealFailureError: test.error, + lastRealFailureDate: test.date, + // Initialize flaky info as null + flakyFailureJobId: null, + flakyFailureRunId: null, + flakyFailureError: null, + flakyFailureDate: null + }); + } + } + + // Process flaky tests second + for (const test of flakyTests) { + if (summary.has(test.name)) { + // This test also had real failures - just add flaky info + const existing = summary.get(test.name); + existing.totalRetries += test.retryCount; + // Keep most recent flaky failure info + if (!existing.flakyFailureJobId || test.date > existing.flakyFailureDate) { + existing.flakyFailureJobId = test.jobId; + existing.flakyFailureRunId = test.runId; + existing.flakyFailureError = test.error; + existing.flakyFailureDate = test.date; + } + // Update last seen + if (test.date > existing.lastSeen) { + existing.lastSeen = test.date; + } + } else { + // This is purely a flaky test (no real failures) + summary.set(test.name, { + name: test.name, + path: test.path, + realFailures: 0, + totalRetries: test.retryCount, + lastSeen: test.date, + suite: test.suite, + // No real failure info + lastRealFailureJobId: null, + lastRealFailureRunId: null, + lastRealFailureError: null, + lastRealFailureDate: null, + // Flaky failure info + flakyFailureJobId: test.jobId, + flakyFailureRunId: test.runId, + flakyFailureError: test.error, + flakyFailureDate: test.date + }); + } + } + + return Array.from(summary.values()) + .sort((a, b) => { + // Real failures first, sorted by failure count + if (a.realFailures !== b.realFailures) { + return b.realFailures - a.realFailures; + } + // If both have same real failure count, sort by total retries + return b.totalRetries - a.totalRetries; + }); +} + +async function sendSlackReport(summary, dateDisplay, workflowCount, failedCount) { + if (!env.SLACK_WEBHOOK_FLAKY_TESTS || !env.SLACK_WEBHOOK_FLAKY_TESTS.startsWith('https://')) { + console.log('Skipping Slack notification'); + return; + } + + console.log('\nπŸ“€ Sending report to Slack...'); + try { + const webhook = new IncomingWebhook(env.SLACK_WEBHOOK_FLAKY_TESTS); + const blocks = createSlackBlocks(summary, dateDisplay, workflowCount, failedCount); + + // Slack has a limit of 50 blocks per message + const BATCH_SIZE = 50; + for (let i = 0; i < blocks.length; i += BATCH_SIZE) { + const batch = blocks.slice(i, i + BATCH_SIZE); + await webhook.send({ blocks: batch }); + } + + console.log('βœ… Report sent to Slack successfully'); + } catch (slackError) { + console.error('❌ Failed to send Slack notification:', slackError.message); + } +} + +function createSlackBlocks(summary, dateDisplay, workflowCount = 0, failedCount = 0) { + const blocks = []; + + blocks.push({ + type: 'header', + text: { + type: 'plain_text', + text: 'Flaky Test Report - Top 10', + emoji: true + } + }); + + // Calculate counts first + const realFailures = summary.filter(test => test.realFailures > 0); + const flakyTests = summary.filter(test => test.realFailures === 0); + + blocks.push({ + type: 'context', + elements: [{ + type: 'mrkdwn', + text: `Period (UTC): ${dateDisplay} | Repo: ${env.REPOSITORY} | Failed CI Runs: ${failedCount}/${workflowCount} from ${env.BRANCH} branch\nFound: ${realFailures.length} tests failing, ${flakyTests.length} flaky (eventually passed)` + }] + }); + + blocks.push({ type: 'divider' }); + + if (summary.length === 0) { + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: [ + { type: 'text', text: 'No flaky tests found, great job! βœ… ' } + ] + }] + }); + return blocks; + } + + const top10 = summary.slice(0, 10); + + // Real failures section + if (realFailures.length > 0) { + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: [ + { type: 'emoji', name: 'x' }, + { type: 'text', text: ' ' }, + { type: 'text', text: 'Failures', style: { bold: true } } + ] + }] + }); + + // Each failure + top10.filter(test => test.realFailures > 0).forEach((test, idx) => { + const globalIndex = top10.indexOf(test) + 1; + const failText = test.realFailures === 1 ? 'time' : 'times'; + const retryText = test.totalRetries === 1 ? 'retry' : 'retries'; + + // Create GitHub file URL + const fileUrl = `https://github.com/${env.OWNER}/${env.REPOSITORY}/blob/${env.BRANCH}/${test.path}`; + + // Build elements for this test + const elements = [ + { type: 'text', text: ` ${globalIndex}. ` }, // 2 spaces indent + { type: 'link', url: fileUrl, text: test.name }, + { type: 'text', text: ` (failed ${test.realFailures} ${failText}, ${test.totalRetries} ${retryText})`, style: { bold: true } } + ]; + + if (test.lastRealFailureJobId && test.lastRealFailureRunId) { + const jobUrl = `https://github.com/${env.OWNER}/${env.REPOSITORY}/actions/runs/${test.lastRealFailureRunId}/job/${test.lastRealFailureJobId}`; + elements.push( + { type: 'text', text: ' - ' }, + { type: 'link', url: jobUrl, text: 'last log' } + ); + } + + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: elements + }] + }); + + // Error message (if exists) + const error = test.lastRealFailureError; + if (error) { + const errorPreview = error.length > 150 ? error.substring(0, 150) + '...' : error; + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: [ + { type: 'text', text: ` ${errorPreview.replace(/\n/g, ' ')}`, style: { italic: true } } + ] + }] + }); + } + }); + } + + if (realFailures.length >= 10) { + return blocks; + } + + // Divider between sections if both exist + if (realFailures.length > 0 && flakyTests.length > 0) { + blocks.push({ type: 'divider' }); + } + + // Flaky tests section + if (flakyTests.length > 0) { + // Title + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: [ + { type: 'emoji', name: 'large_yellow_circle' }, + { type: 'text', text: ' ' }, + { type: 'text', text: 'Flaky (eventually passed)', style: { bold: true } } + ] + }] + }); + + // Each flaky test (respecting the 10-item limit) + const displayedRealFailures = Math.min(realFailures.length, 10); + const remainingSlots = 10 - displayedRealFailures; + const flakyTestsToShow = flakyTests.slice(0, remainingSlots); + + flakyTestsToShow.forEach((test, idx) => { + const globalIndex = displayedRealFailures + idx + 1; + const retryText = test.totalRetries === 1 ? 'retry' : 'retries'; + + // Create GitHub file URL + const fileUrl = `https://github.com/${env.OWNER}/${env.REPOSITORY}/blob/${env.BRANCH}/${test.path}`; + + // Build elements for this test + const elements = [ + { type: 'text', text: ` ${globalIndex}. ` }, // 2 spaces indent + { type: 'link', url: fileUrl, text: test.name }, + { type: 'text', text: ` (${test.totalRetries} ${retryText})`, style: { bold: true } } + ]; + + if (test.flakyFailureJobId && test.flakyFailureRunId) { + const jobUrl = `https://github.com/${env.OWNER}/${env.REPOSITORY}/actions/runs/${test.flakyFailureRunId}/job/${test.flakyFailureJobId}`; + elements.push( + { type: 'text', text: ' - ' }, + { type: 'link', url: jobUrl, text: 'last log' } + ); + } + + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: elements + }] + }); + + // Error message (if exists) + const error = test.flakyFailureError; + if (error) { + const errorPreview = error.length > 150 ? error.substring(0, 150) + '...' : error; + blocks.push({ + type: 'rich_text', + elements: [{ + type: 'rich_text_section', + elements: [ + { type: 'text', text: ` ${errorPreview.replace(/\n/g, ' ')}`, style: { italic: true } } + ] + }] + }); + } + }); + } + + return blocks; +} + +function displayResults(summary, dateDisplay) { + console.log('\n' + '='.repeat(80)); + console.log(`πŸ“Š REPORT - ${dateDisplay}`); + console.log('='.repeat(80)); + + if (summary.length === 0) { + console.log('\nβœ… No failed tests found, great job!'); + return; + } + + const realFailures = summary.filter(test => test.realFailures > 0); + const flakyTests = summary.filter(test => test.realFailures === 0); + + console.log(`${realFailures.length} real failures (failed even after retries)`); + console.log(`${flakyTests.length} flaky tests (eventually passed after retries)`); + console.log(`\nπŸ“Œ Sorted by: 1) Number of failures ↓ 2) Total retries ↓`); + console.log(`πŸ“Š Numbers shown are cumulative across all runs in the time period\n`); + + const top10 = summary.slice(0, 10); + + for (const [index, test] of top10.entries()) { + console.log(`${(index + 1).toString().padStart(2)}. ${test.name}`); + console.log(` πŸ“ File: ${test.path}`); + + if (test.realFailures > 0) { + // Real failures (tests that failed even after retries) + const failurePlural = test.realFailures > 1 ? 's' : ''; + const retryPlural = test.totalRetries > 1 ? 'retries' : 'retry'; + const retryText = test.totalRetries > 0 ? ` (${test.totalRetries} total ${retryPlural})` : ''; + console.log(` ❌ Failed: ${test.realFailures} time${failurePlural}${retryText}`); + + // Show logs for real failures + if (test.lastRealFailureJobId && test.lastRealFailureRunId) { + console.log(` πŸ”— Logs: https://github.com/${env.OWNER}/${env.REPOSITORY}/actions/runs/${test.lastRealFailureRunId}/job/${test.lastRealFailureJobId}`); + } + + // Show error for real failures + if (test.lastRealFailureError) { + const errorPreview = test.lastRealFailureError.length > 100 + ? test.lastRealFailureError.substring(0, 100) + '...' + : test.lastRealFailureError; + console.log(` πŸ’₯ Error: ${errorPreview.replace(/\n/g, ' ')}`); + } + } else { + // Flaky tests (failed initially but eventually passed) + const retryPlural = test.totalRetries > 1 ? 'retries' : 'retry'; + console.log(` 🟑 Flaky: eventually passed (${test.totalRetries} total ${retryPlural})`); + + // Show logs from when it failed (before retry succeeded) + if (test.flakyFailureJobId && test.flakyFailureRunId) { + console.log(` πŸ”— Logs: https://github.com/${env.OWNER}/${env.REPOSITORY}/actions/runs/${test.flakyFailureRunId}/job/${test.flakyFailureJobId}`); + } + + // Show error from initial failure + if (test.flakyFailureError) { + const errorPreview = test.flakyFailureError.length > 100 + ? test.flakyFailureError.substring(0, 100) + '...' + : test.flakyFailureError; + console.log(` πŸ’₯ Initial error: ${errorPreview.replace(/\n/g, ' ')}`); + } + } + + console.log(''); + } + + if (summary.length > 10) { + console.log(`... and ${summary.length - 10} other tests\n`); + } +} + +async function main() { + const github = new Octokit({ auth: env.GITHUB_TOKEN }); + + console.log('πŸ§ͺ🧐 Flaky Test Report\n'); + + const dateRange = getDateRange(); + console.log(`Time range: ${dateRange.from} to ${dateRange.to}\n`); + + try { + console.log('Fetching workflow runs...'); + const workflowRuns = await getWorkflowRuns(github, dateRange.from, dateRange.to); + + if (workflowRuns.length === 0) { + console.log('⚠️ No workflow runs found.'); + return; + } + + console.log(`Found ${workflowRuns.length} workflow run(s)`); + + // Count failed runs + const failedRuns = workflowRuns.filter(run => run.conclusion === 'failure'); + console.log(`Failed CI Runs: ${failedRuns.length}/${workflowRuns.length} from ${env.BRANCH}`); + + console.log('Downloading their test artifacts...'); + const testData = await downloadTestArtifacts(github, workflowRuns); + + if (testData.length === 0) { + console.log('⚠️ No test artifacts found in failed runs'); + return; + } + + console.log('Analyzing test failures...'); + + // Two-pass approach: process real failures and flaky tests separately + const realFailures = extractRealFailures(testData); + const flakyTests = extractFlakyTests(testData); + + const summary = summarizeFailures(realFailures, flakyTests); + displayResults(summary, dateRange.display); + await sendSlackReport(summary, dateRange.display, workflowRuns.length, failedRuns.length); + + } catch (error) { + console.error('❌ Error:', error.message); + if (error.status === 401) { + console.log('\nπŸ’‘ This might be a GitHub token issue. Make sure your token has the right permissions.'); + } + process.exit(1); + } +} + +main().catch(error => { + console.error('\n❌ Unexpected error:', error); + process.exit(1); +}); diff --git a/.github/scripts/create-platform-release-pr.sh b/.github/scripts/create-platform-release-pr.sh index a8de3c1e..28381530 100755 --- a/.github/scripts/create-platform-release-pr.sh +++ b/.github/scripts/create-platform-release-pr.sh @@ -2,21 +2,47 @@ # Script to create platform release PRs for MetaMask # This script handles the creation of release PRs for both mobile and extension platforms -# It creates two PRs: +# It creates three PRs: # 1. A release PR with version updates -# 2. A changelog PR with updated changelog and test plan +# 2. A changelog PR with updated changelog and test plan (skipped in test mode) +# 3. A version bump PR for the main branch +# +# Usage: +# create-platform-release-pr.sh [new_version_number] [git_user_name] [git_user_email] +# +# Parameters: +# platform - 'mobile' or 'extension' +# previous_version_ref - Previous release version branch name, tag or commit hash (e.g., release/7.7.0, v7.7.0, or 76fbc500034db9779e9ff7ce637ac5be1da0493d) +# new_version - New semantic version (e.g., 7.8.0) +# new_version_number - Build version for mobile platform (optional, required for mobile) +# git_user_name - Git user name for commits (optional, defaults to 'metamaskbot') +# git_user_email - Git user email for commits (optional, defaults to 'metamaskbot@users.noreply.github.com') set -e set -u set -o pipefail -# Input validation +# Input assignments (quoted args prevent shifting). Use defaults only for optional args. PLATFORM="${1}" -PREVIOUS_VERSION="${2}" +PREVIOUS_VERSION_REF="${2:-}" +# Normalize whitespace-only values; hotfixes are indicated by the literal string 'null' +PREVIOUS_VERSION_REF="${PREVIOUS_VERSION_REF//[[:space:]]/}" NEW_VERSION="${3}" +NEW_VERSION="${NEW_VERSION//[[:space:]]/}" NEW_VERSION_NUMBER="${4:-}" - -# Validate required parameters +GIT_USER_NAME="${5:-metamaskbot}" +GIT_USER_EMAIL="${6:-metamaskbot@users.noreply.github.com}" + +# Log assigned variables for debugging (after defaults and trimming) +echo "Assigned variables:" +echo "PLATFORM: $PLATFORM" +echo "PREVIOUS_VERSION_REF: $PREVIOUS_VERSION_REF" +echo "NEW_VERSION: $NEW_VERSION" +echo "NEW_VERSION_NUMBER: $NEW_VERSION_NUMBER" +echo "GIT_USER_NAME: $GIT_USER_NAME" +echo "GIT_USER_EMAIL: $GIT_USER_EMAIL" + +# Validate required parameters (allow empty PREVIOUS_VERSION_REF for hotfixes) if [[ -z $PLATFORM ]]; then echo "Error: No platform specified." exit 1 @@ -32,9 +58,6 @@ if [[ -z $NEW_VERSION_NUMBER && $PLATFORM == "mobile" ]]; then exit 1 fi - - - # Helper Functions # --------------- @@ -59,14 +82,7 @@ get_expected_changed_files() { # For all platforms: release/{version} # If TEST_ONLY=true: release-testing/{version} get_release_branch_name() { - local platform="$1" - local new_version="$2" - - # Validate platform - if [[ "$platform" != "mobile" && "$platform" != "extension" ]]; then - echo "Error: Unknown platform '$platform'. Must be 'mobile' or 'extension'." - exit 1 - fi + local new_version="$1" # Use test branch if TEST_ONLY is true if [ "$TEST_ONLY" == "true" ]; then @@ -74,23 +90,178 @@ get_release_branch_name() { return 0 fi - # Different release branch naming for different platforms - if [[ "$platform" == "mobile" ]]; then - echo "release/${new_version}" - elif [[ "$platform" == "extension" ]]; then - echo "Version-v${new_version}" + echo "release/${new_version}" +} + +# Calculate next version for main branch bump +get_next_version() { + local current_version="$1" + + # Parse semantic version (major.minor.patch) + if [[ ! $current_version =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then + echo "Error: Invalid semantic version format: $current_version" >&2 + exit 1 + fi + + local major="${BASH_REMATCH[1]}" + local minor="${BASH_REMATCH[2]}" + local patch="${BASH_REMATCH[3]}" + + # Increment minor version and reset patch to 0 + local next_minor=$((minor + 1)) + echo "${major}.${next_minor}.0" +} + +# Returns the version bump branch name based on version and test mode +get_version_bump_branch_name() { + local next_version="$1" + + # Use appropriate prefix based on test mode + if [ "$TEST_ONLY" == "true" ]; then + echo "version-bump-testing/${next_version}" + else + echo "version-bump/${next_version}" + fi +} + +# Main workflow functions +# ----------------------- + +# Helper function to check if branch exists and checkout/create it +checkout_or_create_branch() { + local branch_name="$1" + local base_branch="${2:-}" # Optional base branch for new branches + + echo "Checking for existing branch ${branch_name}" + + if git show-ref --verify --quiet "refs/heads/${branch_name}" || git ls-remote --heads origin "${branch_name}" | grep -q "${branch_name}"; then + echo "Branch ${branch_name} already exists, checking it out" + if git ls-remote --heads origin "${branch_name}" | grep -q "${branch_name}"; then + git fetch origin "${branch_name}" + git checkout "${branch_name}" + else + git checkout "${branch_name}" + fi + else + echo "Creating new branch ${branch_name}" + if [[ -n "$base_branch" ]]; then + git checkout "$base_branch" + git pull origin "$base_branch" + fi + git checkout -b "${branch_name}" fi + + echo "Branch ${branch_name} ready" } -# Main Script -# ---------- +# Helper function to push branch with error handling +push_branch_with_handling() { + local branch_name="$1" + + echo "Pushing changes to the remote.." + if ! git push --set-upstream origin "${branch_name}"; then + echo "No changes to push to ${branch_name}" + # Check if branch exists remotely + if git ls-remote --heads origin "${branch_name}" | grep -q "${branch_name}"; then + echo "Branch ${branch_name} already exists remotely" + else + echo "Error: Failed to push and branch doesn't exist remotely" + exit 1 + fi + fi +} -# Initialize branch names -RELEASE_BRANCH_NAME=$(get_release_branch_name $PLATFORM $NEW_VERSION) -CHANGELOG_BRANCH_NAME="chore/${NEW_VERSION}-Changelog" +# Helper function to create PR if it doesn't exist +create_pr_if_not_exists() { + local branch_name="$1" + local title="$2" + local body="$3" + local base_branch="${4:-main}" + local labels="${5:-}" + local search_method="${6:-head}" # "head" or "search" + + echo "Creating PR for ${branch_name}.." + + # Check if PR already exists using different methods + local pr_exists=false + if [[ "$search_method" == "search" ]]; then + if gh pr list --search "head:${branch_name}" --json number --jq 'length' | grep -q "1"; then + pr_exists=true + fi + else + if gh pr list --head "${branch_name}" --json number --jq 'length' | grep -q "1"; then + pr_exists=true + fi + fi -# Prepare release PR body with team sign-off checklist -RELEASE_BODY="This is the release candidate for version ${NEW_VERSION}. The changelog will be found in another PR ${CHANGELOG_BRANCH_NAME}. + if $pr_exists; then + echo "PR for branch ${branch_name} already exists" + else + # Build command array with conditional label inclusion + local gh_cmd=(gh pr create --draft --title "${title}" --body "${body}" --base "${base_branch}" --head "${branch_name}") + + # Add labels only if provided (GitHub CLI doesn't accept empty label values) + if [[ -n "${labels:-}" ]]; then + gh_cmd+=(--label "${labels}") + fi + + # Execute the command + # echo "Executing: ${gh_cmd[@]}" + "${gh_cmd[@]}" + echo "PR Created: ${title}" + fi +} + +# Configure git for automation +configure_git() { + echo "Configuring git.." + git config user.name "${GIT_USER_NAME}" + git config user.email "${GIT_USER_EMAIL}" + + echo "Fetching from remote..." + git fetch +} + +# Create release branch, update versions, and create PR +create_release_pr() { + local platform="$1" + local new_version="$2" + local new_version_number="$3" + local release_branch_name="$4" + local changelog_branch_name="$5" + + echo "Checking out the release branch: ${release_branch_name}" + git checkout "${release_branch_name}" + + echo "Release Branch Checked Out" + echo "version : ${new_version}" + echo "platform : ${platform}" + + # Version Updates + echo "Running version update scripts.." + ./github-tools/.github/scripts/set-semvar-version.sh "${new_version}" "${platform}" + + # Commit Changes + local changed_files + changed_files=$(get_expected_changed_files "$platform") + echo "Files to be staged for commit: $changed_files" + + echo "Adding and committing changes.." + git add $changed_files + + # Generate commit message based on platform + if [ "$platform" = "mobile" ]; then + if ! git commit -m "bump semvar version to ${new_version} && build version to ${new_version_number}"; then + echo "No changes to commit for mobile version bump" + fi + elif [ "$platform" = "extension" ]; then + if ! git commit -m "bump semvar version to ${new_version}"; then + echo "No changes to commit for extension version bump" + fi + fi + + # Prepare release PR body with team sign-off checklist + local release_body="This is the release candidate for version ${new_version}. The changelog will be found in another PR ${changelog_branch_name}. # Team sign-off checklist - [ ] team-accounts @@ -109,149 +280,264 @@ RELEASE_BODY="This is the release candidate for version ${NEW_VERSION}. The chan # Reference - Testing plan sheet - https://docs.google.com/spreadsheets/d/1tsoodlAlyvEUpkkcNcbZ4PM9HuC9cEM80RZeoVv5OCQ/edit?gid=404070372#gid=404070372" -# Git Configuration -# ---------------- -echo "Configuring git.." -git config user.name metamaskbot -git config user.email metamaskbot@users.noreply.github.com + # Push and create PR using helper functions + push_branch_with_handling "${release_branch_name}" + create_pr_if_not_exists "${release_branch_name}" "release: ${new_version}" "${release_body}" "${BASE_BRANCH}" "" "head" +} + +# Create changelog branch and generate changelog +create_changelog_pr() { + local platform="$1" + local new_version="$2" + local previous_version_ref="$3" + local release_branch_name="$4" + local changelog_branch_name="$5" + + # Use helper function for branch checkout/creation + checkout_or_create_branch "${changelog_branch_name}" + + # Generate Changelog and Test Plan + echo "Generating changelog via auto-changelog.." + npx @metamask/auto-changelog@4.1.0 update --rc --repo "${GITHUB_REPOSITORY_URL}" --currentVersion "${new_version}" --autoCategorize + + # Skip commits.csv for hotfix releases (previous_version_ref is literal "null") + # - When we create a new major/minor release, we fetch all commits included in the release, by fetching the diff between HEAD and previous version reference. + # - When we create a new hotfix release, there are no commits included in the release by default (they will be cherry-picked one by one). So we don't have previous version reference, which is why the value is set to 'null'. + if [[ "${previous_version_ref,,}" == "null" ]]; then + echo "Hotfix release detected (previous-version-ref is 'null'); skipping commits.csv generation." + else + # Need to run from .github-tools context to inherit it's dependencies/environment + echo "Current Directory: $(pwd)" + PROJECT_GIT_DIR=$(pwd) + + # By default, DIFF_BASE is set to the provided `previous_version_ref` (which can be a branch name, tag, or commit hash). + # If `previous_version_ref` matches a remote branch on origin, we fetch it and update DIFF_BASE to the fully qualified remote ref (`origin/`). + # This is required for the `generate-rc-commits.mjs` script to resolve the branch and successfully run the `git log` command. + # Otherwise, DIFF_BASE remains unchanged. + DIFF_BASE="${previous_version_ref}" + + # Only consider known release branch patterns to avoid regex pitfalls: + # - Extension: Version-vx.y.z + # - Mobile: release/x.y.z + if [[ "${previous_version_ref}" =~ ^Version-v[0-9]+\.[0-9]+\.[0-9]+$ || "${previous_version_ref}" =~ ^release/[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Previous version looks like a release branch: ${previous_version_ref}" + # Check if the exact branch exists on origin without interpolating into a regex + if git ls-remote --heads origin "${previous_version_ref}" | grep -q "."; then + echo "Detected remote branch for previous version: ${previous_version_ref}" + git fetch origin "${previous_version_ref}" + DIFF_BASE="origin/${previous_version_ref}" + else + echo "Remote branch not found on origin: ${previous_version_ref}. Will use as-is." + fi + else + echo "Previous version is not a recognized release branch pattern. Treating as tag or SHA: ${previous_version_ref}" + fi + + # Switch to github-tools directory + cd ./github-tools/ + ls -ltra + corepack prepare yarn@4.5.1 --activate + # This can't be done from the actions context layer due to the upstream repository having it's own context set with yarn + yarn --cwd install + + echo "Generating test plan csv.." + yarn run gen:commits "${platform}" "${DIFF_BASE}" "${release_branch_name}" "${PROJECT_GIT_DIR}" + # Return to project root after generating commits.csv + cd ../ + fi + + # Skipping Google Sheets update since there is no need for it anymore + # TODO: Remove this once the current post-main validation approach is stable + # if [[ "${TEST_ONLY:-false}" == 'false' ]]; then + # echo "Updating release sheet.." + # # Create a new Release Sheet Page for the new version with our commits.csv content + # yarn run update-release-sheet "${platform}" "${new_version}" "${GOOGLE_DOCUMENT_ID}" "./commits.csv" "${PROJECT_GIT_DIR}" "${MOBILE_TEMPLATE_SHEET_ID}" "${EXTENSION_TEMPLATE_SHEET_ID}" + # fi + # Note: Only change directories when we actually entered ./github-tools/ + + # Commit and Push Changelog Changes (exclude commits.csv) + echo "Adding and committing changes.." + local commit_msg="update changelog for ${new_version}" + if [[ "${previous_version_ref,,}" == "null" ]]; then + commit_msg="${commit_msg} (hotfix - no test plan)" + fi + if ! (git commit -am "${commit_msg}"); then + echo "No changes detected; skipping commit." + fi -echo "Fetching from remote..." -git fetch + local pr_body="This PR updates the change log for ${new_version}." + if [[ "${previous_version_ref,,}" == "null" ]]; then + pr_body="${pr_body} (Hotfix - no test plan generated.)" + fi -# Release Branch Setup -# ------------------- -echo "Checking out the release branch: ${RELEASE_BRANCH_NAME}" -git checkout "${RELEASE_BRANCH_NAME}" + # Use helper functions for push and PR creation + push_branch_with_handling "${changelog_branch_name}" + create_pr_if_not_exists "${changelog_branch_name}" "chore: ${changelog_branch_name}" "${pr_body}" "${release_branch_name}" "" "search" -echo "Release Branch Checked Out" + echo "Changelog PR Ready" +} -echo "version : ${NEW_VERSION}" -echo "platform : ${PLATFORM}" +# Create version bump PR for main branch +create_version_bump_pr() { + local platform="$1" + local new_version="$2" + local next_version="$3" + local version_bump_branch_name="$4" + local release_branch_name="$5" + local main_branch="${6:-main}" # Default to 'main' if not provided -# Version Updates -# -------------- -echo "Running version update scripts.." -./github-tools/.github/scripts/set-semvar-version.sh "${NEW_VERSION}" ${PLATFORM} + echo "Creating main version bump PR.." + # Use helper function for branch checkout/creation with base branch + checkout_or_create_branch "${version_bump_branch_name}" "${main_branch}" -# Commit Changes -# ------------- -changed_files=$(get_expected_changed_files "$PLATFORM") -echo "Files to be staged for commit: $changed_files" + # Update version files on main branch + echo "Running version update scripts for ${main_branch} branch.." + ./github-tools/.github/scripts/set-semvar-version.sh "${next_version}" "${platform}" -echo "Adding and committing changes.." + # Commit version bump changes + echo "Committing version bump changes.." + local changed_files + changed_files=$(get_expected_changed_files "$platform") + git add $changed_files -# Track our changes -git add $changed_files + if git diff --staged --quiet; then + echo "No changes to commit for version bump" + else + git commit -m "release: Bump version to ${next_version} after release ${new_version} -# Generate commit message based on platform -if [ "$PLATFORM" = "mobile" ]; then - if ! git commit -m "bump semvar version to ${NEW_VERSION} && build version to ${NEW_VERSION_NUMBER}"; then - echo "No changes to commit for mobile version bump" +This automated version bump ensures that: +- ${main_branch} branch version is ahead of the release branch +- Future nightly builds will have correct versioning + +Release version: ${new_version} +New ${main_branch} version: ${next_version} +Platform: ${platform}" + echo "Version bump committed" fi -elif [ "$PLATFORM" = "extension" ]; then - if ! git commit -m "bump semvar version to ${NEW_VERSION}"; then - echo "No changes to commit for extension version bump" + + # Ensure base branch exists locally; fetch from origin if missing + if ! git rev-parse --verify --quiet "refs/heads/${main_branch}" >/dev/null; then + echo "Base branch ${main_branch} not found locally. Attempting to fetch from origin..." + if git ls-remote --heads origin "${main_branch}" | grep -q "."; then + git fetch origin "${main_branch}:${main_branch}" || git fetch origin "${main_branch}" + echo "Fetched base branch ${main_branch} from origin." + else + echo "Error: Base branch not found on origin: ${main_branch}" + exit 1 + fi fi -fi -# Push Changes and Create Release PR -# --------------------------------- -echo "Pushing changes to the remote.." -if ! git push --set-upstream origin "${RELEASE_BRANCH_NAME}"; then - echo "No changes to push to ${RELEASE_BRANCH_NAME}" - # Check if branch exists remotely - if git ls-remote --heads origin "${RELEASE_BRANCH_NAME}" | grep -q "${RELEASE_BRANCH_NAME}"; then - echo "Branch ${RELEASE_BRANCH_NAME} already exists remotely" - else - echo "Error: Failed to push and branch doesn't exist remotely" + # If the version bump branch has no commits ahead of main, skip pushing/PR creation + # Validate refs before computing ahead count to avoid masking errors + # Fail fast with a error message if the base branch doesn’t exist locally (or isn’t fetched) + # Verifies that ${main_branch} exists and resolves to a valid commit and not a tag, tree, or something else + if ! git rev-parse --verify --quiet "${main_branch}^{commit}" >/dev/null; then + echo "Error: Base branch does not resolve to a commit: ${main_branch}" exit 1 fi -fi + # Fail fast with a error message if the version bump branch doesn’t exist locally (or isn’t fetched) + # Verifies that ${version_bump_branch_name} exists and resolves to a valid commit and not a tag, tree, or something else + if ! git rev-parse --verify --quiet "${version_bump_branch_name}^{commit}" >/dev/null; then + echo "Error: Version bump branch does not resolve to a commit: ${version_bump_branch_name}" + exit 1 + fi + # right-only count gives number of commits unique to the version bump branch + ahead_count=$(git rev-list --right-only --count "${main_branch}...${version_bump_branch_name}") + if [ "${ahead_count}" -eq 0 ]; then + echo "No differences between ${main_branch} and ${version_bump_branch_name}; skipping version bump PR creation." + return 0 + fi -echo "Creating release PR.." -# Check if PR already exists -if gh pr list --head "${RELEASE_BRANCH_NAME}" --json number --jq 'length' | grep -q "1"; then - echo "PR for branch ${RELEASE_BRANCH_NAME} already exists" -else - gh pr create \ - --draft \ - --title "release: ${NEW_VERSION}" \ - --body "${RELEASE_BODY}" \ - --head "${RELEASE_BRANCH_NAME}" - echo "Release PR Created" -fi + local version_bump_body="## Version Bump After Release -# Changelog Branch Setup -# --------------------- -echo "Checking for existing changelog branch ${CHANGELOG_BRANCH_NAME}" - -# Check if branch exists locally or remotely -if git show-ref --verify --quiet refs/heads/"${CHANGELOG_BRANCH_NAME}" || git ls-remote --heads origin "${CHANGELOG_BRANCH_NAME}" | grep -q "${CHANGELOG_BRANCH_NAME}"; then - echo "Branch ${CHANGELOG_BRANCH_NAME} already exists, checking it out" - git fetch origin "${CHANGELOG_BRANCH_NAME}" - git checkout "${CHANGELOG_BRANCH_NAME}" -else - echo "Creating new branch ${CHANGELOG_BRANCH_NAME}" - git checkout -b "${CHANGELOG_BRANCH_NAME}" -fi -echo "Changelog Branch Ready" - -# Generate Changelog and Test Plan -# ------------------------------ -echo "Generating changelog via auto-changelog.." -npx @metamask/auto-changelog@4.1.0 update --rc --repo "${GITHUB_REPOSITORY_URL}" --currentVersion "${NEW_VERSION}" --autoCategorize - -# Need to run from .github-tools context to inherit it's dependencies/environment -echo "Current Directory: $(pwd)" -PROJECT_GIT_DIR=$(pwd) -cd ./github-tools/ -ls -ltra -corepack prepare yarn@4.5.1 --activate -# This can't be done from the actions context layer due to the upstream repository having it's own context set with yarn -yarn --cwd install - -echo "Generating test plan csv.." -yarn run gen:commits "${PLATFORM}" "${PREVIOUS_VERSION}" "${RELEASE_BRANCH_NAME}" "${PROJECT_GIT_DIR}" - -if [[ "${TEST_ONLY:-false}" == 'false' ]]; then - echo "Updating release sheet.." - # Create a new Release Sheet Page for the new version with our commits.csv content - yarn run update-release-sheet "${PLATFORM}" "${NEW_VERSION}" "${GOOGLE_DOCUMENT_ID}" "./commits.csv" "${PROJECT_GIT_DIR}" "${MOBILE_TEMPLATE_SHEET_ID}" "${EXTENSION_TEMPLATE_SHEET_ID}" -fi -cd ../ +This PR bumps the ${main_branch} branch version from ${new_version} to ${next_version} after cutting the release branch. -# Commit and Push Changelog Changes -# ------------------------------- -echo "Adding and committing changes.." -git add ./commits.csv +### Why this is needed: +- **Nightly builds**: Each nightly build needs to be one minor version ahead of the current release candidate +- **Version conflicts**: Prevents conflicts between nightlies and release candidates +- **Platform alignment**: Maintains version alignment between MetaMask mobile and extension +- **Update systems**: Ensures nightlies are accepted by app stores and browser update systems +### What changed: +- Version bumped from \`${new_version}\` to \`${next_version}\` +- Platform: \`${platform}\` +- Files updated by \`set-semvar-version.sh\` script -if ! (git commit -am "updated changelog and generated feature test plan"); -then - echo "Error: No changes detected." - exit 1 -fi +### Next steps: +This PR should be **manually reviewed and merged by the release manager** to maintain proper version flow. -PR_BODY="This PR updates the change log for ${NEW_VERSION} and generates the test plan here [commit.csv](${GITHUB_REPOSITORY_URL}/blob/${CHANGELOG_BRANCH_NAME}/commits.csv)" - -echo "Pushing changes to the remote.." -git push --set-upstream origin "${CHANGELOG_BRANCH_NAME}" - -# Create Changelog PR -# ----------------- -echo "Creating changelog PR.." -# Check if PR already exists -if gh pr list --search "head:${CHANGELOG_BRANCH_NAME}" --json number --jq 'length' | grep -q "1"; then - echo "Changelog PR for branch ${CHANGELOG_BRANCH_NAME} already exists" -else - gh pr create \ - --draft \ - --title "chore: ${CHANGELOG_BRANCH_NAME}" \ - --body "${PR_BODY}" \ - --base "${RELEASE_BRANCH_NAME}" \ - --head "${CHANGELOG_BRANCH_NAME}" - echo "Changelog PR Created" -fi +### Related: +- Release version: ${new_version} +- Release branch: ${release_branch_name} +- Platform: ${platform} +- Test mode: ${TEST_ONLY} + +--- +*This PR was automatically created by the \`create-platform-release-pr.sh\` script.*" + + # Use helper functions for push and PR creation + push_branch_with_handling "${version_bump_branch_name}" + create_pr_if_not_exists "${version_bump_branch_name}" "release: Bump ${main_branch} version to ${next_version}" "${version_bump_body}" "${main_branch}" "" "head" -echo "Changelog PR Ready" + echo "Version bump PR ready" +} + +# Main orchestration function +main() { + # Calculate next version for main branch bump + local next_version + next_version=$(get_next_version "$NEW_VERSION") + + # Initialize branch names + local release_branch_name changelog_branch_name version_bump_branch_name + release_branch_name=$(get_release_branch_name "$NEW_VERSION") + changelog_branch_name="chore/${NEW_VERSION}-Changelog" + version_bump_branch_name=$(get_version_bump_branch_name "$next_version") # Execute main workflow + configure_git + + # Step 1: Create release branch and PR + create_release_pr "$PLATFORM" "$NEW_VERSION" "$NEW_VERSION_NUMBER" "$release_branch_name" "$changelog_branch_name" + + # Step 2: Create changelog PR (skip in test mode) + if [ "$TEST_ONLY" == "true" ]; then + echo "Skipping changelog generation in test mode" + else + create_changelog_pr "$PLATFORM" "$NEW_VERSION" "$PREVIOUS_VERSION_REF" "$release_branch_name" "$changelog_branch_name" + fi + + # Step 3: Create version bump PR for main branch (skip for hotfix releases) + if [[ "${PREVIOUS_VERSION_REF,,}" == "null" ]]; then + echo "Skipping version bump PR for hotfix release (previous-version-ref is 'null')." + else + create_version_bump_pr "$PLATFORM" "$NEW_VERSION" "$next_version" "$version_bump_branch_name" "$release_branch_name" "main" + fi + + # Final summary + echo "" + echo "=========================================" + echo "Release automation complete!" + echo "=========================================" + echo "Created PRs:" + echo "1. Release PR: release: ${NEW_VERSION}" + if [ "$TEST_ONLY" != "true" ]; then + echo "2. Changelog PR: chore: ${changelog_branch_name}" + if [[ "${PREVIOUS_VERSION_REF,,}" == "null" ]]; then + echo "(Hotfix) Skipped version bump PR" + else + echo "3. Version bump PR: Bump main version to ${next_version}" + fi + else + if [[ "${PREVIOUS_VERSION_REF,,}" == "null" ]]; then + echo "(Hotfix) Skipped version bump PR (test mode - changelog skipped)" + else + echo "2. Version bump PR: Bump main version to ${next_version} (test mode - changelog skipped)" + fi + fi + echo "=========================================" +} + +# Execute main function only if script is run directly (not sourced) +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/.github/scripts/generate-rc-commits.mjs b/.github/scripts/generate-rc-commits.mjs index 0be37e28..4c4069b0 100644 --- a/.github/scripts/generate-rc-commits.mjs +++ b/.github/scripts/generate-rc-commits.mjs @@ -59,7 +59,8 @@ async function getTeam(repository, prNumber) { } // Function to filter commits based on unique commit messages and group by teams -async function filterCommitsByTeam(platform, branchA, branchB) { +// Input parameters refA and refB can be a branch name, a tag or a commit hash (e.g., release/7.7.0, v7.7.0, or 76fbc500034db9779e9ff7ce637ac5be1da0493d) +async function filterCommitsByTeam(platform, refA, refB) { const MAX_COMMITS = 500; // Limit the number of commits to process console.log('Filtering commits by team...'); @@ -81,8 +82,8 @@ async function filterCommitsByTeam(platform, branchA, branchB) { const git = simpleGit(); const logOptions = { - from: branchB, - to: branchA, + from: refB, + to: refA, format: { hash: '%H', author: '%an', @@ -92,7 +93,7 @@ async function filterCommitsByTeam(platform, branchA, branchB) { const log = await git.log(logOptions); - console.log(`Total commits between ${branchA} and ${branchB}: ${log.total}`); + console.log(`Total commits between ${refA} and ${refB}: ${log.total}`); console.log(`Processing up to ${Math.min(log.all.length, MAX_COMMITS)} commits...`); const commitsByTeam = {}; @@ -204,15 +205,15 @@ async function main() { if (args.length !== 4) { console.error( - 'Usage: node generate-rc-commits.mjs platform branchA branchB', + 'Usage: node generate-rc-commits.mjs platform refA refB', ); console.error('Received:', args, ' with length:', args.length); process.exit(1); } const platform = args[0]; - const branchA = args[1]; - const branchB = args[2]; + const refA = args[1]; + const refB = args[2]; const gitDir = args[3]; // Change the working directory to the git repository path @@ -220,10 +221,10 @@ async function main() { process.chdir(gitDir); console.log( - `Generating CSV file for commits between ${branchA} and ${branchB} on ${platform} platform...`, + `Generating CSV file for commits between ${refA} and ${refB} on ${platform} platform...`, ); - const commitsByTeam = await filterCommitsByTeam(platform, branchA, branchB); + const commitsByTeam = await filterCommitsByTeam(platform, refA, refB); if (Object.keys(commitsByTeam).length === 0) { console.log('No commits found.'); diff --git a/.github/scripts/get-release-timelines.sh b/.github/scripts/get-release-timelines.sh index a679ab38..e5eab3a5 100755 --- a/.github/scripts/get-release-timelines.sh +++ b/.github/scripts/get-release-timelines.sh @@ -32,7 +32,7 @@ echo "release_pr_merged_at,release_submitted_at,rollout_1_at,rollout_10_at,rollo release_branch="Version-v${VERSION}" release_pr_title="Version v${VERSION}" -release_pr=$(gh pr list --repo "${OWNER}/${REPOSITORY}" --head "${release_branch}" --base master --state merged --json title,mergedAt | jq --arg title "${release_pr_title}" '.[] | select(.title == $title)') +release_pr=$(gh pr list --repo "${OWNER}/${REPOSITORY}" --head "${release_branch}" --base stable --state merged --json title,mergedAt | jq --arg title "${release_pr_title}" '.[] | select(.title == $title)') release_pr_merged_at=$(echo "${release_pr}" | jq -r '.mergedAt') if [[ -z "${release_pr_merged_at}" || "${release_pr_merged_at}" == "null" ]]; then diff --git a/.github/scripts/post-merge-validation-tracker.mjs b/.github/scripts/post-merge-validation-tracker.mjs new file mode 100644 index 00000000..ec362c1c --- /dev/null +++ b/.github/scripts/post-merge-validation-tracker.mjs @@ -0,0 +1,746 @@ +import { google } from 'googleapis'; +import { Octokit } from '@octokit/rest'; + +const githubToken = process.env.GITHUB_TOKEN; +// can be found in the excel url e.g. https://docs.google.com/spreadsheets/d/1uSERA-Mczy0pjlrr1vv../ +const spreadsheetId = process.env.SHEET_ID; // 1uSERA-Mczy0pjlrr1vv... +// GOOGLE_APPLICATION_CREDENTIALS_BASE64 can be found in MM QA 1pasword vault +const googleApplicationCredentialsBase64 = process.env.GOOGLE_APPLICATION_CREDENTIALS_BASE64; +const repo = process.env.REPO || "MetaMask/metamask-extension"; +const LOOKBACK_DAYS = parseInt(process.env.LOOKBACK_DAYS ?? '1'); +const START_HOUR_UTC = parseInt(process.env.START_HOUR_UTC ?? '7'); + +const START_MINUTE_UTC = 0; +const RELEVANT_TITLE_REGEX = /^(feat|perf|fix)\s*(\(|:|!|\/)|\bbump\b/i; +const TEAM_LABEL_PREFIX = 'team-'; +const SIZE_LABEL_PREFIX = 'size-'; +const AUTOMATED_TEST_PATTERNS = [ + /\.test\.(js|ts|tsx)$/, + /\.spec\.(js|ts|tsx)$/, + /\.(test|spec)\.(js|ts|tsx)\.snap$/, + /(^|\/)test\//, + /(^|\/)e2e\//, + /(^|\/)wdio\// +]; + +if (!githubToken) throw new Error('Missing GITHUB_TOKEN env var'); +if (!spreadsheetId) throw new Error('Missing SHEET_ID env var'); +if (!googleApplicationCredentialsBase64) + throw new Error('Missing GOOGLE_APPLICATION_CREDENTIALS_BASE64 env var'); +if (!repo) throw new Error('Missing REPO env var'); + +const octokit = new Octokit({ auth: githubToken }); +const sheets = google.sheets('v4'); + +async function getGoogleAuth() { + const credentials = JSON.parse(Buffer.from(googleApplicationCredentialsBase64, 'base64').toString('utf8')); + const auth = new google.auth.GoogleAuth({ + credentials, + scopes: ['https://www.googleapis.com/auth/spreadsheets'], + }); + return auth.getClient(); +} + +function parseRepo() { + const [owner, repoName] = repo.split('/'); + if (!owner || !repoName) throw new Error(`Invalid repo format "${repo}". Expected format: "owner/repo"`); + return { owner, repo: repoName }; +} + +function repoType(repo) { + if (repo.endsWith('-extension')) return 'extension'; + if (repo.endsWith('-mobile')) return 'mobile'; + return repo; +} + +function tabTitleFor(repo, releaseLabel) { + return `pre-${releaseLabel} (${repoType(repo)})`; +} + +function headerRowFor(type) { + const isMobile = String(type).toLowerCase() === 'mobile'; + const colG = isMobile ? 'Validated (Android)' : 'Validated (Chrome)'; + const colH = isMobile ? 'Validated (iOS)' : 'Validated (Firefox)'; + return [ + 'Pull Request', + 'Merged Time (UTC)', + 'Author', + 'PR Size', + 'Auto Tests', + 'Team Responsible', + colG, + colH, + ]; +} + +function platformLabelFor(type) { + const t = String(type).toLowerCase(); + if (t === 'mobile') return 'πŸ“± Mobile'; + if (t === 'extension') return 'πŸ”Œ Extension'; + return t; +} + +async function ensureSheetExists(authClient, title, platformType) { + const meta = await sheets.spreadsheets.get({ + spreadsheetId, + auth: authClient, + fields: 'sheets(properties(sheetId,title))', + }); + + const sheetsList = meta.data.sheets || []; + const existing = sheetsList.find((s) => s.properties?.title === title); + if (existing) return { sheetId: existing.properties.sheetId, isNew: false }; + + return createSheetFromTemplateOrBlank(authClient, sheetsList, title, platformType); +} + +async function createSheetFromTemplateOrBlank(authClient, sheetsList, title, platformType) { + // Try to duplicate from a template tab (single template name: 'template') + const templateCandidates = ['template']; + const template = sheetsList.find((s) => templateCandidates.includes(s.properties?.title || '')); + + if (template?.properties?.sheetId != null) { + const duplicateRes = await sheets.spreadsheets.batchUpdate({ + spreadsheetId, + auth: authClient, + requestBody: { + requests: [ + { + duplicateSheet: { + sourceSheetId: template.properties.sheetId, + newSheetName: title, + }, + }, + ], + }, + }); + const newSheetId = duplicateRes.data.replies?.[0]?.duplicateSheet?.properties?.sheetId; + // Write platform label in A1 and platform-specific labels; keep row 2 headers from template to preserve formatting + await sheets.spreadsheets.values.update({ + spreadsheetId, + auth: authClient, + range: `${title}!A1:A1`, + valueInputOption: 'USER_ENTERED', + requestBody: { values: [[platformLabelFor(platformType)]] }, + }); + // Overwrite entire row 2 with headerRowFor(type) + await sheets.spreadsheets.values.update({ + spreadsheetId, + auth: authClient, + range: `${title}!A2:I2`, + valueInputOption: 'USER_ENTERED', + requestBody: { values: [headerRowFor(platformType)] }, + }); + // Insert a blank row at index 2 (0-based) so data can start at row 4 + await sheets.spreadsheets.batchUpdate({ + spreadsheetId, + auth: authClient, + requestBody: { + requests: [ + { + updateSheetProperties: { + properties: { sheetId: newSheetId, hidden: false }, + fields: 'hidden', + }, + }, + { + insertDimension: { + range: { sheetId: newSheetId, dimension: 'ROWS', startIndex: 2, endIndex: 3 }, + inheritFromBefore: false, + }, + }, + ], + }, + }); + console.log(`Duplicated template '${template.properties.title}' β†’ '${title}' and set platform label for type '${platformType}'`); + return { sheetId: newSheetId, isNew: true }; + } + + // No template found: log fail message and create a blank tab + console.log(`❌ Template not found for new tab '${title}'. Candidates tried: ${templateCandidates.join(', ')}. Falling back to blank sheet.`); + const addRes = await sheets.spreadsheets.batchUpdate({ + spreadsheetId, + auth: authClient, + requestBody: { + requests: [ + { + addSheet: { + properties: { title }, + }, + }, + ], + }, + }); + const sheetId = addRes.data.replies?.[0]?.addSheet?.properties?.sheetId; + // Write platform label in A1 and dynamic headers in row 2 + await sheets.spreadsheets.values.update({ + spreadsheetId, + auth: authClient, + range: `${title}!A1:A1`, + valueInputOption: 'USER_ENTERED', + requestBody: { values: [[platformLabelFor(platformType)]] }, + }); + await sheets.spreadsheets.values.update({ + spreadsheetId, + auth: authClient, + range: `${title}!A2:I2`, + valueInputOption: 'USER_ENTERED', + requestBody: { values: [headerRowFor(platformType)] }, + }); + await sheets.spreadsheets.batchUpdate({ + spreadsheetId, + auth: authClient, + requestBody: { + requests: [ + { + updateSheetProperties: { + properties: { sheetId, hidden: false }, + fields: 'hidden', + }, + }, + ], + }, + }); + console.log(`Created new sheet tab (no template found): ${title}`); + return { sheetId, isNew: true }; +} + +async function readRows(authClient, title) { + try { + const res = await sheets.spreadsheets.values.get({ + spreadsheetId, + auth: authClient, + range: `${title}!A3:I`, + }); + return res.data.values || []; + } catch (e) { + // If the sheet or range doesn't exist yet + return []; + } +} + +async function appendRows(authClient, title, rows) { + if (!rows.length) return; + await sheets.spreadsheets.values.append({ + spreadsheetId, + auth: authClient, + range: `${title}!A4:I`, + valueInputOption: 'USER_ENTERED', + insertDataOption: 'INSERT_ROWS', + requestBody: { values: rows }, + }); +} + +function uniqKey(number) { + return String(number); +} + +function formatDateHumanUTC(iso) { + if (!iso) return ''; + const d = new Date(iso); + const yyyy = d.getUTCFullYear(); + const mm = String(d.getUTCMonth() + 1).padStart(2, '0'); + const dd = String(d.getUTCDate()).padStart(2, '0'); + const hh = String(d.getUTCHours()).padStart(2, '0'); + const mi = String(d.getUTCMinutes()).padStart(2, '0'); + return `${yyyy}-${mm}-${dd} ${hh}:${mi}`; +} + +function makePrHyperlinkCell(url, title, number) { + const label = `${title} (#${number})`; + // Sheets formula + const escapedUrl = url.replace(/"/g, ''); + const escapedLabel = label.replace(/"/g, ''); + return `=HYPERLINK("${escapedUrl}","${escapedLabel}")`; +} + +function extractTeam(labels) { + const found = labels.find((l) => l.name?.startsWith(TEAM_LABEL_PREFIX)); + return found ? found.name : 'unknown'; +} + +function extractSize(labels) { + const found = labels.find((l) => l.name?.startsWith(SIZE_LABEL_PREFIX)); + return found ? found.name : 'unknown'; +} + +function isoSinceAtUTC(days, hour = 2, minute = 0) { + // Returns an ISO timestamp at (today - days) with specific UTC hour:minute, e.g., 02:00Z + const now = new Date(); + const d = new Date(Date.UTC( + now.getUTCFullYear(), + now.getUTCMonth(), + now.getUTCDate(), + 0, 0, 0, 0, + )); + d.setUTCDate(d.getUTCDate() - days); + d.setUTCHours(hour, minute, 0, 0); + return d.toISOString(); +} + +async function fetchMergedPRsSince(owner, repo, sinceDateISO) { + // Strategy: use fast GitHub search API, then add package.json version detection + const since = new Date(sinceDateISO); + const sinceDate = since.toISOString().split('T')[0]; + const prs = []; + let page = 1; + + while (true) { + console.log(`Fetching merged PRs page ${page} for ${owner}/${repo} (since ${sinceDateISO})...`); + + try { + const query = `repo:${owner}/${repo} is:pr is:merged base:main merged:>=${sinceDate}`; + + const { data } = await octokit.rest.search.issuesAndPullRequests({ + q: query, + sort: 'updated', + order: 'desc', + per_page: 100, + page, + advanced_search: true + }); + + if (!data.items.length) break; + console.log(`Found ${data.items.length} PRs on page ${page}`); + + for (const item of data.items) { + if (item.pull_request && item.closed_at) { + const mergedAt = new Date(item.closed_at); + if (mergedAt >= since) { + prs.push({ + number: item.number, + title: item.title, + html_url: item.html_url, + user: { login: item.user?.login || '' }, + labels: item.labels || [], // Basic labels from search + closed_at: item.closed_at, + base_ref: 'main' + }); + } + } + } + + if (data.items.length < 100) break; + page++; + await sleep(200); + } catch (e) { + console.log(`❌ Search API error: ${e.message}`); + break; + } + } + + console.log(`Found ${prs.length} merged PRs since ${sinceDateISO} for ${owner}/${repo}`); + return prs; +} + +function isRelevantTitle(title) { + if (!title) return false; + return RELEVANT_TITLE_REGEX.test(String(title)); +} + +function extractLabelText(formulaOrText) { + const s = String(formulaOrText || ''); + // If it's a HYPERLINK formula like =HYPERLINK("url","Label") extract Label + const m = s.match(/HYPERLINK\([^,]+,\s*"([^"]+)"\)/i); + if (m) return m[1]; + return s; +} + +function parsePrNumberFromCell(cell) { + const text = extractLabelText(cell); + const m = String(text).match(/#(\d+)/); + return m ? Number(m[1]) : null; +} + +async function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function splitByReleaseAndTitle(items) { + const relevant = []; + let skippedByTitle = 0; + + for (const item of items) { + if (isRelevantTitle(item.title)) { + relevant.push(item); + } else { + skippedByTitle += 1; + } + } + + return { relevant, skippedByTitle }; +} + +// Add efficient version detection with caching +let versionCache = new Map(); // Cache version bumps per repo + +// Automated test detection cache +let automatedTestCache = new Map(); // Cache automated test results per PR: "owner/repo/prNumber" -> "Yes"|"No"|"Unknown" + +async function getVersionTimelineForRepo(owner, repo, sinceDateISO) { + const cacheKey = `${owner}/${repo}`; + if (versionCache.has(cacheKey)) { + return versionCache.get(cacheKey); + } + + console.log(`πŸ” Analyzing version timeline for ${owner}/${repo}...`); + + // Get current version from package.json + const currentVersion = await getCurrentPackageVersion(owner, repo); + if (!currentVersion) { + console.log(`⚠️ Could not determine current version for ${owner}/${repo}`); + return { currentVersion: null, versionBumps: [] }; + } + + // Find version bumps in lookback period + const versionBumps = await findVersionBumpCommits(owner, repo, sinceDateISO); + + // Create timeline: [newest bump, older bump, ..., oldest bump] + const timeline = { + currentVersion: normalizeVersion(currentVersion), + versionBumps: versionBumps, // Already sorted newest first + sinceDate: new Date(sinceDateISO) + }; + + console.log(`πŸ“Š Version timeline for ${owner}/${repo}:`); + console.log(` Current: ${timeline.currentVersion}`); + + if (versionBumps.length === 0) { + console.log(` No version bumps in lookback period - all PRs β†’ ${timeline.currentVersion}`); + } else { + for (const bump of versionBumps) { + console.log(` ${bump.oldVersion} β†’ ${bump.newVersion} at ${formatDateHumanUTC(bump.date)}`); + } + } + + versionCache.set(cacheKey, timeline); + return timeline; +} + +async function getCurrentPackageVersion(owner, repo) { + try { + const { data } = await octokit.rest.repos.getContent({ + owner, + repo, + path: 'package.json', + ref: 'main' + }); + const content = Buffer.from(data.content, 'base64').toString('utf8'); + const packageJson = JSON.parse(content); + return packageJson.version; + } catch (e) { + console.log(`⚠️ Failed to fetch package.json version: ${e.message}`); + return null; + } +} + +function parseVersionFromPatch(patch) { + const lines = patch.split('\n'); + let oldVersion = null; + let newVersion = null; + + for (const line of lines) { + if (line.startsWith('-') && line.includes('"version":')) { + const match = line.match(/"version":\s*"([^"]+)"/); + if (match) oldVersion = match[1]; + } + if (line.startsWith('+') && line.includes('"version":')) { + const match = line.match(/"version":\s*"([^"]+)"/); + if (match) newVersion = match[1]; + } + } + + if (oldVersion && newVersion && oldVersion !== newVersion) { + return { oldVersion, newVersion }; + } + return null; +} + +function normalizeVersion(version) { + if (!version) return null; + return version.startsWith('v') ? version : `v${version}`; +} + +function determineReleaseVersionFromTimeline(mergedAt, versionTimeline) { + const { currentVersion, versionBumps, sinceDate } = versionTimeline; + const mergedDate = new Date(mergedAt); + + // If no version bumps in our lookback period, everything goes to current version + if (versionBumps.length === 0) { + return currentVersion; + } + + // Find the appropriate version based on merge time vs version bump times + // versionBumps are sorted newest first + for (const bump of versionBumps) { + const bumpDate = new Date(bump.date); + + // If PR was merged after this version bump, it belongs to the new version + if (mergedDate >= bumpDate) { + return normalizeVersion(bump.newVersion); + } + } + + // If PR was merged before all version bumps in our period, + // it belongs to the version that existed before the oldest bump + const oldestBump = versionBumps[versionBumps.length - 1]; + return normalizeVersion(oldestBump.oldVersion); +} + +// Optimized buildTabGrouping +async function buildTabGrouping(owner, repo, relevantItems, sinceDateISO) { + const tabToRows = new Map(); + const platformType = repoType(repo); + + // Get version timeline once for this repo + const versionTimeline = await getVersionTimelineForRepo(owner, repo, sinceDateISO); + + if (!versionTimeline.currentVersion) { + console.log(`❌ Cannot determine versions for ${owner}/${repo} - skipping`); + return tabToRows; + } + + // Group PRs by determined release version + const prsByVersion = new Map(); + + for (const pr of relevantItems) { + const releaseVersion = determineReleaseVersionFromTimeline(pr.closed_at, versionTimeline); + if (!releaseVersion) continue; + + if (!prsByVersion.has(releaseVersion)) { + prsByVersion.set(releaseVersion, []); + } + prsByVersion.get(releaseVersion).push(pr); + } + + // Create tabs only for versions that have PRs + for (const [version, prs] of prsByVersion.entries()) { + const title = tabTitleFor(repo, version); + console.log(`πŸ“‹ Tab: ${title} - ${prs.length} PRs`); + + if (!tabToRows.has(title)) { + tabToRows.set(title, { entries: [], platformType }); + } + + for (const pr of prs) { + // Check if PR modifies automated test files + const automatedTestsModified = await checkAutomatedTestFiles(owner, repo, pr.number); + + const row = [ + makePrHyperlinkCell(pr.html_url, pr.title, pr.number), + formatDateHumanUTC(pr.closed_at || ''), + pr.user.login, + extractSize(pr.labels || []), + automatedTestsModified, + extractTeam(pr.labels || []), + '', + '', + '', + ]; + tabToRows.get(title).entries.push({ row, mergedAtIso: pr.closed_at || '' }); + } + } + + return tabToRows; +} + +// Enhanced version bump detection with better error handling +async function findVersionBumpCommits(owner, repo, sinceDateISO) { + try { + const { data: commits } = await octokit.rest.repos.listCommits({ + owner, + repo, + sha: 'main', + since: sinceDateISO, + path: 'package.json', + per_page: 50 // Should be enough for e.g. 15 days lookback + }); + + const versionBumps = []; + + // Process commits in parallel for better performance + const commitPromises = commits.map(async (commit) => { + try { + const { data: commitData } = await octokit.rest.repos.getCommit({ + owner, + repo, + ref: commit.sha + }); + + const packageJsonFile = commitData.files?.find(f => f.filename === 'package.json'); + if (packageJsonFile?.patch) { + const versionChange = parseVersionFromPatch(packageJsonFile.patch); + if (versionChange) { + return { + sha: commit.sha, + date: commit.commit.committer.date, + message: commit.commit.message, + ...versionChange + }; + } + } + } catch (e) { + console.log(`⚠️ Failed to analyze commit ${commit.sha}: ${e.message}`); + } + return null; + }); + + const results = await Promise.all(commitPromises); + versionBumps.push(...results.filter(Boolean)); + + // Sort by date (newest first) + return versionBumps.sort((a, b) => new Date(b.date) - new Date(a.date)); + } catch (e) { + console.log(`⚠️ Failed to find version bumps: ${e.message}`); + return []; + } +} + +// Automated test detection functions +async function fetchPRFiles(owner, repo, prNumber) { + try { + const allFiles = []; + let page = 1; + + while (true) { + const { data: files } = await octokit.rest.pulls.listFiles({ + owner, + repo, + pull_number: prNumber, + per_page: 100, + page + }); + + allFiles.push(...files); + + // If we got less than 100 files, we've reached the end + if (files.length < 100) break; + + page++; + await sleep(100); + } + + return allFiles.map(file => file.filename); + } catch (e) { + console.log(`⚠️ Failed to fetch files for PR #${prNumber}: ${e.message}`); + return null; // Return null to indicate API failure + } +} + +function checkFilesForAutomatedTestPatterns(filenames) { + if (!filenames || filenames.length === 0) return false; + + // Debug logging to help identify pattern matching issues + const matches = filenames.filter(filename => + AUTOMATED_TEST_PATTERNS.some(pattern => pattern.test(filename)) + ); + + return matches.length > 0; +} + +async function checkAutomatedTestFiles(owner, repo, prNumber) { + const cacheKey = `${owner}/${repo}/${prNumber}`; + if (automatedTestCache.has(cacheKey)) { + return automatedTestCache.get(cacheKey); + } + + const filenames = await fetchPRFiles(owner, repo, prNumber); + let result; + + if (filenames === null) { + result = 'Unknown'; // API error + } else if (checkFilesForAutomatedTestPatterns(filenames)) { + result = 'Yes'; + } else { + result = 'No'; + } + + automatedTestCache.set(cacheKey, result); + return result; +} + +async function processTab(authClient, title, entries, platformType) { + const { sheetId, isNew } = await ensureSheetExists(authClient, title, platformType); + const existing = await readRows(authClient, title); + console.log(`Tab=${title} existingRows=${existing.length}, incomingRows=${entries.length}`); + const existingKeys = new Set( + existing + .map((r) => parsePrNumberFromCell(r[0])) + .filter((n) => n !== null) + .map((n) => uniqKey(n)), + ); + const sortedRows = entries + .slice() + .sort((a, b) => new Date(a.mergedAtIso) - new Date(b.mergedAtIso)) + .map((e) => e.row); + const deduped = []; + for (const r of sortedRows) { + const num = parsePrNumberFromCell(r[0]); + const key = num !== null ? uniqKey(num) : null; + if (!key || !existingKeys.has(key)) { + deduped.push(r); + if (key) existingKeys.add(key); + } + } + console.log(`Tab=${title} toInsertAfterDedup=${deduped.length}`); + let inserted = 0; + if (deduped.length) { + await appendRows(authClient, title, deduped); + inserted += deduped.length; + } + if (isNew) { + await sheets.spreadsheets.batchUpdate({ + spreadsheetId, + auth: authClient, + requestBody: { + requests: [ + { + deleteDimension: { + range: { sheetId, dimension: 'ROWS', startIndex: 2, endIndex: 3 }, + }, + }, + ], + }, + }); + } + return inserted; +} + +async function processRepo(authClient, owner, repo, since) { + console.log(`\nScanning ${owner}/${repo}...`); + let insertedThisRepo = 0; + const items = await fetchMergedPRsSince(owner, repo, since); + const { relevant, skippedByTitle } = splitByReleaseAndTitle(items); + + console.log( + `[${owner}/${repo}] API items=${items.length}, relevantByTitle=${relevant.length}, skippedByTitle=${skippedByTitle}`, + ); + + // Sort relevant items by merge time before grouping into tabs + const sortedRelevant = relevant.slice().sort((a, b) => new Date(a.closed_at || '') - new Date(b.closed_at || '')); + const tabToRows = await buildTabGrouping(owner, repo, sortedRelevant, since); + + for (const [title, group] of tabToRows.entries()) { + const inserted = await processTab(authClient, title, group.entries, group.platformType); + insertedThisRepo += inserted; + } + + console.log(`βœ… [${owner}/${repo}] Inserted PRs: ${insertedThisRepo}`); + return { insertedThisRepo }; +} + +async function main() { + const authClient = await getGoogleAuth(); + const { owner, repo: repoName } = parseRepo(); + const since = isoSinceAtUTC(LOOKBACK_DAYS, START_HOUR_UTC, START_MINUTE_UTC); + console.log( + `Starting post-merge validation tracker. Mode=Sheets; Since(UTC)=${since}; Repo=${owner}/${repoName}`, + ); + + await processRepo(authClient, owner, repoName, since); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); \ No newline at end of file diff --git a/.github/scripts/remove-rca-needed-label-sheets.ts b/.github/scripts/remove-rca-needed-label-sheets.ts new file mode 100644 index 00000000..86833792 --- /dev/null +++ b/.github/scripts/remove-rca-needed-label-sheets.ts @@ -0,0 +1,413 @@ +// Note: These packages are installed at runtime in the GitHub Actions workflow +// @ts-ignore - @actions/core is not in devDependencies +import * as core from '@actions/core'; +// @ts-ignore - @actions/github is not in devDependencies +import { context, getOctokit } from '@actions/github'; +// @ts-ignore - googleapis types may not be available locally +import { google } from 'googleapis'; + +// HTTP status codes enum for better maintainability +enum HttpStatusCode { + NotFound = 404, +} + +interface Label { + name: string; + color: string; + description: string; +} + +const RCA_NEEDED_LABEL: Label = { + name: 'RCA-needed', + color: 'FF0000', + description: 'Issue requires Root Cause Analysis', +}; + +// Type alias for Google Sheets v4 API +// @ts-ignore - googleapis types may not be available locally +type SheetsV4 = ReturnType; + +// Google Sheets configuration from environment variables +// @ts-ignore - process is available at runtime in GitHub Actions +const SPREADSHEET_ID = process.env.SPREADSHEET_ID; +// @ts-ignore - process is available at runtime in GitHub Actions +const SHEET_NAME = process.env.SHEET_NAME; + +/** + * Represents a single response row from the RCA (Root Cause Analysis) Google Form. + * @property issueNumber - The GitHub issue number associated with this RCA response (as a string) + * @property timestamp - The timestamp when the form was submitted + * @property [key: string] - Any additional form fields captured from the Google Form. + * The keys correspond to column headers in the Google Sheet, and the values are the user responses. + */ +interface RcaFormResponse { + issueNumber: string; + timestamp: string; + [key: string]: any; +} + +// GitHub GraphQL types +interface GitHubLabel { + id: string; + name: string; +} + +interface GitHubIssue { + id: string; + number: number; + title: string; + createdAt: string; + closedAt: string | null; + labels: { + nodes: GitHubLabel[]; + }; +} + +interface GetIssuesWithRcaLabelResponse { + repository: { + issues: { + nodes: GitHubIssue[]; + pageInfo: { + hasNextPage: boolean; + endCursor: string | null; + }; + }; + }; +} + +async function main(): Promise { + try { + // @ts-ignore - process is available at runtime in GitHub Actions + const githubToken = process.env.GITHUB_TOKEN; + if (!githubToken) { + core.setFailed('GITHUB_TOKEN not found'); + return; + } + + // Google Sheets API credentials (base64 encoded service account JSON) + // @ts-ignore - process is available at runtime in GitHub Actions + const googleCredentials = process.env.GOOGLE_SHEETS_CREDENTIALS; + if (!googleCredentials) { + core.setFailed('GOOGLE_SHEETS_CREDENTIALS not found'); + return; + } + + // Validate sheet configuration + if (!SPREADSHEET_ID) { + core.setFailed('SPREADSHEET_ID not configured'); + return; + } + if (!SHEET_NAME) { + core.setFailed('SHEET_NAME not configured'); + return; + } + + // @ts-ignore - process is available at runtime in GitHub Actions + const isDryRun = process.env.DRY_RUN === 'true'; + + const octokit = getOctokit(githubToken); + const repoOwner = context.repo.owner; + const repoName = context.repo.repo; + + console.log( + `Starting Google Sheets-based RCA label removal (Dry Run: ${isDryRun})`, + ); + console.log(`Repository: ${repoOwner}/${repoName}`); + console.log(`Sheet ID: ${SPREADSHEET_ID}`); + console.log(`Sheet Name: ${SHEET_NAME}`); + + // Initialize Google Sheets API + const sheets = await initializeGoogleSheets(googleCredentials); + + // Get all RCA form responses from the sheet + const rcaResponses = await fetchRcaResponses(sheets); + console.log(`Found ${rcaResponses.length} RCA responses in Google Sheets`); + + // Get all closed issues with RCA-needed label + const issuesWithRcaNeeded = await getIssuesWithRcaLabel( + octokit, + repoOwner, + repoName, + ); + + console.log( + `Found ${issuesWithRcaNeeded.length} issues with RCA-needed label`, + ); + + let removedCount = 0; + let skippedCount = 0; + + let failedCount = 0; + const failedIssues: number[] = []; + + for (const issue of issuesWithRcaNeeded) { + console.log(`\nπŸ“‹ Processing issue #${issue.number}: ${issue.title}`); + + try { + // Check if issue has RCA response in Google Sheets + const hasRcaResponse = rcaResponses.some( + (response) => response.issueNumber === issue.number.toString(), + ); + + if (hasRcaResponse) { + console.log( + `βœ… RCA response found in Google Sheets for issue #${issue.number}`, + ); + + if (!isDryRun) { + // Remove the RCA-needed label + await removeLabelFromIssue( + octokit, + repoOwner, + repoName, + issue.number, + RCA_NEEDED_LABEL.name, + ); + + console.log( + `βœ… Successfully removed RCA-needed label from issue #${issue.number}`, + ); + removedCount++; + } else { + console.log( + `πŸ” [DRY RUN] Would remove label from issue #${issue.number}`, + ); + removedCount++; + } + } else { + console.log( + `⏳ No RCA found in sheet for issue #${issue.number} - skipping`, + ); + skippedCount++; + } + } catch (error: any) { + console.error( + `❌ Failed to process issue #${issue.number}: ${error?.message || String(error)}`, + ); + failedCount++; + failedIssues.push(issue.number); + // Continue processing other issues + } + } + + console.log(`\nπŸ“Š Summary:`); + console.log(` - Repository: ${repoOwner}/${repoName}`); + console.log(` - Issues processed: ${issuesWithRcaNeeded.length}`); + console.log( + ` - Labels ${isDryRun ? 'would be' : ''} removed: ${removedCount}`, + ); + console.log(` - Issues skipped (no RCA in sheet): ${skippedCount}`); + + if (failedCount > 0) { + console.log(` - ⚠️ Issues failed: ${failedCount}`); + console.log(` - Failed issue numbers: ${failedIssues.join(', ')}`); + core.warning(`Some issues failed to process: ${failedIssues.join(', ')}`); + } + + // Set appropriate exit status + if (failedCount > 0 && removedCount === 0) { + core.setFailed('All label removal attempts failed'); + return; + } else if (failedCount > 0) { + console.log( + `\n⚠️ Completed with ${failedCount} failures. Check logs for details.`, + ); + } else { + console.log(`\nβœ… All operations completed successfully!`); + } + } catch (error: any) { + core.setFailed( + `Error in Google Sheets RCA label removal: ${error?.message || String(error)}`, + ); + } +} + +async function initializeGoogleSheets(credentials: string): Promise { + // Decode base64 credentials + const credentialsJson = JSON.parse( + // @ts-ignore - Buffer is available at runtime in GitHub Actions + Buffer.from(credentials, 'base64').toString('utf-8'), + ); + + // Initialize Google Sheets API client + const auth = new google.auth.GoogleAuth({ + credentials: credentialsJson, + scopes: ['https://www.googleapis.com/auth/spreadsheets.readonly'], + }); + + const sheets = google.sheets({ version: 'v4', auth }); + return sheets; +} + +async function fetchRcaResponses(sheets: SheetsV4): Promise { + try { + // Fetch data from the Google Sheet + const response = await sheets.spreadsheets.values.get({ + spreadsheetId: SPREADSHEET_ID, + range: `${SHEET_NAME}!A:Z`, // Covers columns A through Z + }); + + const rows = response.data.values || []; + + if (rows.length <= 1) { + // No data rows (only header or empty) + return []; + } + + // Dynamically determine the column index for "Issue Number" from the header row + const headerRow = rows[0] || []; + const ISSUE_NUMBER_HEADER = 'Issue Number'; + const issueNumberColumnIndex = headerRow.findIndex( + (col: string) => col && col.trim() === ISSUE_NUMBER_HEADER, + ); + + if (issueNumberColumnIndex === -1) { + throw new Error( + `Could not find "${ISSUE_NUMBER_HEADER}" column in sheet headers. Please check the Google Sheet structure.`, + ); + } + + const responses: RcaFormResponse[] = []; + for (let i = 1; i < rows.length; i++) { + const row = rows[i]; + + // Skip empty rows + if (!row || row.length === 0) { + continue; + } + + // Get issue number from dynamically determined column + const issueNumberValue = row[issueNumberColumnIndex]; + + if (issueNumberValue != null) { + // Extract just the numeric part from the issue number + // Handles formats like: "18454", "#18454", or leading/trailing whitespace + const trimmedValue = issueNumberValue.toString().trim(); + if (!trimmedValue) { + continue; // Skip if empty string after trimming + } + + const issueMatch = trimmedValue.match(/^#?(\d+)$/); + if (issueMatch) { + const issueNumber = issueMatch[1]; + responses.push({ + issueNumber: issueNumber, + timestamp: row[0] || '', // Column A: Timestamp + // Additional fields can be added if needed: + // repository: row[2], // Column C: Github Repository + // issueUrl: row[3], // Column D: Github Issue URL + }); + console.log( + ` Found RCA for issue #${issueNumber} submitted on ${row[0]}`, + ); + } + } + } + + return responses; + } catch (error: any) { + console.error( + 'Error fetching Google Sheets data:', + error?.message || String(error), + ); + throw error; + } +} + +async function getIssuesWithRcaLabel( + octokit: ReturnType, + owner: string, + repo: string, +): Promise { + const allIssues: GitHubIssue[] = []; + let hasNextPage = true; + let cursor: string | null = null; + + // Constants for the GraphQL query + const PAGE_SIZE = 100; // Maximum allowed by GitHub API + const LABEL_NAMES = [RCA_NEEDED_LABEL.name]; // Use the constant we already have + + while (hasNextPage) { + const query = ` + query GetIssuesWithRcaLabel( + $owner: String!, + $repo: String!, + $cursor: String, + $labelNames: [String!], + $pageSize: Int! + ) { + repository(owner: $owner, name: $repo) { + issues(labels: $labelNames, states: CLOSED, first: $pageSize, after: $cursor) { + nodes { + id + number + title + createdAt + closedAt + labels(first: 10) { + nodes { + id + name + } + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + `; + + const result: GetIssuesWithRcaLabelResponse = await octokit.graphql(query, { + owner, + repo, + cursor, + labelNames: LABEL_NAMES, + pageSize: PAGE_SIZE, + }); + const issues = result.repository.issues; + + allIssues.push(...(issues.nodes || [])); + hasNextPage = issues.pageInfo.hasNextPage; + cursor = issues.pageInfo.endCursor; + + if (hasNextPage) { + console.log(` Fetching more issues... (${allIssues.length} so far)`); + } + } + + return allIssues; +} + +async function removeLabelFromIssue( + octokit: ReturnType, + owner: string, + repo: string, + issueNumber: number, + labelName: string, +): Promise { + try { + // Use REST API to remove label from issue + await octokit.rest.issues.removeLabel({ + owner, + repo, + issue_number: issueNumber, + name: labelName, + }); + } catch (error: any) { + // If label doesn't exist on issue, the API will throw 404 + // This is not an error for our use case, so we can safely ignore it + if (error?.status !== HttpStatusCode.NotFound) { + throw error; + } + } +} + +// Run the main function +main().catch((error: any): void => { + console.error('Unhandled error:', error); + core.setFailed(`Unhandled error: ${error?.message || String(error)}`); + // core.setFailed() sets the action's exit code to 1, causing the workflow to fail +}); diff --git a/.github/scripts/tests/test-create-platform-release-pr-full.sh b/.github/scripts/tests/test-create-platform-release-pr-full.sh new file mode 100755 index 00000000..02a154c0 --- /dev/null +++ b/.github/scripts/tests/test-create-platform-release-pr-full.sh @@ -0,0 +1,199 @@ +#!/usr/bin/env bash + +# COMPREHENSIVE SAFE test script for create-platform-release-pr.sh +# This script tests ALL functionality while preventing ANY real operations +set -e + +echo "πŸ§ͺ COMPREHENSIVE SAFE TESTING FOR RELEASE SCRIPT" +echo "=================================================" +echo "πŸ›‘οΈ This test completely prevents real operations" +echo "πŸ“‹ Testing all functions, workflows, and scenarios" +echo "" + +# Set safe test mode +export TEST_ONLY="true" +export GITHUB_REPOSITORY_URL="https://github.com/MetaMask/test-repo" +export GOOGLE_DOCUMENT_ID="test-doc-id" +export MOBILE_TEMPLATE_SHEET_ID="test-mobile-sheet" +export EXTENSION_TEMPLATE_SHEET_ID="test-extension-sheet" + +echo "πŸ”§ Setting up safe mocks..." + +# Create comprehensive mocks before sourcing +git() { + case "$1" in + config) echo "MOCK: git config $2 $3" ;; + fetch) echo "MOCK: git fetch" ;; + checkout) + if [[ "$2" == "-b" ]]; then + echo "MOCK: git checkout -b $3" + else + echo "MOCK: git checkout $2" + fi + ;; + add) echo "MOCK: git add $2" ;; + commit) echo "MOCK: git commit -m \"version bump\"" ;; + push) echo "MOCK: git push --set-upstream origin $4" ;; + pull) echo "MOCK: git pull origin $3" ;; + show-ref) return 1 ;; # Branch doesn't exist + ls-remote) return 1 ;; # Branch doesn't exist remotely + diff) + if [[ "$2" == "--staged" && "$3" == "--quiet" ]]; then + return 1 # Changes exist + else + echo "MOCK: git diff (changes exist)" + return 1 + fi + ;; + *) echo "MOCK: git $*" ;; + esac + return 0 +} + +gh() { + case "$1" in + pr) + case "$2" in + list) + if [[ "$*" == *"--head"* ]]; then + echo "0" # No existing PRs by head + elif [[ "$*" == *"--search"* ]]; then + echo "0" # No existing PRs by search + else + echo "0" # No existing PRs + fi + ;; + create) echo "MOCK: PR Created Successfully" ;; + *) echo "MOCK: gh pr $*" ;; + esac + ;; + *) echo "MOCK: gh $*" ;; + esac + return 0 +} + +# Mock all other external commands +npx() { echo "MOCK: npx $*"; return 0; } +yarn() { echo "MOCK: yarn $*"; return 0; } +corepack() { echo "MOCK: corepack $*"; return 0; } +cd() { echo "MOCK: cd $*"; return 0; } +ls() { echo "MOCK: ls $*"; return 0; } +pwd() { echo "/mock/directory"; return 0; } + +# Create mock version script +mkdir -p ./github-tools/.github/scripts/ +cat > ./github-tools/.github/scripts/set-semvar-version.sh << 'EOF' +#!/usr/bin/env bash +echo "MOCK: Version script - Setting $2 to version $1" +EOF +chmod +x ./github-tools/.github/scripts/set-semvar-version.sh + +# Export all mocks +export -f git gh npx yarn corepack cd ls pwd + +echo "βœ… Mocks ready - sourcing script safely..." + +# Source with safe parameters +set -- extension v1.4.0 1.5.3 100 +source ./.github/scripts/create-platform-release-pr.sh +set -- + +echo "" +echo "1️⃣ TESTING UTILITY FUNCTIONS" +echo "==============================" + +echo "Testing get_next_version:" +echo " 1.5.3 β†’ $(get_next_version "1.5.3")" +echo " 2.10.15 β†’ $(get_next_version "2.10.15")" +echo " 10.0.0 β†’ $(get_next_version "10.0.0")" + +echo "" +echo "Testing version validation:" +if (get_next_version "invalid" 2>/dev/null); then + echo " ❌ Should reject invalid version" +else + echo " βœ… Correctly rejects invalid version" +fi + +echo "" +echo "Testing branch naming (test mode):" +export TEST_ONLY="true" +echo " Release: $(get_release_branch_name "1.5.3")" +echo " Version bump: $(get_version_bump_branch_name "1.6.0")" + +echo "" +echo "Testing file lists:" +echo " Mobile files: $(get_expected_changed_files "mobile")" +echo " Extension files: $(get_expected_changed_files "extension")" + +echo "" +echo "2️⃣ TESTING HELPER FUNCTIONS" +echo "=============================" + +echo "" +echo "Testing checkout_or_create_branch:" +checkout_or_create_branch "test-branch" "main" + +echo "" +echo "Testing push_branch_with_handling:" +push_branch_with_handling "test-branch" + +echo "" +echo "Testing create_pr_if_not_exists:" +create_pr_if_not_exists "test-branch" "Test PR" "Test body" "main" "test" + +echo "" +echo "3️⃣ TESTING WORKFLOW FUNCTIONS" +echo "===============================" + +echo "" +echo "Testing configure_git:" +configure_git + +echo "" +echo "Testing create_release_pr:" +create_release_pr "extension" "1.5.3" "100" "Version-v1.5.3" "chore/1.5.3-Changelog" + +echo "" +echo "Testing create_version_bump_pr:" +create_version_bump_pr "extension" "1.5.3" "1.6.0" "version-bump-testing/1.6.0" "Version-v1.5.3" "main" + +echo "" +echo "4️⃣ TESTING DIFFERENT SCENARIOS" +echo "================================" + +echo "" +echo "Testing release branch:" +echo "Release branch: $(get_release_branch_name "2.0.0")" + +echo "" +echo "Testing production vs test mode:" +export TEST_ONLY="false" +echo "Production version bump: $(get_version_bump_branch_name "2.1.0")" +export TEST_ONLY="true" +echo "Test version bump: $(get_version_bump_branch_name "2.1.0")" + +echo "" +echo "5️⃣ CLEANUP" +echo "===========" +rm -rf ./github-tools/ +echo "βœ… Cleaned up mock files" + +echo "" +echo "πŸŽ‰ ALL TESTS COMPLETED SUCCESSFULLY!" +echo "=====================================" +echo "" +echo "πŸ“Š Test Summary:" +echo "βœ… Utility functions: All passed" +echo "βœ… Helper functions: All passed" +echo "βœ… Workflow functions: All passed" +echo "βœ… Error handling: All passed" +echo "βœ… Safety checks: All operations mocked" +echo "" +echo "πŸš€ To test the complete workflow safely:" +echo " TEST_ONLY=true ./.github/scripts/create-platform-release-pr.sh extension v1.4.0 1.5.3 100" +echo "" +echo "⚠️ IMPORTANT SAFETY REMINDER:" +echo " πŸ”΄ NEVER run without TEST_ONLY=true in a real repository!" +echo " πŸ”΄ Real mode creates actual git branches and GitHub PRs!" +echo " 🟒 Always use TEST_ONLY=true for testing!" diff --git a/.github/scripts/tests/test-create-platform-release-pr-functions.sh b/.github/scripts/tests/test-create-platform-release-pr-functions.sh new file mode 100755 index 00000000..e1fa9da4 --- /dev/null +++ b/.github/scripts/tests/test-create-platform-release-pr-functions.sh @@ -0,0 +1,141 @@ +#!/usr/bin/env bash + +# SAFE test script for functions from create-platform-release-pr.sh +# This script uses mocking to prevent any real git/GitHub operations +set -e + +echo "SAFE Testing for Individual Functions" +echo "=====================================" +echo "⚠️ This test uses mocking to prevent real operations" +echo "" + +# Set test mode to prevent real operations +export TEST_ONLY="true" +export GITHUB_REPOSITORY_URL="https://github.com/MetaMask/test-repo" + +# Source the functions from the main script by setting dummy parameters +# We use 'set --' to temporarily set positional parameters while sourcing +set -- extension v1.4.0 1.5.3 100 +source ./.github/scripts/create-platform-release-pr.sh +set -- # Clear positional parameters after sourcing + +echo "Testing individual functions..." +echo "================================" +echo "" + +# Test get_next_version function (safe - no external calls) +echo "Testing get_next_version (SAFE):" +echo " Input: 1.5.3 -> Output: $(get_next_version "1.5.3")" +echo " Input: 2.10.15 -> Output: $(get_next_version "2.10.15")" +echo " Input: 10.0.0 -> Output: $(get_next_version "10.0.0")" + +# Test invalid version +echo " Testing invalid version (should fail):" +if (get_next_version "invalid.version" 2>/dev/null); then + echo " βœ— Should have rejected invalid version" +else + echo " βœ“ Correctly rejected invalid version" +fi + +echo "" + +# Test get_release_branch_name function (safe - no external calls) +echo "Testing get_release_branch_name (SAFE):" +export TEST_ONLY="false" +echo " Input (prod): 1.5.3 -> Output: $(get_release_branch_name "1.5.3")" + +export TEST_ONLY="true" +echo " Input (test): 1.5.3 -> Output: $(get_release_branch_name "1.5.3")" + +echo "" + +# Test get_version_bump_branch_name function (safe - no external calls) +echo "Testing get_version_bump_branch_name (SAFE):" +export TEST_ONLY="false" +echo " Production mode: $(get_version_bump_branch_name "1.6.0")" + +export TEST_ONLY="true" +echo " Test mode: $(get_version_bump_branch_name "1.6.0")" + +echo "" + +# Test get_expected_changed_files function (safe - no external calls) +echo "Testing get_expected_changed_files (SAFE):" +echo " Mobile files: $(get_expected_changed_files "mobile")" +echo " Extension files: $(get_expected_changed_files "extension")" + +echo "" + +# Test workflow functions with comprehensive mocking +echo "Testing workflow functions (MOCKED - SAFE):" +echo "============================================" +echo "πŸ›‘οΈ All external commands are mocked for safety" +echo "" + +# Comprehensive mocking of ALL external commands to prevent real operations +git() { + echo "MOCK git: $*" + return 0 +} + +gh() { + echo "MOCK gh: $*" + return 0 +} + +npx() { + echo "MOCK npx: $*" + return 0 +} + +yarn() { + echo "MOCK yarn: $*" + return 0 +} + +corepack() { + echo "MOCK corepack: $*" + return 0 +} + +cd() { + echo "MOCK cd: $*" + return 0 +} + +ls() { + echo "MOCK ls: $*" + return 0 +} + +pwd() { + echo "/mock/current/directory" + return 0 +} + +# Export functions for subshells to ensure mocking works everywhere +export -f git gh npx yarn corepack cd ls pwd + +echo "Testing configure_git (mocked):" +configure_git + +echo "" +echo "Testing helper functions (mocked):" +echo " Testing checkout_or_create_branch (mocked):" +checkout_or_create_branch "test-branch" "main" + +echo " Testing push_branch_with_handling (mocked):" +push_branch_with_handling "test-branch" + +echo " Testing create_pr_if_not_exists (mocked):" +create_pr_if_not_exists "test-branch" "Test PR" "Test PR body" "main" "test" + +echo "" +echo "βœ… All function tests completed SAFELY!" +echo "" +echo "To test the full script in safe test mode:" +echo " TEST_ONLY=true ./.github/scripts/create-platform-release-pr.sh extension v1.4.0 1.5.3 100" +echo "" +echo "⚠️ NEVER run without TEST_ONLY=true in a real repository!" +echo "With TEST_ONLY mode (SAFE):" +echo " TEST_ONLY=true ./.github/scripts/create-platform-release-pr.sh extension v1.4.0 1.5.3 100" diff --git a/.github/scripts/update-release-sheet.mjs b/.github/scripts/update-release-sheet.mjs index 32a9abf2..9cd90365 100644 --- a/.github/scripts/update-release-sheet.mjs +++ b/.github/scripts/update-release-sheet.mjs @@ -310,7 +310,7 @@ async function main() { extensionTemplateSheetId, ); - createReleaseSheet( + await createReleaseSheet( documentId, platform, semanticVersion, diff --git a/.github/workflows/create-release-pr.yml b/.github/workflows/create-release-pr.yml index fec86dcd..2abb99b2 100644 --- a/.github/workflows/create-release-pr.yml +++ b/.github/workflows/create-release-pr.yml @@ -3,10 +3,14 @@ name: Create Release Pull Request on: workflow_call: inputs: - base-branch: + checkout-base-branch: required: true type: string - description: 'The base branch, tag, or SHA for git operations and the pull request.' + description: 'The base branch, tag, or SHA for git operations.' + release-pr-base-branch: + required: true + type: string + description: 'The base branch, tag, or SHA for the release pull request.' semver-version: required: true type: string @@ -15,10 +19,10 @@ on: required: false type: string description: 'The build version for the mobile platform.' - previous-version-tag: + previous-version-ref: required: true type: string - description: 'Previous release version tag. eg: v7.7.0' + description: 'Previous release version branch name, tag or commit hash (e.g., release/7.7.0, v7.7.0, or 76fbc500034db9779e9ff7ce637ac5be1da0493d). For hotfix releases, pass the literal string "null".' # Flag to indicate if the release is a test release for development purposes only mobile-template-sheet-id: required: false @@ -49,6 +53,14 @@ on: type: string description: 'The version of github-tools to use. Defaults to main.' default: 'main' + git-user-name: + type: string + description: 'Git user name for commits. Defaults to metamaskbot.' + default: 'metamaskbot' + git-user-email: + type: string + description: 'Git user email for commits. Defaults to metamaskbot@users.noreply.github.com.' + default: 'metamaskbot@users.noreply.github.com' secrets: github-token: required: true @@ -70,7 +82,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - ref: ${{ inputs.base-branch }} + ref: ${{ inputs.checkout-base-branch }} token: ${{ secrets.github-token }} # Step 2: Checkout github-tools repository @@ -93,9 +105,10 @@ jobs: echo "Input Values:" echo "-------------" echo "Platform: ${{ inputs.platform }}" - echo "Base Branch: ${{ inputs.base-branch }}" + echo "Checkout Base Branch: ${{ inputs.checkout-base-branch }}" + echo "Release PR Base Branch: ${{ inputs.release-pr-base-branch }}" echo "Semver Version: ${{ inputs.semver-version }}" - echo "Previous Version Tag: ${{ inputs.previous-version-tag }}" + echo "Previous Version Reference: ${{ inputs.previous-version-ref }}" echo "Test Only Mode: ${{ inputs.test-only }}" if [[ "${{ inputs.platform }}" == "mobile" ]]; then echo "Mobile Build Version: ${{ inputs.mobile-build-version }}" @@ -104,6 +117,8 @@ jobs: echo "Extension Template Sheet ID: ${{ inputs.extension-template-sheet-id }}" echo "Release Sheet Google Document ID: ${{ inputs.release-sheet-google-document-id }}" echo "GitHub Tools Version: ${{ inputs.github-tools-version }}" + echo "Git User Name: ${{ inputs.git-user-name }}" + echo "Git User Email: ${{ inputs.git-user-email }}" echo "-------------" # Step 5: Create Release PR @@ -112,7 +127,7 @@ jobs: shell: bash env: GITHUB_TOKEN: ${{ secrets.github-token }} - BASE_BRANCH: ${{ inputs.base-branch }} + BASE_BRANCH: ${{ inputs.release-pr-base-branch }} GITHUB_REPOSITORY_URL: '${{ github.server_url }}/${{ github.repository }}' TEST_ONLY: ${{ inputs.test-only }} GOOGLE_DOCUMENT_ID: ${{ inputs.release-sheet-google-document-id }} @@ -124,7 +139,18 @@ jobs: run: | # Execute the script from github-tools ./github-tools/.github/scripts/create-platform-release-pr.sh \ - ${{ inputs.platform }} \ - ${{ inputs.previous-version-tag }} \ - ${{ inputs.semver-version }} \ - ${{ inputs.mobile-build-version }} + "${{ inputs.platform }}" \ + "${{ inputs.previous-version-ref }}" \ + "${{ inputs.semver-version }}" \ + "${{ inputs.mobile-build-version }}" \ + "${{ inputs.git-user-name }}" \ + "${{ inputs.git-user-email }}" + + # Step 6: Upload commits.csv as artifact (if generated) + - name: Upload commits.csv artifact + if: ${{ hashFiles('commits.csv') != '' }} + uses: actions/upload-artifact@v4 + with: + name: commits-csv + path: commits.csv + if-no-files-found: error diff --git a/.github/workflows/flaky-test-report.yml b/.github/workflows/flaky-test-report.yml new file mode 100644 index 00000000..1f5c298a --- /dev/null +++ b/.github/workflows/flaky-test-report.yml @@ -0,0 +1,54 @@ +name: Flaky Test Report + +on: + workflow_call: + inputs: + repository: + description: 'Repository name (e.g. metamask-extension)' + required: true + type: string + workflow_id: + description: 'Workflow ID to analyze (e.g. main.yml)' + required: true + type: string + secrets: + github-token: + description: 'GitHub token with repo and actions:read access' + required: true + slack-webhook-flaky-tests: + description: 'Slack webhook URL for flaky test reports' + required: true + +jobs: + flaky-test-report: + runs-on: ubuntu-latest + steps: + - name: Checkout github-tools repository + uses: actions/checkout@v4 + with: + repository: MetaMask/github-tools + path: github-tools + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version-file: ./github-tools/.nvmrc + cache-dependency-path: ./github-tools/yarn.lock + cache: yarn + + - name: Enable Corepack + run: corepack enable + working-directory: ./github-tools + + - name: Install dependencies + working-directory: ./github-tools + run: yarn --immutable + + - name: Run flaky test report script + env: + REPOSITORY: ${{ inputs.repository }} + WORKFLOW_ID: ${{ inputs.workflow_id }} + GITHUB_TOKEN: ${{ secrets.github-token }} + SLACK_WEBHOOK_FLAKY_TESTS: ${{ secrets.slack-webhook-flaky-tests }} + working-directory: ./github-tools + run: node .github/scripts/create-flaky-test-report.mjs diff --git a/.github/workflows/post-gh-rca.yml b/.github/workflows/post-gh-rca.yml index 1cfeb750..09713f05 100644 --- a/.github/workflows/post-gh-rca.yml +++ b/.github/workflows/post-gh-rca.yml @@ -9,7 +9,7 @@ on: inputs: google-form-base-url: description: Base URL of the Google Form. - required: true + default: 'https://docs.google.com/forms/d/e/1FAIpQLSeLOVVUy7mO1j-5Isb04OAWk3dM0b1NY1R8kf0tiEBs9elcEg/viewform?usp=pp_url' type: string repo-owner: description: The repo owner @@ -29,15 +29,23 @@ on: type: string entry-issue: description: The entry ID for the issue field in the Google Form - required: true + default: 'entry.1417567074' type: string entry-regression: description: The entry ID for the regression field in the Google Form - required: true + default: 'entry.1470697156' type: string entry-team: description: The entry ID for the team field in the Google Form - required: true + default: 'entry.1198657478' + type: string + entry-repo-name: + description: The entry ID for the repository name field + default: 'entry.1085838323' + type: string + entry-issue-url: + description: The entry ID for the GitHub issue URL field + default: 'entry.516762472' type: string jobs: @@ -56,6 +64,8 @@ jobs: ENTRY_ISSUE: ${{ inputs.entry-issue }} ENTRY_REGRESSION: ${{ inputs.entry-regression }} ENTRY_TEAM: ${{ inputs.entry-team }} + ENTRY_REPO_NAME: ${{ inputs.entry-repo-name }} + ENTRY_ISSUE_URL: ${{ inputs.entry-issue-url }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -64,6 +74,8 @@ jobs: ENTRY_ISSUE, ENTRY_REGRESSION, ENTRY_TEAM, + ENTRY_REPO_NAME, + ENTRY_ISSUE_URL, OWNER_NAME: owner, REPO_NAME: repo, ISSUE_NUMBER: issueNumStr, @@ -109,6 +121,9 @@ jobs: teamLabels.length ? teamLabels.join(',') : '' ); + formUrl.searchParams.set(ENTRY_REPO_NAME, repo); + formUrl.searchParams.set(ENTRY_ISSUE_URL, `https://github.com/${owner}/${repo}/issues/${issue_number}`); + const assignees = issue.assignees.map(u=>`@${u.login}`).join(', '); const body = `Hi ${assignees}, @@ -121,3 +136,16 @@ jobs: owner, repo, issue_number, body }); console.log(`βœ… Comment posted on issue #${issue_number}`); + + // Add the RCA-needed label + try { + await github.rest.issues.addLabels({ + owner, + repo, + issue_number: issue_number, + labels: ['RCA-needed'] + }); + console.log(`βœ… Added 'RCA-needed' label on issue #${issue_number}`); + } catch (error) { + console.log(`⚠️ Could not add label: ${error.message}`); + } diff --git a/.github/workflows/post-merge-validation.yml b/.github/workflows/post-merge-validation.yml new file mode 100644 index 00000000..750988c4 --- /dev/null +++ b/.github/workflows/post-merge-validation.yml @@ -0,0 +1,66 @@ +name: Post Merge Validation + +on: + workflow_call: + inputs: + repo: + description: 'The repo owner/name to process (e.g. MetaMask/metamask-extension)' + required: true + type: string + start_hour_utc: + description: 'The hour of the day (UTC) to start processing the PRs merged in main' + required: true + type: number + spreadsheet_id: + description: 'Google Spreadsheet ID to update' + required: false + type: string + default: '1tsoodlAlyvEUpkkcNcbZ4PM9HuC9cEM80RZeoVv5OCQ' + lookback_days: + description: 'Number of days to look back for PRs' + required: false + type: number + default: 1 + secrets: + github-token: + description: 'GitHub token with repo access' + required: true + google-application-creds-base64: + description: 'Base64 encoded Google service account credentials' + required: true + +jobs: + post-merge-validation-tracker: + runs-on: ubuntu-latest + steps: + - name: Checkout github-tools repository + uses: actions/checkout@v4 + with: + repository: MetaMask/github-tools + path: github-tools + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version-file: ./github-tools/.nvmrc + cache-dependency-path: ./github-tools/yarn.lock + cache: yarn + + - name: Enable Corepack + run: corepack enable + working-directory: ./github-tools + + - name: Install dependencies + working-directory: ./github-tools + run: yarn --immutable + + - name: Run post-merge-validation script + working-directory: ./github-tools + env: + SHEET_ID: ${{ inputs.spreadsheet_id }} + START_HOUR_UTC: ${{ inputs.start_hour_utc }} + LOOKBACK_DAYS: ${{ inputs.lookback_days }} + REPO: ${{ inputs.repo }} + GITHUB_TOKEN: ${{ secrets.github-token }} + GOOGLE_APPLICATION_CREDENTIALS_BASE64: ${{ secrets.google-application-creds-base64 }} + run: node .github/scripts/post-merge-validation-tracker.mjs diff --git a/.github/workflows/pr-line-check.yml b/.github/workflows/pr-line-check.yml index 8789d974..1c9cef79 100644 --- a/.github/workflows/pr-line-check.yml +++ b/.github/workflows/pr-line-check.yml @@ -18,6 +18,26 @@ on: required: false type: string default: '(\.lock$)' + xs_max_size: + description: 'Maximum lines for XS size' + required: false + type: number + default: 10 + s_max_size: + description: 'Maximum lines for S size' + required: false + type: number + default: 100 + m_max_size: + description: 'Maximum lines for M size' + required: false + type: number + default: 500 + l_max_size: + description: 'Maximum lines for L size' + required: false + type: number + default: 1000 jobs: check-lines: @@ -79,15 +99,106 @@ jobs: # Calculate additions and deletions across all changes between the base and HEAD, # filtering out files matching the ignore pattern. - additions=$(git diff "origin/$BASE_BRANCH"...HEAD --numstat | grep -Ev "$ignore_pattern" | awk '{add += $1} END {print add}') - deletions=$(git diff "origin/$BASE_BRANCH"...HEAD --numstat | grep -Ev "$ignore_pattern" | awk '{del += $2} END {print del}') + additions=$(git diff "origin/$BASE_BRANCH"...HEAD --numstat | grep -Ev "$ignore_pattern" | awk '{add += $1} END {print add+0}') + deletions=$(git diff "origin/$BASE_BRANCH"...HEAD --numstat | grep -Ev "$ignore_pattern" | awk '{del += $2} END {print del+0}') total=$((additions + deletions)) echo "Additions: $additions, Deletions: $deletions, Total: $total" - echo "lines_changed=$total" >> "$GITHUB_OUTPUT" + { + echo "lines_changed=$total" + echo "additions=$additions" + echo "deletions=$deletions" + } >> "$GITHUB_OUTPUT" - max_lines="${{ inputs.max_lines }}" - if [ "$total" -gt "$max_lines" ]; then - echo "Error: Total changed lines ($total) exceed the limit of $max_lines." - exit 1 - fi + - name: Check line count limit + uses: actions/github-script@v7 + with: + script: | + const total = parseInt('${{ steps.line_count.outputs.lines_changed }}') || 0; + const additions = parseInt('${{ steps.line_count.outputs.additions }}') || 0; + const deletions = parseInt('${{ steps.line_count.outputs.deletions }}') || 0; + + // Thresholds from inputs with fallback to defaults + const maxLines = parseInt('${{ inputs.max_lines }}') || 1000; + const xsMaxSize = parseInt('${{ inputs.xs_max_size }}') || 10; + const sMaxSize = parseInt('${{ inputs.s_max_size }}') || 100; + const mMaxSize = parseInt('${{ inputs.m_max_size }}') || 500; + const lMaxSize = parseInt('${{ inputs.l_max_size }}') || 1000; + + // Print summary + console.log('Summary:'); + console.log(` - Additions: ${additions}`); + console.log(` - Deletions: ${deletions}`); + console.log(` - Total: ${total}`); + console.log(` - Limit: ${maxLines}`); + + // Determine size label based on configured criteria + let sizeLabel = ''; + if (total <= xsMaxSize) { + sizeLabel = 'size-XS'; + } else if (total <= sMaxSize) { + sizeLabel = 'size-S'; + } else if (total <= mMaxSize) { + sizeLabel = 'size-M'; + } else if (total <= lMaxSize) { + sizeLabel = 'size-L'; + } else { + sizeLabel = 'size-XL'; + } + + console.log(` - Size category: ${sizeLabel}`); + + // Manage PR labels + const owner = context.repo.owner; + const repo = context.repo.repo; + const issue_number = context.payload.pull_request.number; + + try { + const existingSizeLabels = ['size-XS', 'size-S', 'size-M', 'size-L', 'size-XL']; + + // Get current labels + const currentLabels = await github.rest.issues.listLabelsOnIssue({ + owner, + repo, + issue_number + }); + + const currentLabelNames = currentLabels.data.map(l => l.name); + + // Build new label set: keep non-size labels and add the new size label + const newLabels = currentLabelNames + .filter(name => !existingSizeLabels.includes(name)) // Remove all size labels + .concat(sizeLabel); // Add the correct size label + + // Check if labels need updating + const currentSizeLabel = currentLabelNames.find(name => existingSizeLabels.includes(name)); + if (currentSizeLabel === sizeLabel && currentLabelNames.length === newLabels.length) { + console.log(`βœ… Correct label '${sizeLabel}' already present, no changes needed`); + } else { + // Update all labels in a single API call + await github.rest.issues.setLabels({ + owner, + repo, + issue_number, + labels: newLabels + }); + + if (currentSizeLabel && currentSizeLabel !== sizeLabel) { + console.log(` - Replaced '${currentSizeLabel}' with '${sizeLabel}'`); + } else if (!currentSizeLabel) { + console.log(`βœ… Added '${sizeLabel}' label to PR #${issue_number}`); + } else { + console.log(`βœ… Updated labels for PR #${issue_number}`); + } + } + } catch (error) { + console.log(`⚠️ Could not manage labels: ${error.message}`); + } + + // Check if exceeds limit + if (total > maxLines) { + console.log(`❌ Error: Total changed lines (${total}) exceed the limit of ${maxLines}.`); + process.exit(1); + } else { + console.log(`βœ… Success: Total changed lines (${total}) are within the limit of ${maxLines}.`); + } diff --git a/.github/workflows/remove-rca-needed-label-sheets.yml b/.github/workflows/remove-rca-needed-label-sheets.yml new file mode 100644 index 00000000..5a6432f1 --- /dev/null +++ b/.github/workflows/remove-rca-needed-label-sheets.yml @@ -0,0 +1,84 @@ +name: Remove RCA-needed Label + +on: + workflow_call: + inputs: + dry_run: + description: 'Run in dry-run mode (no changes made)' + required: false + default: 'false' + type: string + spreadsheet_id: + description: 'Google Spreadsheet ID (must be provided by consuming repository)' + required: true + type: string + sheet_name: + description: 'Sheet tab name (uses default if not provided)' + required: false + default: 'Form Responses 1' + type: string + github-tools-version: + description: 'The version of github-tools to use. Defaults to main.' + required: false + default: 'main' + type: string + secrets: + github-token: + description: 'GitHub token with issues write permissions' + required: true + google-application-creds-base64: + description: 'Base64 encoded Google application service account credentials' + required: true + +permissions: + issues: write + contents: read + +jobs: + remove-rca-labels: + name: Remove RCA-needed Labels Based on Sheet Data + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout consuming repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.github-token }} + + - name: Checkout github-tools + uses: actions/checkout@v4 + with: + repository: MetaMask/github-tools + ref: ${{ inputs.github-tools-version }} + token: ${{ secrets.github-token }} + path: github-tools + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Run RCA Google Sheets check + run: | + # Move to github-tools directory where our script lives + cd github-tools/.github/scripts + + # Create a simple package.json for npm to work with + echo '{}' > package.json + + # Install exact versions of required packages locally + npm install --no-save --no-package-lock \ + @actions/core@1.10.1 \ + @actions/github@6.0.0 \ + googleapis@144.0.0 \ + tsx@4.7.1 + + # Run the script with tsx + npx tsx remove-rca-needed-label-sheets.ts + env: + GITHUB_TOKEN: ${{ secrets.github-token }} + GOOGLE_SHEETS_CREDENTIALS: ${{ secrets.google-application-creds-base64 }} + DRY_RUN: ${{ inputs.dry_run }} + SPREADSHEET_ID: ${{ inputs.spreadsheet_id }} + SHEET_NAME: ${{ inputs.sheet_name }} diff --git a/.github/workflows/stable-sync.yml b/.github/workflows/stable-sync.yml index 6dc846e3..2dce21fe 100644 --- a/.github/workflows/stable-sync.yml +++ b/.github/workflows/stable-sync.yml @@ -18,7 +18,7 @@ on: stable-branch-name: required: false type: string - description: 'The name of the stable branch to sync to (e.g., stable, master, main)' + description: 'The name of the stable branch to sync to (e.g., stable, main)' default: 'stable' github-tools-version: required: false @@ -39,13 +39,17 @@ on: stable-branch-name: required: false type: string - description: 'The name of the stable branch to sync to (e.g., stable, master, main)' + description: 'The name of the stable branch to sync to (e.g., stable, main)' default: 'stable' github-tools-version: required: false type: string description: 'The version of github-tools to use. Defaults to main.' default: ${{ github.action_ref }} + secrets: + github-token: + description: 'GitHub token for creating pull requests' + required: true jobs: stable-sync: @@ -128,7 +132,7 @@ jobs: - name: Create Pull Request if: steps.check-pr.outputs.result != 'true' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.github-token }} BRANCH_NAME: stable-main-${{ inputs.semver-version }} VERSION: ${{ inputs.semver-version }} run: | diff --git a/package.json b/package.json index 9285b795..4eb16b0d 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "@octokit/request": "^8.1.1", "@octokit/rest": "^19.0.13", "@slack/web-api": "^6.0.0", + "@slack/webhook": "^7.0.6", "@types/luxon": "^3.3.0", "axios": "^0.24.0", "csv-parse": "5.6.0", @@ -35,7 +36,8 @@ "luxon": "^3.3.0", "ora": "^5.4.1", "semver": "^7.7.2", - "simple-git": "3.27.0" + "simple-git": "3.27.0", + "unzipper": "^0.12.3" }, "devDependencies": { "@lavamoat/allow-scripts": "^2.3.1", @@ -49,6 +51,7 @@ "@types/jest": "^28.1.6", "@types/node": "^20.3.2", "@types/semver": "^7", + "@types/unzipper": "^0", "@typescript-eslint/eslint-plugin": "^5.43.0", "@typescript-eslint/parser": "^5.43.0", "depcheck": "^1.4.3", diff --git a/yarn.lock b/yarn.lock index 7e92bef3..80291c24 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1022,12 +1022,14 @@ __metadata: "@octokit/request": "npm:^8.1.1" "@octokit/rest": "npm:^19.0.13" "@slack/web-api": "npm:^6.0.0" + "@slack/webhook": "npm:^7.0.6" "@swc/cli": "npm:^0.1.62" "@swc/core": "npm:^1.3.80" "@types/jest": "npm:^28.1.6" "@types/luxon": "npm:^3.3.0" "@types/node": "npm:^20.3.2" "@types/semver": "npm:^7" + "@types/unzipper": "npm:^0" "@typescript-eslint/eslint-plugin": "npm:^5.43.0" "@typescript-eslint/parser": "npm:^5.43.0" axios: "npm:^0.24.0" @@ -1054,6 +1056,7 @@ __metadata: ts-jest: "npm:^28.0.7" ts-node: "npm:^10.9.1" typescript: "npm:^5.1.3" + unzipper: "npm:^0.12.3" languageName: unknown linkType: soft @@ -1469,10 +1472,10 @@ __metadata: languageName: node linkType: hard -"@slack/types@npm:^2.11.0": - version: 2.14.0 - resolution: "@slack/types@npm:2.14.0" - checksum: 10/fa24a113b88e087f899078504c2ba50ab9795f7c2dd1a2d95b28217a3af20e554494f9cc3b8c8ce173120990d98e19400c95369f9067cecfcc46c08b59d2a46f +"@slack/types@npm:^2.11.0, @slack/types@npm:^2.9.0": + version: 2.16.0 + resolution: "@slack/types@npm:2.16.0" + checksum: 10/e18b568a47d94e9e7234dfd06f789224d6804edae4a2f31068b3f388ce4c482a6dbc6c035dc3dec63e5723f211f92c7694ee40b2ec83d4ac90d46bb35fa46eb5 languageName: node linkType: hard @@ -1495,6 +1498,17 @@ __metadata: languageName: node linkType: hard +"@slack/webhook@npm:^7.0.6": + version: 7.0.6 + resolution: "@slack/webhook@npm:7.0.6" + dependencies: + "@slack/types": "npm:^2.9.0" + "@types/node": "npm:>=18.0.0" + axios: "npm:^1.11.0" + checksum: 10/8f8083f9654e590f04731985b337f576842b2034a9261010f85d813c4e262f69d856c142b0dcf2022bfe69c22c2e97cc7d877a79989cd0f7a0cf2554ae0754ed + languageName: node + linkType: hard + "@swc/cli@npm:^0.1.62": version: 0.1.62 resolution: "@swc/cli@npm:0.1.62" @@ -1875,12 +1889,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:>=12.0.0": - version: 22.13.1 - resolution: "@types/node@npm:22.13.1" +"@types/node@npm:*, @types/node@npm:>=12.0.0, @types/node@npm:>=18.0.0": + version: 24.3.0 + resolution: "@types/node@npm:24.3.0" dependencies: - undici-types: "npm:~6.20.0" - checksum: 10/d8ba7068b0445643c0fa6e4917cdb7a90e8756a9daff8c8a332689cd5b2eaa01e4cd07de42e3cd7e6a6f465eeda803d5a1363d00b5ab3f6cea7950350a159497 + undici-types: "npm:~7.10.0" + checksum: 10/1331c2d0e9a512ac27a016b4df3eff92317e4603dbbbab31731275dff14d3a04847a50c5776cbf94f99ff4dedac0ba5f721dce8cea020d8eea5e21711fd964b0 languageName: node linkType: hard @@ -1935,6 +1949,15 @@ __metadata: languageName: node linkType: hard +"@types/unzipper@npm:^0": + version: 0.10.11 + resolution: "@types/unzipper@npm:0.10.11" + dependencies: + "@types/node": "npm:*" + checksum: 10/c11c0e072556038730b218ccf8af849911ed8a1338e6db863bdf4c44d53d83dd23e3de4752322b1e19cf0205ed6eaf8746e25aa3c2b38e419da457f9d6be7b4e + languageName: node + linkType: hard + "@types/yargs-parser@npm:*": version: 15.0.0 resolution: "@types/yargs-parser@npm:15.0.0" @@ -2448,14 +2471,14 @@ __metadata: languageName: node linkType: hard -"axios@npm:^1.7.4": - version: 1.7.9 - resolution: "axios@npm:1.7.9" +"axios@npm:^1.11.0, axios@npm:^1.7.4": + version: 1.11.0 + resolution: "axios@npm:1.11.0" dependencies: follow-redirects: "npm:^1.15.6" - form-data: "npm:^4.0.0" + form-data: "npm:^4.0.4" proxy-from-env: "npm:^1.1.0" - checksum: 10/b7a5f660ea53ba9c2a745bf5ad77ad8bf4f1338e13ccc3f9f09f810267d6c638c03dac88b55dae8dc98b79c57d2d6835be651d58d2af97c174f43d289a9fd007 + checksum: 10/232df4af7a4e4e07baa84621b9cc4b0c518a757b4eacc7f635c0eb3642cb98dff347326739f24b891b3b4481b7b838c79a3a0c4819c9fbc1fc40232431b9c5dc languageName: node linkType: hard @@ -2624,6 +2647,13 @@ __metadata: languageName: node linkType: hard +"bluebird@npm:~3.7.2": + version: 3.7.2 + resolution: "bluebird@npm:3.7.2" + checksum: 10/007c7bad22c5d799c8dd49c85b47d012a1fe3045be57447721e6afbd1d5be43237af1db62e26cb9b0d9ba812d2e4ca3bac82f6d7e016b6b88de06ee25ceb96e7 + languageName: node + linkType: hard + "brace-expansion@npm:^1.1.7": version: 1.1.11 resolution: "brace-expansion@npm:1.1.11" @@ -3085,6 +3115,13 @@ __metadata: languageName: node linkType: hard +"core-util-is@npm:~1.0.0": + version: 1.0.3 + resolution: "core-util-is@npm:1.0.3" + checksum: 10/9de8597363a8e9b9952491ebe18167e3b36e7707569eed0ebf14f8bba773611376466ae34575bca8cfe3c767890c859c74056084738f09d4e4a6f902b2ad7d99 + languageName: node + linkType: hard + "cosmiconfig@npm:^7.0.0": version: 7.1.0 resolution: "cosmiconfig@npm:7.1.0" @@ -3372,6 +3409,15 @@ __metadata: languageName: node linkType: hard +"duplexer2@npm:~0.1.4": + version: 0.1.4 + resolution: "duplexer2@npm:0.1.4" + dependencies: + readable-stream: "npm:^2.0.2" + checksum: 10/f60ff8b8955f992fd9524516e82faa5662d7aca5b99ee71c50bbbe1a3c970fafacb35d526d8b05cef8c08be56eed3663c096c50626c3c3651a52af36c408bf4d + languageName: node + linkType: hard + "ecdsa-sig-formatter@npm:1.0.11, ecdsa-sig-formatter@npm:^1.0.11": version: 1.0.11 resolution: "ecdsa-sig-formatter@npm:1.0.11" @@ -3513,14 +3559,15 @@ __metadata: languageName: node linkType: hard -"es-set-tostringtag@npm:^2.0.1": - version: 2.0.1 - resolution: "es-set-tostringtag@npm:2.0.1" +"es-set-tostringtag@npm:^2.0.1, es-set-tostringtag@npm:^2.1.0": + version: 2.1.0 + resolution: "es-set-tostringtag@npm:2.1.0" dependencies: - get-intrinsic: "npm:^1.1.3" - has: "npm:^1.0.3" - has-tostringtag: "npm:^1.0.0" - checksum: 10/ec416a12948cefb4b2a5932e62093a7cf36ddc3efd58d6c58ca7ae7064475ace556434b869b0bbeb0c365f1032a8ccd577211101234b69837ad83ad204fff884 + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.6" + has-tostringtag: "npm:^1.0.2" + hasown: "npm:^2.0.2" + checksum: 10/86814bf8afbcd8966653f731415888019d4bc4aca6b6c354132a7a75bb87566751e320369654a101d23a91c87a85c79b178bcf40332839bd347aff437c4fb65f languageName: node linkType: hard @@ -4229,14 +4276,27 @@ __metadata: languageName: node linkType: hard -"form-data@npm:^4.0.0": - version: 4.0.1 - resolution: "form-data@npm:4.0.1" +"form-data@npm:^4.0.4": + version: 4.0.4 + resolution: "form-data@npm:4.0.4" dependencies: asynckit: "npm:^0.4.0" combined-stream: "npm:^1.0.8" + es-set-tostringtag: "npm:^2.1.0" + hasown: "npm:^2.0.2" mime-types: "npm:^2.1.12" - checksum: 10/6adb1cff557328bc6eb8a68da205f9ae44ab0e88d4d9237aaf91eed591ffc64f77411efb9016af7d87f23d0a038c45a788aa1c6634e51175c4efa36c2bc53774 + checksum: 10/a4b62e21932f48702bc468cc26fb276d186e6b07b557e3dd7cc455872bdbb82db7db066844a64ad3cf40eaf3a753c830538183570462d3649fdfd705601cbcfb + languageName: node + linkType: hard + +"fs-extra@npm:^11.2.0": + version: 11.3.1 + resolution: "fs-extra@npm:11.3.1" + dependencies: + graceful-fs: "npm:^4.2.0" + jsonfile: "npm:^6.0.1" + universalify: "npm:^2.0.0" + checksum: 10/2b893213411b1da11f9b061ccb0bcff4d6dd66fe90aa8f5b1616219a5e7ca659da869f454ebd8e94aa21c58342730fb43a2e5c98b5c6c5124f0c54a4633f64b0 languageName: node linkType: hard @@ -4611,10 +4671,10 @@ __metadata: languageName: node linkType: hard -"graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": - version: 4.2.10 - resolution: "graceful-fs@npm:4.2.10" - checksum: 10/0c83c52b62c68a944dcfb9d66b0f9f10f7d6e3d081e8067b9bfdc9e5f3a8896584d576036f82915773189eec1eba599397fc620e75c03c0610fb3d67c6713c1a +"graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.2, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 10/bf152d0ed1dc159239db1ba1f74fdbc40cb02f626770dcd5815c427ce0688c2635a06ed69af364396da4636d0408fcf7d4afdf7881724c3307e46aff30ca49e2 languageName: node linkType: hard @@ -4874,7 +4934,7 @@ __metadata: languageName: node linkType: hard -"inherits@npm:2, inherits@npm:^2.0.3, inherits@npm:^2.0.4": +"inherits@npm:2, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": version: 2.0.4 resolution: "inherits@npm:2.0.4" checksum: 10/cd45e923bee15186c07fa4c89db0aace24824c482fb887b528304694b2aa6ff8a898da8657046a5dcf3e46cd6db6c61629551f9215f208d7c3f157cf9b290521 @@ -5183,6 +5243,13 @@ __metadata: languageName: node linkType: hard +"isarray@npm:~1.0.0": + version: 1.0.0 + resolution: "isarray@npm:1.0.0" + checksum: 10/f032df8e02dce8ec565cf2eb605ea939bdccea528dbcf565cdf92bfa2da9110461159d86a537388ef1acef8815a330642d7885b29010e8f7eac967c9993b65ab + languageName: node + linkType: hard + "isexe@npm:^2.0.0": version: 2.0.0 resolution: "isexe@npm:2.0.0" @@ -5811,6 +5878,19 @@ __metadata: languageName: node linkType: hard +"jsonfile@npm:^6.0.1": + version: 6.2.0 + resolution: "jsonfile@npm:6.2.0" + dependencies: + graceful-fs: "npm:^4.1.6" + universalify: "npm:^2.0.0" + dependenciesMeta: + graceful-fs: + optional: true + checksum: 10/513aac94a6eff070767cafc8eb4424b35d523eec0fcd8019fe5b975f4de5b10a54640c8d5961491ddd8e6f562588cf62435c5ddaf83aaf0986cd2ee789e0d7b9 + languageName: node + linkType: hard + "jwa@npm:^2.0.0": version: 2.0.0 resolution: "jwa@npm:2.0.0" @@ -6836,6 +6916,13 @@ __metadata: languageName: node linkType: hard +"process-nextick-args@npm:~2.0.0": + version: 2.0.1 + resolution: "process-nextick-args@npm:2.0.1" + checksum: 10/1d38588e520dab7cea67cbbe2efdd86a10cc7a074c09657635e34f035277b59fbb57d09d8638346bf7090f8e8ebc070c96fa5fd183b777fff4f5edff5e9466cf + languageName: node + linkType: hard + "promise-inflight@npm:^1.0.1": version: 1.0.1 resolution: "promise-inflight@npm:1.0.1" @@ -6951,6 +7038,21 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^2.0.2": + version: 2.3.8 + resolution: "readable-stream@npm:2.3.8" + dependencies: + core-util-is: "npm:~1.0.0" + inherits: "npm:~2.0.3" + isarray: "npm:~1.0.0" + process-nextick-args: "npm:~2.0.0" + safe-buffer: "npm:~5.1.1" + string_decoder: "npm:~1.1.1" + util-deprecate: "npm:~1.0.1" + checksum: 10/8500dd3a90e391d6c5d889256d50ec6026c059fadee98ae9aa9b86757d60ac46fff24fafb7a39fa41d54cb39d8be56cc77be202ebd4cd8ffcf4cb226cbaa40d4 + languageName: node + linkType: hard + "readable-stream@npm:^3.4.0, readable-stream@npm:^3.6.0": version: 3.6.2 resolution: "readable-stream@npm:3.6.2" @@ -7161,7 +7263,7 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:~5.1.1": +"safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": version: 5.1.2 resolution: "safe-buffer@npm:5.1.2" checksum: 10/7eb5b48f2ed9a594a4795677d5a150faa7eb54483b2318b568dc0c4fc94092a6cce5be02c7288a0500a156282f5276d5688bce7259299568d1053b2150ef374a @@ -7616,6 +7718,15 @@ __metadata: languageName: node linkType: hard +"string_decoder@npm:~1.1.1": + version: 1.1.1 + resolution: "string_decoder@npm:1.1.1" + dependencies: + safe-buffer: "npm:~5.1.0" + checksum: 10/7c41c17ed4dea105231f6df208002ebddd732e8e9e2d619d133cecd8e0087ddfd9587d2feb3c8caf3213cbd841ada6d057f5142cae68a4e62d3540778d9819b4 + languageName: node + linkType: hard + "strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": version: 6.0.1 resolution: "strip-ansi@npm:6.0.1" @@ -8036,10 +8147,10 @@ __metadata: languageName: node linkType: hard -"undici-types@npm:~6.20.0": - version: 6.20.0 - resolution: "undici-types@npm:6.20.0" - checksum: 10/583ac7bbf4ff69931d3985f4762cde2690bb607844c16a5e2fbb92ed312fe4fa1b365e953032d469fa28ba8b224e88a595f0b10a449332f83fa77c695e567dbe +"undici-types@npm:~7.10.0": + version: 7.10.0 + resolution: "undici-types@npm:7.10.0" + checksum: 10/1f3fe777937690ab8a7a7bccabc8fdf4b3171f4899b5a384fb5f3d6b56c4b5fec2a51fbf345c9dd002ff6716fd440a37fa8fdb0e13af8eca8889f25445875ba3 languageName: node linkType: hard @@ -8075,6 +8186,26 @@ __metadata: languageName: node linkType: hard +"universalify@npm:^2.0.0": + version: 2.0.1 + resolution: "universalify@npm:2.0.1" + checksum: 10/ecd8469fe0db28e7de9e5289d32bd1b6ba8f7183db34f3bfc4ca53c49891c2d6aa05f3fb3936a81285a905cc509fb641a0c3fc131ec786167eff41236ae32e60 + languageName: node + linkType: hard + +"unzipper@npm:^0.12.3": + version: 0.12.3 + resolution: "unzipper@npm:0.12.3" + dependencies: + bluebird: "npm:~3.7.2" + duplexer2: "npm:~0.1.4" + fs-extra: "npm:^11.2.0" + graceful-fs: "npm:^4.2.2" + node-int64: "npm:^0.4.0" + checksum: 10/b210c421308e1913e01b54faad4ae79e758c674311892414a0697acacba9f82fa0051b677faa77e62fab422eef928c858f2d5cda9ddb47a2f3db95b0e9b36359 + languageName: node + linkType: hard + "update-browserslist-db@npm:^1.0.5": version: 1.0.5 resolution: "update-browserslist-db@npm:1.0.5" @@ -8105,7 +8236,7 @@ __metadata: languageName: node linkType: hard -"util-deprecate@npm:^1.0.1": +"util-deprecate@npm:^1.0.1, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" checksum: 10/474acf1146cb2701fe3b074892217553dfcf9a031280919ba1b8d651a068c9b15d863b7303cb15bd00a862b498e6cf4ad7b4a08fb134edd5a6f7641681cb54a2