diff --git a/.github/actions/enumerate-tests/action.yml b/.github/actions/enumerate-tests/action.yml
index f13fb81b093..7fa9d0c891e 100644
--- a/.github/actions/enumerate-tests/action.yml
+++ b/.github/actions/enumerate-tests/action.yml
@@ -6,6 +6,11 @@ inputs:
type: string
default: ''
description: 'Additional MSBuild arguments passed to the test matrix generation step (e.g., /p:IncludeTemplateTests=true /p:OnlyDeploymentTests=true)'
+ artifactSuffix:
+ required: false
+ type: string
+ default: ''
+ description: 'Suffix to append to artifact names to avoid collisions when the action is used multiple times in the same workflow run'
# Output format: JSON with structure {"include": [{...}, ...]}
# Each entry contains:
@@ -75,7 +80,7 @@ runs:
if: always()
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
- name: logs-enumerate-tests-${{ runner.os }}
+ name: logs-enumerate-tests${{ inputs.artifactSuffix && format('-{0}', inputs.artifactSuffix) || '' }}-${{ runner.os }}
path: |
artifacts/log/**/*.binlog
artifacts/**/*tests-partitions.json
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e3a6d9c5ae4..fb45051b90d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -66,12 +66,34 @@ jobs:
with:
versionOverrideArg: ${{ needs.prepare_for_ci.outputs.VERSION_SUFFIX_OVERRIDE }}
+ deployment_tests:
+ uses: ./.github/workflows/deployment-e2e-tests.yml
+ name: Deployment E2E Tests
+ needs: [prepare_for_ci]
+ permissions:
+ id-token: write
+ contents: read
+ secrets: inherit
+ # Run on non-fork PRs and push events (main/release branches), skip docs-only changes
+ if: >-
+ ${{
+ github.repository_owner == 'microsoft' &&
+ needs.prepare_for_ci.outputs.skip_workflow != 'true' &&
+ (github.event_name == 'push' ||
+ github.event.pull_request.head.repo.full_name == github.repository)
+ }}
+ with:
+ pr_number: ${{ github.event.number && format('{0}', github.event.number) || '' }}
+ versionOverrideArg: ${{ needs.prepare_for_ci.outputs.VERSION_SUFFIX_OVERRIDE }}
+
# This job is used for branch protection. It fails if any of the dependent jobs failed
results:
if: ${{ always() && github.repository_owner == 'microsoft' }}
runs-on: ubuntu-latest
name: Final Results
- needs: [prepare_for_ci, tests]
+ # deployment_tests is intentionally excluded: deployment failures are informational
+ # only (infrastructure flakiness outside the team's control should not block PRs).
+ needs: [prepare_for_ci, tests, deployment_tests]
steps:
- name: Fail if any of the dependent jobs failed
@@ -83,13 +105,27 @@ jobs:
# For others 'skipped' can be when a transitive dependency fails and the dependent job gets
# 'skipped'. For example, one of setup_* jobs failing and the Integration test jobs getting
# 'skipped'
+ #
+ # Note: deployment_tests is excluded from the failure check below because deployment
+ # failures can be caused by transient Azure infrastructure issues outside the team's
+ # control. Deployment results are reported in a separate informational step.
if: >-
${{ always() &&
needs.prepare_for_ci.outputs.skip_workflow != 'true' &&
needs.tests.outputs.skip_workflow != 'true' &&
- (contains(needs.*.result, 'failure') ||
- contains(needs.*.result, 'cancelled') ||
- contains(needs.*.result, 'skipped')) }}
+ (needs.prepare_for_ci.result == 'failure' ||
+ needs.tests.result == 'failure' ||
+ needs.tests.result == 'cancelled' ||
+ needs.tests.result == 'skipped') }}
run: |
echo "One or more dependent jobs failed."
exit 1
+
+ - name: Report deployment test results
+ if: ${{ always() && needs.deployment_tests.result != 'skipped' }}
+ run: |
+ if [[ "${{ needs.deployment_tests.result }}" == "failure" ]]; then
+ echo "::warning::Deployment E2E tests failed. This is informational only and does not block the PR. Check deployment test artifacts for details."
+ else
+ echo "Deployment E2E tests passed."
+ fi
diff --git a/.github/workflows/cli-e2e-recording-comment.yml b/.github/workflows/cli-e2e-recording-comment.yml
index 06ce0a21275..0127b3db1c3 100644
--- a/.github/workflows/cli-e2e-recording-comment.yml
+++ b/.github/workflows/cli-e2e-recording-comment.yml
@@ -1,4 +1,4 @@
-name: Add CLI E2E Recording Comment
+name: Add E2E Recording Comments
on:
# Trigger when the CI workflow completes (success, failure, or cancelled)
@@ -278,3 +278,348 @@ jobs:
else
echo "No recordings found in $RECORDINGS_DIR"
fi
+
+ # Post deployment E2E test recordings as a separate PR comment
+ add-deployment-recording-comment:
+ # Only run on the microsoft org and for pull requests
+ if: >-
+ ${{ github.repository_owner == 'microsoft' &&
+ (github.event.workflow_run.event == 'pull_request' || github.event_name == 'workflow_dispatch') }}
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ actions: read
+ steps:
+ - name: Get workflow run info
+ id: run-info
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ let runId, prNumber, headSha;
+
+ if (context.eventName === 'workflow_dispatch') {
+ runId = context.payload.inputs.run_id;
+ const run = await github.rest.actions.getWorkflowRun({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: runId
+ });
+ headSha = run.data.head_sha;
+
+ const prs = await github.rest.pulls.list({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ state: 'open',
+ head: `${context.repo.owner}:${run.data.head_branch}`
+ });
+ prNumber = prs.data.length > 0 ? prs.data[0].number : null;
+ } else {
+ runId = context.payload.workflow_run.id;
+ headSha = context.payload.workflow_run.head_sha;
+
+ const prs = context.payload.workflow_run.pull_requests;
+ prNumber = prs && prs.length > 0 ? prs[0].number : null;
+ }
+
+ if (!prNumber) {
+ console.log('No PR found for this workflow run, skipping deployment recording comment');
+ core.setOutput('skip', 'true');
+ return;
+ }
+
+ core.setOutput('skip', 'false');
+ core.setOutput('run_id', runId);
+ core.setOutput('pr_number', prNumber);
+ core.setOutput('head_sha', headSha);
+ console.log(`Run ID: ${runId}, PR: ${prNumber}, SHA: ${headSha}`);
+
+ - name: Get deployment test job results and download recording artifacts
+ if: steps.run-info.outputs.skip != 'true'
+ id: get-results
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ const fs = require('fs');
+ const path = require('path');
+
+ const runId = ${{ steps.run-info.outputs.run_id }};
+
+ // Get all jobs for the workflow run to determine per-test results
+ const jobs = await github.paginate(
+ github.rest.actions.listJobsForWorkflowRun,
+ {
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: runId,
+ per_page: 100
+ }
+ );
+
+ // Filter for deployment test matrix jobs (format: "Deploy (TestClassName)")
+ const deployJobs = jobs.filter(job => job.name.match(/Deploy \(.+\)/));
+
+ if (deployJobs.length === 0) {
+ console.log('No deployment test jobs found in this run');
+ core.setOutput('has_results', 'false');
+ return;
+ }
+
+ const passedTests = [];
+ const failedTests = [];
+ const cancelledTests = [];
+
+ for (const job of deployJobs) {
+ const match = job.name.match(/Deploy \((.+)\)/);
+ const testName = match ? match[1] : job.name;
+
+ console.log(`Job "${job.name}" - conclusion: ${job.conclusion}`);
+
+ if (job.conclusion === 'success') {
+ passedTests.push(testName);
+ } else if (job.conclusion === 'failure') {
+ failedTests.push(testName);
+ } else if (job.conclusion === 'cancelled') {
+ cancelledTests.push(testName);
+ }
+ }
+
+ core.setOutput('has_results', 'true');
+ core.setOutput('passed_tests', JSON.stringify(passedTests));
+ core.setOutput('failed_tests', JSON.stringify(failedTests));
+ core.setOutput('cancelled_tests', JSON.stringify(cancelledTests));
+
+ // Download deployment recording artifacts
+ const allArtifacts = await github.paginate(
+ github.rest.actions.listWorkflowRunArtifacts,
+ {
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: runId,
+ per_page: 100
+ }
+ );
+
+ const recordingArtifacts = allArtifacts.filter(a =>
+ a.name.startsWith('deployment-test-recordings-')
+ );
+
+ console.log(`Found ${recordingArtifacts.length} deployment recording artifacts`);
+
+ const recordingsDir = 'recordings';
+ fs.mkdirSync(recordingsDir, { recursive: true });
+
+ for (const artifact of recordingArtifacts) {
+ console.log(`Downloading ${artifact.name}...`);
+
+ const download = await github.rest.actions.downloadArtifact({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ artifact_id: artifact.id,
+ archive_format: 'zip'
+ });
+
+ const artifactPath = path.join(recordingsDir, `${artifact.name}.zip`);
+ fs.writeFileSync(artifactPath, Buffer.from(download.data));
+ }
+
+ - name: Extract deployment recordings from artifacts
+ if: steps.run-info.outputs.skip != 'true' && steps.get-results.outputs.has_results == 'true'
+ shell: bash
+ run: |
+ mkdir -p cast_files
+
+ for zipfile in recordings/*.zip; do
+ if [ -f "$zipfile" ]; then
+ ARTIFACT_NAME=$(basename "$zipfile" .zip)
+ SHORTNAME=${ARTIFACT_NAME#deployment-test-recordings-}
+ EXTRACT_DIR="recordings/extracted_${ARTIFACT_NAME}"
+ unzip -o "$zipfile" -d "$EXTRACT_DIR" || true
+
+ # Rename .cast files to use the shortname
+ CAST_INDEX=0
+ while IFS= read -r -d '' castfile; do
+ if [ $CAST_INDEX -eq 0 ]; then
+ cp "$castfile" "cast_files/${SHORTNAME}.cast"
+ else
+ cp "$castfile" "cast_files/${SHORTNAME}-${CAST_INDEX}.cast"
+ fi
+ CAST_INDEX=$((CAST_INDEX + 1))
+ done < <(find "$EXTRACT_DIR" -name "*.cast" -print0)
+ fi
+ done
+
+ echo "Found deployment recordings:"
+ ls -la cast_files/ || echo "No .cast files found"
+
+ - name: Upload deployment recordings and post comment
+ if: steps.run-info.outputs.skip != 'true' && steps.get-results.outputs.has_results == 'true'
+ env:
+ GH_TOKEN: ${{ github.token }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ GITHUB_REPOSITORY_OWNER: ${{ github.repository_owner }}
+ GITHUB_EVENT_REPO_NAME: ${{ github.event.repository.name }}
+ PASSED_TESTS: ${{ steps.get-results.outputs.passed_tests }}
+ FAILED_TESTS: ${{ steps.get-results.outputs.failed_tests }}
+ CANCELLED_TESTS: ${{ steps.get-results.outputs.cancelled_tests }}
+ shell: bash
+ run: |
+ PR_NUMBER="${{ steps.run-info.outputs.pr_number }}"
+ RUN_ID="${{ steps.run-info.outputs.run_id }}"
+ HEAD_SHA="${{ steps.run-info.outputs.head_sha }}"
+ SHORT_SHA="${HEAD_SHA:0:7}"
+ RUN_URL="https://github.com/${GITHUB_REPOSITORY}/actions/runs/${RUN_ID}"
+
+ # Parse test results
+ PASSED_COUNT=$(echo "$PASSED_TESTS" | jq 'length')
+ FAILED_COUNT=$(echo "$FAILED_TESTS" | jq 'length')
+ CANCELLED_COUNT=$(echo "$CANCELLED_TESTS" | jq 'length')
+
+ # Determine overall status
+ if [ "$FAILED_COUNT" -gt 0 ]; then
+ EMOJI="❌"
+ STATUS="failed"
+ elif [ "$CANCELLED_COUNT" -gt 0 ] && [ "$PASSED_COUNT" -eq 0 ]; then
+ EMOJI="⚠️"
+ STATUS="cancelled"
+ elif [ "$PASSED_COUNT" -gt 0 ]; then
+ EMOJI="✅"
+ STATUS="passed"
+ else
+ EMOJI="❓"
+ STATUS="unknown"
+ fi
+
+ # Upload recordings to asciinema
+ RECORDINGS_DIR="cast_files"
+ declare -A RECORDING_URLS
+
+ if [ -d "$RECORDINGS_DIR" ] && compgen -G "$RECORDINGS_DIR"/*.cast > /dev/null; then
+ pip install --quiet asciinema
+
+ MAX_UPLOAD_RETRIES=5
+ RETRY_BASE_DELAY_SECONDS=30
+
+ for castfile in "$RECORDINGS_DIR"/*.cast; do
+ if [ -f "$castfile" ]; then
+ filename=$(basename "$castfile" .cast)
+ echo "Uploading $castfile..."
+
+ ASCIINEMA_URL=""
+ for attempt in $(seq 1 "$MAX_UPLOAD_RETRIES"); do
+ UPLOAD_OUTPUT=$(asciinema upload "$castfile" 2>&1) || true
+ ASCIINEMA_URL=$(echo "$UPLOAD_OUTPUT" | grep -oP 'https://asciinema\.org/a/[a-zA-Z0-9_-]+' | head -1) || true
+ if [ -n "$ASCIINEMA_URL" ]; then
+ break
+ fi
+ if [ "$attempt" -lt "$MAX_UPLOAD_RETRIES" ]; then
+ DELAY=$((attempt * RETRY_BASE_DELAY_SECONDS))
+ echo "Upload attempt $attempt failed, retrying in ${DELAY}s..."
+ sleep "$DELAY"
+ fi
+ done
+
+ if [ -n "$ASCIINEMA_URL" ]; then
+ RECORDING_URLS["$filename"]="$ASCIINEMA_URL"
+ echo "Uploaded: $ASCIINEMA_URL"
+ else
+ RECORDING_URLS["$filename"]="FAILED"
+ echo "Failed to upload $castfile after $MAX_UPLOAD_RETRIES attempts"
+ fi
+ fi
+ done
+ fi
+
+ # Build the comment
+ COMMENT_MARKER=""
+
+ COMMENT_BODY="${COMMENT_MARKER}
+ ${EMOJI} **Deployment E2E Tests ${STATUS}** — ${PASSED_COUNT} passed, ${FAILED_COUNT} failed, ${CANCELLED_COUNT} cancelled (commit \`${SHORT_SHA}\`)
+
+
+ View test results and recordings
+
+ [View workflow run](${RUN_URL})
+
+ | Test | Result | Recording |
+ |------|--------|-----------|"
+
+ # Add passed tests
+ while IFS= read -r test; do
+ [ -z "$test" ] && continue
+ RECORDING_LINK=""
+ if [ -n "${RECORDING_URLS[$test]+x}" ]; then
+ if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
+ RECORDING_LINK="❌ Upload failed"
+ else
+ RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
+ fi
+ fi
+ COMMENT_BODY="${COMMENT_BODY}
+ | ${test} | ✅ Passed | ${RECORDING_LINK} |"
+ done < <(echo "$PASSED_TESTS" | jq -r '.[]')
+
+ # Add failed tests
+ while IFS= read -r test; do
+ [ -z "$test" ] && continue
+ RECORDING_LINK=""
+ if [ -n "${RECORDING_URLS[$test]+x}" ]; then
+ if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
+ RECORDING_LINK="❌ Upload failed"
+ else
+ RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
+ fi
+ fi
+ COMMENT_BODY="${COMMENT_BODY}
+ | ${test} | ❌ Failed | ${RECORDING_LINK} |"
+ done < <(echo "$FAILED_TESTS" | jq -r '.[]')
+
+ # Add cancelled tests
+ while IFS= read -r test; do
+ [ -z "$test" ] && continue
+ RECORDING_LINK=""
+ if [ -n "${RECORDING_URLS[$test]+x}" ]; then
+ if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
+ RECORDING_LINK="❌ Upload failed"
+ else
+ RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
+ fi
+ fi
+ COMMENT_BODY="${COMMENT_BODY}
+ | ${test} | ⚠️ Cancelled | ${RECORDING_LINK} |"
+ done < <(echo "$CANCELLED_TESTS" | jq -r '.[]')
+
+ COMMENT_BODY="${COMMENT_BODY}
+
+ ---
+ 📹 Recordings uploaded automatically from [CI run #${RUN_ID}](${RUN_URL})
+
+ "
+
+ # Delete any existing deployment recording comments, then post the new one
+ EXISTING_COMMENT_IDS=$(gh api graphql -f query='
+ query($owner: String!, $repo: String!, $pr: Int!) {
+ repository(owner: $owner, name: $repo) {
+ pullRequest(number: $pr) {
+ comments(first: 100) {
+ nodes {
+ databaseId
+ author { login }
+ body
+ }
+ }
+ }
+ }
+ }' -f owner="$GITHUB_REPOSITORY_OWNER" -f repo="$GITHUB_EVENT_REPO_NAME" -F pr="$PR_NUMBER" \
+ --jq '.data.repository.pullRequest.comments.nodes[] | select(.author.login == "github-actions" and (.body | contains("'"${COMMENT_MARKER}"'"))) | .databaseId') || true
+
+ for COMMENT_ID in $EXISTING_COMMENT_IDS; do
+ echo "Deleting old comment $COMMENT_ID"
+ gh api \
+ --method DELETE \
+ -H "Accept: application/vnd.github+json" \
+ "/repos/${GITHUB_REPOSITORY}/issues/comments/${COMMENT_ID}" || true
+ done
+
+ echo "Creating new deployment recording comment on PR #${PR_NUMBER}"
+ gh pr comment "${PR_NUMBER}" --repo "$GITHUB_REPOSITORY" --body "$COMMENT_BODY"
+ echo "Posted deployment recording comment to PR #${PR_NUMBER}"
diff --git a/.github/workflows/deployment-e2e-tests.yml b/.github/workflows/deployment-e2e-tests.yml
new file mode 100644
index 00000000000..533e4d9e722
--- /dev/null
+++ b/.github/workflows/deployment-e2e-tests.yml
@@ -0,0 +1,264 @@
+# Reusable workflow for deployment E2E tests
+#
+# Called from:
+# - ci.yml: Runs on every non-fork PR
+# - deployment-tests.yml: Nightly schedule and manual triggers
+#
+# Security:
+# - Uses OIDC (Workload Identity Federation) for Azure authentication
+# - No stored Azure secrets - credentials flow from the deployment-testing environment
+#
+name: Deployment E2E Tests (Reusable)
+
+on:
+ workflow_call:
+ inputs:
+ pr_number:
+ description: 'PR number (empty for non-PR runs)'
+ required: false
+ type: string
+ default: ''
+ versionOverrideArg:
+ description: 'Version suffix override for the build (e.g., /p:VersionSuffix=pr.123.gabcdef01)'
+ required: false
+ type: string
+ default: ''
+ outputs:
+ skip_workflow:
+ description: 'Whether the workflow was skipped (no deployment tests found)'
+ value: ${{ jobs.enumerate.outputs.skip_workflow }}
+
+jobs:
+ # Enumerate deployment test classes to build the matrix
+ enumerate:
+ name: Enumerate Tests
+ runs-on: ubuntu-latest
+ if: ${{ github.repository_owner == 'microsoft' }}
+ permissions:
+ contents: read
+ outputs:
+ matrix: ${{ steps.enumerate.outputs.all_tests }}
+ skip_workflow: ${{ steps.check_matrix.outputs.skip_workflow }}
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - uses: ./.github/actions/enumerate-tests
+ id: enumerate
+ with:
+ buildArgs: '/p:OnlyDeploymentTests=true'
+ artifactSuffix: 'deployment'
+
+ - name: Display test matrix
+ run: |
+ echo "Deployment test matrix:"
+ echo '${{ steps.enumerate.outputs.all_tests }}' | jq .
+
+ - name: Check if matrix is empty
+ id: check_matrix
+ run: |
+ MATRIX='${{ steps.enumerate.outputs.all_tests }}'
+ if [ "$MATRIX" = '{"include":[]}' ] || [ -z "$MATRIX" ]; then
+ echo "skip_workflow=true" >> $GITHUB_OUTPUT
+ echo "No deployment tests found, skipping workflow"
+ else
+ echo "skip_workflow=false" >> $GITHUB_OUTPUT
+ fi
+
+ # Build solution and CLI once, share via artifacts
+ build:
+ name: Build
+ needs: [enumerate]
+ if: ${{ needs.enumerate.outputs.skip_workflow != 'true' }}
+ runs-on: 8-core-ubuntu-latest
+ permissions:
+ contents: read
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Setup .NET
+ uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4.3.1
+ with:
+ global-json-file: global.json
+
+ - name: Restore solution
+ run: ./restore.sh
+
+ - name: Build solution and pack CLI
+ run: |
+ # Build the full solution and pack CLI for local testing
+ ./build.sh --build --pack -c Release ${{ inputs.versionOverrideArg }}
+ env:
+ # Skip native build to save time - we'll use the non-native CLI
+ SkipNativeBuild: true
+
+ - name: Prepare CLI artifacts
+ run: |
+ # Create a clean artifact directory with CLI and packages
+ ARTIFACT_DIR="${{ github.workspace }}/cli-artifacts"
+ mkdir -p "$ARTIFACT_DIR/bin"
+ mkdir -p "$ARTIFACT_DIR/packages"
+
+ # Copy CLI binary and dependencies
+ cp -r "${{ github.workspace }}/artifacts/bin/Aspire.Cli/Release/net10.0/"* "$ARTIFACT_DIR/bin/"
+
+ # Copy NuGet packages
+ PACKAGES_DIR="${{ github.workspace }}/artifacts/packages/Release/Shipping"
+ if [ -d "$PACKAGES_DIR" ]; then
+ find "$PACKAGES_DIR" -name "*.nupkg" -exec cp {} "$ARTIFACT_DIR/packages/" \;
+ fi
+
+ echo "CLI artifacts prepared:"
+ ls -la "$ARTIFACT_DIR/bin/"
+ echo "Package count: $(find "$ARTIFACT_DIR/packages" -name "*.nupkg" | wc -l)"
+
+ - name: Upload CLI artifacts
+ uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ with:
+ name: aspire-cli-artifacts
+ path: ${{ github.workspace }}/cli-artifacts/
+ retention-days: 1
+
+ # Run each deployment test class in parallel
+ deploy-test:
+ name: Deploy (${{ matrix.shortname }})
+ needs: [enumerate, build]
+ if: ${{ needs.enumerate.outputs.skip_workflow != 'true' }}
+ runs-on: 8-core-ubuntu-latest
+ environment: deployment-testing
+ permissions:
+ id-token: write # For OIDC Azure login
+ contents: read
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.enumerate.outputs.matrix) }}
+ env:
+ ASPIRE_DEPLOYMENT_TEST_SUBSCRIPTION: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
+ ASPIRE_DEPLOYMENT_TEST_RG_PREFIX: ${{ vars.ASPIRE_DEPLOYMENT_TEST_RG_PREFIX || 'aspire-e2e' }}
+
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Setup .NET
+ uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4.3.1
+ with:
+ global-json-file: global.json
+
+ - name: Restore and build test project
+ run: |
+ ./restore.sh
+ ./build.sh -restore -ci -build -projects ${{ github.workspace }}/tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj -c Release
+ env:
+ SkipNativeBuild: true
+
+ - name: Download CLI artifacts
+ uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
+ with:
+ name: aspire-cli-artifacts
+ path: ${{ github.workspace }}/cli-artifacts
+
+ - name: Install Aspire CLI from artifacts
+ run: |
+ ASPIRE_HOME="$HOME/.aspire"
+ mkdir -p "$ASPIRE_HOME/bin"
+
+ # Copy CLI binary and dependencies
+ cp -r "${{ github.workspace }}/cli-artifacts/bin/"* "$ASPIRE_HOME/bin/"
+ chmod +x "$ASPIRE_HOME/bin/aspire"
+
+ # Add to PATH for this job
+ echo "$ASPIRE_HOME/bin" >> $GITHUB_PATH
+
+ # Set up NuGet hive for local packages
+ HIVE_DIR="$ASPIRE_HOME/hives/local/packages"
+ mkdir -p "$HIVE_DIR"
+ cp "${{ github.workspace }}/cli-artifacts/packages/"*.nupkg "$HIVE_DIR/" 2>/dev/null || true
+
+ # Configure CLI to use local channel
+ "$ASPIRE_HOME/bin/aspire" config set channel local --global || true
+
+ echo "✅ Aspire CLI installed:"
+ "$ASPIRE_HOME/bin/aspire" --version
+
+ - name: Azure Login (OIDC)
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ env:
+ AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
+ AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
+ AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
+ with:
+ script: |
+ const token = await core.getIDToken('api://AzureADTokenExchange');
+ core.setSecret(token);
+
+ // Login directly - token never leaves this step
+ await exec.exec('az', [
+ 'login', '--service-principal',
+ '--username', process.env.AZURE_CLIENT_ID,
+ '--tenant', process.env.AZURE_TENANT_ID,
+ '--federated-token', token,
+ '--allow-no-subscriptions'
+ ]);
+
+ await exec.exec('az', [
+ 'account', 'set',
+ '--subscription', process.env.AZURE_SUBSCRIPTION_ID
+ ]);
+
+ - name: Verify Azure authentication
+ run: |
+ echo "Verifying Azure authentication..."
+ az account show --query "{subscriptionId:id, tenantId:tenantId, user:user.name}" -o table
+ echo "✅ Azure authentication successful"
+
+ - name: Verify Docker is running
+ run: |
+ echo "Verifying Docker daemon..."
+ docker version
+ docker info | head -20
+ echo "✅ Docker is available"
+
+ - name: Run deployment test (${{ matrix.shortname }})
+ id: run_tests
+ env:
+ GITHUB_PR_NUMBER: ${{ inputs.pr_number }}
+ GITHUB_PR_HEAD_SHA: ${{ github.sha }}
+ AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
+ AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
+ AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
+ Azure__SubscriptionId: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
+ Azure__Location: westus3
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ ./dotnet.sh test tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj \
+ -c Release \
+ --logger "trx;LogFileName=${{ matrix.shortname }}.trx" \
+ --results-directory ${{ github.workspace }}/testresults \
+ -- \
+ --filter-not-trait "quarantined=true" \
+ ${{ matrix.extraTestArgs }} \
+ || echo "test_failed=true" >> $GITHUB_OUTPUT
+
+ - name: Upload test results
+ if: always()
+ uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ with:
+ name: deployment-test-results-${{ matrix.shortname }}
+ path: |
+ ${{ github.workspace }}/testresults/
+ retention-days: 30
+
+ - name: Upload recordings
+ if: always()
+ uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
+ with:
+ name: deployment-test-recordings-${{ matrix.shortname }}
+ path: |
+ ${{ github.workspace }}/testresults/recordings/
+ retention-days: 30
+ if-no-files-found: ignore
+
+ - name: Check for test failures
+ if: steps.run_tests.outputs.test_failed == 'true'
+ run: |
+ echo "::error::Deployment test ${{ matrix.shortname }} failed. Check the test results artifact for details."
+ exit 1
diff --git a/.github/workflows/deployment-test-command.yml b/.github/workflows/deployment-test-command.yml
deleted file mode 100644
index 0a7203e455d..00000000000
--- a/.github/workflows/deployment-test-command.yml
+++ /dev/null
@@ -1,102 +0,0 @@
-# Trigger deployment tests from PR comments
-#
-# Usage: Comment `/deployment-test` on a PR
-#
-# This workflow validates the commenter is an org member and triggers
-# the deployment-tests.yml workflow with the PR context.
-#
-name: Deployment Test Command
-
-on:
- issue_comment:
- types: [created]
-
-permissions:
- contents: read
- pull-requests: write
- actions: write # To trigger workflows
-
-jobs:
- deployment-test:
- # Only run when the comment is exactly /deployment-test on a PR
- if: >-
- ${{
- github.event.comment.body == '/deployment-test' &&
- github.event.issue.pull_request &&
- github.repository_owner == 'microsoft'
- }}
- runs-on: ubuntu-latest
-
- steps:
- - name: Check org membership
- id: check_membership
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- with:
- script: |
- const commenter = context.payload.comment.user.login;
-
- try {
- // Check if user is a member of the dotnet org
- const { status } = await github.rest.orgs.checkMembershipForUser({
- org: 'dotnet',
- username: commenter
- });
-
- if (status === 204 || status === 302) {
- core.info(`✅ ${commenter} is a member of dotnet org`);
- core.setOutput('is_member', 'true');
- return;
- }
- } catch (error) {
- if (error.status === 404) {
- core.warning(`❌ ${commenter} is not a member of dotnet org`);
- core.setOutput('is_member', 'false');
-
- // Post a comment explaining the restriction
- await github.rest.issues.createComment({
- owner: context.repo.owner,
- repo: context.repo.repo,
- issue_number: context.issue.number,
- body: `@${commenter} The \`/deployment-test\` command is restricted to dotnet org members for security reasons (it deploys to real Azure infrastructure).`
- });
- return;
- }
- throw error;
- }
-
- - name: Get PR details
- if: steps.check_membership.outputs.is_member == 'true'
- id: pr
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- with:
- script: |
- const { data: pr } = await github.rest.pulls.get({
- owner: context.repo.owner,
- repo: context.repo.repo,
- pull_number: context.issue.number
- });
-
- core.setOutput('number', pr.number);
- core.setOutput('head_sha', pr.head.sha);
- core.setOutput('head_ref', pr.head.ref);
-
- - name: Trigger deployment tests
- if: steps.check_membership.outputs.is_member == 'true'
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- with:
- script: |
- // Dispatch from the PR's head ref to test the PR's code changes.
- // Security: Org membership check is the security boundary - only trusted
- // dotnet org members can trigger this workflow.
- // Note: The triggered workflow posts its own "starting" comment with the run URL.
- await github.rest.actions.createWorkflowDispatch({
- owner: context.repo.owner,
- repo: context.repo.repo,
- workflow_id: 'deployment-tests.yml',
- ref: '${{ steps.pr.outputs.head_ref }}',
- inputs: {
- pr_number: '${{ steps.pr.outputs.number }}'
- }
- });
-
- core.info('✅ Triggered deployment-tests.yml workflow');
diff --git a/.github/workflows/deployment-tests.yml b/.github/workflows/deployment-tests.yml
index 0e51d0c4abd..c79bac16242 100644
--- a/.github/workflows/deployment-tests.yml
+++ b/.github/workflows/deployment-tests.yml
@@ -1,14 +1,15 @@
# End-to-end deployment tests that deploy Aspire applications to real Azure infrastructure
#
# Triggers:
-# - workflow_dispatch: Manual trigger with scenario selection
+# - workflow_dispatch: Manual trigger
# - schedule: Nightly at 03:00 UTC
-# - /deployment-test command on PRs (via deployment-test-command.yml)
+#
+# This workflow calls the reusable deployment-e2e-tests.yml workflow.
+# For PR-triggered deployment tests, see ci.yml which also calls the reusable workflow.
#
# Security:
# - Uses OIDC (Workload Identity Federation) for Azure authentication
# - No stored Azure secrets
-# - Only dotnet org members can trigger via PR command
#
name: Deployment E2E Tests
@@ -31,252 +32,18 @@ concurrency:
cancel-in-progress: true
jobs:
- # Post "starting" comment to PR when triggered via /deployment-test command
- notify-start:
- name: Notify PR
- runs-on: ubuntu-latest
- if: ${{ github.repository_owner == 'microsoft' && inputs.pr_number != '' }}
- permissions:
- pull-requests: write
- steps:
- - name: Post starting comment
- env:
- GH_TOKEN: ${{ github.token }}
- run: |
- PR_NUMBER="${{ inputs.pr_number }}"
- RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
-
- gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body \
- "🚀 **Deployment tests starting** on PR #${PR_NUMBER}...
-
- This will deploy to real Azure infrastructure. Results will be posted here when complete.
-
- [View workflow run](${RUN_URL})"
-
- # Enumerate test classes to build the matrix
- enumerate:
- name: Enumerate Tests
- runs-on: ubuntu-latest
+ # Call the reusable deployment E2E test workflow
+ deployment_tests:
+ uses: ./.github/workflows/deployment-e2e-tests.yml
if: ${{ github.repository_owner == 'microsoft' }}
- permissions:
- contents: read
- outputs:
- matrix: ${{ steps.enumerate.outputs.all_tests }}
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - uses: ./.github/actions/enumerate-tests
- id: enumerate
- with:
- buildArgs: '/p:OnlyDeploymentTests=true'
-
- - name: Display test matrix
- run: |
- echo "Deployment test matrix:"
- echo '${{ steps.enumerate.outputs.all_tests }}' | jq .
-
- # Build solution and CLI once, share via artifacts
- build:
- name: Build
- runs-on: 8-core-ubuntu-latest
- if: ${{ github.repository_owner == 'microsoft' }}
- permissions:
- contents: read
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Setup .NET
- uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4.3.1
- with:
- global-json-file: global.json
-
- - name: Restore solution
- run: ./restore.sh
-
- - name: Build solution and pack CLI
- run: |
- # Build the full solution and pack CLI for local testing
- ./build.sh --build --pack -c Release
- env:
- # Skip native build to save time - we'll use the non-native CLI
- SkipNativeBuild: true
-
- - name: Prepare CLI artifacts
- run: |
- # Create a clean artifact directory with CLI and packages
- ARTIFACT_DIR="${{ github.workspace }}/cli-artifacts"
- mkdir -p "$ARTIFACT_DIR/bin"
- mkdir -p "$ARTIFACT_DIR/packages"
-
- # Copy CLI binary and dependencies
- cp -r "${{ github.workspace }}/artifacts/bin/Aspire.Cli/Release/net10.0/"* "$ARTIFACT_DIR/bin/"
-
- # Copy NuGet packages
- PACKAGES_DIR="${{ github.workspace }}/artifacts/packages/Release/Shipping"
- if [ -d "$PACKAGES_DIR" ]; then
- find "$PACKAGES_DIR" -name "*.nupkg" -exec cp {} "$ARTIFACT_DIR/packages/" \;
- fi
-
- echo "CLI artifacts prepared:"
- ls -la "$ARTIFACT_DIR/bin/"
- echo "Package count: $(find "$ARTIFACT_DIR/packages" -name "*.nupkg" | wc -l)"
-
- - name: Upload CLI artifacts
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
- with:
- name: aspire-cli-artifacts
- path: ${{ github.workspace }}/cli-artifacts/
- retention-days: 1
-
- # Run each test class in parallel
- deploy-test:
- name: Deploy (${{ matrix.shortname }})
- needs: [enumerate, build]
- if: ${{ needs.enumerate.outputs.matrix != '{"include":[]}' && needs.enumerate.outputs.matrix != '' }}
- runs-on: 8-core-ubuntu-latest
- environment: deployment-testing
- permissions:
- id-token: write # For OIDC Azure login
- contents: read
- strategy:
- fail-fast: false
- matrix: ${{ fromJson(needs.enumerate.outputs.matrix) }}
- env:
- ASPIRE_DEPLOYMENT_TEST_SUBSCRIPTION: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
- ASPIRE_DEPLOYMENT_TEST_RG_PREFIX: ${{ vars.ASPIRE_DEPLOYMENT_TEST_RG_PREFIX || 'aspire-e2e' }}
-
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
-
- - name: Setup .NET
- uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4.3.1
- with:
- global-json-file: global.json
-
- - name: Restore and build test project
- run: |
- ./restore.sh
- ./build.sh -restore -ci -build -projects ${{ github.workspace }}/tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj -c Release
- env:
- SkipNativeBuild: true
-
- - name: Download CLI artifacts
- uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9
- with:
- name: aspire-cli-artifacts
- path: ${{ github.workspace }}/cli-artifacts
-
- - name: Install Aspire CLI from artifacts
- run: |
- ASPIRE_HOME="$HOME/.aspire"
- mkdir -p "$ASPIRE_HOME/bin"
-
- # Copy CLI binary and dependencies
- cp -r "${{ github.workspace }}/cli-artifacts/bin/"* "$ASPIRE_HOME/bin/"
- chmod +x "$ASPIRE_HOME/bin/aspire"
-
- # Add to PATH for this job
- echo "$ASPIRE_HOME/bin" >> $GITHUB_PATH
-
- # Set up NuGet hive for local packages
- HIVE_DIR="$ASPIRE_HOME/hives/local/packages"
- mkdir -p "$HIVE_DIR"
- cp "${{ github.workspace }}/cli-artifacts/packages/"*.nupkg "$HIVE_DIR/" 2>/dev/null || true
-
- # Configure CLI to use local channel
- "$ASPIRE_HOME/bin/aspire" config set channel local --global || true
-
- echo "✅ Aspire CLI installed:"
- "$ASPIRE_HOME/bin/aspire" --version
-
- - name: Azure Login (OIDC)
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- env:
- AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
- AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
- AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
- with:
- script: |
- const token = await core.getIDToken('api://AzureADTokenExchange');
- core.setSecret(token);
-
- // Login directly - token never leaves this step
- await exec.exec('az', [
- 'login', '--service-principal',
- '--username', process.env.AZURE_CLIENT_ID,
- '--tenant', process.env.AZURE_TENANT_ID,
- '--federated-token', token,
- '--allow-no-subscriptions'
- ]);
-
- await exec.exec('az', [
- 'account', 'set',
- '--subscription', process.env.AZURE_SUBSCRIPTION_ID
- ]);
-
- - name: Verify Azure authentication
- run: |
- echo "Verifying Azure authentication..."
- az account show --query "{subscriptionId:id, tenantId:tenantId, user:user.name}" -o table
- echo "✅ Azure authentication successful"
-
- - name: Verify Docker is running
- run: |
- echo "Verifying Docker daemon..."
- docker version
- docker info | head -20
- echo "✅ Docker is available"
-
- - name: Run deployment test (${{ matrix.shortname }})
- id: run_tests
- env:
- GITHUB_PR_NUMBER: ${{ inputs.pr_number || '' }}
- GITHUB_PR_HEAD_SHA: ${{ github.sha }}
- AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
- AZURE_TENANT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_TENANT_ID }}
- AZURE_CLIENT_ID: ${{ secrets.AZURE_DEPLOYMENT_TEST_CLIENT_ID }}
- Azure__SubscriptionId: ${{ secrets.AZURE_DEPLOYMENT_TEST_SUBSCRIPTION_ID }}
- Azure__Location: westus3
- GH_TOKEN: ${{ github.token }}
- run: |
- ./dotnet.sh test tests/Aspire.Deployment.EndToEnd.Tests/Aspire.Deployment.EndToEnd.Tests.csproj \
- -c Release \
- --logger "trx;LogFileName=${{ matrix.shortname }}.trx" \
- --results-directory ${{ github.workspace }}/testresults \
- -- \
- --filter-not-trait "quarantined=true" \
- ${{ matrix.extraTestArgs }} \
- || echo "test_failed=true" >> $GITHUB_OUTPUT
-
- - name: Upload test results
- if: always()
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
- with:
- name: deployment-test-results-${{ matrix.shortname }}
- path: |
- ${{ github.workspace }}/testresults/
- retention-days: 30
-
- - name: Upload recordings
- if: always()
- uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
- with:
- name: deployment-test-recordings-${{ matrix.shortname }}
- path: |
- ${{ github.workspace }}/testresults/recordings/
- retention-days: 30
- if-no-files-found: ignore
-
- - name: Check for test failures
- if: steps.run_tests.outputs.test_failed == 'true'
- run: |
- echo "::error::Deployment test ${{ matrix.shortname }} failed. Check the test results artifact for details."
- exit 1
+ secrets: inherit
+ with:
+ pr_number: ${{ inputs.pr_number || '' }}
# Create GitHub issue on nightly failure
create_issue_on_failure:
name: Create Issue on Failure
- needs: [deploy-test]
+ needs: [deployment_tests]
runs-on: ubuntu-latest
if: ${{ failure() && github.event_name == 'schedule' }}
permissions:
@@ -343,307 +110,3 @@ jobs:
});
console.log(`Created issue: ${issue.data.html_url}`);
}
-
- # Post completion comment back to PR when triggered via /deployment-test command
- post_pr_comment:
- name: Post PR Comment
- needs: [deploy-test]
- runs-on: ubuntu-latest
- if: ${{ always() && inputs.pr_number != '' }}
- permissions:
- pull-requests: write
- actions: read
- steps:
- - name: Get job results and download recording artifacts
- id: get_results
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- with:
- script: |
- const fs = require('fs');
- const path = require('path');
-
- // Get all jobs for this workflow run to determine per-test results
- const jobs = await github.paginate(
- github.rest.actions.listJobsForWorkflowRun,
- {
- owner: context.repo.owner,
- repo: context.repo.repo,
- run_id: context.runId,
- per_page: 100
- }
- );
-
- console.log(`Total jobs found: ${jobs.length}`);
-
- // Filter for deploy-test matrix jobs (format: "Deploy (TestClassName)")
- const deployJobs = jobs.filter(job => job.name.startsWith('Deploy ('));
-
- const passedTests = [];
- const failedTests = [];
- const cancelledTests = [];
-
- for (const job of deployJobs) {
- // Extract test name from job name "Deploy (TestClassName)"
- const match = job.name.match(/^Deploy \((.+)\)$/);
- const testName = match ? match[1] : job.name;
-
- console.log(`Job "${job.name}" - conclusion: ${job.conclusion}, status: ${job.status}`);
-
- if (job.conclusion === 'success') {
- passedTests.push(testName);
- } else if (job.conclusion === 'failure') {
- failedTests.push(testName);
- } else if (job.conclusion === 'cancelled') {
- cancelledTests.push(testName);
- }
- }
-
- console.log(`Passed: ${passedTests.length}, Failed: ${failedTests.length}, Cancelled: ${cancelledTests.length}`);
-
- // Output results for later steps
- core.setOutput('passed_tests', JSON.stringify(passedTests));
- core.setOutput('failed_tests', JSON.stringify(failedTests));
- core.setOutput('cancelled_tests', JSON.stringify(cancelledTests));
- core.setOutput('total_tests', passedTests.length + failedTests.length + cancelledTests.length);
-
- // List all artifacts for the current workflow run
- const allArtifacts = await github.paginate(
- github.rest.actions.listWorkflowRunArtifacts,
- {
- owner: context.repo.owner,
- repo: context.repo.repo,
- run_id: context.runId,
- per_page: 100
- }
- );
-
- console.log(`Total artifacts found: ${allArtifacts.length}`);
-
- // Filter for deployment test recording artifacts
- const recordingArtifacts = allArtifacts.filter(a =>
- a.name.startsWith('deployment-test-recordings-')
- );
-
- console.log(`Found ${recordingArtifacts.length} recording artifacts`);
-
- // Create recordings directory
- const recordingsDir = 'recordings';
- fs.mkdirSync(recordingsDir, { recursive: true });
-
- // Download each artifact
- for (const artifact of recordingArtifacts) {
- console.log(`Downloading ${artifact.name}...`);
-
- const download = await github.rest.actions.downloadArtifact({
- owner: context.repo.owner,
- repo: context.repo.repo,
- artifact_id: artifact.id,
- archive_format: 'zip'
- });
-
- const artifactPath = path.join(recordingsDir, `${artifact.name}.zip`);
- fs.writeFileSync(artifactPath, Buffer.from(download.data));
- console.log(`Saved to ${artifactPath}`);
- }
-
- core.setOutput('artifact_count', recordingArtifacts.length);
-
- - name: Extract recordings from artifacts
- shell: bash
- run: |
- mkdir -p cast_files
-
- for zipfile in recordings/*.zip; do
- if [ -f "$zipfile" ]; then
- echo "Extracting $zipfile..."
- # Artifact zip name: deployment-test-recordings-{shortname}.zip
- ARTIFACT_NAME=$(basename "$zipfile" .zip)
- SHORTNAME=${ARTIFACT_NAME#deployment-test-recordings-}
- EXTRACT_DIR="recordings/extracted_${ARTIFACT_NAME}"
- unzip -o "$zipfile" -d "$EXTRACT_DIR" || true
-
- # Rename .cast files to use the shortname (matching the job/test name)
- CAST_INDEX=0
- while IFS= read -r -d '' castfile; do
- if [ $CAST_INDEX -eq 0 ]; then
- cp "$castfile" "cast_files/${SHORTNAME}.cast"
- else
- cp "$castfile" "cast_files/${SHORTNAME}-${CAST_INDEX}.cast"
- fi
- CAST_INDEX=$((CAST_INDEX + 1))
- done < <(find "$EXTRACT_DIR" -name "*.cast" -print0)
- fi
- done
-
- echo "Found recordings:"
- ls -la cast_files/ || echo "No .cast files found"
-
- - name: Upload recordings to asciinema and post comment
- env:
- GH_TOKEN: ${{ github.token }}
- PASSED_TESTS: ${{ steps.get_results.outputs.passed_tests }}
- FAILED_TESTS: ${{ steps.get_results.outputs.failed_tests }}
- CANCELLED_TESTS: ${{ steps.get_results.outputs.cancelled_tests }}
- TOTAL_TESTS: ${{ steps.get_results.outputs.total_tests }}
- shell: bash
- run: |
- PR_NUMBER="${{ inputs.pr_number }}"
- RUN_ID="${{ github.run_id }}"
- RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${RUN_ID}"
- TEST_RESULT="${{ needs.deploy-test.result }}"
-
- # Parse the test results from JSON
- PASSED_COUNT=$(echo "$PASSED_TESTS" | jq 'length')
- FAILED_COUNT=$(echo "$FAILED_TESTS" | jq 'length')
- CANCELLED_COUNT=$(echo "$CANCELLED_TESTS" | jq 'length')
-
- # Determine overall status
- if [ "$FAILED_COUNT" -gt 0 ]; then
- EMOJI="❌"
- STATUS="failed"
- elif [ "$CANCELLED_COUNT" -gt 0 ] && [ "$PASSED_COUNT" -eq 0 ]; then
- EMOJI="⚠️"
- STATUS="cancelled"
- elif [ "$PASSED_COUNT" -gt 0 ]; then
- EMOJI="✅"
- STATUS="passed"
- else
- EMOJI="❓"
- STATUS="unknown"
- fi
-
- # Upload recordings first so we can include links in the unified table
- RECORDINGS_DIR="cast_files"
- declare -A RECORDING_URLS
-
- if [ -d "$RECORDINGS_DIR" ] && compgen -G "$RECORDINGS_DIR"/*.cast > /dev/null; then
- pip install --quiet asciinema
-
- # Retry configuration for asciinema uploads
- MAX_UPLOAD_RETRIES=5
- RETRY_BASE_DELAY_SECONDS=30
-
- UPLOAD_COUNT=0
- for castfile in "$RECORDINGS_DIR"/*.cast; do
- if [ -f "$castfile" ]; then
- filename=$(basename "$castfile" .cast)
- echo "Uploading $castfile..."
-
- # Upload to asciinema with retry logic for transient failures
- ASCIINEMA_URL=""
- for attempt in $(seq 1 "$MAX_UPLOAD_RETRIES"); do
- UPLOAD_OUTPUT=$(asciinema upload "$castfile" 2>&1) || true
- ASCIINEMA_URL=$(echo "$UPLOAD_OUTPUT" | grep -oP 'https://asciinema\.org/a/[a-zA-Z0-9_-]+' | head -1) || true
- if [ -n "$ASCIINEMA_URL" ]; then
- break
- fi
- if [ "$attempt" -lt "$MAX_UPLOAD_RETRIES" ]; then
- DELAY=$((attempt * RETRY_BASE_DELAY_SECONDS))
- echo "Upload attempt $attempt failed, retrying in ${DELAY}s..."
- sleep "$DELAY"
- fi
- done
-
- if [ -n "$ASCIINEMA_URL" ]; then
- RECORDING_URLS["$filename"]="$ASCIINEMA_URL"
- echo "Uploaded: $ASCIINEMA_URL"
- UPLOAD_COUNT=$((UPLOAD_COUNT + 1))
- else
- RECORDING_URLS["$filename"]="FAILED"
- echo "Failed to upload $castfile after $MAX_UPLOAD_RETRIES attempts"
- fi
- fi
- done
- echo "Uploaded $UPLOAD_COUNT recordings"
- else
- echo "No recordings found in $RECORDINGS_DIR"
- fi
-
- # Build the comment with summary outside collapsible and details inside
- COMMENT_MARKER=""
-
- COMMENT_BODY="${COMMENT_MARKER}
- ${EMOJI} **Deployment E2E Tests ${STATUS}** — ${PASSED_COUNT} passed, ${FAILED_COUNT} failed, ${CANCELLED_COUNT} cancelled
-
-
- View test results and recordings
-
- [View workflow run](${RUN_URL})
-
- | Test | Result | Recording |
- |------|--------|-----------|"
-
- # Add passed tests
- while IFS= read -r test; do
- RECORDING_LINK=""
- if [ -n "${RECORDING_URLS[$test]+x}" ]; then
- if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
- RECORDING_LINK="❌ Upload failed"
- else
- RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
- fi
- fi
- COMMENT_BODY="${COMMENT_BODY}
- | ${test} | ✅ Passed | ${RECORDING_LINK} |"
- done < <(echo "$PASSED_TESTS" | jq -r '.[]')
-
- # Add failed tests
- while IFS= read -r test; do
- RECORDING_LINK=""
- if [ -n "${RECORDING_URLS[$test]+x}" ]; then
- if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
- RECORDING_LINK="❌ Upload failed"
- else
- RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
- fi
- fi
- COMMENT_BODY="${COMMENT_BODY}
- | ${test} | ❌ Failed | ${RECORDING_LINK} |"
- done < <(echo "$FAILED_TESTS" | jq -r '.[]')
-
- # Add cancelled tests
- while IFS= read -r test; do
- RECORDING_LINK=""
- if [ -n "${RECORDING_URLS[$test]+x}" ]; then
- if [ "${RECORDING_URLS[$test]}" = "FAILED" ]; then
- RECORDING_LINK="❌ Upload failed"
- else
- RECORDING_LINK="[▶️ View Recording](${RECORDING_URLS[$test]})"
- fi
- fi
- COMMENT_BODY="${COMMENT_BODY}
- | ${test} | ⚠️ Cancelled | ${RECORDING_LINK} |"
- done < <(echo "$CANCELLED_TESTS" | jq -r '.[]')
-
- COMMENT_BODY="${COMMENT_BODY}
-
- "
-
- # Delete any existing deployment test comments, then post the new one
- EXISTING_COMMENT_IDS=$(gh api graphql -f query='
- query($owner: String!, $repo: String!, $pr: Int!) {
- repository(owner: $owner, name: $repo) {
- pullRequest(number: $pr) {
- comments(first: 100) {
- nodes {
- databaseId
- author { login }
- body
- }
- }
- }
- }
- }' -f owner="${{ github.repository_owner }}" -f repo="${{ github.event.repository.name }}" -F pr="$PR_NUMBER" \
- --jq '.data.repository.pullRequest.comments.nodes[] | select(.author.login == "github-actions" and (.body | contains("'"${COMMENT_MARKER}"'"))) | .databaseId') || true
-
- for COMMENT_ID in $EXISTING_COMMENT_IDS; do
- echo "Deleting old comment $COMMENT_ID"
- gh api \
- --method DELETE \
- -H "Accept: application/vnd.github+json" \
- "/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" || true
- done
-
- echo "Creating new comment on PR #${PR_NUMBER}"
- gh pr comment "${PR_NUMBER}" --repo "${{ github.repository }}" --body "$COMMENT_BODY"
- echo "Posted comment to PR #${PR_NUMBER}"
diff --git a/tests/Aspire.Deployment.EndToEnd.Tests/AcaCompactNamingUpgradeDeploymentTests.cs b/tests/Aspire.Deployment.EndToEnd.Tests/AcaCompactNamingUpgradeDeploymentTests.cs
index b992d67c05e..2fc01a3f444 100644
--- a/tests/Aspire.Deployment.EndToEnd.Tests/AcaCompactNamingUpgradeDeploymentTests.cs
+++ b/tests/Aspire.Deployment.EndToEnd.Tests/AcaCompactNamingUpgradeDeploymentTests.cs
@@ -106,11 +106,29 @@ private async Task UpgradeFromGaToDevDoesNotDuplicateStorageAccountsCore(Cancell
await auto.DeclineAgentInitPromptAsync(counter);
// Step 6: Add ACA package using GA CLI (uses GA NuGet packages)
+ // The GA CLI may show a version picker (with "based on NuGet.config" text)
+ // or may auto-select the version. Handle both paths.
output.WriteLine("Step 6: Adding Azure Container Apps package (GA)...");
await auto.TypeAsync("aspire add Aspire.Hosting.Azure.AppContainers");
await auto.EnterAsync();
- await auto.WaitUntilTextAsync("(based on NuGet.config)", timeout: TimeSpan.FromSeconds(60));
- await auto.EnterAsync();
+
+ // Wait for either the version picker prompt or the command to complete
+ var showedVersionPicker = false;
+ await auto.WaitUntilAsync(s =>
+ {
+ if (s.ContainsText("(based on NuGet.config)"))
+ {
+ showedVersionPicker = true;
+ return true;
+ }
+ return s.ContainsText("was added to");
+ }, timeout: TimeSpan.FromSeconds(120), description: "aspire add version picker or completion");
+
+ // If the version picker appeared, press Enter to accept the default
+ if (showedVersionPicker)
+ {
+ await auto.EnterAsync();
+ }
await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromSeconds(180));
// Step 7: Modify apphost.cs with a short env name (fits within 24 chars with default naming)
diff --git a/tests/Aspire.Deployment.EndToEnd.Tests/AcaManagedRedisDeploymentTests.cs b/tests/Aspire.Deployment.EndToEnd.Tests/AcaManagedRedisDeploymentTests.cs
index 1caee7f4023..6eaf8fb1624 100644
--- a/tests/Aspire.Deployment.EndToEnd.Tests/AcaManagedRedisDeploymentTests.cs
+++ b/tests/Aspire.Deployment.EndToEnd.Tests/AcaManagedRedisDeploymentTests.cs
@@ -67,258 +67,188 @@ private async Task DeployStarterWithManagedRedisToAzureContainerAppsCore(Cancell
using var terminal = DeploymentE2ETestHelpers.CreateTestTerminal();
var pendingRun = terminal.RunAsync(cancellationToken);
- // Pattern searchers for aspire new interactive prompts
- var waitingForTemplateSelectionPrompt = new CellPatternSearcher()
- .FindPattern("> Starter App");
-
- var waitingForProjectNamePrompt = new CellPatternSearcher()
- .Find($"Enter the project name ({workspace.WorkspaceRoot.Name}): ");
-
- var waitingForOutputPathPrompt = new CellPatternSearcher()
- .Find("Enter the output path:");
-
- var waitingForUrlsPrompt = new CellPatternSearcher()
- .Find("Use *.dev.localhost URLs");
-
- var waitingForRedisPrompt = new CellPatternSearcher()
- .Find("Use Redis Cache");
-
- // Pattern searchers for aspire add prompts
- var waitingForAddVersionSelectionPrompt = new CellPatternSearcher()
- .Find("(based on NuGet.config)");
-
- var waitingForIntegrationSelectionPrompt = new CellPatternSearcher()
- .Find("Select an integration to add:");
-
- // Pattern searchers for deployment outcome
- var waitingForPipelineSucceeded = new CellPatternSearcher()
- .Find("PIPELINE SUCCEEDED");
-
- var waitingForPipelineFailed = new CellPatternSearcher()
- .Find("PIPELINE FAILED");
-
var counter = new SequenceCounter();
- var sequenceBuilder = new Hex1bTerminalInputSequenceBuilder();
+ var auto = new Hex1bTerminalAutomator(terminal, defaultTimeout: TimeSpan.FromSeconds(500));
// Step 1: Prepare environment
output.WriteLine("Step 1: Preparing environment...");
- sequenceBuilder.PrepareEnvironment(workspace, counter);
+ await auto.PrepareEnvironmentAsync(workspace, counter);
// Step 1b: Register Microsoft.Cache provider (required for Azure Managed Redis zone support)
output.WriteLine("Step 1b: Registering Microsoft.Cache resource provider...");
- sequenceBuilder
- .Type("az provider register --namespace Microsoft.Cache --wait")
- .Enter()
- .WaitForSuccessPrompt(counter, TimeSpan.FromMinutes(5));
+ await auto.TypeAsync("az provider register --namespace Microsoft.Cache --wait");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromMinutes(5));
// Step 2: Set up CLI environment (in CI)
if (DeploymentE2ETestHelpers.IsRunningInCI)
{
output.WriteLine("Step 2: Using pre-installed Aspire CLI from local build...");
- sequenceBuilder.SourceAspireCliEnvironment(counter);
+ await auto.SourceAspireCliEnvironmentAsync(counter);
}
// Step 3: Create starter project (React) with Redis enabled
output.WriteLine("Step 3: Creating React starter project with Redis...");
- sequenceBuilder.Type("aspire new")
- .Enter()
- .WaitUntil(s => waitingForTemplateSelectionPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(60))
- .Key(Hex1b.Input.Hex1bKey.DownArrow) // Move to second template (Starter App ASP.NET Core/React)
- .Enter()
- .WaitUntil(s => waitingForProjectNamePrompt.Search(s).Count > 0, TimeSpan.FromSeconds(30))
- .Type(projectName)
- .Enter()
- .WaitUntil(s => waitingForOutputPathPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(10))
- .Enter() // Accept default output path
- .WaitUntil(s => waitingForUrlsPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(10))
- .Enter() // Select "No" for localhost URLs (default)
- .WaitUntil(s => waitingForRedisPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(10))
- .Enter() // Select "Yes" for Redis Cache (first/default option)
- .WaitForSuccessPrompt(counter, TimeSpan.FromMinutes(5));
+ await auto.AspireNewAsync(projectName, counter, template: AspireTemplate.JsReact, useRedisCache: true);
// Step 4: Navigate to project directory
output.WriteLine("Step 4: Navigating to project directory...");
- sequenceBuilder
- .Type($"cd {projectName}")
- .Enter()
- .WaitForSuccessPrompt(counter);
+ await auto.TypeAsync($"cd {projectName}");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter);
// Step 5: Add Aspire.Hosting.Azure.AppContainers package
output.WriteLine("Step 5: Adding Azure Container Apps hosting package...");
- sequenceBuilder.Type("aspire add Aspire.Hosting.Azure.AppContainers")
- .Enter();
+ await auto.TypeAsync("aspire add Aspire.Hosting.Azure.AppContainers");
+ await auto.EnterAsync();
if (DeploymentE2ETestHelpers.IsRunningInCI)
{
- sequenceBuilder
- .WaitUntil(s => waitingForAddVersionSelectionPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(60))
- .Enter(); // select first version (PR build)
+ await auto.WaitUntilTextAsync("(based on NuGet.config)", timeout: TimeSpan.FromSeconds(60));
+ await auto.EnterAsync(); // select first version (PR build)
}
- sequenceBuilder.WaitForSuccessPrompt(counter, TimeSpan.FromSeconds(180));
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromSeconds(180));
// Step 6: Add Aspire.Hosting.Azure.Redis package
output.WriteLine("Step 6: Adding Azure Redis hosting package...");
- sequenceBuilder.Type("aspire add Aspire.Hosting.Azure.Redis")
- .Enter();
+ await auto.TypeAsync("aspire add Aspire.Hosting.Azure.Redis");
+ await auto.EnterAsync();
if (DeploymentE2ETestHelpers.IsRunningInCI)
{
- sequenceBuilder
- .WaitUntil(s => waitingForAddVersionSelectionPrompt.Search(s).Count > 0, TimeSpan.FromSeconds(60))
- .Enter(); // select first version (PR build)
+ await auto.WaitUntilTextAsync("(based on NuGet.config)", timeout: TimeSpan.FromSeconds(60));
+ await auto.EnterAsync(); // select first version (PR build)
}
- sequenceBuilder.WaitForSuccessPrompt(counter, TimeSpan.FromSeconds(180));
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromSeconds(180));
// Step 7: Add Aspire.Microsoft.Azure.StackExchangeRedis to Server project for WithAzureAuthentication
// Use --prerelease because this package may only be available as a prerelease version
output.WriteLine("Step 7: Adding Azure StackExchange Redis client package to Server project...");
- sequenceBuilder
- .Type($"dotnet add {projectName}.Server/{projectName}.Server.csproj package Aspire.Microsoft.Azure.StackExchangeRedis --prerelease")
- .Enter()
- .WaitForSuccessPrompt(counter, TimeSpan.FromSeconds(120));
+ await auto.TypeAsync($"dotnet add {projectName}.Server/{projectName}.Server.csproj package Aspire.Microsoft.Azure.StackExchangeRedis --prerelease");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromSeconds(120));
// Step 8: Modify AppHost.cs - Replace AddRedis with AddAzureManagedRedis and add ACA environment
- sequenceBuilder.ExecuteCallback(() =>
- {
- var projectDir = Path.Combine(workspace.WorkspaceRoot.FullName, projectName);
- var appHostDir = Path.Combine(projectDir, $"{projectName}.AppHost");
- var appHostFilePath = Path.Combine(appHostDir, "AppHost.cs");
-
- output.WriteLine($"Modifying AppHost.cs at: {appHostFilePath}");
+ var projectDir = Path.Combine(workspace.WorkspaceRoot.FullName, projectName);
+ var appHostDir = Path.Combine(projectDir, $"{projectName}.AppHost");
+ var appHostFilePath = Path.Combine(appHostDir, "AppHost.cs");
- var content = File.ReadAllText(appHostFilePath);
+ output.WriteLine($"Modifying AppHost.cs at: {appHostFilePath}");
- // Replace AddRedis("cache") with AddAzureManagedRedis("cache")
- content = content.Replace(
- "builder.AddRedis(\"cache\")",
- "builder.AddAzureManagedRedis(\"cache\")");
+ var appHostContent = File.ReadAllText(appHostFilePath);
- // Insert the Azure Container App Environment before builder.Build().Run();
- var buildRunPattern = "builder.Build().Run();";
- var replacement = """
-// Add Azure Container App Environment for deployment
-builder.AddAzureContainerAppEnvironment("infra");
+ // Replace AddRedis("cache") with AddAzureManagedRedis("cache")
+ appHostContent = appHostContent.Replace(
+ "builder.AddRedis(\"cache\")",
+ "builder.AddAzureManagedRedis(\"cache\")");
-builder.Build().Run();
-""";
+ // Insert the Azure Container App Environment before builder.Build().Run();
+ appHostContent = appHostContent.Replace(
+ "builder.Build().Run();",
+ """
+ // Add Azure Container App Environment for deployment
+ builder.AddAzureContainerAppEnvironment("infra");
- content = content.Replace(buildRunPattern, replacement);
- File.WriteAllText(appHostFilePath, content);
+ builder.Build().Run();
+ """);
+ File.WriteAllText(appHostFilePath, appHostContent);
- output.WriteLine($"Modified AppHost.cs: replaced AddRedis with AddAzureManagedRedis, added ACA environment");
- output.WriteLine($"New content:\n{content}");
- });
+ output.WriteLine($"Modified AppHost.cs: replaced AddRedis with AddAzureManagedRedis, added ACA environment");
+ output.WriteLine($"New content:\n{appHostContent}");
// Step 9: Modify Server Program.cs - Add WithAzureAuthentication for Azure Managed Redis
- sequenceBuilder.ExecuteCallback(() =>
- {
- var projectDir = Path.Combine(workspace.WorkspaceRoot.FullName, projectName);
- var serverDir = Path.Combine(projectDir, $"{projectName}.Server");
- var programFilePath = Path.Combine(serverDir, "Program.cs");
+ var serverDir = Path.Combine(projectDir, $"{projectName}.Server");
+ var programFilePath = Path.Combine(serverDir, "Program.cs");
- output.WriteLine($"Modifying Server Program.cs at: {programFilePath}");
+ output.WriteLine($"Modifying Server Program.cs at: {programFilePath}");
- var content = File.ReadAllText(programFilePath);
+ var programContent = File.ReadAllText(programFilePath);
- // The React template uses AddRedisClientBuilder("cache").WithOutputCache()
- // Add .WithAzureAuthentication() to the chain
- content = content.Replace(
- ".WithOutputCache();",
- """
-.WithOutputCache()
- .WithAzureAuthentication();
-""");
+ // The React template uses AddRedisClientBuilder("cache").WithOutputCache()
+ // Add .WithAzureAuthentication() to the chain
+ programContent = programContent.Replace(
+ ".WithOutputCache();",
+ """
+ .WithOutputCache()
+ .WithAzureAuthentication();
+ """);
- File.WriteAllText(programFilePath, content);
+ File.WriteAllText(programFilePath, programContent);
- output.WriteLine($"Modified Server Program.cs: added WithAzureAuthentication to Redis client builder");
- output.WriteLine($"New content:\n{content}");
- });
+ output.WriteLine($"Modified Server Program.cs: added WithAzureAuthentication to Redis client builder");
+ output.WriteLine($"New content:\n{programContent}");
// Step 10: Navigate to AppHost project directory
output.WriteLine("Step 10: Navigating to AppHost directory...");
- sequenceBuilder
- .Type($"cd {projectName}.AppHost")
- .Enter()
- .WaitForSuccessPrompt(counter);
+ await auto.TypeAsync($"cd {projectName}.AppHost");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter);
// Step 11: Set environment variables for deployment
// Use eastus for Azure Managed Redis availability zone support
- sequenceBuilder.Type($"unset ASPIRE_PLAYGROUND && export AZURE__LOCATION=eastus && export AZURE__RESOURCEGROUP={resourceGroupName}")
- .Enter()
- .WaitForSuccessPrompt(counter);
+ await auto.TypeAsync($"unset ASPIRE_PLAYGROUND && export AZURE__LOCATION=eastus && export AZURE__RESOURCEGROUP={resourceGroupName}");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter);
// Step 12: Deploy to Azure Container Apps
// Azure Managed Redis provisioning typically takes ~5 minutes
output.WriteLine("Step 12: Starting Azure Container Apps deployment...");
- sequenceBuilder
- .Type("aspire deploy --clear-cache")
- .Enter()
- // Wait for pipeline to complete - detect both success and failure to fail fast
- .WaitUntil(s =>
- waitingForPipelineSucceeded.Search(s).Count > 0 ||
- waitingForPipelineFailed.Search(s).Count > 0,
- TimeSpan.FromMinutes(30))
- .ExecuteCallback(() =>
- {
- // This callback runs after the pipeline completes - we'll verify success in the prompt check
- output.WriteLine("Pipeline completed, checking result...");
- })
- .WaitForSuccessPrompt(counter, TimeSpan.FromMinutes(2));
+ await auto.TypeAsync("aspire deploy --clear-cache");
+ await auto.EnterAsync();
+ // Wait for pipeline to complete successfully
+ await auto.WaitUntilTextAsync("PIPELINE SUCCEEDED", timeout: TimeSpan.FromMinutes(30));
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromMinutes(2));
// Step 13: Verify deployed endpoints with retry
// Retry each endpoint for up to 3 minutes (18 attempts * 10 seconds)
output.WriteLine("Step 13: Verifying deployed endpoints...");
- sequenceBuilder
- .Type($"RG_NAME=\"{resourceGroupName}\" && " +
- "echo \"Resource group: $RG_NAME\" && " +
- "if ! az group show -n \"$RG_NAME\" &>/dev/null; then echo \"❌ Resource group not found\"; exit 1; fi && " +
- "urls=$(az containerapp list -g \"$RG_NAME\" --query \"[].properties.configuration.ingress.fqdn\" -o tsv 2>/dev/null | grep -v '\\.internal\\.') && " +
- "if [ -z \"$urls\" ]; then echo \"❌ No external container app endpoints found\"; exit 1; fi && " +
- "failed=0 && " +
- "for url in $urls; do " +
- "echo \"Checking https://$url...\"; " +
- "success=0; " +
- "for i in $(seq 1 18); do " +
- "STATUS=$(curl -s -o /dev/null -w \"%{http_code}\" \"https://$url\" --max-time 10 2>/dev/null); " +
- "if [ \"$STATUS\" = \"200\" ] || [ \"$STATUS\" = \"302\" ]; then echo \" ✅ $STATUS (attempt $i)\"; success=1; break; fi; " +
- "echo \" Attempt $i: $STATUS, retrying in 10s...\"; sleep 10; " +
- "done; " +
- "if [ \"$success\" -eq 0 ]; then echo \" ❌ Failed after 18 attempts\"; failed=1; fi; " +
- "done && " +
- "if [ \"$failed\" -ne 0 ]; then echo \"❌ One or more endpoint checks failed\"; exit 1; fi")
- .Enter()
- .WaitForSuccessPrompt(counter, TimeSpan.FromMinutes(5));
+ await auto.TypeAsync(
+ $"RG_NAME=\"{resourceGroupName}\" && " +
+ "echo \"Resource group: $RG_NAME\" && " +
+ "if ! az group show -n \"$RG_NAME\" &>/dev/null; then echo \"❌ Resource group not found\"; exit 1; fi && " +
+ "urls=$(az containerapp list -g \"$RG_NAME\" --query \"[].properties.configuration.ingress.fqdn\" -o tsv 2>/dev/null | grep -v '\\.internal\\.') && " +
+ "if [ -z \"$urls\" ]; then echo \"❌ No external container app endpoints found\"; exit 1; fi && " +
+ "failed=0 && " +
+ "for url in $urls; do " +
+ "echo \"Checking https://$url...\"; " +
+ "success=0; " +
+ "for i in $(seq 1 18); do " +
+ "STATUS=$(curl -s -o /dev/null -w \"%{http_code}\" \"https://$url\" --max-time 10 2>/dev/null); " +
+ "if [ \"$STATUS\" = \"200\" ] || [ \"$STATUS\" = \"302\" ]; then echo \" ✅ $STATUS (attempt $i)\"; success=1; break; fi; " +
+ "echo \" Attempt $i: $STATUS, retrying in 10s...\"; sleep 10; " +
+ "done; " +
+ "if [ \"$success\" -eq 0 ]; then echo \" ❌ Failed after 18 attempts\"; failed=1; fi; " +
+ "done && " +
+ "if [ \"$failed\" -ne 0 ]; then echo \"❌ One or more endpoint checks failed\"; exit 1; fi");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromMinutes(5));
// Step 14: Verify /api/weatherforecast returns valid JSON (exercises Redis output cache)
output.WriteLine("Step 14: Verifying /api/weatherforecast returns valid JSON...");
- sequenceBuilder
- .Type($"RG_NAME=\"{resourceGroupName}\" && " +
- // Get the server container app FQDN
- "SERVER_FQDN=$(az containerapp list -g \"$RG_NAME\" --query \"[?contains(name,'server')].properties.configuration.ingress.fqdn\" -o tsv 2>/dev/null | head -1) && " +
- "if [ -z \"$SERVER_FQDN\" ]; then echo \"❌ Server container app not found\"; exit 1; fi && " +
- "echo \"Server FQDN: $SERVER_FQDN\" && " +
- // Retry fetching /api/weatherforecast and validate JSON
- "success=0 && " +
- "for i in $(seq 1 18); do " +
- "RESPONSE=$(curl -s \"https://$SERVER_FQDN/api/weatherforecast\" --max-time 10 2>/dev/null) && " +
- "echo \"$RESPONSE\" | python3 -m json.tool > /dev/null 2>&1 && " +
- "echo \" ✅ Valid JSON response (attempt $i)\" && echo \"$RESPONSE\" | head -c 200 && echo && success=1 && break; " +
- "echo \" Attempt $i: not valid JSON yet, retrying in 10s...\"; sleep 10; " +
- "done && " +
- "if [ \"$success\" -eq 0 ]; then echo \"❌ /api/weatherforecast did not return valid JSON after 18 attempts\"; exit 1; fi")
- .Enter()
- .WaitForSuccessPrompt(counter, TimeSpan.FromMinutes(5));
+ await auto.TypeAsync(
+ $"RG_NAME=\"{resourceGroupName}\" && " +
+ // Get the server container app FQDN
+ "SERVER_FQDN=$(az containerapp list -g \"$RG_NAME\" --query \"[?contains(name,'server')].properties.configuration.ingress.fqdn\" -o tsv 2>/dev/null | head -1) && " +
+ "if [ -z \"$SERVER_FQDN\" ]; then echo \"❌ Server container app not found\"; exit 1; fi && " +
+ "echo \"Server FQDN: $SERVER_FQDN\" && " +
+ // Retry fetching /api/weatherforecast and validate JSON
+ "success=0 && " +
+ "for i in $(seq 1 18); do " +
+ "RESPONSE=$(curl -s \"https://$SERVER_FQDN/api/weatherforecast\" --max-time 10 2>/dev/null) && " +
+ "echo \"$RESPONSE\" | python3 -m json.tool > /dev/null 2>&1 && " +
+ "echo \" ✅ Valid JSON response (attempt $i)\" && echo \"$RESPONSE\" | head -c 200 && echo && success=1 && break; " +
+ "echo \" Attempt $i: not valid JSON yet, retrying in 10s...\"; sleep 10; " +
+ "done && " +
+ "if [ \"$success\" -eq 0 ]; then echo \"❌ /api/weatherforecast did not return valid JSON after 18 attempts\"; exit 1; fi");
+ await auto.EnterAsync();
+ await auto.WaitForSuccessPromptAsync(counter, TimeSpan.FromMinutes(5));
// Step 15: Exit terminal
- sequenceBuilder
- .Type("exit")
- .Enter();
+ await auto.TypeAsync("exit");
+ await auto.EnterAsync();
- var sequence = sequenceBuilder.Build();
- await sequence.ApplyAsync(terminal, cancellationToken);
await pendingRun;
var duration = DateTime.UtcNow - startTime;
diff --git a/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2EAutomatorHelpers.cs b/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2EAutomatorHelpers.cs
index 85fa9c741f4..b21aa6f0ff6 100644
--- a/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2EAutomatorHelpers.cs
+++ b/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2EAutomatorHelpers.cs
@@ -8,7 +8,6 @@ namespace Aspire.Deployment.EndToEnd.Tests.Helpers;
///
/// Extension methods for providing deployment E2E test patterns.
-/// These parallel the -based methods in .
///
internal static class DeploymentE2EAutomatorHelpers
{
diff --git a/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2ETestHelpers.cs b/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2ETestHelpers.cs
index bf5bf70a780..1cf09025a73 100644
--- a/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2ETestHelpers.cs
+++ b/tests/Aspire.Deployment.EndToEnd.Tests/Helpers/DeploymentE2ETestHelpers.cs
@@ -2,15 +2,12 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System.Runtime.CompilerServices;
-using Aspire.Cli.Tests.Utils;
using Hex1b;
-using Hex1b.Automation;
namespace Aspire.Deployment.EndToEnd.Tests.Helpers;
///
/// Helper methods for creating and managing Hex1b terminal sessions for deployment testing.
-/// Extends the patterns from CLI E2E tests with deployment-specific functionality.
///
internal static class DeploymentE2ETestHelpers
{
@@ -106,72 +103,4 @@ internal static string GetTestResultsRecordingPath(string testName)
{
return Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
}
-
- ///
- /// Prepares the terminal environment with a custom prompt for command tracking.
- ///
- internal static Hex1bTerminalInputSequenceBuilder PrepareEnvironment(
- this Hex1bTerminalInputSequenceBuilder builder,
- TemporaryWorkspace workspace,
- SequenceCounter counter)
- {
- var waitingForInputPattern = new CellPatternSearcher()
- .Find("b").RightUntil("$").Right(' ').Right(' ');
-
- builder.WaitUntil(s => waitingForInputPattern.Search(s).Count > 0, TimeSpan.FromSeconds(10))
- .Wait(500);
-
- // Bash prompt setup with command tracking
- const string promptSetup = "CMDCOUNT=0; PROMPT_COMMAND='s=$?;((CMDCOUNT++));PS1=\"[$CMDCOUNT $([ $s -eq 0 ] && echo OK || echo ERR:$s)] \\$ \"'";
- builder.Type(promptSetup).Enter();
-
- return builder.WaitForSuccessPrompt(counter)
- .Type($"cd {workspace.WorkspaceRoot.FullName}").Enter()
- .WaitForSuccessPrompt(counter);
- }
-
- ///
- /// Installs the Aspire CLI from PR build artifacts.
- ///
- internal static Hex1bTerminalInputSequenceBuilder InstallAspireCliFromPullRequest(
- this Hex1bTerminalInputSequenceBuilder builder,
- int prNumber,
- SequenceCounter counter)
- {
- var command = $"curl -fsSL https://raw.githubusercontent.com/microsoft/aspire/main/eng/scripts/get-aspire-cli-pr.sh | bash -s -- {prNumber}";
-
- return builder
- .Type(command)
- .Enter()
- .WaitForSuccessPromptFailFast(counter, TimeSpan.FromSeconds(300));
- }
-
- ///
- /// Installs the latest GA (release quality) Aspire CLI.
- ///
- internal static Hex1bTerminalInputSequenceBuilder InstallAspireCliRelease(
- this Hex1bTerminalInputSequenceBuilder builder,
- SequenceCounter counter)
- {
- var command = "curl -fsSL https://aka.ms/aspire/get/install.sh | bash -s -- --quality release";
-
- return builder
- .Type(command)
- .Enter()
- .WaitForSuccessPromptFailFast(counter, TimeSpan.FromSeconds(300));
- }
-
- ///
- /// Configures the PATH and environment variables for the Aspire CLI.
- ///
- internal static Hex1bTerminalInputSequenceBuilder SourceAspireCliEnvironment(
- this Hex1bTerminalInputSequenceBuilder builder,
- SequenceCounter counter)
- {
- return builder
- .Type("export PATH=~/.aspire/bin:$PATH ASPIRE_PLAYGROUND=true DOTNET_CLI_TELEMETRY_OPTOUT=true DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true DOTNET_GENERATE_ASPNET_CERTIFICATE=false")
- .Enter()
- .WaitForSuccessPrompt(counter);
- }
-
}
diff --git a/tests/Aspire.Deployment.EndToEnd.Tests/TypeScriptExpressDeploymentTests.cs b/tests/Aspire.Deployment.EndToEnd.Tests/TypeScriptExpressDeploymentTests.cs
index 1c9cf2a26ba..df47e4099a7 100644
--- a/tests/Aspire.Deployment.EndToEnd.Tests/TypeScriptExpressDeploymentTests.cs
+++ b/tests/Aspire.Deployment.EndToEnd.Tests/TypeScriptExpressDeploymentTests.cs
@@ -17,6 +17,7 @@ public sealed class TypeScriptExpressDeploymentTests(ITestOutputHelper output)
private static readonly TimeSpan s_testTimeout = TimeSpan.FromMinutes(40);
[Fact]
+ [ActiveIssue("https://github.com/microsoft/aspire/issues/15222")]
public async Task DeployTypeScriptExpressTemplateToAzureContainerApps()
{
using var cts = new CancellationTokenSource(s_testTimeout);
diff --git a/tests/Shared/Hex1bAutomatorTestHelpers.cs b/tests/Shared/Hex1bAutomatorTestHelpers.cs
index ffc9fa49d2a..76752137b48 100644
--- a/tests/Shared/Hex1bAutomatorTestHelpers.cs
+++ b/tests/Shared/Hex1bAutomatorTestHelpers.cs
@@ -135,26 +135,24 @@ await auto.WaitUntilAsync(s =>
await auto.WaitAsync(500);
- // Type 'n' + Enter unconditionally:
- // - Agent init: declines the prompt, CLI exits, success prompt appears
- // - No agent init: 'n' runs at bash (command not found), produces error prompt
- await auto.TypeAsync("n");
- await auto.EnterAsync();
-
- // Wait for the aspire command's success prompt
- await auto.WaitUntilAsync(s =>
+ if (agentInitFound)
{
- var successSearcher = new CellPatternSearcher()
- .FindPattern(counter.Value.ToString())
- .RightText(" OK] $ ");
- return successSearcher.Search(s).Count > 0;
- }, timeout: effectiveTimeout, description: $"success prompt [{counter.Value} OK] $ after agent init");
+ // Decline the agent init prompt, CLI exits, success prompt appears
+ await auto.TypeAsync("n");
+ await auto.EnterAsync();
- // Increment counter correctly for both cases
- if (!agentInitFound)
- {
- counter.Increment();
+ // Wait for the aspire command's success prompt after agent init completes
+ await auto.WaitUntilAsync(s =>
+ {
+ var successSearcher = new CellPatternSearcher()
+ .FindPattern(counter.Value.ToString())
+ .RightText(" OK] $ ");
+ return successSearcher.Search(s).Count > 0;
+ }, timeout: effectiveTimeout, description: $"success prompt [{counter.Value} OK] $ after agent init");
}
+
+ // The success prompt from the aspire command (init/new) has been detected.
+ // Increment once for that command.
counter.Increment();
}