name: test:e2e on: push jobs: cypress-e2e-tests: name: Run E2E Tests runs-on: ubuntu-latest outputs: tests-failed: ${{ steps.cypress-tests.outcome == 'failure' || steps.report-results.outputs.test_failed == 'true' }} tests-outcome: ${{ steps.cypress-tests.outcome }} test_failed: ${{ steps.report-results.outputs.test_failed }} steps: - name: Checkout code uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - name: Set up Node.js uses: actions/setup-node@89d709d423dc495668cd762a18dd4a070611be3f # v5.0.0 with: node-version-file: ./.tool-versions cache: 'npm' cache-dependency-path: | app/package-lock.json lib/package-lock.json cypress/package-lock.json - name: Build Library run: | npm ci npm run build working-directory: ./lib - name: Build Frontend run: | cp .env.dist .env sed -i '/VITE_DIRECTUS_ADMIN_ROLE=/c\VITE_DIRECTUS_ADMIN_ROLE=8141dee8-8e10-48d0-baf1-680aea271298' .env npm ci npm run build working-directory: ./app - name: Clean Database State run: | # Remove any existing database data to ensure fresh state sudo rm -rf ./data/database mkdir -p ./data/uploads sudo chmod 777 -R ./data - name: Build and start all Containers run: docker compose up -d - name: Wait for Directus to be Ready run: | echo "Waiting for Directus API to be ready..." timeout 120 bash -c 'until curl -f http://localhost:8055/server/health; do echo "Waiting for Directus..."; sleep 5; done' echo "Directus is ready!" - name: Seed Backend run: | mkdir -p ./data/uploads sudo chmod 777 -R ./data cd backend && ./seed.sh working-directory: ./ - name: Wait for Application to be Ready run: | echo "Waiting for application to be ready..." timeout 300 bash -c 'until curl -f http://localhost:8080/login; do sleep 5; done' echo "Application is ready!" - name: Health Check run: | echo "Frontend health check:" curl -f http://localhost:8080/login || exit 1 echo "Backend health check:" curl -f http://localhost:8055/server/health || exit 1 - name: Install Cypress Dependencies run: npm ci working-directory: ./cypress - name: Setup Display Environment for Parallel Tests run: | echo "Setting up display environment for parallel Cypress execution..." # Kill any existing Xvfb processes to ensure clean state sudo pkill Xvfb || true # Remove any existing lock files sudo rm -f /tmp/.X*-lock || true # Ensure xvfb is available which xvfb-run || (sudo apt-get update && sudo apt-get install -y xvfb) echo "Display environment setup complete" - name: Run E2E Tests id: cypress-tests run: | # Override the npm script to use xvfb-run with display isolation SPEC_COUNT=$(find e2e -name "*.cy.ts" | wc -l) echo "Running $SPEC_COUNT test chunks in parallel with display isolation" # Array to store background process PIDs declare -a pids=() # Launch parallel processes with isolated displays for i in $(seq 0 $((SPEC_COUNT-1))); do echo "Starting Cypress chunk $((i + 1))/$SPEC_COUNT on display :$((100 + i))" ( SPLIT="$SPEC_COUNT" SPLIT_INDEX="$i" SPLIT_SUMMARY=false \ xvfb-run --server-num="$((100 + i))" \ --server-args="-screen 0 1280x720x24 -ac +extension GLX +render -noreset" \ npx cypress run --e2e --browser chromium ) & pids+=($!) done # Wait for all background processes and collect exit codes exit_code=0 for pid in "${pids[@]}"; do if ! wait "$pid"; then echo "Process $pid failed" exit_code=1 fi done echo "All parallel test processes completed" # Exit with failure if any test failed if [ $exit_code -ne 0 ]; then echo "❌ Some tests failed" exit 1 else echo "✅ All tests passed" fi working-directory: ./cypress env: # Disable individual cypress-split summaries to avoid conflicts SPLIT_SUMMARY: false - name: Merge Test Reports if: always() run: | if [ -d "reports/json" ] && [ "$(ls -A reports/json)" ]; then npm run report:merge else echo "No test reports to merge" fi working-directory: ./cypress - name: Generate HTML Report if: always() run: | if [ -f "reports/json/merged-report.json" ]; then npm run report:generate else echo "No merged report to generate HTML from" fi working-directory: ./cypress - name: Create Test Summary if: always() run: | echo "# Cypress E2E Test Results" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY # Debug: Show what files exist echo "## Debug Information" >> $GITHUB_STEP_SUMMARY echo "**Working Directory:** $(pwd)" >> $GITHUB_STEP_SUMMARY echo "**Reports Directory Structure:**" >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY ls -la reports/ 2>/dev/null || echo "No reports directory found" echo "" ls -la reports/json/ 2>/dev/null || echo "No reports/json directory found" echo "" echo "=== JSON files in reports/json/ ===" find reports/json/ -name "*.json" 2>/dev/null || echo "No JSON files found in reports/json/" echo "" echo "=== Count of JSON files ===" find reports/json/ -name "*.json" 2>/dev/null | wc -l || echo "0" echo '```' >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY # Count total specs TOTAL_SPECS=$(find e2e -name "*.cy.ts" | wc -l) echo "**Total Specs:** $TOTAL_SPECS" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "" find reports/ -name "*.json" 2>/dev/null || echo "No JSON files found" echo '```' >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY # Count total specs TOTAL_SPECS=$(find e2e -name "*.cy.ts" | wc -l) echo "**Total Specs:** $TOTAL_SPECS" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY # Check actual test results from JSON reports TOTAL_FAILURES=0 if [ -d "reports/json" ] && [ "$(ls -A reports/json 2>/dev/null)" ]; then # Count total failures across all JSON reports for json_file in reports/json/*.json; do if [ -f "$json_file" ]; then failures=$(grep -o '"failures":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") TOTAL_FAILURES=$((TOTAL_FAILURES + failures)) fi done fi # Display status based on actual test results if [ "$TOTAL_FAILURES" -eq 0 ]; then echo "## ✅ All Tests Passed" >> $GITHUB_STEP_SUMMARY else echo "## ❌ Tests Failed ($TOTAL_FAILURES total failures)" >> $GITHUB_STEP_SUMMARY fi echo "" >> $GITHUB_STEP_SUMMARY # Always show spec details table echo "## 📊 Test Details" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "| Spec File | Status |" >> $GITHUB_STEP_SUMMARY echo "|-----------|--------|" >> $GITHUB_STEP_SUMMARY # List all spec files and their actual status from JSON reports if [ -d "reports/json" ] && [ "$(ls -A reports/json 2>/dev/null)" ]; then # Create temporary file to store spec results temp_results=$(mktemp) # Parse JSON reports to get actual spec results for json_file in reports/json/*.json; do if [ -f "$json_file" ]; then # Try multiple patterns to extract spec file name from JSON # Pattern 1: Look for "file" field spec_file=$(grep -o '"file":"[^"]*"' "$json_file" | cut -d'"' -f4 | head -1) # Pattern 2: Look for "spec" field if "file" not found if [ -z "$spec_file" ]; then spec_file=$(grep -o '"spec":"[^"]*"' "$json_file" | cut -d'"' -f4 | head -1) fi # Pattern 3: Look for "title" or other identifying fields if [ -z "$spec_file" ]; then # Try to extract from the file name itself (mochawesome pattern) spec_file=$(basename "$json_file" .json | sed 's/mochawesome_[0-9]*_//') fi if [ -n "$spec_file" ]; then spec_name=$(basename "$spec_file") # Check if this spec has any failures failures=$(grep -o '"failures":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") passes=$(grep -o '"passes":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") if [ "$failures" -gt 0 ]; then echo "$spec_name:❌ Failed ($failures failures)" >> "$temp_results" elif [ "$passes" -gt 0 ]; then echo "$spec_name:✅ Passed ($passes tests)" >> "$temp_results" else echo "$spec_name:⚠️ No tests found" >> "$temp_results" fi fi fi done # List all spec files with their actual status find e2e -name "*.cy.ts" | sort | while read spec; do spec_name=$(basename "$spec") result=$(grep "^$spec_name:" "$temp_results" | cut -d':' -f2- || echo "⚠️ No report found") echo "| $spec_name | $result |" >> $GITHUB_STEP_SUMMARY done # Clean up temporary file rm -f "$temp_results" else # Fallback if no JSON reports available find e2e -name "*.cy.ts" | sort | while read spec; do spec_name=$(basename "$spec") echo "| $spec_name | ⚠️ No reports available |" >> $GITHUB_STEP_SUMMARY done fi # Add test counts from JSON reports if available if [ -d "reports/json" ] && [ "$(ls -A reports/json 2>/dev/null)" ]; then echo "" >> $GITHUB_STEP_SUMMARY echo "## Test Counts" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY TOTAL_TESTS=0 PASSED_TESTS=0 FAILED_TESTS=0 # Parse JSON reports to get test counts for json_file in reports/json/*.json; do if [ -f "$json_file" ]; then # Extract test counts using basic tools (avoiding jq dependency) TESTS=$(grep -o '"tests":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") PASSES=$(grep -o '"passes":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") FAILURES=$(grep -o '"failures":[0-9]*' "$json_file" | cut -d':' -f2 || echo "0") TOTAL_TESTS=$((TOTAL_TESTS + TESTS)) PASSED_TESTS=$((PASSED_TESTS + PASSES)) FAILED_TESTS=$((FAILED_TESTS + FAILURES)) fi done echo "- **Total Tests:** $TOTAL_TESTS" >> $GITHUB_STEP_SUMMARY echo "- **Passed:** $PASSED_TESTS ✅" >> $GITHUB_STEP_SUMMARY echo "- **Failed:** $FAILED_TESTS ❌" >> $GITHUB_STEP_SUMMARY fi # Add links to artifacts based on actual test results echo "" >> $GITHUB_STEP_SUMMARY echo "## Reports" >> $GITHUB_STEP_SUMMARY if [ "$TOTAL_FAILURES" -gt 0 ]; then echo "- HTML Report: Will be available in artifacts" >> $GITHUB_STEP_SUMMARY echo "- Screenshots: Will be available in artifacts" >> $GITHUB_STEP_SUMMARY echo "- JSON Reports: Available for detailed analysis" >> $GITHUB_STEP_SUMMARY else echo "- ✅ All tests passed - no failure artifacts needed" >> $GITHUB_STEP_SUMMARY echo "- JSON Reports: Available for test verification" >> $GITHUB_STEP_SUMMARY fi working-directory: ./cypress - name: Debug Before Upload if: always() run: | echo "=== Current Working Directory ===" pwd echo "" echo "=== Cypress Directory Structure ===" ls -la cypress/ || echo "No cypress directory" echo "" echo "=== Cypress Reports Directory ===" ls -la cypress/reports/ || echo "No cypress/reports directory" echo "" echo "=== Find all files in cypress/reports ===" find cypress/reports/ -type f 2>/dev/null || echo "No files in cypress/reports" echo "" echo "=== JSON files specifically ===" find cypress/reports/ -name "*.json" 2>/dev/null || echo "No JSON files found" - name: Upload Test Artifacts (Temporary) if: always() && (steps.cypress-tests.outcome == 'failure' || steps.report-results.outputs.test_failed == 'true') uses: actions/upload-artifact@2848b2cda0e5190984587ec6bb1f36730ca78d50 # v4.6.2 with: name: e2e-test-reports-temp-${{ github.run_id }}-${{ github.run_attempt }} path: | cypress/reports/ retention-days: 1 if-no-files-found: warn - name: Report Test Results id: report-results if: always() run: | if [ "${{ steps.cypress-tests.outcome }}" = "failure" ]; then echo "❌ Tests failed - artifacts will be uploaded by dependent job" echo "test_failed=true" >> $GITHUB_OUTPUT exit 1 else echo "✅ All tests passed successfully" echo "test_failed=false" >> $GITHUB_OUTPUT fi upload-artifacts: name: Upload Test Artifacts runs-on: ubuntu-latest needs: cypress-e2e-tests # Only run if the test job failed (not cancelled or skipped) if: always() && (needs.cypress-e2e-tests.result == 'failure' || needs.cypress-e2e-tests.outputs.test_failed == 'true') steps: - name: Download Test Reports uses: actions/download-artifact@4a24838f3d5601fd639834081e118c2995d51e1c #v5.0.0 with: name: e2e-test-reports-temp-${{ github.run_id }}-${{ github.run_attempt }} path: ./cypress-reports - name: Debug Downloaded Artifacts run: | echo "=== Downloaded Artifact Structure ===" pwd ls -la . echo "" echo "=== cypress-reports directory ===" ls -la cypress-reports/ || echo "cypress-reports directory not found" echo "" echo "=== Find all files ===" find cypress-reports/ -type f 2>/dev/null || echo "No files found in cypress-reports" echo "" echo "=== Find JSON files specifically ===" find cypress-reports/ -name "*.json" 2>/dev/null || echo "No JSON files found" - name: Upload Test Artifacts uses: actions/upload-artifact@2848b2cda0e5190984587ec6bb1f36730ca78d50 # v4.6.2 with: name: e2e-test-failures-${{ github.run_id }}-${{ github.run_attempt }} path: | cypress-reports/reports/ retention-days: 30 if-no-files-found: warn - name: Create Failure Summary run: | echo "# 🚨 E2E Test Failure Report" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "The E2E tests failed. Artifacts have been uploaded for analysis." >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "## 📦 Available Artifacts" >> $GITHUB_STEP_SUMMARY echo "- **Screenshots**: Visual evidence of failed tests" >> $GITHUB_STEP_SUMMARY echo "- **HTML Report**: Detailed test results with interactive features" >> $GITHUB_STEP_SUMMARY echo "- **JSON Reports**: Raw test data for further analysis" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "## 🔍 Next Steps" >> $GITHUB_STEP_SUMMARY echo "1. Download the artifacts from this workflow run" >> $GITHUB_STEP_SUMMARY echo "2. Open the HTML report to see detailed test results" >> $GITHUB_STEP_SUMMARY echo "3. Review screenshots to understand visual failures" >> $GITHUB_STEP_SUMMARY echo "4. Check the test logs in the main job for additional context" >> $GITHUB_STEP_SUMMARY - name: Cleanup Temporary Artifacts if: always() continue-on-error: true run: | # Note: GitHub Actions will automatically clean up temporary artifacts # This step is mainly for documentation purposes echo "Temporary artifacts will be automatically cleaned up by GitHub Actions"