-
Notifications
You must be signed in to change notification settings - Fork 420
196 lines (168 loc) · 7.49 KB
/
test-automation-v2.yml
File metadata and controls
196 lines (168 loc) · 7.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
name: Test Automation Code Modernization - v2
permissions:
contents: read
actions: read
on:
workflow_call:
inputs:
TEST_URL:
required: true
type: string
description: "Web URL for code modernization"
TEST_SUITE:
required: false
type: string
default: "GoldenPath-Testing"
description: "Test suite to run: 'Smoke-Testing', 'GoldenPath-Testing' "
outputs:
TEST_SUCCESS:
description: "Whether tests passed"
value: ${{ jobs.test.outputs.TEST_SUCCESS }}
TEST_REPORT_URL:
description: "URL to test report artifact"
value: ${{ jobs.test.outputs.TEST_REPORT_URL }}
env:
url: ${{ inputs.TEST_URL }}
accelerator_name: "Code Modernization"
test_suite: ${{ inputs.TEST_SUITE }}
jobs:
test:
runs-on: ubuntu-latest
outputs:
TEST_SUCCESS: ${{ steps.test1.outcome == 'success' || steps.test2.outcome == 'success' || steps.test3.outcome == 'success' }}
TEST_REPORT_URL: ${{ steps.upload_report.outputs.artifact-url }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.13'
- name: Login to Azure
run: |
az login --service-principal -u ${{ secrets.AZURE_CLIENT_ID }} -p ${{ secrets.AZURE_CLIENT_SECRET }} --tenant ${{ secrets.AZURE_TENANT_ID }}
az account set --subscription ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r tests/e2e-test/requirements.txt
- name: Ensure browsers are installed
run: python -m playwright install --with-deps chromium
- name: Validate URL
run: |
if [ -z "${{ env.url }}" ]; then
echo "ERROR: No URL provided for testing"
exit 1
fi
echo "Testing URL: ${{ env.url }}"
echo "Test Suite: ${{ env.test_suite }}"
- name: Wait for Application to be Ready
run: |
echo "Waiting for application to be ready at ${{ env.url }} "
max_attempts=10
attempt=1
while [ $attempt -le $max_attempts ]; do
echo "Attempt $attempt: Checking if application is ready..."
if curl -f -s "${{ env.url }}" > /dev/null; then
echo "Application is ready!"
break
fi
if [ $attempt -eq $max_attempts ]; then
echo "Application is not ready after $max_attempts attempts"
exit 1
fi
echo "Application not ready, waiting 30 seconds..."
sleep 30
attempt=$((attempt + 1))
done
- name: Run tests(1)
id: test1
run: |
if [ "${{ env.test_suite }}" == "GoldenPath-Testing" ]; then
xvfb-run pytest -m gp --html=report/report.html --self-contained-html
else
xvfb-run pytest --html=report/report.html --self-contained-html
fi
working-directory: tests/e2e-test
continue-on-error: true
- name: Sleep for 30 seconds
if: ${{ steps.test1.outcome == 'failure' }}
run: sleep 30s
shell: bash
- name: Run tests(2)
id: test2
if: ${{ steps.test1.outcome == 'failure' }}
run: |
if [ "${{ env.test_suite }}" == "GoldenPath-Testing" ]; then
xvfb-run pytest -m gp --html=report/report.html --self-contained-html
else
xvfb-run pytest --html=report/report.html --self-contained-html
fi
working-directory: tests/e2e-test
continue-on-error: true
- name: Sleep for 60 seconds
if: ${{ steps.test2.outcome == 'failure' }}
run: sleep 60s
shell: bash
- name: Run tests(3)
id: test3
if: ${{ steps.test2.outcome == 'failure' }}
run: |
if [ "${{ env.test_suite }}" == "GoldenPath-Testing" ]; then
xvfb-run pytest -m gp --html=report/report.html --self-contained-html
else
xvfb-run pytest --html=report/report.html --self-contained-html
fi
working-directory: tests/e2e-test
- name: Upload test report
id: upload_report
uses: actions/upload-artifact@v7
if: ${{ !cancelled() }}
with:
name: test-report
path: |
tests/e2e-test/report/*
tests/e2e-test/screenshots/*
- name: Generate E2E Test Summary
if: always()
run: |
# Determine test suite type for title
if [ "${{ env.test_suite }}" == "GoldenPath-Testing" ]; then
echo "## 🧪 E2E Test Job Summary : Golden Path Testing" >> $GITHUB_STEP_SUMMARY
else
echo "## 🧪 E2E Test Job Summary : Smoke Testing" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Field | Value |" >> $GITHUB_STEP_SUMMARY
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
# Determine overall test result
OVERALL_SUCCESS="${{ steps.test1.outcome == 'success' || steps.test2.outcome == 'success' || steps.test3.outcome == 'success' }}"
if [[ "$OVERALL_SUCCESS" == "true" ]]; then
echo "| **Job Status** | ✅ Success |" >> $GITHUB_STEP_SUMMARY
else
echo "| **Job Status** | ❌ Failed |" >> $GITHUB_STEP_SUMMARY
fi
echo "| **Target URL** | [${{ env.url }}](${{ env.url }}) |" >> $GITHUB_STEP_SUMMARY
echo "| **Test Suite** | \`${{ env.test_suite }}\` |" >> $GITHUB_STEP_SUMMARY
echo "| **Test Report** | [Download Artifact](${{ steps.upload_report.outputs.artifact-url }}) |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### 📋 Test Execution Details" >> $GITHUB_STEP_SUMMARY
echo "| Attempt | Status | Notes |" >> $GITHUB_STEP_SUMMARY
echo "|---------|--------|-------|" >> $GITHUB_STEP_SUMMARY
echo "| **Test Run 1** | ${{ steps.test1.outcome == 'success' && '✅ Passed' || '❌ Failed' }} | Initial test execution |" >> $GITHUB_STEP_SUMMARY
if [[ "${{ steps.test1.outcome }}" == "failure" ]]; then
echo "| **Test Run 2** | ${{ steps.test2.outcome == 'success' && '✅ Passed' || steps.test2.outcome == 'failure' && '❌ Failed' || '⏸️ Skipped' }} | Retry after 30s delay |" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ steps.test2.outcome }}" == "failure" ]]; then
echo "| **Test Run 3** | ${{ steps.test3.outcome == 'success' && '✅ Passed' || steps.test3.outcome == 'failure' && '❌ Failed' || '⏸️ Skipped' }} | Final retry after 60s delay |" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
if [[ "$OVERALL_SUCCESS" == "true" ]]; then
echo "### ✅ Test Results" >> $GITHUB_STEP_SUMMARY
echo "- End-to-end tests completed successfully" >> $GITHUB_STEP_SUMMARY
echo "- Application is functioning as expected" >> $GITHUB_STEP_SUMMARY
else
echo "### ❌ Test Results" >> $GITHUB_STEP_SUMMARY
echo "- All test attempts failed" >> $GITHUB_STEP_SUMMARY
echo "- Check the e2e-test/test job for detailed error information" >> $GITHUB_STEP_SUMMARY
fi