-
Notifications
You must be signed in to change notification settings - Fork 0
311 lines (259 loc) · 11.4 KB
/
tests_and_reports.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
name: "Test and Deploy Reports"
on:
# Run on PRs; by default, runs when a PR activity type is `opened`, `synchronize`, or `reopened`
# (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
pull_request: # empty object will use defaults
# Run on updates to `main` branch
push:
branches:
- "main"
permissions:
contents: read
# Allow us to write comments on PRs
pull-requests: write
env:
# Preview URLs
VERCEL_META_PREVIEW_URL: meta-site-${{ github.event.number }}-ldaf.vercel.app
VERCEL_STORYBOOK_PREVIEW_URL: storybook-${{ github.event.number }}-ldaf.vercel.app
# Names for report paths on Vercel Meta-Site
UNIT_PATH: unit-test-coverage
E2E_PATH: e2e-test-report
BUNDLE_VISUALIZER_PATH: bundle-visualizer
LIGHTHOUSE_PATH: lighthouse
# Just run this check once; used to determine whether to post PR comments, deploy to Vercel production, etc.
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
jobs:
lint-and-unit-tests:
name: "ESLint, TypeScript Check, and Unit Tests"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Prettier & ESLint
run: npm run lint
- name: svelte-check (diagnostic checks for unused CSS, Svelte A11y hints, and JS/TS errors)
run: npm run check
- name: Unit tests
run: npm run test:unit:coverage
- name: Report code coverage to PR in comment
if: ${{ !fromJSON(env.IS_MAIN) }}
# https://github.com/marketplace/actions/code-coverage-report
# Use a hash instead of version to support delete-old-comments field:
# https://github.com/romeovs/lcov-reporter-action/issues/47#issuecomment-1362606540
uses: romeovs/lcov-reporter-action@4cf015aa4afa87b78238301f1e3dc140ea0e1ec6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
delete-old-comments: true
- name: Create artifact with report
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.UNIT_PATH }}
path: coverage
retention-days: 1
end-to-end-tests:
name: "End-to-End Tests"
runs-on: ubuntu-latest
# Set a ridiculously long timeout to allow site to be built
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Install Playwright browsers
run: npx playwright install --with-deps
- name: Build site and run end-to-end tests
run: npm run test:e2e
- name: Create artifact with report
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.E2E_PATH }}
path: e2e-tests/report
retention-days: 1
bundle-visualizer:
name: "Bundle Visualizer"
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Build bundle visualizer
run: npm run bundle-visualizer
- name: Create artifact with bundle visualizer
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.BUNDLE_VISUALIZER_PATH }}
path: bundle-visualizer
retention-days: 1
lighthouse:
name: "Lighthouse"
runs-on: "ubuntu-latest"
timeout-minutes: 60
steps:
- name: Capture Vercel preview URL
uses: patrickedqvist/[email protected]
id: vercel_preview_url
with:
token: ${{ secrets.GITHUB_TOKEN }}
# Check once a minute
check_interval: 60
# Timeout after 45 minutes
max_timeout: 2700
- name: Checkout
uses: actions/checkout@v4
- name: Setup Chrome
uses: browser-actions/setup-chrome@latest
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm i -g lighthouse lighthouse-batch
- name: Construct URLs to test
run: |
while IFS="" read -r path || [ -n "$path" ]
do
echo "${{ steps.vercel_preview_url.outputs.url }}${path}" >> .lighthouse/urls.txt
done < .lighthouse/paths.txt
- name: Generate Lighthouse reports
# Run twice, once for mobile and once for desktop.
# Ignore the PWA category by only selecting the other four.
# When running without the increased throttling modifier, we were getting warnings on
# reports that the GitHub Actions runner couldn't keep up. Increasing this to 8 from the
# base of 4 seems to fix the issue.
run: |
lighthouse-batch --use-global --html --out lighthouse-reports/mobile --file .lighthouse/urls.txt --params "--only-categories=\"performance,accessibility,best-practices,seo\" --extra-headers \"{\\\"x-vercel-skip-toolbar\\\":\\\"1\\\"}\" --throttling.cpuSlowdownMultiplier=8"
lighthouse-batch --use-global --html --out lighthouse-reports/desktop --file .lighthouse/urls.txt --params "--only-categories=\"performance,accessibility,best-practices,seo\" --extra-headers \"{\\\"x-vercel-skip-toolbar\\\":\\\"1\\\"}\" --throttling.cpuSlowdownMultiplier=8 --preset=desktop"
- name: Copy landing page for reports summary
run: cp .lighthouse/index.html lighthouse-reports/index.html
- name: Create artifact with Lighthouse reports
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.LIGHTHOUSE_PATH }}
path: lighthouse-reports
retention-days: 1
deploy-reports-to-vercel-meta-site:
# Guide: https://vercel.com/guides/how-can-i-use-github-actions-with-vercel
name: "Deploy Reports to Vercel Meta-Site"
# Wait for tests to complete before deploying.
needs: [lint-and-unit-tests, end-to-end-tests, bundle-visualizer, lighthouse]
# We still want to upload reports regardless of whether tests failed.
if: ${{ always() }}
runs-on: ubuntu-latest
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_META_SITE_PROJECT_ID }}
VERCEL_TOKEN: ${{secrets.VERCEL_META_SITE_DEPLOY_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install Vercel CLI
run: npm i -g vercel@latest
- name: Setup Vercel to build and deploy
run: vercel pull --yes --cwd .meta --environment=${{ fromJSON(env.IS_MAIN) && 'production' || 'preview' }} --token=${{env.VERCEL_TOKEN}}
- name: Download unit test report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.UNIT_PATH }}
path: .meta/${{ env.UNIT_PATH }}
- name: Download end-to-end test report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.E2E_PATH }}
path: .meta/${{ env.E2E_PATH }}
- name: Download bundle visualizer artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.BUNDLE_VISUALIZER_PATH }}
path: .meta/${{ env.BUNDLE_VISUALIZER_PATH }}
- name: Download Lighthouse report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.LIGHTHOUSE_PATH }}
path: .meta/${{ env.LIGHTHOUSE_PATH }}
- name: Build for Vercel
run: vercel build --cwd .meta --token=${{env.VERCEL_TOKEN}} ${{ fromJSON(env.IS_MAIN) && '--prod'}}
- name: Deploy to Vercel preview
if: ${{ !fromJSON(env.IS_MAIN) }}
# Guide: https://vercel.com/guides/how-to-alias-a-preview-deployment-using-the-cli
run: |
url="$(vercel deploy --cwd .meta --prebuilt --token=${{env.VERCEL_TOKEN}})"
vercel alias --cwd .meta --scope ldaf --token=${{env.VERCEL_TOKEN}} set "$url" ${{env.VERCEL_META_PREVIEW_URL}}
- name: Deploy to Vercel production
if: ${{ fromJSON(env.IS_MAIN) }}
run: vercel deploy --cwd .meta --prebuilt --prod --token=${{env.VERCEL_TOKEN}}
- name: Report deployment success message to PR in comment
if: ${{ !fromJSON(env.IS_MAIN) }}
# https://github.com/marketplace/actions/comment-progress
uses: hasura/[email protected]
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.repository }}
number: ${{ github.event.number }}
id: vercel-deploy
recreate: true
message: "Reports for ${{ github.event.after }} have been [deployed to Vercel](https://${{env.VERCEL_META_PREVIEW_URL}}):\n\n* [Unit Test Coverage Report](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.UNIT_PATH }})\n* [End-to-End Test Report](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.E2E_PATH }})\n* [Bundle Visualizer](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.BUNDLE_VISUALIZER_PATH }})\n* [Lighthouse Reports](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.LIGHTHOUSE_PATH }})\n* [Storybook](https://${{ env.VERCEL_STORYBOOK_PREVIEW_URL }})"
# Handle Storybook separately since it takes much longer to build and deploy.
storybook:
name: "Build and Deploy Storybook"
runs-on: "ubuntu-latest"
timeout-minutes: 60
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_STORYBOOK_PROJECT_ID }}
VERCEL_TOKEN: ${{secrets.VERCEL_STORYBOOK_DEPLOY_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Build Storybook
run: npm run build-storybook
- name: Install Vercel CLI
run: npm i -g vercel@latest
- name: Setup Vercel to build and deploy
run: vercel pull --yes --cwd storybook-static --environment=${{ fromJSON(env.IS_MAIN) && 'production' || 'preview' }} --token=${{env.VERCEL_TOKEN}}
- name: Build for Vercel
run: vercel build --cwd storybook-static --token=${{env.VERCEL_TOKEN}} ${{ fromJSON(env.IS_MAIN) && '--prod'}}
- name: Deploy to Vercel preview
if: ${{ !fromJSON(env.IS_MAIN) }}
run: |
url="$(vercel deploy --cwd storybook-static --prebuilt --token=${{env.VERCEL_TOKEN}})"
vercel alias --cwd storybook-static --scope ldaf --token=${{env.VERCEL_TOKEN}} set "$url" ${{env.VERCEL_STORYBOOK_PREVIEW_URL}}
- name: Deploy to Vercel production
if: ${{ fromJSON(env.IS_MAIN) }}
run: vercel deploy --cwd storybook-static --prebuilt --prod --token=${{env.VERCEL_TOKEN}}