Skip to content

Allow for logging in with Contentful OAuth #1629

Allow for logging in with Contentful OAuth

Allow for logging in with Contentful OAuth #1629

name: "Test and Deploy Reports"
on:
# Run on PRs; by default, runs when a PR activity type is `opened`, `synchronize`, or `reopened`
# (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
pull_request: # empty object will use defaults
# Run on updates to `main` branch
push:
branches:
- "main"
permissions:
contents: read
# Allow us to write comments on PRs
pull-requests: write
env:
# Preview URLs
VERCEL_META_PREVIEW_URL: meta-site-${{ github.event.number }}-ldaf.vercel.app
VERCEL_STORYBOOK_PREVIEW_URL: storybook-${{ github.event.number }}-ldaf.vercel.app
# Names for report paths on Vercel Meta-Site
UNIT_PATH: unit-test-coverage
E2E_PATH: e2e-test-report
BUNDLE_VISUALIZER_PATH: bundle-visualizer
LIGHTHOUSE_PATH: lighthouse
# Just run this check once; used to determine whether to post PR comments, deploy to Vercel production, etc.
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
jobs:
lint-and-unit-tests:
name: "ESLint, TypeScript Check, and Unit Tests"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Prettier & ESLint
run: npm run lint
- name: svelte-check (diagnostic checks for unused CSS, Svelte A11y hints, and JS/TS errors)
run: npm run check
- name: Unit tests
run: npm run test:unit:coverage
- name: Report code coverage to PR in comment
if: ${{ !fromJSON(env.IS_MAIN) }}
# https://github.com/marketplace/actions/code-coverage-report
# Use a hash instead of version to support delete-old-comments field:
# https://github.com/romeovs/lcov-reporter-action/issues/47#issuecomment-1362606540
uses: romeovs/lcov-reporter-action@4cf015aa4afa87b78238301f1e3dc140ea0e1ec6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
delete-old-comments: true
- name: Create artifact with report
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.UNIT_PATH }}
path: coverage
retention-days: 1
end-to-end-tests:
name: "End-to-End Tests"
runs-on: ubuntu-latest
# Set a ridiculously long timeout to allow site to be built
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Install Playwright browsers
run: npx playwright install --with-deps
- name: Build site and run end-to-end tests
run: npm run test:e2e
- name: Create artifact with report
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.E2E_PATH }}
path: e2e-tests/report
retention-days: 1
bundle-visualizer:
name: "Bundle Visualizer"
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Build bundle visualizer
run: npm run bundle-visualizer
- name: Create artifact with bundle visualizer
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.BUNDLE_VISUALIZER_PATH }}
path: bundle-visualizer
retention-days: 1
lighthouse:
name: "Lighthouse"
runs-on: "ubuntu-latest"
timeout-minutes: 60
steps:
- name: Capture Vercel preview URL
uses: patrickedqvist/[email protected]
id: vercel_preview_url
with:
token: ${{ secrets.GITHUB_TOKEN }}
# Check once a minute
check_interval: 60
# Timeout after 45 minutes
max_timeout: 2700
- name: Checkout
uses: actions/checkout@v4
- name: Setup Chrome
uses: browser-actions/setup-chrome@latest
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm i -g lighthouse lighthouse-batch
- name: Construct URLs to test
run: |
while IFS="" read -r path || [ -n "$path" ]
do
echo "${{ steps.vercel_preview_url.outputs.url }}${path}" >> .lighthouse/urls.txt
done < .lighthouse/paths.txt
- name: Generate Lighthouse reports
# Run twice, once for mobile and once for desktop.
# Ignore the PWA category by only selecting the other four.
# When running without the increased throttling modifier, we were getting warnings on
# reports that the GitHub Actions runner couldn't keep up. Increasing this to 8 from the
# base of 4 seems to fix the issue.
run: |
lighthouse-batch --use-global --html --out lighthouse-reports/mobile --file .lighthouse/urls.txt --params "--only-categories=\"performance,accessibility,best-practices,seo\" --extra-headers \"{\\\"x-vercel-skip-toolbar\\\":\\\"1\\\"}\" --throttling.cpuSlowdownMultiplier=8"
lighthouse-batch --use-global --html --out lighthouse-reports/desktop --file .lighthouse/urls.txt --params "--only-categories=\"performance,accessibility,best-practices,seo\" --extra-headers \"{\\\"x-vercel-skip-toolbar\\\":\\\"1\\\"}\" --throttling.cpuSlowdownMultiplier=8 --preset=desktop"
- name: Copy landing page for reports summary
run: cp .lighthouse/index.html lighthouse-reports/index.html
- name: Create artifact with Lighthouse reports
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.LIGHTHOUSE_PATH }}
path: lighthouse-reports
retention-days: 1
deploy-reports-to-vercel-meta-site:
# Guide: https://vercel.com/guides/how-can-i-use-github-actions-with-vercel
name: "Deploy Reports to Vercel Meta-Site"
# Wait for tests to complete before deploying.
needs: [lint-and-unit-tests, end-to-end-tests, bundle-visualizer, lighthouse]
# We still want to upload reports regardless of whether tests failed.
if: ${{ always() }}
runs-on: ubuntu-latest
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_META_SITE_PROJECT_ID }}
VERCEL_TOKEN: ${{secrets.VERCEL_META_SITE_DEPLOY_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install Vercel CLI
run: npm i -g vercel@latest
- name: Setup Vercel to build and deploy
run: vercel pull --yes --cwd .meta --environment=${{ fromJSON(env.IS_MAIN) && 'production' || 'preview' }} --token=${{env.VERCEL_TOKEN}}
- name: Download unit test report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.UNIT_PATH }}
path: .meta/${{ env.UNIT_PATH }}
- name: Download end-to-end test report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.E2E_PATH }}
path: .meta/${{ env.E2E_PATH }}
- name: Download bundle visualizer artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.BUNDLE_VISUALIZER_PATH }}
path: .meta/${{ env.BUNDLE_VISUALIZER_PATH }}
- name: Download Lighthouse report artifact
uses: actions/download-artifact@v3
with:
name: ${{ github.event.after }}-${{ env.LIGHTHOUSE_PATH }}
path: .meta/${{ env.LIGHTHOUSE_PATH }}
- name: Build for Vercel
run: vercel build --cwd .meta --token=${{env.VERCEL_TOKEN}} ${{ fromJSON(env.IS_MAIN) && '--prod'}}
- name: Deploy to Vercel preview
if: ${{ !fromJSON(env.IS_MAIN) }}
# Guide: https://vercel.com/guides/how-to-alias-a-preview-deployment-using-the-cli
run: |
url="$(vercel deploy --cwd .meta --prebuilt --token=${{env.VERCEL_TOKEN}})"
vercel alias --cwd .meta --scope ldaf --token=${{env.VERCEL_TOKEN}} set "$url" ${{env.VERCEL_META_PREVIEW_URL}}
- name: Deploy to Vercel production
if: ${{ fromJSON(env.IS_MAIN) }}
run: vercel deploy --cwd .meta --prebuilt --prod --token=${{env.VERCEL_TOKEN}}
- name: Report deployment success message to PR in comment
if: ${{ !fromJSON(env.IS_MAIN) }}
# https://github.com/marketplace/actions/comment-progress
uses: hasura/[email protected]
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.repository }}
number: ${{ github.event.number }}
id: vercel-deploy
recreate: true
message: "Reports for ${{ github.event.after }} have been [deployed to Vercel](https://${{env.VERCEL_META_PREVIEW_URL}}):\n\n* [Unit Test Coverage Report](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.UNIT_PATH }})\n* [End-to-End Test Report](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.E2E_PATH }})\n* [Bundle Visualizer](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.BUNDLE_VISUALIZER_PATH }})\n* [Lighthouse Reports](https://${{ env.VERCEL_META_PREVIEW_URL }}/${{ env.LIGHTHOUSE_PATH }})\n* [Storybook](https://${{ env.VERCEL_STORYBOOK_PREVIEW_URL }})"
# Handle Storybook separately since it takes much longer to build and deploy.
storybook:
name: "Build and Deploy Storybook"
runs-on: "ubuntu-latest"
timeout-minutes: 60
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_STORYBOOK_PROJECT_ID }}
VERCEL_TOKEN: ${{secrets.VERCEL_STORYBOOK_DEPLOY_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: ".nvmrc"
cache: npm
- name: Install
run: npm ci
- name: Build Storybook
run: npm run build-storybook
- name: Install Vercel CLI
run: npm i -g vercel@latest
- name: Setup Vercel to build and deploy
run: vercel pull --yes --cwd storybook-static --environment=${{ fromJSON(env.IS_MAIN) && 'production' || 'preview' }} --token=${{env.VERCEL_TOKEN}}
- name: Build for Vercel
run: vercel build --cwd storybook-static --token=${{env.VERCEL_TOKEN}} ${{ fromJSON(env.IS_MAIN) && '--prod'}}
- name: Deploy to Vercel preview
if: ${{ !fromJSON(env.IS_MAIN) }}
run: |
url="$(vercel deploy --cwd storybook-static --prebuilt --token=${{env.VERCEL_TOKEN}})"
vercel alias --cwd storybook-static --scope ldaf --token=${{env.VERCEL_TOKEN}} set "$url" ${{env.VERCEL_STORYBOOK_PREVIEW_URL}}
- name: Deploy to Vercel production
if: ${{ fromJSON(env.IS_MAIN) }}
run: vercel deploy --cwd storybook-static --prebuilt --prod --token=${{env.VERCEL_TOKEN}}