Skip to content

Commit

Permalink
refactor(CI): Convert daily turbopack (areweturboyet) integration tes…
Browse files Browse the repository at this point in the history
…ts into reusable workflows
  • Loading branch information
bgw committed Feb 20, 2025
1 parent c939331 commit 0551ad6
Show file tree
Hide file tree
Showing 3 changed files with 237 additions and 197 deletions.
8 changes: 7 additions & 1 deletion .github/workflows/build_reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,13 @@ jobs:

- run: turbo run get-test-timings -- --build ${{ github.sha }}

- run: /bin/bash -c "${{ inputs.afterBuild }}"
- run: ${{ inputs.afterBuild }}
# defaults.run.shell sets a stronger options (`-leo pipefail`)
# Set this back to github action's weaker defaults:
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
#
# We must use a login shell: fnm installation may modify the `.profile`
shell: bash -le {0}
timeout-minutes: ${{ inputs.timeout_minutes }}

- name: Upload artifact
Expand Down
207 changes: 207 additions & 0 deletions .github/workflows/integration_tests_reusable.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
name: Integration Tests Reusable

on:
workflow_call:
inputs:
name:
description: A unique identifer used for uploaded assets
type: string
test_type:
description: '"development" or "production"'
required: true
type: string
env_json:
description: Additional environment variables to pass to all tests
type: string
default: '{}'
e2e_groups:
description: >
Size of the matrix used for running e2e tests (controls parallelism)
type: number
default: 6
integration_groups:
description: >
Size of the matrix used for running legacy integration tests (controls
parallelism)
type: number
default: 6
e2e_timeout_minutes:
type: number
default: 30
integration_timeout_minutes:
type: number
default: 30
datadog_args:
description: >
Unescaped arguments to pass to the datadog CLI's `junit upload`
subcommand
type: string
diff_base:
description: >
The base of the test results to compare against. If not specified, will try to compare with latest main branch's test results.
type: string
default: 'none' # the literal string "none"

jobs:
# First, build Next.js to execute across tests.
build-next:
name: build-next
uses: ./.github/workflows/build_reusable.yml
with:
skipNativeBuild: yes
stepName: build-next
secrets: inherit

build-native:
name: build-native
uses: ./.github/workflows/build_reusable.yml
with:
skipInstallBuild: yes
stepName: build-native
secrets: inherit

generate-matrices:
runs-on: [self-hosted, linux, x64, metal]
steps:
- id: out
run: |
printf 'e2e=[%s]\n' \
"$(seq -s, 1 ${{ inputs.e2e_groups }})" | \
tee -a "$GITHUB_OUTPUT"
printf 'integration=[%s]\n' \
"$(seq -s, 1 ${{ inputs.integration_groups }})" | \
tee -a "$GITHUB_OUTPUT"
outputs:
e2e: ${{ steps.out.outputs.e2e }}
integration: ${{ steps.out.outputs.integration }}

# Actual test scheduling. These jobs mimic the normal test jobs.
# Refer build_and_test.yml for more details.
#
# We run tests in two parts. Legacy integration tests are run separately:
# https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#test-types-in-nextjs
test-e2e:
# Name must match `integrationTestJobs` in
# `./.github/actions/next-integration-stat`
name: Next.js integration test (E2E and ${{ inputs.test_type }})
needs: [build-next, build-native, generate-matrices]
strategy:
fail-fast: false
matrix:
group: ${{ fromJSON(needs.generate-matrices.outputs.e2e) }}
uses: ./.github/workflows/build_reusable.yml
with:
afterBuild: |
# running e2e and ${{ inputs.test_type }} tests with `node run-tests.js`
export __INTERNAL_CUSTOM_TURBOPACK_BINDINGS=\
'${{ github.workspace }}/packages/next-swc/native/next-swc.linux-x64-gnu.node'
export NEXT_TEST_CONTINUE_ON_ERROR=TRUE
export NEXT_E2E_TEST_TIMEOUT=240000
# expand environment variables: https://stackoverflow.com/a/69502302
eval "export $( \
printf "%s\n" "$ENV_JSON" | \
jq -r 'to_entries | map("\(.key)=\(.value)") | @sh' \
) __LAST_ENV_VAR=''" # prevent an empty argument list to `export`
export NEXT_TEST_MODE=${{
inputs.test_type == 'development' && 'dev' || 'start'
}}
node run-tests.js \
-g ${{ matrix.group }}/${{ inputs.e2e_groups }} \
-c $TEST_CONCURRENCY \
--type ${{ inputs.test_type }}
stepName: test-${{ inputs.name }}-${{ matrix.group }}
timeout_minutes: ${{ inputs.e2e_timeout_minutes }}
secrets: inherit

test-integration:
# Name must match `integrationTestJobs` in
# `./.github/actions/next-integration-stat`
name: Next.js integration test (Integration)
needs: [build-next, build-native, generate-matrices]
strategy:
fail-fast: false
matrix:
group: ${{ fromJSON(needs.generate-matrices.outputs.integration) }}
uses: ./.github/workflows/build_reusable.yml
with:
nodeVersion: 18.18.2
afterBuild: |
# running legacy integration tests with `node run-tests.js`
export __INTERNAL_CUSTOM_TURBOPACK_BINDINGS=\
'${{ github.workspace }}/packages/next-swc/native/next-swc.linux-x64-gnu.node'
export NEXT_TEST_CONTINUE_ON_ERROR=TRUE
export NEXT_E2E_TEST_TIMEOUT=240000
eval "export $( \
printf "%s\n" "$ENV_JSON" | \
jq -r 'to_entries | map("\(.key)=\(.value)") | @sh' \
) __LAST_ENV_VAR=''"
# HACK: Despite the name, these environment variables are just used to
# gate tests, so they're applicable to both turbopack and rspack tests
export ${{
inputs.test_type == 'development' &&
'TURBOPACK_DEV=1' ||
'TURBOPACK_BUILD=1'
}}
node run-tests.js \
-g ${{ matrix.group }}/${{ inputs.integration_groups }} \
-c $TEST_CONCURRENCY \
--type integration
stepName: test-${{ inputs.name }}-integration-${{ matrix.group }}
timeout_minutes: ${{ inputs.integration_timeout_minutes }}
secrets: inherit

# Collect integration test results from execute_tests,
# Store it as github artifact for next step to consume.
collect_nextjs_development_integration_stat:
needs: [test-e2e, test-integration]
name: Next.js integration test development status report
runs-on: [self-hosted, linux, x64, metal]
if: always()
permissions:
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Collect integration test stat
uses: ./.github/actions/next-integration-stat
with:
diff_base: ${{ inputs.diff_base }}

- name: Store artifacts
uses: actions/upload-artifact@v4
with:
name: test-results-${{ inputs.name }}
path: |
nextjs-test-results.json
failed-test-path-list.json
passed-test-path-list.json
upload_test_report:
needs: [test-e2e, test-integration]
name: Upload test report to datadog
runs-on: [self-hosted, linux, x64, metal]
if: ${{ !cancelled() }}
steps:
- name: Download test report artifacts
id: download-test-reports
uses: actions/download-artifact@v4
with:
pattern: test-reports-*
path: test/reports
merge-multiple: true

- name: Upload to datadog
env:
DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }}
DD_ENV: 'ci'
run: |
npx @datadog/[email protected] junit upload ${{ inputs.datadog_args }} ./test/reports
Loading

0 comments on commit 0551ad6

Please sign in to comment.