\n`
+ comment += `\n`
comment += `${result.title}${increaseDecreaseNote}
\n\n
\n\n`
comment += resultContent
comment += ' \n'
diff --git a/.github/actions/next-stats-action/src/index.js b/.github/actions/next-stats-action/src/index.js
index 402c7cfadfcb..a859aaac0cc7 100644
--- a/.github/actions/next-stats-action/src/index.js
+++ b/.github/actions/next-stats-action/src/index.js
@@ -1,5 +1,6 @@
const path = require('path')
-const fs = require('fs-extra')
+const fs = require('fs/promises')
+const { existsSync } = require('fs')
const exec = require('./util/exec')
const logger = require('./util/logger')
const runConfigs = require('./run')
@@ -21,7 +22,7 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
;(async () => {
try {
- if (await fs.pathExists(path.join(__dirname, '../SKIP_NEXT_STATS.txt'))) {
+ if (existsSync(path.join(__dirname, '../SKIP_NEXT_STATS.txt'))) {
console.log(
'SKIP_NEXT_STATS.txt file present, exiting stats generation..'
)
@@ -100,7 +101,7 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
for (const dir of repoDirs) {
logger(`Running initial build for ${dir}`)
if (!actionInfo.skipClone) {
- const usePnpm = await fs.pathExists(path.join(dir, 'pnpm-lock.yaml'))
+ const usePnpm = existsSync(path.join(dir, 'pnpm-lock.yaml'))
if (!statsConfig.skipInitialInstall) {
await exec.spawnPromise(
@@ -121,15 +122,13 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
}
await fs
- .copy(
+ .cp(
path.join(__dirname, '../native'),
- path.join(dir, 'packages/next-swc/native')
+ path.join(dir, 'packages/next-swc/native'),
+ { recursive: true, force: true }
)
.catch(console.error)
- console.log(await exec(`ls ${path.join(__dirname, '../native')}`))
- console.log(await exec(`cd ${dir} && ls ${dir}/packages/next-swc/native`))
-
logger(`Linking packages in ${dir}`)
const isMainRepo = dir === mainRepoDir
const pkgPaths = await linkPackages({
diff --git a/.github/actions/next-stats-action/src/prepare/repo-setup.js b/.github/actions/next-stats-action/src/prepare/repo-setup.js
index 74fd6ea1fc92..df62cc572340 100644
--- a/.github/actions/next-stats-action/src/prepare/repo-setup.js
+++ b/.github/actions/next-stats-action/src/prepare/repo-setup.js
@@ -1,16 +1,14 @@
const path = require('path')
-const fse = require('fs-extra')
-const fs = require('fs')
-const fsp = require('fs/promises')
+const fs = require('fs/promises')
+const { existsSync } = require('fs')
const exec = require('../util/exec')
-const { remove } = require('fs-extra')
const logger = require('../util/logger')
const execa = require('execa')
module.exports = (actionInfo) => {
return {
async cloneRepo(repoPath = '', dest = '', branch = '', depth = '20') {
- await remove(dest)
+ await fs.rm(dest, { recursive: true, force: true })
await exec(
`git clone ${actionInfo.gitRoot}${repoPath} --single-branch --branch ${branch} --depth=${depth} ${dest}`
)
@@ -72,7 +70,7 @@ module.exports = (actionInfo) => {
let pkgs
try {
- pkgs = await fsp.readdir(path.join(repoDir, 'packages'))
+ pkgs = await fs.readdir(path.join(repoDir, 'packages'))
} catch (err) {
if (err.code === 'ENOENT') {
require('console').log('no packages to link')
@@ -87,8 +85,8 @@ module.exports = (actionInfo) => {
const packedPkgPath = path.join(pkgPath, `${pkg}-packed.tgz`)
const pkgDataPath = path.join(pkgPath, 'package.json')
- if (fs.existsSync(pkgDataPath)) {
- const pkgData = JSON.parse(await fsp.readFile(pkgDataPath))
+ if (existsSync(pkgDataPath)) {
+ const pkgData = JSON.parse(await fs.readFile(pkgDataPath))
const { name } = pkgData
pkgDatas.set(name, {
@@ -122,7 +120,7 @@ module.exports = (actionInfo) => {
pkgData.files.push('native')
try {
- const swcBinariesDirContents = await fsp.readdir(
+ const swcBinariesDirContents = await fs.readdir(
path.join(pkgPath, 'native')
)
require('console').log(
@@ -155,7 +153,7 @@ module.exports = (actionInfo) => {
}
}
- await fsp.writeFile(
+ await fs.writeFile(
pkgDataPath,
JSON.stringify(pkgData, null, 2),
'utf8'
@@ -186,9 +184,9 @@ module.exports = (actionInfo) => {
'disabled-native-gitignore'
)
- await fsp.rename(nativeGitignorePath, renamedGitignorePath)
+ await fs.rename(nativeGitignorePath, renamedGitignorePath)
cleanup = async () => {
- await fsp.rename(renamedGitignorePath, nativeGitignorePath)
+ await fs.rename(renamedGitignorePath, nativeGitignorePath)
}
}
@@ -201,7 +199,7 @@ module.exports = (actionInfo) => {
})
return Promise.all([
- fsp.rename(path.resolve(pkgPath, stdout.trim()), packedPkgPath),
+ fs.rename(path.resolve(pkgPath, stdout.trim()), packedPkgPath),
cleanup?.(),
])
}
diff --git a/.github/actions/next-stats-action/src/run/collect-diffs.js b/.github/actions/next-stats-action/src/run/collect-diffs.js
index 3440d8066be4..c850fb843ab9 100644
--- a/.github/actions/next-stats-action/src/run/collect-diffs.js
+++ b/.github/actions/next-stats-action/src/run/collect-diffs.js
@@ -1,5 +1,6 @@
const path = require('path')
-const fs = require('fs-extra')
+const fs = require('fs/promises')
+const { existsSync } = require('fs')
const exec = require('../util/exec')
const glob = require('../util/glob')
const logger = require('../util/logger')
@@ -12,15 +13,17 @@ module.exports = async function collectDiffs(
if (initial) {
logger('Setting up directory for diffing')
// set-up diffing directory
- await fs.remove(diffingDir)
- await fs.mkdirp(diffingDir)
+ await fs.rm(diffingDir, { recursive: true, force: true })
+ await fs.mkdir(diffingDir, { recursive: true })
await exec(`cd ${diffingDir} && git init`)
} else {
// remove any previous files in case they won't be overwritten
const toRemove = await glob('!(.git)', { cwd: diffingDir, dot: true })
await Promise.all(
- toRemove.map((file) => fs.remove(path.join(diffingDir, file)))
+ toRemove.map((file) =>
+ fs.rm(path.join(diffingDir, file), { recursive: true, force: true })
+ )
)
}
const diffs = {}
@@ -40,7 +43,7 @@ module.exports = async function collectDiffs(
const absPath = path.join(statsAppDir, file)
const diffDest = path.join(diffingDir, file)
- await fs.copy(absPath, diffDest)
+ await fs.cp(absPath, diffDest, { recursive: true, force: true })
}
if (curFiles.length > 0) {
@@ -75,7 +78,7 @@ module.exports = async function collectDiffs(
for (const line of renamedFiles) {
const [, prev, cur] = line.split('\t')
- await fs.move(path.join(diffingDir, cur), path.join(diffingDir, prev))
+ await fs.rename(path.join(diffingDir, cur), path.join(diffingDir, prev))
diffs._renames.push({
prev,
cur,
@@ -91,7 +94,7 @@ module.exports = async function collectDiffs(
for (const file of changedFiles) {
const fileKey = path.basename(file)
- const hasFile = await fs.exists(path.join(diffingDir, file))
+ const hasFile = existsSync(path.join(diffingDir, file))
if (!hasFile) {
diffs[fileKey] = 'deleted'
@@ -103,7 +106,7 @@ module.exports = async function collectDiffs(
`cd ${diffingDir} && git diff --minimal HEAD ${file}`
)
stdout = (stdout.split(file).pop() || '').trim()
- if (stdout.length > 0) {
+ if (stdout.length > 0 && !isLikelyHashOrIDChange(stdout)) {
diffs[fileKey] = stdout
}
} catch (err) {
@@ -114,3 +117,48 @@ module.exports = async function collectDiffs(
}
return diffs
}
+
+function isLikelyHashOrIDChange(diff) {
+ const lines = diff.split('\n')
+ let additions = []
+ let deletions = []
+
+ // Separate additions and deletions
+ for (const line of lines) {
+ if (line.startsWith('+')) {
+ additions.push(line.substring(1).split(/\b/))
+ } else if (line.startsWith('-')) {
+ deletions.push(line.substring(1).split(/\b/))
+ }
+ }
+
+ // If the number of additions and deletions is different, it's not a hash or ID change
+ if (additions.length !== deletions.length) {
+ return false
+ }
+
+ // Compare each addition with each deletion
+ for (let i = 0; i < additions.length; i++) {
+ const additionTokens = additions[i]
+ const deletionTokens = deletions[i]
+
+ // Identify differing tokens
+ const differingTokens = additionTokens.filter(
+ (token, index) => token !== deletionTokens[index]
+ )
+
+ // Analyze differing tokens
+ for (const token of differingTokens) {
+ const isLikelyHash = /^[a-f0-9]+$/.test(token)
+ const isLikelyID = /^[0-9]+$/.test(token)
+ // this is most likely noise because some path include the repo name, which can be main or diff
+ const isLikelyNoise = ['main', 'diff'].includes(token)
+
+ if (!isLikelyHash && !isLikelyID && !isLikelyNoise) {
+ return false
+ }
+ }
+ }
+
+ return true
+}
diff --git a/.github/actions/next-stats-action/src/run/collect-stats.js b/.github/actions/next-stats-action/src/run/collect-stats.js
index f1ef1eb98b1d..8c54fca65e42 100644
--- a/.github/actions/next-stats-action/src/run/collect-stats.js
+++ b/.github/actions/next-stats-action/src/run/collect-stats.js
@@ -1,5 +1,5 @@
const path = require('path')
-const fs = require('fs-extra')
+const fs = require('fs/promises')
const getPort = require('get-port')
const fetch = require('node-fetch')
const glob = require('../util/glob')
@@ -84,7 +84,7 @@ module.exports = async function collectStats(
if (hasPagesToFetch) {
const fetchedPagesDir = path.join(curDir, 'fetched-pages')
- await fs.mkdirp(fetchedPagesDir)
+ await fs.mkdir(fetchedPagesDir, { recursive: true })
for (let url of runConfig.pagesToFetch) {
url = url.replace('$PORT', port)
diff --git a/.github/actions/next-stats-action/src/run/get-dir-size.js b/.github/actions/next-stats-action/src/run/get-dir-size.js
index aa16e519382e..c8e42d463111 100644
--- a/.github/actions/next-stats-action/src/run/get-dir-size.js
+++ b/.github/actions/next-stats-action/src/run/get-dir-size.js
@@ -1,5 +1,5 @@
const path = require('path')
-const fs = require('fs-extra')
+const fs = require('fs/promises')
// getDirSize recursively gets size of all files in a directory
async function getDirSize(dir, ctx = { size: 0 }) {
diff --git a/.github/actions/next-stats-action/src/run/index.js b/.github/actions/next-stats-action/src/run/index.js
index afc647d399be..66fea2ab2d2b 100644
--- a/.github/actions/next-stats-action/src/run/index.js
+++ b/.github/actions/next-stats-action/src/run/index.js
@@ -1,5 +1,5 @@
const path = require('path')
-const fs = require('fs-extra')
+const fs = require('fs/promises')
const glob = require('../util/glob')
const exec = require('../util/exec')
const logger = require('../util/logger')
@@ -36,8 +36,8 @@ async function runConfigs(
const curStatsAppPath = path.join(diffRepoDir, relativeStatsAppDir)
// clean statsAppDir
- await fs.remove(statsAppDir)
- await fs.copy(curStatsAppPath, statsAppDir)
+ await fs.rm(statsAppDir, { recursive: true, force: true })
+ await fs.cp(curStatsAppPath, statsAppDir, { recursive: true })
logger(`Copying ${curStatsAppPath} ${statsAppDir}`)
@@ -70,7 +70,7 @@ async function runConfigs(
? result.replace(/(\.|-)[0-9a-f]{16}(\.|-)/g, '$1HASH$2')
: rename.dest
if (result === dest) continue
- await fs.move(
+ await fs.rename(
path.join(statsAppDir, result),
path.join(statsAppDir, dest)
)
@@ -172,7 +172,10 @@ async function runConfigs(
}
async function linkPkgs(pkgDir = '', pkgPaths) {
- await fs.remove(path.join(pkgDir, 'node_modules'))
+ await fs.rm(path.join(pkgDir, 'node_modules'), {
+ recursive: true,
+ force: true,
+ })
const pkgJsonPath = path.join(pkgDir, 'package.json')
const pkgData = require(pkgJsonPath)
diff --git a/.github/pnpm-lock.yaml b/.github/pnpm-lock.yaml
index 1781d02be48a..76a07e96e5db 100644
--- a/.github/pnpm-lock.yaml
+++ b/.github/pnpm-lock.yaml
@@ -33,9 +33,6 @@ importers:
execa:
specifier: 2.0.3
version: 2.0.3
- fs-extra:
- specifier: ^8.1.0
- version: 8.1.0
get-port:
specifier: ^5.0.0
version: 5.1.1
@@ -480,15 +477,6 @@ packages:
mime-types: 2.1.35
dev: false
- /fs-extra@8.1.0:
- resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==}
- engines: {node: '>=6 <7 || >=8'}
- dependencies:
- graceful-fs: 4.2.11
- jsonfile: 4.0.0
- universalify: 0.1.2
- dev: false
-
/fs.realpath@1.0.0:
resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
dev: false
@@ -522,10 +510,6 @@ packages:
path-is-absolute: 1.0.1
dev: false
- /graceful-fs@4.2.11:
- resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
- dev: false
-
/gray-matter@4.0.2:
resolution: {integrity: sha512-7hB/+LxrOjq/dd8APlK0r24uL/67w7SkYnfwhNFwg/VDIGWGmduTDYf3WNstLW2fbbmRwrDGCVSJ2isuf2+4Hw==}
engines: {node: '>=6.0'}
@@ -693,12 +677,6 @@ packages:
esprima: 4.0.1
dev: false
- /jsonfile@4.0.0:
- resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==}
- optionalDependencies:
- graceful-fs: 4.2.11
- dev: false
-
/kind-of@6.0.3:
resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==}
engines: {node: '>=0.10.0'}
@@ -1135,11 +1113,6 @@ packages:
resolution: {integrity: sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==}
dev: false
- /universalify@0.1.2:
- resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==}
- engines: {node: '>= 4.0.0'}
- dev: false
-
/uuid@8.3.2:
resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
hasBin: true
diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml
index 93648a7ae05c..0547c39f95d7 100644
--- a/.github/workflows/build_and_deploy.yml
+++ b/.github/workflows/build_and_deploy.yml
@@ -463,54 +463,6 @@ jobs:
VERCEL_API_TOKEN: ${{ secrets.VERCEL_API_TOKEN }}
DEPLOY_ENVIRONMENT: production
- testDeployE2E:
- name: E2E (deploy)
- runs-on: ubuntu-latest
- needs: [publishRelease]
- env:
- NEXT_TELEMETRY_DISABLED: 1
- VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }}
- VERCEL_TEST_TEAM: vtest314-next-e2e-tests
- DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }}
- steps:
- - uses: actions/cache@v3
- timeout-minutes: 5
- id: restore-build
- with:
- path: ./*
- key: ${{ github.sha }}-${{ github.run_number }}
-
- - run: npm i -g vercel@latest
-
- - uses: actions/download-artifact@v3
- with:
- name: next-swc-binaries
- path: packages/next-swc/native
-
- - run: RESET_VC_PROJECT=true node scripts/reset-vercel-project.mjs
- name: Reset test project
-
- - run: docker run --rm -v $(pwd):/work mcr.microsoft.com/playwright:v1.35.1-jammy /bin/bash -c "cd /work && NODE_VERSION=${{ env.NODE_LTS_VERSION }} ./scripts/setup-node.sh && corepack enable > /dev/null && DATADOG_TRACE_NEXTJS_TEST=TRUE DATADOG_API_KEY=${DATADOG_API_KEY} DD_ENV=ci VERCEL_TEST_TOKEN=${{ secrets.VERCEL_TEST_TOKEN }} VERCEL_TEST_TEAM=vtest314-next-e2e-tests NEXT_TEST_JOB=1 NEXT_TEST_MODE=deploy TEST_TIMINGS_TOKEN=${{ secrets.TEST_TIMINGS_TOKEN }} NEXT_TEST_CONTINUE_ON_ERROR=1 xvfb-run node run-tests.js --type e2e >> /proc/1/fd/1"
- name: Run test/e2e (deploy)
-
- - name: Upload test trace
- if: always()
- uses: actions/upload-artifact@v3
- with:
- name: test-trace
- if-no-files-found: ignore
- retention-days: 2
- path: |
- test/traces
-
- - name: Upload test trace to datadog
- continue-on-error: true
- run: |
- ls -al ./test
- npm install -g junit-report-merger@6.0.2 @datadog/datadog-ci@2.14.0 @aws-sdk/property-provider@3
- jrm ./nextjs-test-result-junit.xml "test/test-junit-report/**/*.xml"
- DD_ENV=ci datadog-ci junit upload --tags test.type:nextjs_deploy_e2e --service nextjs ./nextjs-test-result-junit.xml
-
releaseStats:
name: Release Stats
runs-on:
diff --git a/.github/workflows/test_e2e_deploy.yml b/.github/workflows/test_e2e_deploy.yml
new file mode 100644
index 000000000000..0d86aa03f2f6
--- /dev/null
+++ b/.github/workflows/test_e2e_deploy.yml
@@ -0,0 +1,80 @@
+name: test-e2e-deploy
+
+on:
+ schedule:
+ # run every day at midnight
+ - cron: '0 0 * * *'
+ # allow triggering manually as well
+ workflow_dispatch:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ env:
+ VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }}
+ VERCEL_TEST_TEAM: vtest314-next-e2e-tests
+ DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }}
+ NAPI_CLI_VERSION: 2.16.2
+ TURBO_VERSION: 1.10.9
+ NODE_MAINTENANCE_VERSION: 16
+ NODE_LTS_VERSION: 18
+ CARGO_PROFILE_RELEASE_LTO: 'true'
+ TURBO_TEAM: 'vercel'
+ TURBO_REMOTE_ONLY: 'true'
+ TEST_TIMINGS_TOKEN: ${{ secrets.TEST_TIMINGS_TOKEN }}
+ NEXT_TELEMETRY_DISABLED: 1
+ # we build a dev binary for use in CI so skip downloading
+ # canary next-swc binaries in the monorepo
+ NEXT_SKIP_NATIVE_POSTINSTALL: 1
+
+ strategy:
+ fail-fast: false
+ matrix:
+ group: [1, 2]
+
+ steps:
+ - name: Setup node
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ env.NODE_LTS_VERSION }}
+ check-latest: true
+ - run: corepack enable
+
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 25
+
+ - run: pnpm install
+
+ - run: pnpm run build
+
+ - run: npm i -g vercel@latest
+
+ - uses: actions/download-artifact@v3
+ with:
+ name: next-swc-binaries
+ path: packages/next-swc/native
+
+ - run: RESET_VC_PROJECT=true node scripts/reset-vercel-project.mjs
+ name: Reset test project
+
+ - run: docker run --rm -v $(pwd):/work mcr.microsoft.com/playwright:v1.35.1-jammy /bin/bash -c "cd /work && NODE_VERSION=${{ env.NODE_LTS_VERSION }} ./scripts/setup-node.sh && corepack enable > /dev/null && DATADOG_TRACE_NEXTJS_TEST=TRUE DATADOG_API_KEY=${DATADOG_API_KEY} DD_ENV=ci VERCEL_TEST_TOKEN=${{ secrets.VERCEL_TEST_TOKEN }} VERCEL_TEST_TEAM=vtest314-next-e2e-tests NEXT_TEST_JOB=1 NEXT_TEST_MODE=deploy TEST_TIMINGS_TOKEN=${{ secrets.TEST_TIMINGS_TOKEN }} NEXT_TEST_CONTINUE_ON_ERROR=1 xvfb-run node run-tests.js --type e2e --timings -g ${{ matrix.group }}/2 -c 2 >> /proc/1/fd/1"
+ name: Run test/e2e (deploy)
+
+ - name: Upload test trace
+ if: always()
+ uses: actions/upload-artifact@v3
+ with:
+ name: test-trace
+ if-no-files-found: ignore
+ retention-days: 2
+ path: |
+ test/traces
+
+ - name: Upload test trace to datadog
+ continue-on-error: true
+ run: |
+ ls -al ./test
+ npm install -g junit-report-merger@6.0.2 @datadog/datadog-ci@2.14.0 @aws-sdk/property-provider@3
+ jrm ./nextjs-test-result-junit.xml "test/test-junit-report/**/*.xml"
+ DD_ENV=ci datadog-ci junit upload --tags test.type:nextjs_deploy_e2e --service nextjs ./nextjs-test-result-junit.xml
diff --git a/.github/workflows/trigger_release.yml b/.github/workflows/trigger_release.yml
index b79df5057f19..6faec5b1c37e 100644
--- a/.github/workflows/trigger_release.yml
+++ b/.github/workflows/trigger_release.yml
@@ -1,4 +1,8 @@
on:
+ schedule:
+ # run every day at 23:15
+ - cron: '15 23 * * *'
+
workflow_dispatch:
inputs:
releaseType:
@@ -38,7 +42,7 @@ jobs:
# canary next-swc binaries in the monorepo
NEXT_SKIP_NATIVE_POSTINSTALL: 1
- environment: release-${{ github.event.inputs.releaseType }}
+ environment: release-${{ github.event.inputs.releaseType || 'canary' }}
steps:
- name: Setup node
uses: actions/setup-node@v3
@@ -73,6 +77,6 @@ jobs:
- run: pnpm run build
- - run: node ./scripts/start-release.js --release-type ${{ github.event.inputs.releaseType }} --semver-type ${{ github.event.inputs.semverType }}
+ - run: node ./scripts/start-release.js --release-type ${{ github.event.inputs.releaseType || 'canary' }} --semver-type ${{ github.event.inputs.semverType }}
env:
RELEASE_BOT_GITHUB_TOKEN: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }}
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 4c5c09be61c4..21603e2d9723 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -111,76 +111,76 @@ stages:
env:
NEXT_TEST_MODE: 'dev'
- # - job: test_e2e_dev
- # pool:
- # vmImage: 'windows-2019'
- # steps:
- # - task: NodeTool@0
- # inputs:
- # versionSpec: $(node_16_version)
- # displayName: 'Install Node.js'
-
- # - bash: |
- # node scripts/run-for-change.js --not --type docs --exec echo "##vso[task.setvariable variable=isDocsOnly]No"
- # displayName: 'Check Docs Only Change'
-
- # - script: corepack enable
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Enable Corepack'
-
- # - script: pnpm config set store-dir $(PNPM_CACHE_FOLDER)
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: pnpm store path
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: pnpm install && pnpm run build
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Install and build'
-
- # - script: npx playwright@1.35.1 install chromium
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: |
- # node run-tests.js -c 1 --debug test/e2e/app-dir/app/index.test.ts
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Run tests (E2E Development)'
- # env:
- # NEXT_TEST_MODE: 'dev'
-
- # - job: test_e2e_prod
- # pool:
- # vmImage: 'windows-2019'
- # steps:
- # - task: NodeTool@0
- # inputs:
- # versionSpec: $(node_16_version)
- # displayName: 'Install Node.js'
-
- # - bash: |
- # node scripts/run-for-change.js --not --type docs --exec echo "##vso[task.setvariable variable=isDocsOnly]No"
- # displayName: 'Check Docs Only Change'
-
- # - script: corepack enable
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Enable Corepack'
-
- # - script: pnpm config set store-dir $(PNPM_CACHE_FOLDER)
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: pnpm store path
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: pnpm install && pnpm run build
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Install and build'
-
- # - script: npx playwright@1.35.1 install chromium
- # condition: eq(variables['isDocsOnly'], 'No')
-
- # - script: |
- # node run-tests.js -c 1 --debug test/e2e/app-dir/app/index.test.ts
- # condition: eq(variables['isDocsOnly'], 'No')
- # displayName: 'Run tests (E2E Production)'
- # env:
- # NEXT_TEST_MODE: 'start'
+ - job: test_e2e_dev
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: NodeTool@0
+ inputs:
+ versionSpec: $(node_16_version)
+ displayName: 'Install Node.js'
+
+ - bash: |
+ node scripts/run-for-change.js --not --type docs --exec echo "##vso[task.setvariable variable=isDocsOnly]No"
+ displayName: 'Check Docs Only Change'
+
+ - script: corepack enable
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Enable Corepack'
+
+ - script: pnpm config set store-dir $(PNPM_CACHE_FOLDER)
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: pnpm store path
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: pnpm install && pnpm run build
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Install and build'
+
+ - script: npx playwright@1.35.1 install chromium
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: |
+ node run-tests.js -c 1 --debug test/e2e/app-dir/app/index.test.ts test/e2e/app-dir/app-edge/app-edge.test.ts
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Run tests (E2E Development)'
+ env:
+ NEXT_TEST_MODE: 'dev'
+
+ - job: test_e2e_prod
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: NodeTool@0
+ inputs:
+ versionSpec: $(node_16_version)
+ displayName: 'Install Node.js'
+
+ - bash: |
+ node scripts/run-for-change.js --not --type docs --exec echo "##vso[task.setvariable variable=isDocsOnly]No"
+ displayName: 'Check Docs Only Change'
+
+ - script: corepack enable
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Enable Corepack'
+
+ - script: pnpm config set store-dir $(PNPM_CACHE_FOLDER)
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: pnpm store path
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: pnpm install && pnpm run build
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Install and build'
+
+ - script: npx playwright@1.35.1 install chromium
+ condition: eq(variables['isDocsOnly'], 'No')
+
+ - script: |
+ node run-tests.js -c 1 --debug test/e2e/app-dir/app/index.test.ts test/e2e/app-dir/app-edge/app-edge.test.ts
+ condition: eq(variables['isDocsOnly'], 'No')
+ displayName: 'Run tests (E2E Production)'
+ env:
+ NEXT_TEST_MODE: 'start'
diff --git a/contributing/repository/linting.md b/contributing/repository/linting.md
index 513797ce7e28..30535fdd6d0c 100644
--- a/contributing/repository/linting.md
+++ b/contributing/repository/linting.md
@@ -14,7 +14,7 @@ If you get errors, you can run the ESLint and Prettier auto-fix using:
pnpm lint-fix
```
-Not all rules can be auto-fixed, those require manual changes.
+Not all rules can be auto-fixed, some require manual changes.
If you get a warning by alex, follow the instructions to correct the language.
diff --git a/docs/02-app/02-api-reference/05-next-config-js/optimizePackageImports.mdx b/docs/02-app/02-api-reference/05-next-config-js/optimizePackageImports.mdx
index 2d1a5c1f538c..7f33802a0c77 100644
--- a/docs/02-app/02-api-reference/05-next-config-js/optimizePackageImports.mdx
+++ b/docs/02-app/02-api-reference/05-next-config-js/optimizePackageImports.mdx
@@ -1,6 +1,6 @@
---
title: optimizePackageImports
-description:
+description: API Reference for optmizedPackageImports Next.js Config Option
---
{/* The content of this doc is shared between the app and pages router. You can use the `Content` component to add content that is specific to the Pages Router. Any shared content should not be wrapped in a component. */}
diff --git a/docs/03-pages/01-building-your-application/01-routing/02-dynamic-routes.mdx b/docs/03-pages/01-building-your-application/01-routing/02-dynamic-routes.mdx
index 8dd5ac3471b2..2216b0bc6912 100644
--- a/docs/03-pages/01-building-your-application/01-routing/02-dynamic-routes.mdx
+++ b/docs/03-pages/01-building-your-application/01-routing/02-dynamic-routes.mdx
@@ -13,7 +13,7 @@ When you don't know the exact segment names ahead of time and want to create rou
## Convention
-A Dynamic Segment can be created by wrapping a folder's name in square brackets: `[folderName]`. For example, `[id]` or `[slug]`.
+A Dynamic Segment can be created by wrapping a file or folder name in square brackets: `[segmentName]`. For example, `[id]` or `[slug]`.
Dynamic Segments can be accessed from [`useRouter`](/docs/pages/api-reference/functions/use-router).
@@ -38,7 +38,7 @@ export default function Page() {
## Catch-all Segments
-Dynamic Segments can be extended to **catch-all** subsequent segments by adding an ellipsis inside the brackets `[...folderName]`.
+Dynamic Segments can be extended to **catch-all** subsequent segments by adding an ellipsis inside the brackets `[...segmentName]`.
For example, `pages/shop/[...slug].js` will match `/shop/clothes`, but also `/shop/clothes/tops`, `/shop/clothes/tops/t-shirts`, and so on.
@@ -50,7 +50,7 @@ For example, `pages/shop/[...slug].js` will match `/shop/clothes`, but also `/sh
## Optional Catch-all Segments
-Catch-all Segments can be made **optional** by including the parameter in double square brackets: `[[...folderName]]`.
+Catch-all Segments can be made **optional** by including the parameter in double square brackets: `[[...segmentName]]`.
For example, `pages/shop/[[...slug]].js` will **also** match `/shop`, in addition to `/shop/clothes`, `/shop/clothes/tops`, `/shop/clothes/tops/t-shirts`.
diff --git a/docs/03-pages/01-building-your-application/01-routing/07-api-routes.mdx b/docs/03-pages/01-building-your-application/01-routing/07-api-routes.mdx
index 5804e6d3cdfd..5c2b728845b1 100644
--- a/docs/03-pages/01-building-your-application/01-routing/07-api-routes.mdx
+++ b/docs/03-pages/01-building-your-application/01-routing/07-api-routes.mdx
@@ -217,7 +217,10 @@ The following example sends a JSON response with the status code `200` (`OK`) an
```ts filename="pages/api/hello.ts" switcher
import type { NextApiRequest, NextApiResponse } from 'next'
-export default function handler(req: NextApiRequest, res: NextApiResponse) {
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
try {
const result = await someAsyncOperation()
res.status(200).json({ result })
@@ -228,7 +231,7 @@ export default function handler(req: NextApiRequest, res: NextApiResponse) {
```
```js filename="pages/api/hello.js" switcher
-export default function handler(req, res) {
+export default async function handler(req, res) {
try {
const result = await someAsyncOperation()
res.status(200).json({ result })
@@ -247,7 +250,10 @@ The following example sends a HTTP response with the status code `200` (`OK`) an
```ts filename="pages/api/hello.ts" switcher
import type { NextApiRequest, NextApiResponse } from 'next'
-export default function handler(req: NextApiRequest, res: NextApiResponse) {
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
try {
const result = await someAsyncOperation()
res.status(200).send({ result })
@@ -258,7 +264,7 @@ export default function handler(req: NextApiRequest, res: NextApiResponse) {
```
```js filename="pages/api/hello.js" switcher
-export default function handler(req, res) {
+export default async function handler(req, res) {
try {
const result = await someAsyncOperation()
res.status(200).send({ result })
@@ -277,7 +283,10 @@ The following example redirects the client to the `/` path if the form is succes
```ts filename="pages/api/hello.ts" switcher
import type { NextApiRequest, NextApiResponse } from 'next'
-export default function handler(req: NextApiRequest, res: NextApiResponse) {
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
const { name, message } = req.body
try {
@@ -290,7 +299,7 @@ export default function handler(req: NextApiRequest, res: NextApiResponse) {
```
```js filename="pages/api/hello.js" switcher
-export default function handler(req, res) {
+export default async function handler(req, res) {
const { name, message } = req.body
try {
diff --git a/docs/03-pages/01-building-your-application/03-data-fetching/02-get-static-paths.mdx b/docs/03-pages/01-building-your-application/03-data-fetching/02-get-static-paths.mdx
index 4d00624c3f98..77e74f5ead92 100644
--- a/docs/03-pages/01-building-your-application/03-data-fetching/02-get-static-paths.mdx
+++ b/docs/03-pages/01-building-your-application/03-data-fetching/02-get-static-paths.mdx
@@ -1,6 +1,6 @@
---
title: getStaticPaths
-description: Fetch data and generate static pages with `getStaticProps`. Learn more about this API for data fetching in Next.js.
+description: Fetch data and generate static pages with `getStaticPaths`. Learn more about this API for data fetching in Next.js.
---
If a page has [Dynamic Routes](/docs/pages/building-your-application/routing/dynamic-routes) and uses `getStaticProps`, it needs to define a list of paths to be statically generated.
diff --git a/docs/05-community/01-contribution-guide.mdx b/docs/05-community/01-contribution-guide.mdx
index 9e51bb61be50..199a84797057 100644
--- a/docs/05-community/01-contribution-guide.mdx
+++ b/docs/05-community/01-contribution-guide.mdx
@@ -111,7 +111,7 @@ The following fields are **required**:
```yaml filename="required-fields.mdx"
---
-tile: Page Title
+title: Page Title
description: Page Description
---
```
diff --git a/errors/large-page-data.mdx b/errors/large-page-data.mdx
index aec886bdfe16..a3c91e075859 100644
--- a/errors/large-page-data.mdx
+++ b/errors/large-page-data.mdx
@@ -13,7 +13,7 @@ Reduce the amount of data returned from `getStaticProps`, `getServerSideProps`,
To inspect the props passed to your page, you can inspect the below element's content in your browser devtools:
```bash filename="Terminal"
-document.getElementById("__NEXT_DATA__").text
+JSON.parse(document.getElementById("__NEXT_DATA__").textContent)
```
## Useful Links
diff --git a/examples/with-fauna/README.md b/examples/with-fauna/README.md
index 0bb3a8b157e4..651367bfe666 100644
--- a/examples/with-fauna/README.md
+++ b/examples/with-fauna/README.md
@@ -1,6 +1,6 @@
-# Fauna GraphQL Guestbook Starter
+# Fauna Guestbook Starter
-This Guestbook Single-Page Application (SPA) example shows you how to use [Fauna's GraphQL endpoint](https://docs.fauna.com/fauna/current/api/graphql/) in your Next.js project.
+This Guestbook Application example shows you how to use [Fauna](https://docs.fauna.com/) in your Next.js project.
## Deploy your own
@@ -8,10 +8,6 @@ Deploy the example using [Vercel](https://vercel.com?utm_source=github&utm_mediu
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https://github.com/vercel/next.js/tree/canary/examples/with-fauna&project-name=fauna-nextjs-guestbook&repository-name=fauna-nextjs-guestbook&demo-title=Next.js%20Fauna%20Guestbook%20App&demo-description=A%20simple%20guestbook%20application%20built%20with%20Next.js%20and%20Fauna&integration-ids=oac_Erlbqm8Teb1y4WhioE3r2utY)
-## Why Fauna
-
-By importing a `.gql` or `.graphql` schema into Fauna ([see our sample schema file](./schema.gql)), Fauna will generate required Indexes and GraphQL resolvers for you -- hands free 👐 ([some limitations exist](https://docs.fauna.com/fauna/current/api/graphql/#limitations)).
-
## How to use
Execute [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app) with [npm](https://docs.npmjs.com/cli/init), [Yarn](https://yarnpkg.com/lang/en/docs/cli/create/), or [pnpm](https://pnpm.io) to bootstrap the example:
@@ -26,17 +22,12 @@ pnpm create next-app --example with-fauna with-fauna-app
You can start with this template [using `create-next-app`](#using-create-next-app) or by [downloading the repository manually](#download-manually).
-To use a live Fauna database, create a database at [dashboard.fauna.com](https://dashboard.fauna.com/) and generate an admin token by going to the **Security** tab on the left and then click **New Key**. Give the new key a name and select the 'Admin' Role. Copy the token since the setup script will ask for it. Do not use it in the frontend, it has superpowers which you don't want to give to your users.
-
-### Setting Up Your Schema
-
-The Next.js and Fauna example includes a setup script (`npm run setup`). After providing your admin token, the script will:
+### Setting Up Your Fauna Database
-- **Import your GraphQL schema:** Fauna automatically sets up collections and indexes to support your queries. You can view these in your [project dashboard](https://dashboard.fauna.com/) under **GraphQL**.
-- **Create an index and function:** The script will create a GraphQL resolver that uses [User-defined functions](https://docs.fauna.com/fauna/current/api/graphql/functions?lang=javascript) based on a sorting index.
-- **Create a scoped token:** This token is for use on the client side. The admin key can be used on the server side.
+Head over to [Fauna Dashboard](https://dashboard.fauna.com/) and create a new database. You can name it whatever you want, but for this example, we'll use `nextjs-guestbook`. Next, create a new collection called `Entry` in your new database.
+Finally create a new database access key to connect to your database.
-After the script completes, a `.env.local` [file](https://nextjs.org/docs/basic-features/environment-variables) will be created for you with the newly generated client token assigned to an Environment Variable.
+Watch [this video](https://www.youtube.com/watch?v=8YJcG2fUPyE&t=43s&ab_channel=FaunaInc.) to learn how to connect to your database.
### Run locally
diff --git a/examples/with-fauna/actions/entry.ts b/examples/with-fauna/actions/entry.ts
new file mode 100644
index 000000000000..3f69f7284de0
--- /dev/null
+++ b/examples/with-fauna/actions/entry.ts
@@ -0,0 +1,22 @@
+'use server'
+
+import { revalidatePath } from 'next/cache'
+import { createEntry } from '@/lib/fauna'
+
+export async function createEntryAction(prevState: any, formData: FormData) {
+ const name = formData.get('name') as string
+ const message = formData.get('message') as string
+ try {
+ await createEntry(name, message)
+ revalidatePath('/')
+ return {
+ successMessage: 'Thank you for signing the guest book',
+ errorMessage: null,
+ }
+ } catch (error) {
+ return {
+ successMessage: null,
+ errorMessage: 'Something went wrong. Please try again',
+ }
+ }
+}
diff --git a/examples/with-fauna/app/globals.css b/examples/with-fauna/app/globals.css
new file mode 100644
index 000000000000..b5c61c956711
--- /dev/null
+++ b/examples/with-fauna/app/globals.css
@@ -0,0 +1,3 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
diff --git a/examples/with-fauna/app/guestbook-page.tsx b/examples/with-fauna/app/guestbook-page.tsx
new file mode 100644
index 000000000000..553a61489a49
--- /dev/null
+++ b/examples/with-fauna/app/guestbook-page.tsx
@@ -0,0 +1,52 @@
+import cn from 'classnames'
+import formatDate from 'date-fns/format'
+import EntryForm from '@/components/EntryForm'
+import { EntryType } from './page'
+
+const EntryItem = ({ entry }: { entry: EntryType }) => (
+
+
{entry.message}
+
+
{entry.name}
+
/
+
+ {formatDate(
+ new Date(entry.createdAt.isoString),
+ "d MMM yyyy 'at' h:mm bb"
+ )}
+
+
+
+)
+
+export default async function GuestbookPage({
+ entries,
+}: {
+ entries: EntryType[]
+}) {
+ return (
+
+
+
+ Sign the Guestbook
+
+
+ Share a message for a future visitor.
+
+
+
+
+
+ {entries?.map((entry) => (
+
+ ))}
+
+
+ )
+}
diff --git a/examples/with-fauna/app/layout.tsx b/examples/with-fauna/app/layout.tsx
new file mode 100644
index 000000000000..afbaec717032
--- /dev/null
+++ b/examples/with-fauna/app/layout.tsx
@@ -0,0 +1,21 @@
+import './globals.css'
+
+export const metadata: {
+ title: string
+ description: string
+} = {
+ title: 'Next.js + Fauna example',
+ description: 'Generated by Next.js',
+}
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode
+}) {
+ return (
+
+ {children}
+
+ )
+}
diff --git a/examples/with-fauna/app/page.tsx b/examples/with-fauna/app/page.tsx
new file mode 100644
index 000000000000..8d7292c15598
--- /dev/null
+++ b/examples/with-fauna/app/page.tsx
@@ -0,0 +1,16 @@
+import { getAllEntries } from '@/lib/fauna'
+import GuestbookPage from './guestbook-page'
+
+export type EntryType = {
+ id: string
+ name: string
+ message: string
+ createdAt: {
+ isoString: string
+ }
+}
+
+export default async function Page() {
+ const entries = (await getAllEntries()) as EntryType[]
+ return
+}
diff --git a/examples/with-fauna/components/EntryForm.tsx b/examples/with-fauna/components/EntryForm.tsx
new file mode 100644
index 000000000000..71d867590596
--- /dev/null
+++ b/examples/with-fauna/components/EntryForm.tsx
@@ -0,0 +1,65 @@
+'use client'
+
+import cn from 'classnames'
+import { createEntryAction } from '@/actions/entry'
+// @ts-ignore
+import { experimental_useFormState as useFormState } from 'react-dom'
+import { experimental_useFormStatus as useFormStatus } from 'react-dom'
+import LoadingSpinner from '@/components/LoadingSpinner'
+import SuccessMessage from '@/components/SuccessMessage'
+import ErrorMessage from '@/components/ErrorMessage'
+
+const inputClasses = cn(
+ 'block py-2 bg-white dark:bg-gray-800',
+ 'rounded-md border-gray-300 focus:ring-blue-500',
+ 'focus:border-blue-500 text-gray-900 dark:text-gray-100'
+)
+
+const initialState = {
+ successMessage: null,
+ errorMessage: null,
+}
+
+export default function EntryForm() {
+ const [state, formAction] = useFormState(createEntryAction, initialState)
+ const { pending } = useFormStatus()
+
+ return (
+ <>
+
+ {state?.successMessage ? (
+ {state.successMessage}
+ ) : null}
+ {state?.errorMessage ? (
+ {state.errorMessage}
+ ) : null}
+ >
+ )
+}
diff --git a/examples/with-fauna/components/ErrorMessage.js b/examples/with-fauna/components/ErrorMessage.tsx
similarity index 85%
rename from examples/with-fauna/components/ErrorMessage.js
rename to examples/with-fauna/components/ErrorMessage.tsx
index 4367acf1e085..e782d4cf2883 100644
--- a/examples/with-fauna/components/ErrorMessage.js
+++ b/examples/with-fauna/components/ErrorMessage.tsx
@@ -1,4 +1,8 @@
-export default function ErrorMessage({ children }) {
+export default function ErrorMessage({
+ children,
+}: {
+ children: React.ReactNode
+}) {
return (