diff --git a/.eslintrc.js b/.eslintrc.js index 7c939124ee267e..787da541c3aed0 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -35,6 +35,9 @@ Module._findPath = (request, paths, isMain) => { module.exports = { root: true, + env: { + es2022: true, + }, extends: ['eslint:recommended', 'plugin:jsdoc/recommended'], plugins: ['jsdoc', 'markdown', 'node-core'], parser: '@babel/eslint-parser', @@ -316,23 +319,11 @@ module.exports = { 'node-core/no-duplicate-requires': 'error', }, globals: { - AbortController: 'readable', - AbortSignal: 'readable', - Atomics: 'readable', - BigInt: 'readable', - BigInt64Array: 'readable', - BigUint64Array: 'readable', - Event: 'readable', - EventTarget: 'readable', - MessageChannel: 'readable', - MessageEvent: 'readable', - MessagePort: 'readable', - TextEncoder: 'readable', - TextDecoder: 'readable', - queueMicrotask: 'readable', - globalThis: 'readable', - btoa: 'readable', - atob: 'readable', - performance: 'readable', + Crypto: 'readable', + CryptoKey: 'readable', + fetch: 'readable', + FormData: 'readable', + Response: 'readable', + SubtleCrypto: 'readable', }, }; diff --git a/.github/ISSUE_TEMPLATE/1-bug-report.yml b/.github/ISSUE_TEMPLATE/1-bug-report.yml index 07a8f075ee55c0..3edac8d807d905 100644 --- a/.github/ISSUE_TEMPLATE/1-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/1-bug-report.yml @@ -1,4 +1,4 @@ -name: "\U0001F41B Bug report" +name: 🐛 Bug report description: Create a report to help us improve body: - type: markdown diff --git a/.github/ISSUE_TEMPLATE/2-feature-request.yml b/.github/ISSUE_TEMPLATE/2-feature-request.yml index eca3e6fa48e6fc..26a77a3617cbeb 100644 --- a/.github/ISSUE_TEMPLATE/2-feature-request.yml +++ b/.github/ISSUE_TEMPLATE/2-feature-request.yml @@ -1,6 +1,6 @@ -name: "\U0001F680 Feature request" +name: 🚀 Feature request description: Suggest an idea for this project -labels: ["feature request"] +labels: [feature request] body: - type: markdown attributes: @@ -10,7 +10,7 @@ body: Please fill in as much of the following form as you're able. For more information on how the project manages feature - requests, see [Feature request management](https://github.com/nodejs/node/blob/HEAD/doc/guides/feature-request-management.md). + requests, see [Feature request management](https://github.com/nodejs/node/blob/HEAD/doc/contributing/feature-request-management.md). - type: textarea attributes: label: What is the problem this feature will solve? diff --git a/.github/ISSUE_TEMPLATE/3-api-ref-docs-problem.yml b/.github/ISSUE_TEMPLATE/3-api-ref-docs-problem.yml index 9b3bdc35197f24..753e2d9e58a9d5 100644 --- a/.github/ISSUE_TEMPLATE/3-api-ref-docs-problem.yml +++ b/.github/ISSUE_TEMPLATE/3-api-ref-docs-problem.yml @@ -1,6 +1,6 @@ -name: "\U0001F4D7 Open an issue regarding the Node.js API reference docs" +name: 📗 Open an issue regarding the Node.js API reference docs description: Let us know about any problematic API reference documents -labels: ["doc"] +labels: [doc] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/4-report-a-flaky-test.yml b/.github/ISSUE_TEMPLATE/4-report-a-flaky-test.yml index c557accfb15b92..dd6fa5091e3f15 100644 --- a/.github/ISSUE_TEMPLATE/4-report-a-flaky-test.yml +++ b/.github/ISSUE_TEMPLATE/4-report-a-flaky-test.yml @@ -1,6 +1,6 @@ name: Report a flaky test description: Report a flaky test in our CI -labels: ["flaky-test"] +labels: [flaky-test] body: - type: markdown attributes: diff --git a/.github/label-pr-config.yml b/.github/label-pr-config.yml index 93aeaab0257369..a8d22980f6db18 100644 --- a/.github/label-pr-config.yml +++ b/.github/label-pr-config.yml @@ -85,7 +85,7 @@ subSystemLabels: /^deps\/nghttp2\//: http2 /^deps\/ngtcp2\//: quic, dont-land-on-v14.x, dont-land-on-v12.x /^deps\/nghttp3\//: quic, dont-land-on-v14.x, dont-land-on-v12.x - /^deps\/([^/]+)/: $1 + /^deps\/([^/]+)/: dependencies, $1 ## JS subsystems # Oddities first diff --git a/.github/workflows/authors.yml b/.github/workflows/authors.yml index 0a14c94f1d7e74..75fec53a549438 100644 --- a/.github/workflows/authors.yml +++ b/.github/workflows/authors.yml @@ -1,8 +1,8 @@ -name: "authors update" +name: Authors update on: schedule: # Run once a week at 00:05 AM UTC on Sunday. - - cron: '5 0 * * 0' + - cron: 5 0 * * 0 workflow_dispatch: @@ -15,7 +15,7 @@ jobs: with: fetch-depth: '0' # This is required to actually get all the authors persist-credentials: false - - run: "tools/update-authors.js" # Run the AUTHORS tool + - run: tools/update-authors.js # Run the AUTHORS tool - uses: gr2m/create-or-update-pull-request-action@v1 # Create a PR or update the Action's existing PR env: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} @@ -25,7 +25,7 @@ jobs: Here are some new additions to the AUTHORS file. This is an automatically generated PR by the `authors.yml` GitHub Action, which runs `tools/update-authors.js`. - branch: "actions/authors-update" # Custom branch *just* for this Action. - commit-message: "meta: update AUTHORS" + branch: actions/authors-update # Custom branch *just* for this Action. + commit-message: 'meta: update AUTHORS' labels: meta - title: "meta: update AUTHORS" + title: 'meta: update AUTHORS' diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index 632fdc0a970ceb..2416561ab7ebf3 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -6,13 +6,15 @@ on: # optimistic, it can take longer to run. # To understand why `schedule` is used instead of other events, refer to # ./doc/contributing/commit-queue.md - - cron: "*/5 * * * *" + - cron: '*/5 * * * *' + +concurrency: ${{ github.workflow }} env: NODE_VERSION: lts/* jobs: - get_prs_for_ci: + get-prs-for-ci: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest outputs: @@ -29,20 +31,20 @@ jobs: --limit 100 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - startCI: - needs: get_prs_for_ci - if: needs.get_prs_for_ci.outputs.numbers != '' + start-ci: + needs: get-prs-for-ci + if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: persist-credentials: false - # Install dependencies - name: Install Node.js uses: actions/setup-node@v2 with: node-version: ${{ env.NODE_VERSION }} + - name: Install node-core-utils run: npm install -g node-core-utils @@ -55,6 +57,6 @@ jobs: ncu-config set repo "$(echo ${{ github.repository }} | cut -d/ -f2)" - name: Start the CI - run: ./tools/actions/start-ci.sh ${{ needs.get_prs_for_ci.outputs.numbers }} + run: ./tools/actions/start-ci.sh ${{ needs.get-prs-for-ci.outputs.numbers }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index ebf6458889410f..38a8922c093a99 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -4,10 +4,10 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/build-tarball.yml' push: @@ -17,21 +17,24 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/build-tarball.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: + PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare jobs: build-tarball: if: github.event.pull_request.draft == false - env: - PYTHON_VERSION: '3.10' runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -57,8 +60,6 @@ jobs: name: tarballs path: tarballs test-tarball-linux: - env: - PYTHON_VERSION: '3.10' needs: build-tarball runs-on: ubuntu-latest steps: diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 3a886988d42b38..2312d10d33faee 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -1,9 +1,9 @@ -name: build-windows +name: Build Windows on: pull_request: paths-ignore: - - "README.md" + - README.md - .github/** - '!.github/workflows/build-windows.yml' types: [opened, synchronize, reopened, ready_for_review] @@ -15,10 +15,14 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: - - "README.md" + - README.md - .github/** - '!.github/workflows/build-windows.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/close-stale-feature-requests.yml b/.github/workflows/close-stale-feature-requests.yml new file mode 100644 index 00000000000000..0b482ed8ae065f --- /dev/null +++ b/.github/workflows/close-stale-feature-requests.yml @@ -0,0 +1,51 @@ +name: Close stale feature requests +on: + workflow_dispatch: + inputs: + daysBeforeStale: + description: Idle number of days before marking feature requests stale + required: true + default: 906 + type: number + +# yamllint disable rule:empty-lines +env: + CLOSE_MESSAGE: > + There has been no activity on this feature request + and it is being closed. If you feel closing this issue is not the + right thing to do, please leave a comment. + + + For more information on how the project manages + feature requests, please consult the + [feature request management document](https://github.com/nodejs/node/blob/HEAD/doc/contributing/feature-request-management.md). + + WARN_MESSAGE: > + There has been no activity on this feature request for + 5 months and it is unlikely to be implemented. + It will be closed 6 months after the last non-automated comment. + + + For more information on how the project manages + feature requests, please consult the + [feature request management document](https://github.com/nodejs/node/blob/HEAD/doc/contributing/feature-request-management.md). +# yamllint enable + +jobs: + stale: + if: github.repository == 'nodejs/node' + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v4 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + days-before-stale: ${{ github.event.inputs.daysBeforeStale }} + days-before-close: 30 + stale-issue-label: stale + close-issue-message: ${{ env.CLOSE_MESSAGE }} + stale-issue-message: ${{ env.WARN_MESSAGE }} + only-labels: feature request + exempt-pr-labels: never-stale + # max requests it will send per run to the GitHub API before it deliberately exits to avoid hitting API rate limits + operations-per-run: 500 + remove-stale-when-updated: true diff --git a/.github/workflows/close-stalled.yml b/.github/workflows/close-stalled.yml index 9a3dad5621b971..347b22bd70caf3 100644 --- a/.github/workflows/close-stalled.yml +++ b/.github/workflows/close-stalled.yml @@ -1,7 +1,7 @@ name: Close stalled issues and PRs on: schedule: - - cron: "0 0 * * *" + - cron: 0 0 * * * env: CLOSE_MESSAGE: > diff --git a/.github/workflows/comment-labeled.yml b/.github/workflows/comment-labeled.yml index 2b70802d9adbf4..5c529000a63fb2 100644 --- a/.github/workflows/comment-labeled.yml +++ b/.github/workflows/comment-labeled.yml @@ -1,4 +1,4 @@ -name: Comment on issues and PRs when labelled +name: Comment on issues and PRs when labeled on: issues: types: [labeled] @@ -12,7 +12,7 @@ env: FAST_TRACK_MESSAGE: Fast-track has been requested by @${{ github.actor }}. Please 👍 to approve. jobs: - staleComment: + stale-comment: if: github.repository == 'nodejs/node' && github.event.label.name == 'stalled' runs-on: ubuntu-latest steps: @@ -22,7 +22,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh issue comment "$NUMBER" --repo ${{ github.repository }} --body "$STALE_MESSAGE" - fastTrack: + fast-track: if: github.repository == 'nodejs/node' && github.event_name == 'pull_request_target' && github.event.label.name == 'fast-track' runs-on: ubuntu-latest steps: diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index 524df2224074fc..d0754d56035e54 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -1,4 +1,4 @@ -name: "First commit message adheres to guidelines at https://goo.gl/p2fr5Q" +name: First commit message adheres to guidelines on: [pull_request] diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index 509be21815bb06..bbaf67a0e7e779 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -11,7 +11,9 @@ on: # be read-only, and the Action won't have access to any other repository # secrets, which it needs to access Jenkins API. schedule: - - cron: "*/5 * * * *" + - cron: '*/5 * * * *' + +concurrency: ${{ github.workflow }} env: NODE_VERSION: lts/* diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index e6200f67e8b74a..65004c243f3dd5 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -1,27 +1,29 @@ -name: coverage-linux +name: Coverage Linux on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: - '**.md' - - 'benchmark/**' - - 'deps/**' - - 'doc/**' + - benchmark/** + - deps/* + - doc/** - .github/** - '!.github/workflows/coverage-linux.yml' push: - branches: - - master - - main + branches: [master, main] paths-ignore: - '**.md' - - 'benchmark/**' - - 'deps/**' - - 'doc/**' + - benchmark/** + - deps/** + - doc/** - .github/** - '!.github/workflows/coverage-linux.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index b89ef0d4eb642a..550b4424e50407 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -1,29 +1,31 @@ -name: coverage-windows +name: Coverage Windows on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: - '**.md' - - 'benchmark/**' - - 'deps/**' - - 'doc/**' - - 'tools/**' + - benchmark/** + - deps/** + - doc/** + - tools/** - .github/** - '!.github/workflows/coverage-windows.yml' push: - branches: - - master - - main + branches: [master, main] paths-ignore: - '**.md' - - 'benchmark/**' - - 'deps/**' - - 'doc/**' - - 'tools/**' + - benchmark/** + - deps/** + - doc/** + - tools/** - .github/** - '!.github/workflows/coverage-windows.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 2f2560dd751c1d..79294ca966ddef 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -3,7 +3,7 @@ name: Node.js daily job on: workflow_dispatch: schedule: - - cron: "0 0 * * *" + - cron: 0 0 * * * env: NODE_VERSION: lts/* diff --git a/.github/workflows/misc.yml b/.github/workflows/doc.yml similarity index 84% rename from .github/workflows/misc.yml rename to .github/workflows/doc.yml index 7cfe2fdaa6dbd7..9bb872f35384d4 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/doc.yml @@ -1,4 +1,4 @@ -name: misc +name: Test and upload documentation to artifacts on: pull_request: @@ -10,6 +10,10 @@ on: - v[0-9]+.x-staging - v[0-9]+.x +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: NODE_VERSION: lts/* diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index 643e3ea4a6b4b1..21f0f0297d5f11 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -3,7 +3,7 @@ name: Find inactive collaborators on: schedule: # Run every Monday at 4:05 AM UTC. - - cron: '5 4 * * 1' + - cron: 5 4 * * 1 workflow_dispatch: @@ -37,6 +37,6 @@ jobs: author: Node.js GitHub Bot branch: actions/inactive-collaborators body: This PR was generated by tools/find-inactive-collaborators.yml. - commit-message: "meta: move one or more collaborators to emeritus" + commit-message: 'meta: move one or more collaborators to emeritus' labels: meta - title: "meta: move one or more collaborators to emeritus" + title: 'meta: move one or more collaborators to emeritus' diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index 5d834c8cb25d70..9276de8b6a5c3a 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -3,7 +3,7 @@ name: Find inactive TSC members on: schedule: # Run every Tuesday 12:05 AM UTC. - - cron: '5 0 * * 2' + - cron: 5 0 * * 2 workflow_dispatch: @@ -51,6 +51,6 @@ jobs: @nodejs/tsc ${{ env.INACTIVE_TSC_HANDLES }} ${{ env.DETAILS_FOR_COMMIT_BODY }} - commit-message: "meta: move one or more TSC members to emeritus" + commit-message: 'meta: move one or more TSC members to emeritus' labels: meta - title: "meta: move one or more TSC members to emeritus" + title: 'meta: move one or more TSC members to emeritus' diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index 0f83e16194b37f..6ccb3b2f743ed6 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -1,9 +1,9 @@ -name: license update +name: License update on: schedule: # 00:00:00 every Monday # https://crontab.guru/#0_0_*_*_1 - - cron: "0 0 * * 1" + - cron: 0 0 * * 1 workflow_dispatch: jobs: @@ -14,14 +14,14 @@ jobs: - uses: actions/checkout@v2 with: persist-credentials: false - - run: "./tools/license-builder.sh" # Run the license builder tool + - run: ./tools/license-builder.sh # Run the license builder tool - uses: gr2m/create-or-update-pull-request-action@v1.x # Create a PR or update the Action's existing PR env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: author: Node.js GitHub Bot branch: actions/license-builder - title: "doc: run license-builder" + title: 'doc: run license-builder' body: > License is likely out of date. This is an automatically generated PR by the `license-builder.yml` GitHub Action, which runs `license-builder.sh` diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 166846ae3f8705..b8bc5ecb92efbd 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -1,4 +1,4 @@ -name: linters +name: Linters on: pull_request: @@ -10,6 +10,10 @@ on: - v[0-9]+.x-staging - v[0-9]+.x +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' NODE_VERSION: lts/* @@ -121,7 +125,7 @@ jobs: lint-sh: if: github.event.pull_request.draft == false - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: @@ -138,7 +142,7 @@ jobs: persist-credentials: false - uses: mszostok/codeowners-validator@v0.6.0 with: - checks: "files,duppatterns" + checks: files,duppatterns lint-pr-url: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest diff --git a/.github/workflows/notify-force-push.yml b/.github/workflows/notify-force-push.yml index 9c2fc3a6a4e64d..dac2d50ae266f7 100644 --- a/.github/workflows/notify-force-push.yml +++ b/.github/workflows/notify-force-push.yml @@ -8,7 +8,7 @@ name: Notify on Force Push jobs: slackNotification: name: Slack Notification - if: ${{ github.event.forced && github.repository == 'nodejs/node' }} + if: github.repository == 'nodejs/node' && github.event.forced runs-on: ubuntu-latest steps: - name: Slack Notification @@ -16,7 +16,7 @@ jobs: env: SLACK_COLOR: '#DE512A' SLACK_ICON: https://github.com/nodejs.png?size=48 - SLACK_TITLE: '${{ github.actor }} force-pushed to ${{ github.ref }}' + SLACK_TITLE: ${{ github.actor }} force-pushed to ${{ github.ref }} SLACK_MESSAGE: | A commit was force-pushed to by diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index 29956196d0216f..5f2f1c93c3d521 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -1,13 +1,13 @@ -name: test-asan +name: Test ASan on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/test-asan.yml' push: @@ -18,13 +18,17 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/test-asan.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 94f4ab9086be6d..9b504980301dbc 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -1,4 +1,4 @@ -name: test-internet +name: Test internet on: workflow_dispatch: @@ -7,8 +7,7 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] - paths: - - test/internet/** + paths: [test/internet/**] push: branches: - master @@ -16,8 +15,11 @@ on: - canary - v[0-9]+.x-staging - v[0-9]+.x - paths: - - test/internet/** + paths: [test/internet/**] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true env: PYTHON_VERSION: '3.10' diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index ba2244aa116bc3..22b5e9a35d3dd9 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -1,9 +1,9 @@ -name: test-linux +name: Test Linux on: pull_request: paths-ignore: - - "README.md" + - README.md - .github/** - '!.github/workflows/test-linux.yml' types: [opened, synchronize, reopened, ready_for_review] @@ -15,10 +15,14 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: - - "README.md" + - README.md - .github/** - '!.github/workflows/test-linux.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index bdea886801a784..40885bd9e65c5a 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -1,13 +1,13 @@ -name: test-macOS +name: Test macOS on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/test-macos.yml' push: @@ -18,13 +18,17 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: - - '.mailmap' + - .mailmap - '**.md' - - 'AUTHORS' - - 'doc/**' + - AUTHORS + - doc/** - .github/** - '!.github/workflows/test-macos.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: PYTHON_VERSION: '3.10' FLAKY_TESTS: dontcare diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 5750c41b0fc775..a9ae1372e42bd9 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -1,13 +1,13 @@ -name: "tools update" +name: Tools update on: schedule: - # Run once a week at 00:05 AM UTC on Saturday. - - cron: '5 0 * * 6' + # Run once a week at 00:05 AM UTC on Sunday. + - cron: 5 0 * * 0 workflow_dispatch: jobs: - tools_update: + tools-update: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest strategy: @@ -23,7 +23,11 @@ jobs: echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV ./update-eslint.sh fi - - id: "lint-md-dependencies" + - id: corepack + run: | + make corepack-update + echo "NEW_VERSION=$(node deps/corepack/dist/corepack.js --version)" >> $GITHUB_ENV + - id: lint-md-dependencies run: | cd tools/lint-md npm ci @@ -63,8 +67,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} with: author: Node.js GitHub Bot - body: "This is an automated update of ${{ matrix.id }} to ${{ env.NEW_VERSION }}." - branch: "actions/tools-update-${{ matrix.id }}" # Custom branch *just* for this Action. - commit-message: "tools: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}" + body: This is an automated update of ${{ matrix.id }} to ${{ env.NEW_VERSION }}. + branch: actions/tools-update-${{ matrix.id }} # Custom branch *just* for this Action. + commit-message: 'tools: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}' labels: tools - title: "tools: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}" + title: 'tools: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}' diff --git a/.mailmap b/.mailmap index 7e7c447be3b0b2..ea89c8e96c1076 100644 --- a/.mailmap +++ b/.mailmap @@ -35,6 +35,7 @@ Anna Henningsen Anna Henningsen Anna Magdalena Kedzierska Antoine Amara +apeltop Aria Stewart Arlo Breault Arnaud Lefebvre @@ -77,6 +78,7 @@ Brent Pendergraft Brett Kiefer Brian White Brian White +Brian White Caleb Boyd Calvin Metcalf Calvin Metcalf @@ -219,6 +221,7 @@ James Hartig James Ide James M Snell James Nimlos +James Sumners Jan Krems Jem Bezooyen Jem Bezooyen @@ -365,6 +368,7 @@ Nigel Kibodeaux Nikola Glavina Nikolai Vavilov Nils Kuhnhenn +Nitzan Uziely Nitzan Uziely Noah Rose Ledesma Oliver Chang @@ -494,6 +498,7 @@ Timur Shemsedinov Ting Shao TJ Holowaychuk TJ Holowaychuk +Tobias Nießen Tobias Nießen Toby Farley Toby Stableford diff --git a/.yamllint.yaml b/.yamllint.yaml index 97ec306ea3fa0c..c8e63ec722006e 100644 --- a/.yamllint.yaml +++ b/.yamllint.yaml @@ -1,9 +1,40 @@ extends: default rules: + braces: + min-spaces-inside: 0 + max-spaces-inside: 1 + min-spaces-inside-empty: 0 + max-spaces-inside-empty: 0 + + brackets: + min-spaces-inside: 0 + max-spaces-inside: 1 + min-spaces-inside-empty: 0 + max-spaces-inside-empty: 0 + + comments-indentation: + level: error + + document-end: + present: false + document-start: + level: error present: false + + empty-lines: + max: 1 + + indentation: + spaces: 2 + line-length: disable + + quoted-strings: + quote-type: single + required: only-when-needed + truthy: allowed-values: ['true', 'false', 'on', 'off'] diff --git a/AUTHORS b/AUTHORS index 409fbbb6f585c4..f213b402b5d222 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1408,7 +1408,7 @@ Howard Hellyer Amelia Clarke James Ide Ben Schmidt -James Sumners +James Sumners Bradley Curran chiaki-yokoo Benjamin Fleischer @@ -3413,5 +3413,36 @@ Ateş Göral Tony Gorez ofirbarak Bar Admoni +ofir +Xuguang Mei +Elad Nava +Balakrishna Avulapati +Aaron Xie +小菜 +Raymond Zhao +Austin Cheney +Michael Scovetta +Valters Jansons +Marcos Bjoerkelund +T•Ø•R•Ü•S +Sean Quinlan <1011062+sbquinlan@users.noreply.github.com> +Derek Wolpert <48101033+derekwolpert@users.noreply.github.com> +wbt +Alexandru Comanescu +madflow +Austin Kelleher +apeltop +Livia Medeiros <74449973+LiviaMedeiros@users.noreply.github.com> +Nikolaos Papaspyrou +Matt Probert <1196252+mattpr@users.noreply.github.com> +Roch Devost +Kohei Ueno +bradh352 +Mikael Finstad +Damjan Cvetko +Randall Leeds +Khoo Hao Yit <40757009+KhooHaoYit@users.noreply.github.com> +Aroyan <43630681+aroyan@users.noreply.github.com> +theanarkh <2923878201@qq.com> # Generated by tools/update-authors.js diff --git a/BUILDING.md b/BUILDING.md index 54cdb1b0075913..d6ca30104b77f4 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -273,6 +273,10 @@ $ ./configure $ make -j4 ``` +We can speed up the builds by using [Ninja](https://ninja-build.org/). For more +information, see +[Building Node.js with Ninja](doc/contributing/building-node-with-ninja.md). + The `-j4` option will cause `make` to run 4 simultaneous compilation jobs which may reduce build time. For more information, see the [GNU Make Documentation](https://www.gnu.org/software/make/manual/html_node/Parallel.html). diff --git a/CHANGELOG.md b/CHANGELOG.md index c514c985d05595..93a23c4da2e236 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,8 @@ release. -16.14.2
+16.15.0
+16.14.2
16.14.1
16.14.0
16.13.2
diff --git a/LICENSE b/LICENSE index 55babf58f0bee2..6730deeb0cdf52 100644 --- a/LICENSE +++ b/LICENSE @@ -634,6 +634,31 @@ The externally maintained libraries used by Node.js are: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ +- undici, located at deps/undici, is licensed as follows: + """ + MIT License + + Copyright (c) Matteo Collina and Undici contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + """ + - OpenSSL, located at deps/openssl, is licensed as follows: """ Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. diff --git a/Makefile b/Makefile index 0e5b6d5d1cdf07..3c48653d05c0c6 100644 --- a/Makefile +++ b/Makefile @@ -1119,13 +1119,15 @@ pkg: $(PKG) .PHONY: corepack-update corepack-update: - rm -rf /tmp/node-corepack-clone - git clone 'https://github.com/nodejs/corepack.git' /tmp/node-corepack-clone - cd /tmp/node-corepack-clone && yarn pack - rm -rf deps/corepack && mkdir -p deps/corepack - cd deps/corepack && tar xf /tmp/node-corepack-clone/package.tgz --strip-components=1 + mkdir -p /tmp/node-corepack + curl -qLo /tmp/node-corepack/package.tgz "$$(npm view corepack dist.tarball)" + + rm -rf deps/corepack && mkdir deps/corepack + cd deps/corepack && tar xf /tmp/node-corepack/package.tgz --strip-components=1 chmod +x deps/corepack/shims/* + node deps/corepack/dist/corepack.js --version + .PHONY: pkg-upload # Note: this is strictly for release builds on release machines only. pkg-upload: pkg diff --git a/README.md b/README.md index 35e9c6e1e1bb6b..e305d58c4ee2eb 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,4 @@ - - -

- - Node.js - -

+# Node.js Node.js is an open-source, cross-platform, JavaScript runtime environment. @@ -19,7 +9,7 @@ The Node.js project uses an [open governance model](./GOVERNANCE.md). The **This project has a [Code of Conduct][].** -# Table of contents +## Table of contents * [Support](#support) * [Release types](#release-types) @@ -120,7 +110,7 @@ For Current and LTS, the GPG detached signature of `SHASUMS256.txt` is in import the keys: ```console -$ gpg --keyserver pool.sks-keyservers.net --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D +$ gpg --keyserver hkps://keys.openpgp.org --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D ``` See the bottom of this README for a full script to import active release keys. @@ -294,6 +284,8 @@ For information about the governance of the Node.js project, see **Bradley Farias** <> * [bnb](https://github.com/bnb) - **Tierney Cyren** <> (they/he) +* [bnoordhuis](https://github.com/bnoordhuis) - + **Ben Noordhuis** <> * [boneskull](https://github.com/boneskull) - **Christopher Hiller** <> (he/him) * [BridgeAR](https://github.com/BridgeAR) - @@ -312,8 +304,6 @@ For information about the governance of the Node.js project, see **Daniel Bevenius** <> (he/him) * [danielleadams](https://github.com/danielleadams) - **Danielle Adams** <> (she/her) -* [DerekNonGeneric](https://github.com/DerekNonGeneric) - - **Derek Lewis** <> (he/him) * [devnexen](https://github.com/devnexen) - **David Carlier** <> * [devsnek](https://github.com/devsnek) - @@ -354,12 +344,14 @@ For information about the governance of the Node.js project, see **Fedor Indutny** <> * [JacksonTian](https://github.com/JacksonTian) - **Jackson Tian** <> +* [JakobJingleheimer](https://github.com/JakobJingleheimer) - + **Jacob Smith** <> (he/him) * [jasnell](https://github.com/jasnell) - **James M Snell** <> (he/him) * [jkrems](https://github.com/jkrems) - **Jan Krems** <> (he/him) -* [joaocgreis](https://github.com/joaocgreis) - - **João Reis** <> +* [joesepi](https://github.com/joesepi) - + **Joe Sepi** <> (he/him) * [joyeecheung](https://github.com/joyeecheung) - **Joyee Cheung** <> (she/her) * [juanarbol](https://github.com/juanarbol) - @@ -378,6 +370,8 @@ For information about the governance of the Node.js project, see **Denys Otrishko** <> (he/him) * [Lxxyx](https://github.com/Lxxyx) - **Zijian Liu** <> (he/him) +* [marsonya](https://github.com/marsonya) - + **Akhil Marsonya** <> (he/him) * [mcollina](https://github.com/mcollina) - **Matteo Collina** <> (he/him) * [Mesteery](https://github.com/Mesteery) - @@ -424,6 +418,8 @@ For information about the governance of the Node.js project, see **Santiago Gimeno** <> * [shisama](https://github.com/shisama) - **Masashi Hirano** <> (he/him) +* [ShogunPanda](https://github.com/ShogunPanda) - + **Paolo Insogna** <> (he/him) * [srl295](https://github.com/srl295) - **Steven R Loomis** <> * [starkwang](https://github.com/starkwang) - @@ -474,8 +470,6 @@ For information about the governance of the Node.js project, see **Alexey Orlenko** <> (he/him) * [bmeurer](https://github.com/bmeurer) - **Benedikt Meurer** <> -* [bnoordhuis](https://github.com/bnoordhuis) - - **Ben Noordhuis** <> * [brendanashworth](https://github.com/brendanashworth) - **Brendan Ashworth** <> * [calvinmetcalf](https://github.com/calvinmetcalf) - @@ -526,6 +520,8 @@ For information about the governance of the Node.js project, see **John-David Dalton** <> * [jhamhader](https://github.com/jhamhader) - **Yuval Brik** <> +* [joaocgreis](https://github.com/joaocgreis) - + **João Reis** <> * [joshgav](https://github.com/joshgav) - **Josh Gavant** <> * [julianduque](https://github.com/julianduque) - @@ -648,6 +644,8 @@ maintaining the Node.js project. **Frank Qiu** <> (he/him) * [marsonya](https://github.com/marsonya) - **Akhil Marsonya** <> (he/him) +* [meixg](https://github.com/meixg) - + **Xuguang Mei** <> (he/him) * [Mesteery](https://github.com/Mesteery) - **Mestery** <> (he/him) * [PoojaDurgad](https://github.com/PoojaDurgad) - @@ -690,18 +688,18 @@ To import the full set of trusted release keys (including subkeys possibly used to sign releases): ```bash -gpg --keyserver pool.sks-keyservers.net --recv-keys 4ED778F539E3634C779C87C6D7062848A1AB005C -gpg --keyserver pool.sks-keyservers.net --recv-keys 141F07595B7B3FFE74309A937405533BE57C7D57 -gpg --keyserver pool.sks-keyservers.net --recv-keys 94AE36675C464D64BAFA68DD7434390BDBE9B9C5 -gpg --keyserver pool.sks-keyservers.net --recv-keys 74F12602B6F1C4E913FAA37AD3A89613643B6201 -gpg --keyserver pool.sks-keyservers.net --recv-keys 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1 -gpg --keyserver pool.sks-keyservers.net --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 -gpg --keyserver pool.sks-keyservers.net --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 -gpg --keyserver pool.sks-keyservers.net --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C -gpg --keyserver pool.sks-keyservers.net --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D -gpg --keyserver pool.sks-keyservers.net --recv-keys A48C2BEE680E841632CD4E44F07496B3EB3C1762 -gpg --keyserver pool.sks-keyservers.net --recv-keys 108F52B48DB57BB0CC439B2997B01419BD92F80A -gpg --keyserver pool.sks-keyservers.net --recv-keys B9E2F5981AA6E0CD28160D9FF13993A75599653C +gpg --keyserver hkps://keys.openpgp.org --recv-keys 4ED778F539E3634C779C87C6D7062848A1AB005C +gpg --keyserver hkps://keys.openpgp.org --recv-keys 141F07595B7B3FFE74309A937405533BE57C7D57 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 94AE36675C464D64BAFA68DD7434390BDBE9B9C5 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 74F12602B6F1C4E913FAA37AD3A89613643B6201 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 71DCFD284A79C3B38668286BC97EC7A07EDE3FC1 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 +gpg --keyserver hkps://keys.openpgp.org --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 +gpg --keyserver hkps://keys.openpgp.org --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C +gpg --keyserver hkps://keys.openpgp.org --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D +gpg --keyserver hkps://keys.openpgp.org --recv-keys A48C2BEE680E841632CD4E44F07496B3EB3C1762 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 108F52B48DB57BB0CC439B2997B01419BD92F80A +gpg --keyserver hkps://keys.openpgp.org --recv-keys B9E2F5981AA6E0CD28160D9FF13993A75599653C ``` See the section above on [Verifying binaries](#verifying-binaries) for how to diff --git a/SECURITY.md b/SECURITY.md index 8e5e3c4fe80815..b22301a1f1d556 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -22,14 +22,7 @@ the HackerOne platform. See for further details. ## Reporting a bug in a third party module Security bugs in third party modules should be reported to their respective -maintainers and should also be coordinated through the Node.js Ecosystem -Security Team via [HackerOne](https://hackerone.com/nodejs-ecosystem). - -Details regarding this process can be found in the -[Security Working Group repository](https://github.com/nodejs/security-wg/blob/HEAD/processes/third_party_vuln_process.md). - -Thank you for improving the security of Node.js and its ecosystem. Your efforts -and responsible disclosure are greatly appreciated and will be acknowledged. +maintainers. ## Disclosure policy diff --git a/benchmark/buffers/buffer-slice.js b/benchmark/buffers/buffer-slice.js index a64cbd2ab2db7c..dcb18754e8420d 100644 --- a/benchmark/buffers/buffer-slice.js +++ b/benchmark/buffers/buffer-slice.js @@ -3,7 +3,7 @@ const common = require('../common.js'); const SlowBuffer = require('buffer').SlowBuffer; const bench = common.createBenchmark(main, { - type: ['fast', 'slow'], + type: ['fast', 'slow', 'subarray'], n: [1e6] }); @@ -11,10 +11,14 @@ const buf = Buffer.allocUnsafe(1024); const slowBuf = new SlowBuffer(1024); function main({ n, type }) { - const b = type === 'fast' ? buf : slowBuf; + const b = type === 'slow' ? slowBuf : buf; + const fn = type === 'subarray' ? + () => b.subarray(10, 256) : + () => b.slice(10, 256); + bench.start(); for (let i = 0; i < n; i++) { - b.slice(10, 256); + fn(); } bench.end(n); } diff --git a/benchmark/crypto/webcrypto-digest.js b/benchmark/crypto/webcrypto-digest.js index 2d95f868f66f7c..4acd82878dacf2 100644 --- a/benchmark/crypto/webcrypto-digest.js +++ b/benchmark/crypto/webcrypto-digest.js @@ -3,11 +3,9 @@ const common = require('../common.js'); const { createHash, - webcrypto: { - subtle, - getRandomValues - } + webcrypto, } = require('crypto'); +const { subtle } = webcrypto; const bench = common.createBenchmark(main, { sync: ['createHash', 'subtle'], @@ -50,7 +48,7 @@ function measureSubtle(n, data, method) { } function main({ n, sync, data, method }) { - data = getRandomValues(Buffer.alloc(data)); + data = webcrypto.getRandomValues(Buffer.alloc(data)); switch (sync) { case 'createHash': return measureLegacy(n, data, method); case 'subtle': return measureSubtle(n, data, method); diff --git a/common.gypi b/common.gypi index be30169cf58d97..bdc2c105abeddc 100644 --- a/common.gypi +++ b/common.gypi @@ -70,6 +70,9 @@ # See https://github.com/v8/v8/wiki/Untrusted-code-mitigations 'v8_untrusted_code_mitigations': 0, + # Disable v8 hugepage by default. + 'v8_enable_hugepage%': 0, + # This is more of a V8 dev setting # https://github.com/nodejs/node/pull/22920/files#r222779926 'v8_enable_fast_mksnapshot': 0, @@ -109,7 +112,7 @@ 'v8_base': '<(PRODUCT_DIR)/libv8_snapshot.a', }], # V8 pointer compression only supports 64bit architectures. - ['target_arch in "arm ia32 mips mipsel ppc x32"', { + ['target_arch in "arm ia32 mips mipsel ppc"', { 'v8_enable_pointer_compression': 0, 'v8_enable_31bit_smis_on_64bit_arch': 0, }], @@ -417,10 +420,6 @@ 'cflags': [ '-m32' ], 'ldflags': [ '-m32' ], }], - [ 'target_arch=="x32"', { - 'cflags': [ '-mx32' ], - 'ldflags': [ '-mx32' ], - }], [ 'target_arch=="x64"', { 'cflags': [ '-m64' ], 'ldflags': [ '-m64' ], @@ -622,7 +621,7 @@ '-q64', ], # for addons due to v8config.h include of "zos-base.h": - 'include_dirs': ['$(ZOSLIB_INCLUDES)'], + 'include_dirs': ['<(zoslib_include_dir)'], }], ], } diff --git a/configure.py b/configure.py index 47137d8912dd14..95b31769cb5756 100755 --- a/configure.py +++ b/configure.py @@ -47,7 +47,7 @@ valid_os = ('win', 'mac', 'solaris', 'freebsd', 'openbsd', 'linux', 'android', 'aix', 'cloudabi') valid_arch = ('arm', 'arm64', 'ia32', 'mips', 'mipsel', 'mips64el', 'ppc', - 'ppc64', 'x32','x64', 'x86', 'x86_64', 's390x', 'riscv64') + 'ppc64', 'x64', 'x86', 'x86_64', 's390x', 'riscv64', 'loong64') valid_arm_float_abi = ('soft', 'softfp', 'hard') valid_arm_fpu = ('vfp', 'vfpv3', 'vfpv3-d16', 'neon') valid_mips_arch = ('loongson', 'r1', 'r2', 'r6', 'rx') @@ -779,6 +779,13 @@ default=True, help='compile V8 with auxiliar functions for native debuggers') +parser.add_argument('--v8-enable-hugepage', + action='store_true', + dest='v8_enable_hugepage', + default=None, + help='Enable V8 transparent hugepage support. This feature is only '+ + 'available on Linux platform.') + parser.add_argument('--node-builtin-modules-path', action='store', dest='node_builtin_modules_path', @@ -1155,7 +1162,8 @@ def configure_zos(o): o['variables']['node_static_zoslib'] = b(True) if options.static_zoslib_gyp: # Apply to all Node.js components for now - o['include_dirs'] += [os.path.dirname(options.static_zoslib_gyp) + '/include'] + o['variables']['zoslib_include_dir'] = os.path.dirname(options.static_zoslib_gyp) + '/include' + o['include_dirs'] += [o['variables']['zoslib_include_dir']] else: raise Exception('--static-zoslib-gyp= is required.') @@ -1436,7 +1444,9 @@ def configure_v8(o): raise Exception('--enable-d8 is incompatible with --without-bundled-v8.') if options.static_zoslib_gyp: o['variables']['static_zoslib_gyp'] = options.static_zoslib_gyp - + if flavor != 'linux' and options.v8_enable_hugepage: + raise Exception('--v8-enable-hugepage is supported only on linux.') + o['variables']['v8_enable_hugepage'] = 1 if options.v8_enable_hugepage else 0 def configure_openssl(o): variables = o['variables'] diff --git a/deps/cares/src/lib/ares_expand_name.c b/deps/cares/src/lib/ares_expand_name.c index fcd88a2a42eb42..6c7a35a715bf47 100644 --- a/deps/cares/src/lib/ares_expand_name.c +++ b/deps/cares/src/lib/ares_expand_name.c @@ -64,6 +64,8 @@ static int ares__isprint(int ch) * - underscores which are used in SRV records. * - Forward slashes such as are used for classless in-addr.arpa * delegation (CNAMEs) + * - Asterisks may be used for wildcard domains in CNAMEs as seen in the + * real world. * While RFC 2181 section 11 does state not to do validation, * that applies to servers, not clients. Vulnerabilities have been * reported when this validation is not performed. Security is more @@ -71,7 +73,7 @@ static int ares__isprint(int ch) * anyhow). */ static int is_hostnamech(int ch) { - /* [A-Za-z0-9-._/] + /* [A-Za-z0-9-*._/] * Don't use isalnum() as it is locale-specific */ if (ch >= 'A' && ch <= 'Z') @@ -80,7 +82,7 @@ static int is_hostnamech(int ch) return 1; if (ch >= '0' && ch <= '9') return 1; - if (ch == '-' || ch == '.' || ch == '_' || ch == '/') + if (ch == '-' || ch == '.' || ch == '_' || ch == '/' || ch == '*') return 1; return 0; diff --git a/deps/nghttp2/lib/Makefile.in b/deps/nghttp2/lib/Makefile.in index 78e76e55d9683d..5653774d5d6a1c 100644 --- a/deps/nghttp2/lib/Makefile.in +++ b/deps/nghttp2/lib/Makefile.in @@ -362,6 +362,8 @@ LIBMRUBY_LIBS = @LIBMRUBY_LIBS@ LIBNGHTTP3_CFLAGS = @LIBNGHTTP3_CFLAGS@ LIBNGHTTP3_LIBS = @LIBNGHTTP3_LIBS@ LIBNGTCP2_CFLAGS = @LIBNGTCP2_CFLAGS@ +LIBNGTCP2_CRYPTO_BORINGSSL_CFLAGS = @LIBNGTCP2_CRYPTO_BORINGSSL_CFLAGS@ +LIBNGTCP2_CRYPTO_BORINGSSL_LIBS = @LIBNGTCP2_CRYPTO_BORINGSSL_LIBS@ LIBNGTCP2_CRYPTO_OPENSSL_CFLAGS = @LIBNGTCP2_CRYPTO_OPENSSL_CFLAGS@ LIBNGTCP2_CRYPTO_OPENSSL_LIBS = @LIBNGTCP2_CRYPTO_OPENSSL_LIBS@ LIBNGTCP2_LIBS = @LIBNGTCP2_LIBS@ @@ -408,6 +410,7 @@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ +PYTHON_PLATFORM_SITE_PKG = @PYTHON_PLATFORM_SITE_PKG@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ diff --git a/deps/nghttp2/lib/includes/Makefile.in b/deps/nghttp2/lib/includes/Makefile.in index 834eff24ab9c67..327e523197e4ca 100644 --- a/deps/nghttp2/lib/includes/Makefile.in +++ b/deps/nghttp2/lib/includes/Makefile.in @@ -271,6 +271,8 @@ LIBMRUBY_LIBS = @LIBMRUBY_LIBS@ LIBNGHTTP3_CFLAGS = @LIBNGHTTP3_CFLAGS@ LIBNGHTTP3_LIBS = @LIBNGHTTP3_LIBS@ LIBNGTCP2_CFLAGS = @LIBNGTCP2_CFLAGS@ +LIBNGTCP2_CRYPTO_BORINGSSL_CFLAGS = @LIBNGTCP2_CRYPTO_BORINGSSL_CFLAGS@ +LIBNGTCP2_CRYPTO_BORINGSSL_LIBS = @LIBNGTCP2_CRYPTO_BORINGSSL_LIBS@ LIBNGTCP2_CRYPTO_OPENSSL_CFLAGS = @LIBNGTCP2_CRYPTO_OPENSSL_CFLAGS@ LIBNGTCP2_CRYPTO_OPENSSL_LIBS = @LIBNGTCP2_CRYPTO_OPENSSL_LIBS@ LIBNGTCP2_LIBS = @LIBNGTCP2_LIBS@ @@ -317,6 +319,7 @@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ +PYTHON_PLATFORM_SITE_PKG = @PYTHON_PLATFORM_SITE_PKG@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ diff --git a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h index 5f2454cfd7cbbb..c6082518c7b5fa 100644 --- a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h +++ b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h @@ -29,7 +29,7 @@ * @macro * Version number of the nghttp2 library release */ -#define NGHTTP2_VERSION "1.45.1" +#define NGHTTP2_VERSION "1.47.0" /** * @macro @@ -37,6 +37,6 @@ * release. This is a 24 bit number with 8 bits for major number, 8 bits * for minor and 8 bits for patch. Version 1.2.3 becomes 0x010203. */ -#define NGHTTP2_VERSION_NUM 0x012d01 +#define NGHTTP2_VERSION_NUM 0x012f00 #endif /* NGHTTP2VER_H */ diff --git a/deps/nghttp2/lib/nghttp2_buf.h b/deps/nghttp2/lib/nghttp2_buf.h index 06cce67a11bdea..45f62f16e271dc 100644 --- a/deps/nghttp2/lib/nghttp2_buf.h +++ b/deps/nghttp2/lib/nghttp2_buf.h @@ -99,7 +99,7 @@ void nghttp2_buf_free(nghttp2_buf *buf, nghttp2_mem *mem); * |new_cap|. If extensions took place, buffer pointers in |buf| will * change. * - * This function returns 0 if it succeeds, or one of the followings + * This function returns 0 if it succeeds, or one of the following * negative error codes: * * NGHTTP2_ERR_NOMEM diff --git a/deps/nghttp2/lib/nghttp2_frame.c b/deps/nghttp2/lib/nghttp2_frame.c index 382a26c818dd7b..3648b2389d7cbf 100644 --- a/deps/nghttp2/lib/nghttp2_frame.c +++ b/deps/nghttp2/lib/nghttp2_frame.c @@ -654,8 +654,6 @@ int nghttp2_frame_unpack_goaway_payload2(nghttp2_goaway *frame, var_gift_payloadlen = 0; } - payloadlen -= var_gift_payloadlen; - if (!var_gift_payloadlen) { var_gift_payload = NULL; } else { diff --git a/deps/nghttp2/lib/nghttp2_frame.h b/deps/nghttp2/lib/nghttp2_frame.h index 4b9222ac6dd685..3859926ebbcbac 100644 --- a/deps/nghttp2/lib/nghttp2_frame.h +++ b/deps/nghttp2/lib/nghttp2_frame.h @@ -46,7 +46,7 @@ #define NGHTTP2_MAX_FRAME_SIZE_MIN (1 << 14) #define NGHTTP2_MAX_PAYLOADLEN 16384 -/* The one frame buffer length for tranmission. We may use several of +/* The one frame buffer length for transmission. We may use several of them to support CONTINUATION. To account for Pad Length field, we allocate extra 1 byte, which saves extra large memcopying. */ #define NGHTTP2_FRAMEBUF_CHUNKLEN \ diff --git a/deps/nghttp2/lib/nghttp2_hd.c b/deps/nghttp2/lib/nghttp2_hd.c index 5e869315259921..30ee9b88920c0a 100644 --- a/deps/nghttp2/lib/nghttp2_hd.c +++ b/deps/nghttp2/lib/nghttp2_hd.c @@ -1263,6 +1263,8 @@ int nghttp2_hd_inflate_change_table_size( return NGHTTP2_ERR_INVALID_STATE; } + inflater->settings_hd_table_bufsize_max = settings_max_dynamic_table_size; + /* It seems that encoder is not required to send dynamic table size update if the table size is not changed after applying SETTINGS_HEADER_TABLE_SIZE. RFC 7541 is ambiguous here, but this @@ -1275,13 +1277,12 @@ int nghttp2_hd_inflate_change_table_size( /* Remember minimum value, and validate that encoder sends the value less than or equal to this. */ inflater->min_hd_table_bufsize_max = settings_max_dynamic_table_size; - } - inflater->settings_hd_table_bufsize_max = settings_max_dynamic_table_size; + inflater->ctx.hd_table_bufsize_max = settings_max_dynamic_table_size; - inflater->ctx.hd_table_bufsize_max = settings_max_dynamic_table_size; + hd_context_shrink_table_size(&inflater->ctx, NULL); + } - hd_context_shrink_table_size(&inflater->ctx, NULL); return 0; } diff --git a/deps/nghttp2/lib/nghttp2_map.c b/deps/nghttp2/lib/nghttp2_map.c index 5aab90b4daf473..e5db168ca2bc3b 100644 --- a/deps/nghttp2/lib/nghttp2_map.c +++ b/deps/nghttp2/lib/nghttp2_map.c @@ -189,6 +189,7 @@ static int map_resize(nghttp2_map *map, uint32_t new_tablelen, nghttp2_map_bucket *new_table; nghttp2_map_bucket *bkt; int rv; + (void)rv; new_table = nghttp2_mem_calloc(map->mem, new_tablelen, sizeof(nghttp2_map_bucket)); diff --git a/deps/nghttp2/lib/nghttp2_net.h b/deps/nghttp2/lib/nghttp2_net.h index 95ffee74a14fc9..582099b93dc0b8 100644 --- a/deps/nghttp2/lib/nghttp2_net.h +++ b/deps/nghttp2/lib/nghttp2_net.h @@ -42,7 +42,7 @@ #if defined(WIN32) /* Windows requires ws2_32 library for ntonl family functions. We define inline functions for those function so that we don't have - dependeny on that lib. */ + dependency on that lib. */ # ifdef _MSC_VER # define STIN static __inline diff --git a/deps/nghttp2/lib/nghttp2_outbound_item.h b/deps/nghttp2/lib/nghttp2_outbound_item.h index b5f503a312dd8c..bd4611b551bbbd 100644 --- a/deps/nghttp2/lib/nghttp2_outbound_item.h +++ b/deps/nghttp2/lib/nghttp2_outbound_item.h @@ -111,7 +111,7 @@ struct nghttp2_outbound_item { to this structure to avoid frequent memory allocation. */ nghttp2_ext_frame_payload ext_frame_payload; nghttp2_aux_data aux_data; - /* The priority used in priority comparion. Smaller is served + /* The priority used in priority comparison. Smaller is served earlier. For PING, SETTINGS and non-DATA frames (excluding response HEADERS frame) have dedicated cycle value defined above. For DATA frame, cycle is computed by taking into account of diff --git a/deps/nghttp2/lib/nghttp2_pq.h b/deps/nghttp2/lib/nghttp2_pq.h index 2d7b702ac18ad0..7b7b7392f8479c 100644 --- a/deps/nghttp2/lib/nghttp2_pq.h +++ b/deps/nghttp2/lib/nghttp2_pq.h @@ -114,7 +114,7 @@ typedef int (*nghttp2_pq_item_cb)(nghttp2_pq_entry *item, void *arg); void nghttp2_pq_update(nghttp2_pq *pq, nghttp2_pq_item_cb fun, void *arg); /* - * Applys |fun| to each item in |pq|. The |arg| is passed as arg + * Applies |fun| to each item in |pq|. The |arg| is passed as arg * parameter to callback function. This function must not change the * ordering key. If the return value from callback is nonzero, this * function returns 1 immediately without iterating remaining items. diff --git a/deps/nghttp2/lib/nghttp2_session.c b/deps/nghttp2/lib/nghttp2_session.c index 36f1179f72a225..380a47c1b1e82b 100644 --- a/deps/nghttp2/lib/nghttp2_session.c +++ b/deps/nghttp2/lib/nghttp2_session.c @@ -5341,7 +5341,7 @@ static ssize_t inbound_frame_compute_pad(nghttp2_inbound_frame *iframe) { /* * This function returns the effective payload length in the data of - * length |readlen| when the remaning payload is |payloadleft|. The + * length |readlen| when the remaining payload is |payloadleft|. The * |payloadleft| does not include |readlen|. If padding was started * strictly before this data chunk, this function returns -1. */ diff --git a/deps/nghttp2/lib/nghttp2_session.h b/deps/nghttp2/lib/nghttp2_session.h index 07bfbb6c90c8df..907b1704bc8412 100644 --- a/deps/nghttp2/lib/nghttp2_session.h +++ b/deps/nghttp2/lib/nghttp2_session.h @@ -408,7 +408,7 @@ int nghttp2_session_add_rst_stream(nghttp2_session *session, int32_t stream_id, uint32_t error_code); /* - * Adds PING frame. This is a convenient functin built on top of + * Adds PING frame. This is a convenient function built on top of * nghttp2_session_add_frame() to add PING easily. * * If the |opaque_data| is not NULL, it must point to 8 bytes memory diff --git a/deps/nghttp2/lib/nghttp2_stream.c b/deps/nghttp2/lib/nghttp2_stream.c index 96e1d9fe0f9b7e..f4c80a24b5e704 100644 --- a/deps/nghttp2/lib/nghttp2_stream.c +++ b/deps/nghttp2/lib/nghttp2_stream.c @@ -33,7 +33,7 @@ #include "nghttp2_frame.h" /* Maximum distance between any two stream's cycle in the same - prirority queue. Imagine stream A's cycle is A, and stream B's + priority queue. Imagine stream A's cycle is A, and stream B's cycle is B, and A < B. The cycle is unsigned 32 bit integer, it may get overflow. Because of how we calculate the next cycle value, if B - A is less than or equals to diff --git a/deps/nghttp2/lib/nghttp2_submit.c b/deps/nghttp2/lib/nghttp2_submit.c index 744a49cf6098ec..92fb03e8ca0f09 100644 --- a/deps/nghttp2/lib/nghttp2_submit.c +++ b/deps/nghttp2/lib/nghttp2_submit.c @@ -492,8 +492,6 @@ int nghttp2_session_set_local_window_size(nghttp2_session *session, return nghttp2_session_update_recv_stream_window_size(session, stream, 0, 1); } - - return 0; } int nghttp2_submit_altsvc(nghttp2_session *session, uint8_t flags, diff --git a/deps/npm/docs/content/commands/npm-access.md b/deps/npm/docs/content/commands/npm-access.md index 1f661c911f47dc..162e94f1fec029 100644 --- a/deps/npm/docs/content/commands/npm-access.md +++ b/deps/npm/docs/content/commands/npm-access.md @@ -6,21 +6,27 @@ description: Set access level on published packages ### Synopsis + + + + ```bash npm access public [] npm access restricted [] - npm access grant [] npm access revoke [] - npm access 2fa-required [] npm access 2fa-not-required [] - npm access ls-packages [||] npm access ls-collaborators [ []] npm access edit [] ``` + + + + + ### Description Used to set access controls on private packages. diff --git a/deps/npm/docs/content/commands/npm-adduser.md b/deps/npm/docs/content/commands/npm-adduser.md index 21a31ca940e524..06eeb379c4dd88 100644 --- a/deps/npm/docs/content/commands/npm-adduser.md +++ b/deps/npm/docs/content/commands/npm-adduser.md @@ -6,12 +6,21 @@ description: Add a registry user account ### Synopsis + + + + ```bash -npm adduser [--registry=url] [--scope=@orgname] [--auth-type=legacy] +npm adduser aliases: login, add-user ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md index 58c614d793db29..24b700ff5abd85 100644 --- a/deps/npm/docs/content/commands/npm-audit.md +++ b/deps/npm/docs/content/commands/npm-audit.md @@ -6,13 +6,19 @@ description: Run a security audit ### Synopsis -```bash -npm audit [--json] [--production] [--audit-level=(low|moderate|high|critical)] -npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)] + + + -common options: [--production] [--only=(dev|prod)] +```bash +npm audit [fix] ``` + + + + + ### Description The audit command submits a description of the dependencies configured in @@ -240,6 +246,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. * Allow clobbering existing values in `npm pkg` +* Allow unpublishing of entire packages (not just a single version). If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -300,6 +307,36 @@ variable will be set to `'production'` for all lifecycle scripts. +#### `foreground-scripts` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + + + + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + + + + #### `workspace` * Default: diff --git a/deps/npm/docs/content/commands/npm-bin.md b/deps/npm/docs/content/commands/npm-bin.md index 2d7c1d5b8149ee..94b72cfd5c81ce 100644 --- a/deps/npm/docs/content/commands/npm-bin.md +++ b/deps/npm/docs/content/commands/npm-bin.md @@ -6,10 +6,19 @@ description: Display npm bin folder ### Synopsis + + + + ```bash -npm bin [-g|--global] +npm bin ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-bugs.md b/deps/npm/docs/content/commands/npm-bugs.md index f92241a14b95c0..aeddeb848e81b8 100644 --- a/deps/npm/docs/content/commands/npm-bugs.md +++ b/deps/npm/docs/content/commands/npm-bugs.md @@ -6,12 +6,21 @@ description: Report bugs for a package in a web browser ### Synopsis + + + + ```bash -npm bugs [ [ ...]] +npm bugs [] -aliases: issues +alias: issues ``` + + + + + ### Description This command tries to guess at the likely location of a package's bug diff --git a/deps/npm/docs/content/commands/npm-cache.md b/deps/npm/docs/content/commands/npm-cache.md index 6497a3988c9387..091e26e8a71828 100644 --- a/deps/npm/docs/content/commands/npm-cache.md +++ b/deps/npm/docs/content/commands/npm-cache.md @@ -6,18 +6,26 @@ description: Manipulates packages cache ### Synopsis -```bash -npm cache add ... -npm cache add ... -npm cache add ... -npm cache add @... - -npm cache clean -aliases: npm cache clear, npm cache rm + + + +```bash +npm cache add +npm cache add +npm cache add +npm cache add +npm cache add @ +npm cache clean [] +npm cache ls [@] npm cache verify ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 1ce50c66d5fafa..97d1aa5523dd72 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -6,10 +6,21 @@ description: Install a project with a clean slate ### Synopsis + + + + ```bash npm ci + +aliases: clean-install, ic, install-clean, isntall-clean ``` + + + + + ### Description This command is similar to [`npm install`](/commands/npm-install), except @@ -83,6 +94,21 @@ submitted. +#### `foreground-scripts` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + + + + #### `ignore-scripts` * Default: false diff --git a/deps/npm/docs/content/commands/npm-completion.md b/deps/npm/docs/content/commands/npm-completion.md index 9dbd960913f270..d73a98f2e50f78 100644 --- a/deps/npm/docs/content/commands/npm-completion.md +++ b/deps/npm/docs/content/commands/npm-completion.md @@ -6,10 +6,19 @@ description: Tab Completion for npm ### Synopsis + + + + ```bash -source <(npm completion) +npm completion ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-config.md b/deps/npm/docs/content/commands/npm-config.md index 2d77f045cbc472..a66a198ce42d17 100644 --- a/deps/npm/docs/content/commands/npm-config.md +++ b/deps/npm/docs/content/commands/npm-config.md @@ -6,18 +6,25 @@ description: Manage the npm configuration files ### Synopsis + + + + ```bash npm config set = [= ...] npm config get [ [ ...]] npm config delete [ ...] npm config list [--json] npm config edit -npm set = [= ...] -npm get [ [ ...]] alias: c ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-dedupe.md b/deps/npm/docs/content/commands/npm-dedupe.md index 53d2e64272a67b..b5a64831c0bbaa 100644 --- a/deps/npm/docs/content/commands/npm-dedupe.md +++ b/deps/npm/docs/content/commands/npm-dedupe.md @@ -6,13 +6,21 @@ description: Reduce duplication in the package tree ### Synopsis + + + + ```bash npm dedupe -npm ddp -aliases: ddp +alias: ddp ``` + + + + + ### Description Searches the local package tree and attempts to simplify the overall @@ -145,6 +153,24 @@ When package package-locks are disabled, automatic pruning of extraneous modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. +This configuration does not affect `npm ci`. + + + + +#### `save` + +* Default: `true` unless when using `npm update` or `npm dedupe` where it + defaults to `false` +* Type: Boolean + +Save installed packages to a `package.json` file as dependencies. + +When used with the `npm rm` command, removes the dependency from +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. + diff --git a/deps/npm/docs/content/commands/npm-deprecate.md b/deps/npm/docs/content/commands/npm-deprecate.md index 438a54ec6e4f36..4345120d3744b3 100644 --- a/deps/npm/docs/content/commands/npm-deprecate.md +++ b/deps/npm/docs/content/commands/npm-deprecate.md @@ -6,10 +6,19 @@ description: Deprecate a version of a package ### Synopsis + + + + ```bash -npm deprecate [@] +npm deprecate [@] ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-diff.md b/deps/npm/docs/content/commands/npm-diff.md index 8d05df779f3ca5..c4c9eafdb3524d 100644 --- a/deps/npm/docs/content/commands/npm-diff.md +++ b/deps/npm/docs/content/commands/npm-diff.md @@ -6,14 +6,19 @@ description: The registry diff command ### Synopsis + + + + ```bash npm diff [...] -npm diff --diff= [...] -npm diff --diff= [--diff=] [...] -npm diff --diff= [--diff=] [...] -npm diff [--diff-ignore-all-space] [--diff-name-only] [...] ``` + + + + + ### Description Similar to its `git diff` counterpart, this command will print diff patches diff --git a/deps/npm/docs/content/commands/npm-dist-tag.md b/deps/npm/docs/content/commands/npm-dist-tag.md index a4e0243aac87b3..a0f306cd4970d5 100644 --- a/deps/npm/docs/content/commands/npm-dist-tag.md +++ b/deps/npm/docs/content/commands/npm-dist-tag.md @@ -6,14 +6,23 @@ description: Modify package distribution tags ### Synopsis + + + + ```bash npm dist-tag add @ [] npm dist-tag rm npm dist-tag ls [] -aliases: dist-tags +alias: dist-tags ``` + + + + + ### Description Add, remove, and enumerate distribution tags on a package: diff --git a/deps/npm/docs/content/commands/npm-docs.md b/deps/npm/docs/content/commands/npm-docs.md index 970d17aa829c6e..8d5a278286a88b 100644 --- a/deps/npm/docs/content/commands/npm-docs.md +++ b/deps/npm/docs/content/commands/npm-docs.md @@ -6,12 +6,21 @@ description: Open documentation for a package in a web browser ### Synopsis + + + + ```bash npm docs [ [ ...]] -aliases: home +alias: home ``` + + + + + ### Description This command tries to guess at the likely location of a package's diff --git a/deps/npm/docs/content/commands/npm-doctor.md b/deps/npm/docs/content/commands/npm-doctor.md index 0cce60c7b7b157..7fb63bab16e835 100644 --- a/deps/npm/docs/content/commands/npm-doctor.md +++ b/deps/npm/docs/content/commands/npm-doctor.md @@ -6,10 +6,19 @@ description: Check your npm environment ### Synopsis + + + + ```bash npm doctor ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-edit.md b/deps/npm/docs/content/commands/npm-edit.md index 5ae7f2481ae456..39fc49592c571c 100644 --- a/deps/npm/docs/content/commands/npm-edit.md +++ b/deps/npm/docs/content/commands/npm-edit.md @@ -6,10 +6,19 @@ description: Edit an installed package ### Synopsis + + + + ```bash -npm edit +npm edit [/...] ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-exec.md b/deps/npm/docs/content/commands/npm-exec.md index d154f5780b9c9c..3645e336b9da97 100644 --- a/deps/npm/docs/content/commands/npm-exec.md +++ b/deps/npm/docs/content/commands/npm-exec.md @@ -6,26 +6,23 @@ description: Run a command from a local or remote npm package ### Synopsis + + + + ```bash npm exec -- [@] [args...] npm exec --package=[@] -- [args...] npm exec -c ' [args...]' npm exec --package=foo -c ' [args...]' -npm exec [--ws] [-w [@] [args...] -npx -p [@] [args...] -npx -c ' [args...]' -npx -p [@] -c ' [args...]' -Run without --call or positional args to open interactive subshell +alias: x +``` -alias: npm x, npx + + -common options: ---package= (may be specified multiple times) --p is a shorthand for --package only when using npx executable --c --call= (may not be mixed with positional arguments) -``` + ### Description diff --git a/deps/npm/docs/content/commands/npm-explain.md b/deps/npm/docs/content/commands/npm-explain.md index 5f05cac0f906b0..765221056585d9 100644 --- a/deps/npm/docs/content/commands/npm-explain.md +++ b/deps/npm/docs/content/commands/npm-explain.md @@ -6,12 +6,21 @@ description: Explain installed packages ### Synopsis + + + + ```bash npm explain alias: why ``` + + + + + ### Description This command will print the chain of dependencies causing a given package diff --git a/deps/npm/docs/content/commands/npm-explore.md b/deps/npm/docs/content/commands/npm-explore.md index 3979da9573db00..90753c7e09199f 100644 --- a/deps/npm/docs/content/commands/npm-explore.md +++ b/deps/npm/docs/content/commands/npm-explore.md @@ -6,10 +6,19 @@ description: Browse an installed package ### Synopsis + + + + ```bash npm explore [ -- ] ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-find-dupes.md b/deps/npm/docs/content/commands/npm-find-dupes.md index f7dc84f9c5306d..f7366fa6375d1e 100644 --- a/deps/npm/docs/content/commands/npm-find-dupes.md +++ b/deps/npm/docs/content/commands/npm-find-dupes.md @@ -6,10 +6,19 @@ description: Find duplication in the package tree ### Synopsis + + + + ```bash npm find-dupes ``` + + + + + ### Description Runs `npm dedupe` in `--dry-run` mode, making npm only output the @@ -82,6 +91,8 @@ When package package-locks are disabled, automatic pruning of extraneous modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. +This configuration does not affect `npm ci`. + diff --git a/deps/npm/docs/content/commands/npm-fund.md b/deps/npm/docs/content/commands/npm-fund.md index 606b0a188c5549..5b96e91ab8ccb9 100644 --- a/deps/npm/docs/content/commands/npm-fund.md +++ b/deps/npm/docs/content/commands/npm-fund.md @@ -6,11 +6,19 @@ description: Retrieve funding information ### Synopsis + + + + ```bash -npm fund [] -npm fund [-w ] +npm fund [[<@scope>/]] ``` + + + + + ### Description This command retrieves information on how to fund the dependencies of a diff --git a/deps/npm/docs/content/commands/npm-help-search.md b/deps/npm/docs/content/commands/npm-help-search.md index 78553a14ecb01d..152f9f6bec16f1 100644 --- a/deps/npm/docs/content/commands/npm-help-search.md +++ b/deps/npm/docs/content/commands/npm-help-search.md @@ -6,10 +6,19 @@ description: Search npm help documentation ### Synopsis + + + + ```bash npm help-search ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-help.md b/deps/npm/docs/content/commands/npm-help.md index a8002eef17156c..83c595db696b9c 100644 --- a/deps/npm/docs/content/commands/npm-help.md +++ b/deps/npm/docs/content/commands/npm-help.md @@ -6,10 +6,21 @@ description: Get help on npm ### Synopsis + + + + ```bash npm help [] + +alias: hlep ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-hook.md b/deps/npm/docs/content/commands/npm-hook.md index c91bce3075e7b2..4a9805d02f9d43 100644 --- a/deps/npm/docs/content/commands/npm-hook.md +++ b/deps/npm/docs/content/commands/npm-hook.md @@ -6,13 +6,22 @@ description: Manage registry hooks ### Synopsis + + + + ```bash +npm hook add [--type=] npm hook ls [pkg] -npm hook add -npm hook update [secret] npm hook rm +npm hook update ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-init.md b/deps/npm/docs/content/commands/npm-init.md index a608061a96d8dc..71109cd360511d 100644 --- a/deps/npm/docs/content/commands/npm-init.md +++ b/deps/npm/docs/content/commands/npm-init.md @@ -6,13 +6,23 @@ description: Create a package.json file ### Synopsis + + + + ```bash -npm init [--yes|-y|--scope] -npm init <@scope> (same as `npm exec <@scope>/create`) -npm init [<@scope>/] (same as `npm exec [<@scope>/]create-`) -npm init [-w ] [args...] +npm init [--force|-f|--yes|-y|--scope] +npm init <@scope> (same as `npx <@scope>/create`) +npm init [<@scope>/] (same as `npx [<@scope>/]create-`) + +aliases: create, innit ``` + + + + + ### Description `npm init ` can be used to set up a new or existing npm @@ -38,6 +48,15 @@ strictly additive, so it will keep any fields and values that were already set. You can also use `-y`/`--yes` to skip the questionnaire altogether. If you pass `--scope`, it will create a scoped package. +*Note:* if a user already has the `create-` package +globally installed, that will be what `npm init` uses. If you want npm +to use the latest version, or another specific version you must specify +it: + +* `npm init foo@latest` # fetches and runs the latest `create-foo` from + the registry +* `npm init foo@1.2.3` # runs `create-foo@1.2.3` specifically + #### Forwarding additional options Any additional options will be passed directly to the command, so `npm init @@ -180,6 +199,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. * Allow clobbering existing values in `npm pkg` +* Allow unpublishing of entire packages (not just a single version). If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! diff --git a/deps/npm/docs/content/commands/npm-install-ci-test.md b/deps/npm/docs/content/commands/npm-install-ci-test.md index 5c37ed8f56128b..0d9470acf58b3d 100644 --- a/deps/npm/docs/content/commands/npm-install-ci-test.md +++ b/deps/npm/docs/content/commands/npm-install-ci-test.md @@ -6,12 +6,21 @@ description: Install a project with a clean slate and run tests ### Synopsis + + + + ```bash npm install-ci-test -alias: npm cit +alias: cit ``` + + + + + ### Description This command runs `npm ci` followed immediately by `npm test`. @@ -34,6 +43,21 @@ submitted. +#### `foreground-scripts` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + + + + #### `ignore-scripts` * Default: false diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md index c464e5bd0b8c64..5ac31cbf08e25c 100644 --- a/deps/npm/docs/content/commands/npm-install-test.md +++ b/deps/npm/docs/content/commands/npm-install-test.md @@ -6,20 +6,30 @@ description: Install package(s) and run tests ### Synopsis + + + + ```bash -npm install-test (with no args, in package dir) -npm install-test [<@scope>/] -npm install-test [<@scope>/]@ -npm install-test [<@scope>/]@ -npm install-test [<@scope>/]@ +npm install-test [<@scope>/] +npm install-test [<@scope>/]@ +npm install-test [<@scope>/]@ +npm install-test [<@scope>/]@ +npm install-test @npm: +npm install-test npm install-test npm install-test -npm install-test +npm install-test +npm install-test / -alias: npm it -common options: [--save|--save-dev|--save-optional] [--save-exact] [--dry-run] +alias: it ``` + + + + + ### Description This command runs an `npm install` followed immediately by an `npm test`. It @@ -32,13 +42,16 @@ takes exactly the same arguments as `npm install`. #### `save` -* Default: true +* Default: `true` unless when using `npm update` or `npm dedupe` where it + defaults to `false` * Type: Boolean -Save installed packages to a package.json file as dependencies. +Save installed packages to a `package.json` file as dependencies. When used with the `npm rm` command, removes the dependency from -package.json. +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. @@ -99,6 +112,27 @@ will be preferred. +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + + + + #### `strict-peer-deps` * Default: false @@ -133,26 +167,22 @@ When package package-locks are disabled, automatic pruning of extraneous modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. +This configuration does not affect `npm ci`. + -#### `omit` - -* Default: 'dev' if the `NODE_ENV` environment variable is set to - 'production', otherwise empty. -* Type: "dev", "optional", or "peer" (can be set multiple times) +#### `foreground-scripts` -Dependency types to omit from the installation tree on disk. - -Note that these dependencies _are_ still resolved and added to the -`package-lock.json` or `npm-shrinkwrap.json` file. They are just not -physically installed on disk. +* Default: false +* Type: Boolean -If a package type appears in both the `--include` and `--omit` lists, then -it will be included. +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. -If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment -variable will be set to `'production'` for all lifecycle scripts. +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 83b9af1e4d07f9..2ffd3e3e9fff14 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -6,23 +6,30 @@ description: Install a package ### Synopsis + + + + ```bash -npm install (with no args, in package dir) -npm install [<@scope>/] -npm install [<@scope>/]@ -npm install [<@scope>/]@ -npm install [<@scope>/]@ +npm install [<@scope>/] +npm install [<@scope>/]@ +npm install [<@scope>/]@ +npm install [<@scope>/]@ npm install @npm: -npm install :/ -npm install +npm install npm install npm install -npm install +npm install +npm install / -aliases: npm i, npm add -common options: [-P|--save-prod|-D|--save-dev|-O|--save-optional|--save-peer] [-E|--save-exact] [-B|--save-bundle] [--no-save] [--dry-run] +aliases: i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall, add ``` + + + + + ### Description This command installs a package and any packages that it depends on. If the @@ -78,11 +85,20 @@ into a tarball (b). * `npm install `: - Install the package in the directory as a symlink in the current - project. Its dependencies will be installed before it's linked. If - `` sits inside the root of your project, its dependencies may + If `` sits inside the root of your project, its dependencies will be installed and may be hoisted to the top-level `node_modules` as they would for other - types of dependencies. + types of dependencies. If `` sits outside the root of your project, + *npm will not install the package dependencies* in the directory ``, + but it will create a symlink to ``. + + > NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use [`npm pack`](/commands/npm-pack) while in the `` directory, and then install the resulting tarball instead of the `` using `npm install ` + + Example: + + ```bash + npm install ../../other-package + npm install ./sub-package + ``` * `npm install `: @@ -416,13 +432,16 @@ These are some of the most common options related to installation. #### `save` -* Default: true +* Default: `true` unless when using `npm update` or `npm dedupe` where it + defaults to `false` * Type: Boolean -Save installed packages to a package.json file as dependencies. +Save installed packages to a `package.json` file as dependencies. When used with the `npm rm` command, removes the dependency from -package.json. +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. @@ -483,6 +502,27 @@ will be preferred. +#### `omit` + +* Default: 'dev' if the `NODE_ENV` environment variable is set to + 'production', otherwise empty. +* Type: "dev", "optional", or "peer" (can be set multiple times) + +Dependency types to omit from the installation tree on disk. + +Note that these dependencies _are_ still resolved and added to the +`package-lock.json` or `npm-shrinkwrap.json` file. They are just not +physically installed on disk. + +If a package type appears in both the `--include` and `--omit` lists, then +it will be included. + +If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment +variable will be set to `'production'` for all lifecycle scripts. + + + + #### `strict-peer-deps` * Default: false @@ -517,26 +557,22 @@ When package package-locks are disabled, automatic pruning of extraneous modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. +This configuration does not affect `npm ci`. + -#### `omit` - -* Default: 'dev' if the `NODE_ENV` environment variable is set to - 'production', otherwise empty. -* Type: "dev", "optional", or "peer" (can be set multiple times) - -Dependency types to omit from the installation tree on disk. +#### `foreground-scripts` -Note that these dependencies _are_ still resolved and added to the -`package-lock.json` or `npm-shrinkwrap.json` file. They are just not -physically installed on disk. +* Default: false +* Type: Boolean -If a package type appears in both the `--include` and `--omit` lists, then -it will be included. +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. -If the resulting omit list includes `'dev'`, then the `NODE_ENV` environment -variable will be set to `'production'` for all lifecycle scripts. +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. diff --git a/deps/npm/docs/content/commands/npm-link.md b/deps/npm/docs/content/commands/npm-link.md index d4ef41ae964628..892b55496c9b6f 100644 --- a/deps/npm/docs/content/commands/npm-link.md +++ b/deps/npm/docs/content/commands/npm-link.md @@ -6,13 +6,22 @@ description: Symlink a package folder ### Synopsis + + + + ```bash npm link (in package dir) npm link [<@scope>/][@] -alias: npm ln +alias: ln ``` + + + + + ### Description This is handy for installing your own stuff, so that you can work on it and @@ -116,13 +125,16 @@ workspace(s). #### `save` -* Default: true +* Default: `true` unless when using `npm update` or `npm dedupe` where it + defaults to `false` * Type: Boolean -Save installed packages to a package.json file as dependencies. +Save installed packages to a `package.json` file as dependencies. When used with the `npm rm` command, removes the dependency from -package.json. +`package.json`. + +Will also prevent writing to `package-lock.json` if set to `false`. @@ -217,6 +229,8 @@ When package package-locks are disabled, automatic pruning of extraneous modules will also be disabled. To remove extraneous modules with package-locks disabled use `npm prune`. +This configuration does not affect `npm ci`. + diff --git a/deps/npm/docs/content/commands/npm-logout.md b/deps/npm/docs/content/commands/npm-logout.md index cb7c8496fb4791..f0dd5cb856eaee 100644 --- a/deps/npm/docs/content/commands/npm-logout.md +++ b/deps/npm/docs/content/commands/npm-logout.md @@ -6,10 +6,19 @@ description: Log out of the registry ### Synopsis + + + + ```bash -npm logout [--registry=] [--scope=<@scope>] +npm logout ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 3b33f0a3605e08..0f06e131f414b2 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -6,12 +6,21 @@ description: List installed packages ### Synopsis + + + + ```bash npm ls [[<@scope>/] ...] -aliases: list, la, ll +alias: list ``` + + + + + ### Description This command will print to stdout all the versions of packages that are diff --git a/deps/npm/docs/content/commands/npm-org.md b/deps/npm/docs/content/commands/npm-org.md index 2f08f611529925..975581c860df64 100644 --- a/deps/npm/docs/content/commands/npm-org.md +++ b/deps/npm/docs/content/commands/npm-org.md @@ -6,12 +6,23 @@ description: Manage orgs ### Synopsis + + + + ```bash -npm org set [developer | admin | owner] -npm org rm -npm org ls [] +npm org set orgname username [developer | admin | owner] +npm org rm orgname username +npm org ls orgname [] + +alias: ogr ``` + + + + + Note: This command is unaware of workspaces. ### Example diff --git a/deps/npm/docs/content/commands/npm-outdated.md b/deps/npm/docs/content/commands/npm-outdated.md index 1b58a6afda64bb..6fa026550e7477 100644 --- a/deps/npm/docs/content/commands/npm-outdated.md +++ b/deps/npm/docs/content/commands/npm-outdated.md @@ -6,10 +6,19 @@ description: Check for outdated packages ### Synopsis + + + + ```bash npm outdated [[<@scope>/] ...] ``` + + + + + ### Description This command will check the registry to see if any (or, specific) installed diff --git a/deps/npm/docs/content/commands/npm-owner.md b/deps/npm/docs/content/commands/npm-owner.md index 74e7f84af6c804..0779984e19a9db 100644 --- a/deps/npm/docs/content/commands/npm-owner.md +++ b/deps/npm/docs/content/commands/npm-owner.md @@ -6,14 +6,23 @@ description: Manage package owners ### Synopsis + + + + ```bash npm owner add [<@scope>/] npm owner rm [<@scope>/] npm owner ls [<@scope>/] -aliases: author +alias: author ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-pack.md b/deps/npm/docs/content/commands/npm-pack.md index 53945986837b94..c834f643ac0bb1 100644 --- a/deps/npm/docs/content/commands/npm-pack.md +++ b/deps/npm/docs/content/commands/npm-pack.md @@ -6,10 +6,19 @@ description: Create a tarball from a package ### Synopsis + + + + ```bash -npm pack [[<@scope>/]...] [--dry-run] [--json] +npm pack [[<@scope>/]...] ``` + + + + + ### Configuration diff --git a/deps/npm/docs/content/commands/npm-ping.md b/deps/npm/docs/content/commands/npm-ping.md index 6f1c4582f058f0..161d7292f8c977 100644 --- a/deps/npm/docs/content/commands/npm-ping.md +++ b/deps/npm/docs/content/commands/npm-ping.md @@ -6,10 +6,19 @@ description: Ping npm registry ### Synopsis + + + + ```bash -npm ping [--registry ] +npm ping ``` + + + + + Note: This command is unaware of workspaces. ### Description @@ -18,11 +27,14 @@ Ping the configured or given npm registry and verify authentication. If it works it will output something like: ```bash -Ping success: {*Details about registry*} +npm notice PING https://registry.npmjs.org/ +npm notice PONG 255ms ``` -otherwise you will get: +otherwise you will get an error: ```bash -Ping error: {*Detail about error} +npm notice PING http://foo.com/ +npm ERR! code E404 +npm ERR! 404 Not Found - GET http://www.foo.com/-/ping?write=true ``` ### Configuration diff --git a/deps/npm/docs/content/commands/npm-pkg.md b/deps/npm/docs/content/commands/npm-pkg.md index beee9c1c4e78a9..576e1335efbba9 100644 --- a/deps/npm/docs/content/commands/npm-pkg.md +++ b/deps/npm/docs/content/commands/npm-pkg.md @@ -6,12 +6,23 @@ description: Manages your package.json ### Synopsis + + + + ```bash -npm pkg get [ [. ...]] -npm pkg set = [.= ...] -npm pkg delete [. ...] +npm pkg set = [= ...] +npm pkg get [ [ ...]] +npm pkg delete [ ...] +npm pkg set [[].= ...] +npm pkg set [[].= ...] ``` + + + + + ### Description A command that automates the management of `package.json` files. @@ -188,6 +199,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. * Allow clobbering existing values in `npm pkg` +* Allow unpublishing of entire packages (not just a single version). If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! diff --git a/deps/npm/docs/content/commands/npm-prefix.md b/deps/npm/docs/content/commands/npm-prefix.md index 276a9e9e699100..39328bcc88a143 100644 --- a/deps/npm/docs/content/commands/npm-prefix.md +++ b/deps/npm/docs/content/commands/npm-prefix.md @@ -6,10 +6,19 @@ description: Display prefix ### Synopsis + + + + ```bash npm prefix [-g] ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-profile.md b/deps/npm/docs/content/commands/npm-profile.md index cecc48518dbdb7..af1f9d8aa10633 100644 --- a/deps/npm/docs/content/commands/npm-profile.md +++ b/deps/npm/docs/content/commands/npm-profile.md @@ -6,14 +6,22 @@ description: Change settings on your registry profile ### Synopsis + + + + ```bash -npm profile get [--json|--parseable] [] -npm profile set [--json|--parseable] -npm profile set password -npm profile enable-2fa [auth-and-writes|auth-only] +npm profile enable-2fa [auth-only|auth-and-writes] npm profile disable-2fa +npm profile get [] +npm profile set ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-prune.md b/deps/npm/docs/content/commands/npm-prune.md index 658ab2610e0ed9..a10a353801b7cd 100644 --- a/deps/npm/docs/content/commands/npm-prune.md +++ b/deps/npm/docs/content/commands/npm-prune.md @@ -6,10 +6,19 @@ description: Remove extraneous packages ### Synopsis + + + + ```bash -npm prune [[<@scope>/]...] [--production] [--dry-run] [--json] +npm prune [[<@scope>/]...] ``` + + + + + ### Description This command removes "extraneous" packages. If a package name is provided, @@ -90,6 +99,36 @@ Not supported by all npm commands. +#### `foreground-scripts` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + + + + +#### `ignore-scripts` + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +Note that commands explicitly intended to run a particular script, such as +`npm start`, `npm stop`, `npm restart`, `npm test`, and `npm run-script` +will still run their intended script if `ignore-scripts` is set, but they +will *not* run any pre- or post-scripts. + + + + #### `workspace` * Default: diff --git a/deps/npm/docs/content/commands/npm-publish.md b/deps/npm/docs/content/commands/npm-publish.md index 6958b1066de7fd..ce6e1c1012c8e2 100644 --- a/deps/npm/docs/content/commands/npm-publish.md +++ b/deps/npm/docs/content/commands/npm-publish.md @@ -6,13 +6,19 @@ description: Publish a package ### Synopsis -```bash -npm publish [|] [--tag ] [--access ] [--otp otpcode] [--dry-run] + + + -Publishes '.' if no argument supplied -Sets tag 'latest' if no --tag specified +```bash +npm publish [] ``` + + + + + ### Description Publishes a package to the registry so that it can be installed by name. diff --git a/deps/npm/docs/content/commands/npm-rebuild.md b/deps/npm/docs/content/commands/npm-rebuild.md index 75e71c60e6810a..d63e00b79d3867 100644 --- a/deps/npm/docs/content/commands/npm-rebuild.md +++ b/deps/npm/docs/content/commands/npm-rebuild.md @@ -6,12 +6,21 @@ description: Rebuild a package ### Synopsis + + + + ```bash npm rebuild [[<@scope>/][@] ...] alias: rb ``` + + + + + ### Description This command runs the `npm build` command on the matched folders. This is @@ -61,6 +70,21 @@ systems. +#### `foreground-scripts` + +* Default: false +* Type: Boolean + +Run all build scripts (ie, `preinstall`, `install`, and `postinstall`) +scripts for installed packages in the foreground process, sharing standard +input, output, and error with the main npm process. + +Note that this will generally make installs run slower, and be much noisier, +but can be useful for debugging. + + + + #### `ignore-scripts` * Default: false diff --git a/deps/npm/docs/content/commands/npm-repo.md b/deps/npm/docs/content/commands/npm-repo.md index cd47fde47127ee..e14f07012a2484 100644 --- a/deps/npm/docs/content/commands/npm-repo.md +++ b/deps/npm/docs/content/commands/npm-repo.md @@ -6,10 +6,19 @@ description: Open package repository page in the browser ### Synopsis + + + + ```bash npm repo [ [ ...]] ``` + + + + + ### Description This command tries to guess at the likely location of a package's diff --git a/deps/npm/docs/content/commands/npm-restart.md b/deps/npm/docs/content/commands/npm-restart.md index 80f8ab77ef0183..f01cd014e74357 100644 --- a/deps/npm/docs/content/commands/npm-restart.md +++ b/deps/npm/docs/content/commands/npm-restart.md @@ -6,10 +6,19 @@ description: Restart a package ### Synopsis + + + + ```bash npm restart [-- ] ``` + + + + + ### Description This restarts a project. It is equivalent to running `npm run-script diff --git a/deps/npm/docs/content/commands/npm-root.md b/deps/npm/docs/content/commands/npm-root.md index 98d1108d33f758..40b58e4b33d0b2 100644 --- a/deps/npm/docs/content/commands/npm-root.md +++ b/deps/npm/docs/content/commands/npm-root.md @@ -6,10 +6,19 @@ description: Display npm root ### Synopsis + + + + ```bash -npm root [-g] +npm root ``` + + + + + ### Description Print the effective `node_modules` folder to standard out. diff --git a/deps/npm/docs/content/commands/npm-run-script.md b/deps/npm/docs/content/commands/npm-run-script.md index 6dd602d03e00ad..79b7c9a25780e6 100644 --- a/deps/npm/docs/content/commands/npm-run-script.md +++ b/deps/npm/docs/content/commands/npm-run-script.md @@ -6,14 +6,21 @@ description: Run arbitrary package scripts ### Synopsis + + + + ```bash -npm run-script [--if-present] [--silent] [-- ] -npm run-script [--workspace=] -npm run-script [--workspaces] +npm run-script [-- ] aliases: run, rum, urn ``` + + + + + ### Description This runs an arbitrary command from a package's `"scripts"` object. If no diff --git a/deps/npm/docs/content/commands/npm-search.md b/deps/npm/docs/content/commands/npm-search.md index 252822e7198443..db6a12bafabf13 100644 --- a/deps/npm/docs/content/commands/npm-search.md +++ b/deps/npm/docs/content/commands/npm-search.md @@ -6,12 +6,21 @@ description: Search for packages ### Synopsis + + + + ```bash -npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...] +npm search [search terms ...] aliases: s, se, find ``` + + + + + Note: This command is unaware of workspaces. ### Description diff --git a/deps/npm/docs/content/commands/npm-set-script.md b/deps/npm/docs/content/commands/npm-set-script.md index 869ceede045ae3..2d8e87db852196 100644 --- a/deps/npm/docs/content/commands/npm-set-script.md +++ b/deps/npm/docs/content/commands/npm-set-script.md @@ -7,10 +7,19 @@ description: Set tasks in the scripts section of package.json ### Synopsis An npm command that lets you create a task in the `scripts` section of the `package.json`. + + + + ```bash npm set-script [ + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/cross-origin.html b/test/fixtures/wpt/webmessaging/broadcastchannel/cross-origin.html new file mode 100644 index 00000000000000..ee4b2f21c8e3fe --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/cross-origin.html @@ -0,0 +1,38 @@ + + + + + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/cross-partition.https.tentative.html b/test/fixtures/wpt/webmessaging/broadcastchannel/cross-partition.https.tentative.html new file mode 100644 index 00000000000000..163e6c00a93a95 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/cross-partition.https.tentative.html @@ -0,0 +1,72 @@ + + + + + + + + + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/detached-iframe.html b/test/fixtures/wpt/webmessaging/broadcastchannel/detached-iframe.html new file mode 100644 index 00000000000000..b9b06c3a46463b --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/detached-iframe.html @@ -0,0 +1,174 @@ + + + + + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/interface.any.js b/test/fixtures/wpt/webmessaging/broadcastchannel/interface.any.js new file mode 100644 index 00000000000000..35e09d34b418d3 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/interface.any.js @@ -0,0 +1,65 @@ +test(() => assert_throws_js(TypeError, () => new BroadcastChannel()), + 'Should throw if no name is provided'); + +test(() => { + let c = new BroadcastChannel(null); + assert_equals(c.name, 'null'); + }, 'Null name should not throw'); + +test(() => { + let c = new BroadcastChannel(undefined); + assert_equals(c.name, 'undefined'); + }, 'Undefined name should not throw'); + +test(() => { + let c = new BroadcastChannel('fooBar'); + assert_equals(c.name, 'fooBar'); + }, 'Non-empty name should not throw'); + +test(() => { + let c = new BroadcastChannel(123); + assert_equals(c.name, '123'); + }, 'Non-string name should not throw'); + +test(() => { + let c = new BroadcastChannel(''); + assert_throws_js(TypeError, () => c.postMessage()); + }, 'postMessage without parameters should throw'); + +test(() => { + let c = new BroadcastChannel(''); + c.postMessage(null); + }, 'postMessage with null should not throw'); + +test(() => { + let c = new BroadcastChannel(''); + c.close(); + }, 'close should not throw'); + +test(() => { + let c = new BroadcastChannel(''); + c.close(); + c.close(); + }, 'close should not throw when called multiple times'); + +test(() => { + let c = new BroadcastChannel(''); + c.close(); + assert_throws_dom('InvalidStateError', () => c.postMessage('')); + }, 'postMessage after close should throw'); + +test(() => { + let c = new BroadcastChannel(''); + assert_not_equals(c.onmessage, undefined); + }, 'BroadcastChannel should have an onmessage event'); + +test(() => { + let c = new BroadcastChannel(''); + assert_throws_dom('DataCloneError', () => c.postMessage(Symbol())); + }, 'postMessage should throw with uncloneable data'); + +test(() => { + let c = new BroadcastChannel(''); + c.close(); + assert_throws_dom('InvalidStateError', () => c.postMessage(Symbol())); + }, 'postMessage should throw InvalidStateError after close, even with uncloneable data'); diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/opaque-origin.html b/test/fixtures/wpt/webmessaging/broadcastchannel/opaque-origin.html new file mode 100644 index 00000000000000..c10e0cb22256a0 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/opaque-origin.html @@ -0,0 +1,35 @@ + + + + + + + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/ordering.html b/test/fixtures/wpt/webmessaging/broadcastchannel/ordering.html new file mode 100644 index 00000000000000..2d521b9e0ccf94 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/ordering.html @@ -0,0 +1,116 @@ + + + + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/origin.window.js b/test/fixtures/wpt/webmessaging/broadcastchannel/origin.window.js new file mode 100644 index 00000000000000..7e9d602af194b7 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/origin.window.js @@ -0,0 +1,10 @@ +async_test(t => { + const crossOriginURL = new URL("resources/origin.html", self.location.href).href.replace("://", "://天気の良い日."), + frame = document.createElement("iframe"); + frame.src = crossOriginURL; + document.body.appendChild(frame); + t.add_cleanup(() => frame.remove()); + self.onmessage = t.step_func_done(e => { + assert_equals(e.data, self.origin.replace("://", "://xn--n8j6ds53lwwkrqhv28a.")); + }); +}, "Serialization of BroadcastChannel origin"); diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/cross-origin.html b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/cross-origin.html new file mode 100644 index 00000000000000..5078b6fc8e46f5 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/cross-origin.html @@ -0,0 +1,15 @@ + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/ordering.html b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/ordering.html new file mode 100644 index 00000000000000..b7f12d865ad2bf --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/ordering.html @@ -0,0 +1,78 @@ + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/origin.html b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/origin.html new file mode 100644 index 00000000000000..f57d582bbb878c --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/origin.html @@ -0,0 +1,8 @@ + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/sandboxed.html b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/sandboxed.html new file mode 100644 index 00000000000000..e32962cdfd4456 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/sandboxed.html @@ -0,0 +1,10 @@ + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/service-worker.js b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/service-worker.js new file mode 100644 index 00000000000000..a3d17b9c650adb --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/service-worker.js @@ -0,0 +1,15 @@ +let promise_func = null; +let promise = new Promise(resolve => promise_func = resolve); + +const SERVICE_WORKER_TEST_CHANNEL_NAME = 'service worker'; +const bc3 = new BroadcastChannel(SERVICE_WORKER_TEST_CHANNEL_NAME); +bc3.onmessage = e => { + bc3.postMessage('done'); + promise_func(); +}; +bc3.postMessage('from worker'); + +// Ensure that the worker stays alive for the duration of the test +self.addEventListener('install', evt => { + evt.waitUntil(promise); +}); diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/resources/worker.js b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/worker.js new file mode 100644 index 00000000000000..df23072bc99f95 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/resources/worker.js @@ -0,0 +1,37 @@ +var c; + +function handler(e, reply) { + if (e.data.ping) { + c.postMessage(e.data.ping); + return; + } + if (e.data.blob) { + (() => { + c.postMessage({blob: new Blob(e.data.blob)}); + })(); + // TODO(https://github.com/web-platform-tests/wpt/issues/7899): Change to + // some sort of cross-browser GC trigger. + if (self.gc) self.gc(); + } + c = new BroadcastChannel(e.data.channel); + let messages = []; + c.onmessage = e => { + if (e.data === 'ready') { + // Ignore any 'ready' messages from the other thread since there could + // be some race conditions between this BroadcastChannel instance + // being created / ready to receive messages and the message being sent. + return; + } + messages.push(e.data); + if (e.data == 'done') + reply(messages); + }; + c.postMessage('from worker'); +} + +onmessage = e => handler(e, postMessage); + +onconnect = e => { + let port = e.ports[0]; + port.onmessage = e => handler(e, msg => port.postMessage(msg)); +}; diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/sandbox.html b/test/fixtures/wpt/webmessaging/broadcastchannel/sandbox.html new file mode 100644 index 00000000000000..aedf3c0d6fe64d --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/sandbox.html @@ -0,0 +1,16 @@ + + +Creating BroadcastChannel in an opaque origin + + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/service-worker.https.html b/test/fixtures/wpt/webmessaging/broadcastchannel/service-worker.https.html new file mode 100644 index 00000000000000..d605434ae1ccb0 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/service-worker.https.html @@ -0,0 +1,47 @@ + + + + + diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html b/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html new file mode 100644 index 00000000000000..8b55492f3cffb3 --- /dev/null +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html @@ -0,0 +1,375 @@ + + + + + diff --git a/test/js-native-api/test_properties/test.js b/test/js-native-api/test_properties/test.js index 704002dfc74f3a..7311c615cb815f 100644 --- a/test/js-native-api/test_properties/test.js +++ b/test/js-native-api/test_properties/test.js @@ -35,12 +35,11 @@ assert.ok(!propertyNames.includes('readwriteAccessor2')); assert.ok(!propertyNames.includes('readonlyAccessor1')); assert.ok(!propertyNames.includes('readonlyAccessor2')); -// Validate property created with symbol -const start = 'Symbol('.length; -const end = start + 'NameKeySymbol'.length; -const symbolDescription = - String(Object.getOwnPropertySymbols(test_object)[0]).slice(start, end); -assert.strictEqual(symbolDescription, 'NameKeySymbol'); +// Validate properties created with symbol +const propertySymbols = Object.getOwnPropertySymbols(test_object); +assert.strictEqual(propertySymbols[0].toString(), 'Symbol(NameKeySymbol)'); +assert.strictEqual(propertySymbols[1].toString(), 'Symbol()'); +assert.strictEqual(propertySymbols[2], Symbol.for('NameKeySymbolFor')); // The napi_writable attribute should be ignored for accessors. const readwriteAccessor1Descriptor = diff --git a/test/js-native-api/test_properties/test_properties.c b/test/js-native-api/test_properties/test_properties.c index c778601aa7489b..2c1a513449d214 100644 --- a/test/js-native-api/test_properties/test_properties.c +++ b/test/js-native-api/test_properties/test_properties.c @@ -1,3 +1,4 @@ +#define NAPI_EXPERIMENTAL #include #include "../common.h" @@ -77,6 +78,16 @@ napi_value Init(napi_env env, napi_value exports) { NODE_API_CALL(env, napi_create_symbol(env, symbol_description, &name_symbol)); + napi_value name_symbol_descriptionless; + NODE_API_CALL(env, + napi_create_symbol(env, NULL, &name_symbol_descriptionless)); + + napi_value name_symbol_for; + NODE_API_CALL(env, node_api_symbol_for(env, + "NameKeySymbolFor", + NAPI_AUTO_LENGTH, + &name_symbol_for)); + napi_property_descriptor properties[] = { { "echo", 0, Echo, 0, 0, 0, napi_enumerable, 0 }, { "readwriteValue", 0, 0, 0, 0, number, napi_enumerable | napi_writable, 0 }, @@ -84,6 +95,8 @@ napi_value Init(napi_env env, napi_value exports) { { "hiddenValue", 0, 0, 0, 0, number, napi_default, 0}, { NULL, name_value, 0, 0, 0, number, napi_enumerable, 0}, { NULL, name_symbol, 0, 0, 0, number, napi_enumerable, 0}, + { NULL, name_symbol_descriptionless, 0, 0, 0, number, napi_enumerable, 0}, + { NULL, name_symbol_for, 0, 0, 0, number, napi_enumerable, 0}, { "readwriteAccessor1", 0, 0, GetValue, SetValue, 0, napi_default, 0}, { "readwriteAccessor2", 0, 0, GetValue, SetValue, 0, napi_writable, 0}, { "readonlyAccessor1", 0, 0, GetValue, NULL, 0, napi_default, 0}, diff --git a/test/js-native-api/test_reference/test.js b/test/js-native-api/test_reference/test.js index 0dc9e553300060..6f128b788706cd 100644 --- a/test/js-native-api/test_reference/test.js +++ b/test/js-native-api/test_reference/test.js @@ -21,6 +21,25 @@ async function runTests() { })(); test_reference.deleteReference(); + (() => { + const symbol = test_reference.createSymbolFor('testSymFor'); + test_reference.createReference(symbol, 0); + assert.strictEqual(test_reference.referenceValue, symbol); + assert.strictEqual(test_reference.referenceValue, Symbol.for('testSymFor')); + })(); + test_reference.deleteReference(); + + (() => { + const symbol = test_reference.createSymbolForEmptyString(); + test_reference.createReference(symbol, 0); + assert.strictEqual(test_reference.referenceValue, symbol); + assert.strictEqual(test_reference.referenceValue, Symbol.for('')); + })(); + test_reference.deleteReference(); + + assert.throws(() => test_reference.createSymbolForIncorrectLength(), + /Invalid argument/); + (() => { const value = test_reference.createExternal(); assert.strictEqual(test_reference.finalizeCount, 0); diff --git a/test/js-native-api/test_reference/test_reference.c b/test/js-native-api/test_reference/test_reference.c index 7b770cb8766775..4a224efbd8cf6d 100644 --- a/test/js-native-api/test_reference/test_reference.c +++ b/test/js-native-api/test_reference/test_reference.c @@ -1,3 +1,4 @@ +#define NAPI_EXPERIMENTAL #include #include #include @@ -49,6 +50,41 @@ static napi_value CreateSymbol(napi_env env, napi_callback_info info) { return result_symbol; } +static napi_value CreateSymbolFor(napi_env env, napi_callback_info info) { + + size_t argc = 1; + napi_value args[1]; + + char description[256]; + size_t description_length; + + NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL,NULL)); + NODE_API_ASSERT(env, argc == 1, "Expect one argument only (symbol description)"); + + NODE_API_CALL(env, napi_get_value_string_utf8(env, args[0], description, sizeof(description), &description_length)); + NODE_API_ASSERT(env, description_length <= 255, "Cannot accommodate descriptions longer than 255 bytes"); + + napi_value result_symbol; + + NODE_API_CALL(env, node_api_symbol_for(env, + description, + description_length, + &result_symbol)); + return result_symbol; +} + +static napi_value CreateSymbolForEmptyString(napi_env env, napi_callback_info info) { + napi_value result_symbol; + NODE_API_CALL(env, node_api_symbol_for(env, NULL, 0, &result_symbol)); + return result_symbol; +} + +static napi_value CreateSymbolForIncorrectLength(napi_env env, napi_callback_info info) { + napi_value result_symbol; + NODE_API_CALL(env, node_api_symbol_for(env, NULL, 5, &result_symbol)); + return result_symbol; +} + static napi_value CreateExternalWithFinalize(napi_env env, napi_callback_info info) { napi_value result; @@ -190,6 +226,9 @@ napi_value Init(napi_env env, napi_value exports) { DECLARE_NODE_API_PROPERTY("checkExternal", CheckExternal), DECLARE_NODE_API_PROPERTY("createReference", CreateReference), DECLARE_NODE_API_PROPERTY("createSymbol", CreateSymbol), + DECLARE_NODE_API_PROPERTY("createSymbolFor", CreateSymbolFor), + DECLARE_NODE_API_PROPERTY("createSymbolForEmptyString", CreateSymbolForEmptyString), + DECLARE_NODE_API_PROPERTY("createSymbolForIncorrectLength", CreateSymbolForIncorrectLength), DECLARE_NODE_API_PROPERTY("deleteReference", DeleteReference), DECLARE_NODE_API_PROPERTY("incrementRefcount", IncrementRefcount), DECLARE_NODE_API_PROPERTY("decrementRefcount", DecrementRefcount), diff --git a/test/node-api/test_general/test.js b/test/node-api/test_general/test.js index dd409f010a3ada..77550c02927fb4 100644 --- a/test/node-api/test_general/test.js +++ b/test/node-api/test_general/test.js @@ -1,15 +1,46 @@ 'use strict'; const common = require('../../common'); +const tmpdir = require('../../common/tmpdir'); +const child_process = require('child_process'); +const fs = require('fs'); +const path = require('path'); +const url = require('url'); const filename = require.resolve(`./build/${common.buildType}/test_general`); const test_general = require(filename); const assert = require('assert'); -// TODO(gabrielschulhof): This test may need updating if/when the filename -// becomes a full-fledged URL. -assert.strictEqual(test_general.filename, `file://${filename}`); +tmpdir.refresh(); -const [ major, minor, patch, release ] = test_general.testGetNodeVersion(); -assert.strictEqual(process.version.split('-')[0], - `v${major}.${minor}.${patch}`); -assert.strictEqual(release, process.release.name); +{ + // TODO(gabrielschulhof): This test may need updating if/when the filename + // becomes a full-fledged URL. + assert.strictEqual(test_general.filename, url.pathToFileURL(filename).href); +} + +{ + const urlTestDir = path.join(tmpdir.path, 'foo%#bar'); + const urlTestFile = path.join(urlTestDir, path.basename(filename)); + fs.mkdirSync(urlTestDir, { recursive: true }); + fs.copyFileSync(filename, urlTestFile); + // Use a child process as indirection so that the native module is not loaded + // into this process and can be removed here. + const reportedFilename = child_process.spawnSync( + process.execPath, + ['-p', `require(${JSON.stringify(urlTestFile)}).filename`], + { encoding: 'utf8' }).stdout.trim(); + assert.doesNotMatch(reportedFilename, /foo%#bar/); + assert.strictEqual(reportedFilename, url.pathToFileURL(urlTestFile).href); + fs.rmSync(urlTestDir, { + force: true, + recursive: true, + maxRetries: 256 + }); +} + +{ + const [ major, minor, patch, release ] = test_general.testGetNodeVersion(); + assert.strictEqual(process.version.split('-')[0], + `v${major}.${minor}.${patch}`); + assert.strictEqual(release, process.release.name); +} diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index a3140505ab67ae..403752346856cd 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -32,8 +32,6 @@ test-crypto-keygen: SKIP [$system==freebsd] # https://github.com/nodejs/node/issues/31727 test-fs-stat-bigint: PASS,FLAKY -# https://github.com/nodejs/node/issues/28803 -test-stdout-close-catch: PASS,FLAKY # https://github.com/nodejs/node/issues/31280 test-worker-message-port-message-before-close: PASS,FLAKY @@ -67,5 +65,5 @@ test-http-client-parse-error: PASS, FLAKY test-http-multi-line-headers: PASS, FLAKY test-http-server-unconsume: PASS, FLAKY test-http-upgrade-advertise: PASS, FLAKY -test-http-client-mindhsize: PASS, FLAKY -test-http-tls-write-error: PASS, FLAKY +test-tls-client-mindhsize: PASS, FLAKY +test-tls-write-error: PASS, FLAKY diff --git a/test/parallel/test-asyncresource-bind.js b/test/parallel/test-asyncresource-bind.js index a9f613d9302edf..29de9bbb0f10bb 100644 --- a/test/parallel/test-asyncresource-bind.js +++ b/test/parallel/test-asyncresource-bind.js @@ -41,7 +41,7 @@ const fn3 = asyncResource.bind(common.mustCall(function() { fn3(); const fn4 = asyncResource.bind(common.mustCall(function() { - assert.strictEqual(this, asyncResource); + assert.strictEqual(this, undefined); })); fn4(); @@ -49,3 +49,8 @@ const fn5 = asyncResource.bind(common.mustCall(function() { assert.strictEqual(this, false); }), false); fn5(); + +const fn6 = asyncResource.bind(common.mustCall(function() { + assert.strictEqual(this, 'test'); +})); +fn6.call('test'); diff --git a/test/parallel/test-blob-createobjecturl.js b/test/parallel/test-blob-createobjecturl.js index a8fd377dd3ef70..70c64b138db1ac 100644 --- a/test/parallel/test-blob-createobjecturl.js +++ b/test/parallel/test-blob-createobjecturl.js @@ -29,6 +29,10 @@ const assert = require('assert'); Buffer.from(await otherBlob.arrayBuffer()).toString(), 'hello'); URL.revokeObjectURL(id); + + // should do nothing + URL.revokeObjectURL(id); + assert.strictEqual(resolveObjectURL(id), undefined); // Leaving a Blob registered should not cause an assert diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index 596ad2c2119681..de9414abb2d648 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -1,3 +1,4 @@ +// Flags: --expose-internals 'use strict'; // This list must be computed before we require any modules to @@ -8,22 +9,24 @@ const common = require('../common'); const assert = require('assert'); const expectedModules = new Set([ - 'Internal Binding errors', 'Internal Binding async_wrap', + 'Internal Binding block_list', 'Internal Binding buffer', 'Internal Binding config', 'Internal Binding constants', 'Internal Binding contextify', 'Internal Binding credentials', - 'Internal Binding fs', + 'Internal Binding errors', 'Internal Binding fs_dir', 'Internal Binding fs_event_wrap', + 'Internal Binding fs', 'Internal Binding heap_utils', 'Internal Binding messaging', 'Internal Binding module_wrap', 'Internal Binding native_module', 'Internal Binding options', 'Internal Binding performance', + 'Internal Binding pipe_wrap', 'Internal Binding process_methods', 'Internal Binding report', 'Internal Binding serdes', @@ -31,6 +34,7 @@ const expectedModules = new Set([ 'Internal Binding string_decoder', 'Internal Binding symbols', 'Internal Binding task_queue', + 'Internal Binding tcp_wrap', 'Internal Binding timers', 'Internal Binding trace_events', 'Internal Binding types', @@ -45,53 +49,58 @@ const expectedModules = new Set([ 'NativeModule internal/abort_controller', 'NativeModule internal/assert', 'NativeModule internal/async_hooks', + 'NativeModule internal/blocklist', 'NativeModule internal/bootstrap/pre_execution', 'NativeModule internal/buffer', 'NativeModule internal/console/constructor', 'NativeModule internal/console/global', 'NativeModule internal/constants', + 'NativeModule internal/dtrace', 'NativeModule internal/encoding', 'NativeModule internal/errors', 'NativeModule internal/event_target', 'NativeModule internal/fixed_queue', 'NativeModule internal/fs/dir', - 'NativeModule internal/fs/utils', 'NativeModule internal/fs/promises', 'NativeModule internal/fs/read_file_context', 'NativeModule internal/fs/rimraf', + 'NativeModule internal/fs/utils', 'NativeModule internal/fs/watchers', 'NativeModule internal/heap_utils', 'NativeModule internal/histogram', 'NativeModule internal/idna', 'NativeModule internal/linkedlist', - 'NativeModule internal/modules/run_main', - 'NativeModule internal/modules/package_json_reader', 'NativeModule internal/modules/cjs/helpers', 'NativeModule internal/modules/cjs/loader', 'NativeModule internal/modules/esm/assert', 'NativeModule internal/modules/esm/create_dynamic_module', + 'NativeModule internal/modules/esm/fetch_module', + 'NativeModule internal/modules/esm/formats', 'NativeModule internal/modules/esm/get_format', 'NativeModule internal/modules/esm/get_source', - 'NativeModule internal/modules/esm/loader', + 'NativeModule internal/modules/esm/handle_process_exit', + 'NativeModule internal/modules/esm/initialize_import_meta', 'NativeModule internal/modules/esm/load', + 'NativeModule internal/modules/esm/loader', 'NativeModule internal/modules/esm/module_job', 'NativeModule internal/modules/esm/module_map', 'NativeModule internal/modules/esm/resolve', - 'NativeModule internal/modules/esm/initialize_import_meta', 'NativeModule internal/modules/esm/translators', - 'NativeModule internal/modules/esm/handle_process_exit', - 'NativeModule internal/process/esm_loader', + 'NativeModule internal/modules/package_json_reader', + 'NativeModule internal/modules/run_main', + 'NativeModule internal/net', 'NativeModule internal/options', 'NativeModule internal/perf/event_loop_delay', 'NativeModule internal/perf/event_loop_utilization', 'NativeModule internal/perf/nodetiming', 'NativeModule internal/perf/observe', - 'NativeModule internal/perf/performance', 'NativeModule internal/perf/performance_entry', + 'NativeModule internal/perf/performance', 'NativeModule internal/perf/timerify', 'NativeModule internal/perf/usertiming', 'NativeModule internal/perf/utils', 'NativeModule internal/priority_queue', + 'NativeModule internal/process/esm_loader', 'NativeModule internal/process/execution', 'NativeModule internal/process/per_thread', 'NativeModule internal/process/promises', @@ -101,6 +110,7 @@ const expectedModules = new Set([ 'NativeModule internal/process/warning', 'NativeModule internal/promise_hooks', 'NativeModule internal/querystring', + 'NativeModule internal/socketaddress', 'NativeModule internal/source_map/source_map_cache', 'NativeModule internal/stream_base_commons', 'NativeModule internal/streams/add-abort-signal', @@ -133,6 +143,7 @@ const expectedModules = new Set([ 'Internal Binding blob', 'NativeModule internal/blob', 'NativeModule async_hooks', + 'NativeModule net', 'NativeModule path', 'NativeModule perf_hooks', 'NativeModule querystring', @@ -189,6 +200,11 @@ if (process.env.NODE_V8_COVERAGE) { expectedModules.add('Internal Binding profiler'); } +const { internalBinding } = require('internal/test/binding'); +if (internalBinding('config').hasDtrace) { + expectedModules.add('Internal Binding dtrace'); +} + const difference = (setA, setB) => { return new Set([...setA].filter((x) => !setB.has(x))); }; diff --git a/test/parallel/test-cluster-net-listen-backlog.js b/test/parallel/test-cluster-net-listen-backlog.js new file mode 100644 index 00000000000000..090552fd1e1eeb --- /dev/null +++ b/test/parallel/test-cluster-net-listen-backlog.js @@ -0,0 +1,45 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +// Monkey-patch `net.Server.listen` +const net = require('net'); +const cluster = require('cluster'); + +// Force round-robin scheduling policy +// as Windows defaults to SCHED_NONE +// https://nodejs.org/docs/latest/api/cluster.html#clusterschedulingpolicy +cluster.schedulingPolicy = cluster.SCHED_RR; + +// Ensures that the `backlog` is used to create a `net.Server`. +const kExpectedBacklog = 127; +if (cluster.isMaster) { + const listen = net.Server.prototype.listen; + + net.Server.prototype.listen = common.mustCall( + function(...args) { + const options = args[0]; + if (typeof options === 'object') { + assert(options.backlog, kExpectedBacklog); + } else { + assert(args[1], kExpectedBacklog); + } + return listen.call(this, ...args); + } + ); + + const worker = cluster.fork(); + worker.on('message', () => { + worker.disconnect(); + }); +} else { + const server = net.createServer(); + + server.listen({ + host: common.localhostIPv4, + port: 0, + backlog: kExpectedBacklog, + }, common.mustCall(() => { + process.send(true); + })); +} diff --git a/test/parallel/test-common-expect-warning.js b/test/parallel/test-common-expect-warning.js index be3e385b11e0f4..dff32037fbb5b1 100644 --- a/test/parallel/test-common-expect-warning.js +++ b/test/parallel/test-common-expect-warning.js @@ -36,9 +36,11 @@ if (process.argv[2] !== 'child') { child.stderr.on('data', (data) => { stderr += data; }); + child.stderr.on('end', common.mustCall(() => { + assert.match(stderr, /Unexpected extra warning received/); + })); child.on('exit', common.mustCall((status) => { assert.notStrictEqual(status, 0); - assert.match(stderr, /Unexpected extra warning received/); })); } } else { diff --git a/test/parallel/test-crypto-authenticated.js b/test/parallel/test-crypto-authenticated.js index 21c5af6cfe3e5e..3749895769ffc9 100644 --- a/test/parallel/test-crypto-authenticated.js +++ b/test/parallel/test-crypto-authenticated.js @@ -44,7 +44,7 @@ const errMessages = { state: / state/, FIPS: /not supported in FIPS mode/, length: /Invalid initialization vector/, - authTagLength: /Invalid authentication tag/ + authTagLength: /Invalid authentication tag length/ }; const ciphers = crypto.getCiphers(); @@ -687,3 +687,17 @@ for (const test of TEST_CASES) { }); } } + +{ + const key = Buffer.alloc(32); + const iv = Buffer.alloc(12); + + for (const authTagLength of [0, 17]) { + assert.throws(() => { + crypto.createCipheriv('chacha20-poly1305', key, iv, { authTagLength }); + }, { + code: 'ERR_CRYPTO_INVALID_AUTH_TAG', + message: errMessages.authTagLength + }); + } +} diff --git a/test/parallel/test-crypto-engine.js b/test/parallel/test-crypto-engine.js deleted file mode 100644 index ca2e4793683788..00000000000000 --- a/test/parallel/test-crypto-engine.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; -const common = require('../common'); -if (!common.hasCrypto) common.skip('missing crypto'); - -// This tests crypto.setEngine(). - -const assert = require('assert'); -const crypto = require('crypto'); -const fs = require('fs'); -const path = require('path'); - -assert.throws(() => crypto.setEngine(true), /ERR_INVALID_ARG_TYPE/); -assert.throws(() => crypto.setEngine('/path/to/engine', 'notANumber'), - /ERR_INVALID_ARG_TYPE/); - -{ - const invalidEngineName = 'xxx'; - assert.throws(() => crypto.setEngine(invalidEngineName), - /ERR_CRYPTO_ENGINE_UNKNOWN/); - assert.throws(() => crypto.setEngine(invalidEngineName, - crypto.constants.ENGINE_METHOD_RSA), - /ERR_CRYPTO_ENGINE_UNKNOWN/); -} - -crypto.setEngine('dynamic'); -crypto.setEngine('dynamic'); - -crypto.setEngine('dynamic', crypto.constants.ENGINE_METHOD_RSA); -crypto.setEngine('dynamic', crypto.constants.ENGINE_METHOD_RSA); - -{ - const engineName = 'test_crypto_engine'; - let engineLib; - if (common.isOSX) - engineLib = `lib${engineName}.dylib`; - else if (common.isLinux && process.arch === 'x64') - engineLib = `lib${engineName}.so`; - - if (engineLib !== undefined) { - const execDir = path.dirname(process.execPath); - const enginePath = path.join(execDir, engineLib); - const engineId = path.parse(engineLib).name; - - fs.accessSync(enginePath); - - crypto.setEngine(enginePath); - // OpenSSL 3.0.1 and 1.1.1m now throw errors if an engine is loaded again - // with a duplicate absolute path. - // TODO(richardlau): figure out why this fails on macOS but not Linux. - // crypto.setEngine(enginePath); - - // crypto.setEngine(enginePath, crypto.constants.ENGINE_METHOD_RSA); - // crypto.setEngine(enginePath, crypto.constants.ENGINE_METHOD_RSA); - - process.env.OPENSSL_ENGINES = execDir; - - crypto.setEngine(engineId); - crypto.setEngine(engineId); - - crypto.setEngine(engineId, crypto.constants.ENGINE_METHOD_RSA); - crypto.setEngine(engineId, crypto.constants.ENGINE_METHOD_RSA); - } -} diff --git a/test/parallel/test-crypto-key-objects.js b/test/parallel/test-crypto-key-objects.js index 13441ac6f96db2..40a982ea7b6cd6 100644 --- a/test/parallel/test-crypto-key-objects.js +++ b/test/parallel/test-crypto-key-objects.js @@ -21,6 +21,7 @@ const { privateDecrypt, privateEncrypt, getCurves, + generateKeySync, generateKeyPairSync, webcrypto, } = require('crypto'); @@ -844,3 +845,51 @@ const privateDsa = fixtures.readKey('dsa_private_encrypted_1025.pem', assert(!isKeyObject(cryptoKey)); }); } + +{ + const first = Buffer.from('Hello'); + const second = Buffer.from('World'); + const keyObject = createSecretKey(first); + assert(createSecretKey(first).equals(createSecretKey(first))); + assert(!createSecretKey(first).equals(createSecretKey(second))); + + assert.throws(() => keyObject.equals(0), { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "otherKeyObject" argument must be an instance of KeyObject. Received type number (0)' + }); + + assert(keyObject.equals(keyObject)); + assert(!keyObject.equals(createPublicKey(publicPem))); + assert(!keyObject.equals(createPrivateKey(privatePem))); +} + +{ + const first = generateKeyPairSync('ed25519'); + const second = generateKeyPairSync('ed25519'); + const secret = generateKeySync('aes', { length: 128 }); + + assert(first.publicKey.equals(first.publicKey)); + assert(first.publicKey.equals(createPublicKey( + first.publicKey.export({ format: 'pem', type: 'spki' })))); + assert(!first.publicKey.equals(second.publicKey)); + assert(!first.publicKey.equals(second.privateKey)); + assert(!first.publicKey.equals(secret)); + + assert(first.privateKey.equals(first.privateKey)); + assert(first.privateKey.equals(createPrivateKey( + first.privateKey.export({ format: 'pem', type: 'pkcs8' })))); + assert(!first.privateKey.equals(second.privateKey)); + assert(!first.privateKey.equals(second.publicKey)); + assert(!first.privateKey.equals(secret)); +} + +{ + const first = generateKeyPairSync('ed25519'); + const second = generateKeyPairSync('ed448'); + + assert(!first.publicKey.equals(second.publicKey)); + assert(!first.publicKey.equals(second.privateKey)); + assert(!first.privateKey.equals(second.privateKey)); + assert(!first.privateKey.equals(second.publicKey)); +} diff --git a/test/parallel/test-crypto-keygen.js b/test/parallel/test-crypto-keygen.js index f92a1091fc9411..0b18360a17dc25 100644 --- a/test/parallel/test-crypto-keygen.js +++ b/test/parallel/test-crypto-keygen.js @@ -1544,44 +1544,65 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); } } -if (!common.hasOpenSSL3) { - // Passing an empty passphrase string should not cause OpenSSL's default - // passphrase prompt in the terminal. - // See https://github.com/nodejs/node/issues/35898. - - for (const type of ['pkcs1', 'pkcs8']) { - generateKeyPair('rsa', { - modulusLength: 1024, - privateKeyEncoding: { - type, - format: 'pem', - cipher: 'aes-256-cbc', - passphrase: '' - } - }, common.mustSucceed((publicKey, privateKey) => { - assert.strictEqual(publicKey.type, 'public'); +// Passing an empty passphrase string should not cause OpenSSL's default +// passphrase prompt in the terminal. +// See https://github.com/nodejs/node/issues/35898. - for (const passphrase of ['', Buffer.alloc(0)]) { - const privateKeyObject = createPrivateKey({ - passphrase, - key: privateKey - }); - assert.strictEqual(privateKeyObject.asymmetricKeyType, 'rsa'); - } +for (const type of ['pkcs1', 'pkcs8']) { + generateKeyPair('rsa', { + modulusLength: 1024, + privateKeyEncoding: { + type, + format: 'pem', + cipher: 'aes-256-cbc', + passphrase: '' + } + }, common.mustSucceed((publicKey, privateKey) => { + assert.strictEqual(publicKey.type, 'public'); - // Encrypting with an empty passphrase is not the same as not encrypting - // the key, and not specifying a passphrase should fail when decoding it. - assert.throws(() => { - return testSignVerify(publicKey, privateKey); - }, { - name: 'TypeError', - code: 'ERR_MISSING_PASSPHRASE', - message: 'Passphrase required for encrypted key' + for (const passphrase of ['', Buffer.alloc(0)]) { + const privateKeyObject = createPrivateKey({ + passphrase, + key: privateKey }); - })); - } + assert.strictEqual(privateKeyObject.asymmetricKeyType, 'rsa'); + } + + // Encrypting with an empty passphrase is not the same as not encrypting + // the key, and not specifying a passphrase should fail when decoding it. + assert.throws(() => { + return testSignVerify(publicKey, privateKey); + }, common.hasOpenSSL3 ? { + name: 'Error', + code: 'ERR_OSSL_CRYPTO_INTERRUPTED_OR_CANCELLED', + message: 'error:07880109:common libcrypto routines::interrupted or cancelled' + } : { + name: 'TypeError', + code: 'ERR_MISSING_PASSPHRASE', + message: 'Passphrase required for encrypted key' + }); + })); } +// Passing an empty passphrase string should not throw ERR_OSSL_CRYPTO_MALLOC_FAILURE even on OpenSSL 3. +// Regression test for https://github.com/nodejs/node/issues/41428. +generateKeyPair('rsa', { + modulusLength: 4096, + publicKeyEncoding: { + type: 'spki', + format: 'pem' + }, + privateKeyEncoding: { + type: 'pkcs8', + format: 'pem', + cipher: 'aes-256-cbc', + passphrase: '' + } +}, common.mustSucceed((publicKey, privateKey) => { + assert.strictEqual(typeof publicKey, 'string'); + assert.strictEqual(typeof privateKey, 'string'); +})); + { // Proprietary Web Cryptography API ECDH/ECDSA namedCurve parameters // should not be recognized in this API. diff --git a/test/parallel/test-crypto-prime.js b/test/parallel/test-crypto-prime.js index 749221b4514ef3..2d3f39aec15a08 100644 --- a/test/parallel/test-crypto-prime.js +++ b/test/parallel/test-crypto-prime.js @@ -41,7 +41,7 @@ const pCheckPrime = promisify(checkPrime); }); }); -[-1, 0].forEach((i) => { +[-1, 0, 2 ** 31, 2 ** 31 + 1, 2 ** 32 - 1, 2 ** 32].forEach((i) => { assert.throws(() => generatePrime(i, common.mustNotCall()), { code: 'ERR_OUT_OF_RANGE' }); diff --git a/test/parallel/test-crypto.js b/test/parallel/test-crypto.js index 58441be4d093f0..a8ceb169de2b3d 100644 --- a/test/parallel/test-crypto.js +++ b/test/parallel/test-crypto.js @@ -121,6 +121,19 @@ function validateList(list) { const cryptoCiphers = crypto.getCiphers(); assert(crypto.getCiphers().includes('aes-128-cbc')); validateList(cryptoCiphers); +// Make sure all of the ciphers are supported by OpenSSL +for (const algo of cryptoCiphers) { + const { ivLength, keyLength, mode } = crypto.getCipherInfo(algo); + let options; + if (mode === 'ccm') + options = { authTagLength: 8 }; + else if (mode === 'ocb' || algo === 'chacha20-poly1305') + options = { authTagLength: 16 }; + crypto.createCipheriv(algo, + crypto.randomBytes(keyLength), + crypto.randomBytes(ivLength || 0), + options); +} // Assume that we have at least AES256-SHA. const tlsCiphers = tls.getCiphers(); @@ -140,6 +153,9 @@ assert(!crypto.getHashes().includes('SHA256')); assert(crypto.getHashes().includes('RSA-SHA1')); assert(!crypto.getHashes().includes('rsa-sha1')); validateList(crypto.getHashes()); +// Make sure all of the hashes are supported by OpenSSL +for (const algo of crypto.getHashes()) + crypto.createHash(algo); // Assume that we have at least secp384r1. assert.notStrictEqual(crypto.getCurves().length, 0); diff --git a/test/parallel/test-eslint-documented-deprecation-codes.js b/test/parallel/test-eslint-documented-deprecation-codes.js new file mode 100644 index 00000000000000..45fec7786a28ac --- /dev/null +++ b/test/parallel/test-eslint-documented-deprecation-codes.js @@ -0,0 +1,42 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +if (!common.hasIntl) + common.skip('missing Intl'); +common.skipIfEslintMissing(); + +const RuleTester = require('../../tools/node_modules/eslint').RuleTester; +const rule = require('../../tools/eslint-rules/documented-deprecation-codes'); + +const mdFile = 'doc/api/deprecations.md'; + +const invalidCode = 'UNDOCUMENTED INVALID CODE'; + +new RuleTester().run('documented-deprecation-codes', rule, { + valid: [ + ` + deprecate(function() { + return this.getHeaders(); + }, 'OutgoingMessage.prototype._headers is deprecated', 'DEP0066') + `, + ], + invalid: [ + { + code: ` + deprecate(function foo(){}, 'bar', '${invalidCode}'); + `, + errors: [ + { + message: `"${invalidCode}" does not match the expected pattern`, + line: 2 + }, + { + message: `"${invalidCode}" is not documented in ${mdFile}`, + line: 2 + }, + ] + }, + ] +}); diff --git a/test/parallel/test-eslint-prefer-primordials.js b/test/parallel/test-eslint-prefer-primordials.js index 61b6b6327279cc..30c8cd25355c5a 100644 --- a/test/parallel/test-eslint-prefer-primordials.js +++ b/test/parallel/test-eslint-prefer-primordials.js @@ -99,6 +99,34 @@ new RuleTester({ `, options: [{ name: 'Function' }], }, + { + code: 'function identifier() {}', + options: [{ name: 'identifier' }] + }, + { + code: 'function* identifier() {}', + options: [{ name: 'identifier' }] + }, + { + code: 'class identifier {}', + options: [{ name: 'identifier' }] + }, + { + code: 'new class { identifier(){} }', + options: [{ name: 'identifier' }] + }, + { + code: 'const a = { identifier: \'4\' }', + options: [{ name: 'identifier' }] + }, + { + code: 'identifier:{const a = 4}', + options: [{ name: 'identifier' }] + }, + { + code: 'switch(0){case identifier:}', + options: [{ name: 'identifier' }] + }, ], invalid: [ { diff --git a/test/parallel/test-eventemitter-asyncresource.js b/test/parallel/test-eventemitter-asyncresource.js index ae86f3608b7ffd..4cd2bc69207219 100644 --- a/test/parallel/test-eventemitter-asyncresource.js +++ b/test/parallel/test-eventemitter-asyncresource.js @@ -10,6 +10,7 @@ const { const { deepStrictEqual, strictEqual, + throws, } = require('assert'); const { @@ -130,3 +131,29 @@ function makeHook(trackedTypes) { ], ])); })().then(common.mustCall()); + +// Member methods ERR_INVALID_THIS +throws( + () => EventEmitterAsyncResource.prototype.emit(), + { code: 'ERR_INVALID_THIS' } +); + +throws( + () => EventEmitterAsyncResource.prototype.emitDestroy(), + { code: 'ERR_INVALID_THIS' } +); + +throws( + () => Reflect.get(EventEmitterAsyncResource.prototype, 'asyncId', {}), + { code: 'ERR_INVALID_THIS' } +); + +throws( + () => Reflect.get(EventEmitterAsyncResource.prototype, 'triggerAsyncId', {}), + { code: 'ERR_INVALID_THIS' } +); + +throws( + () => Reflect.get(EventEmitterAsyncResource.prototype, 'asyncResource', {}), + { code: 'ERR_INVALID_THIS' } +); diff --git a/test/parallel/test-eventtarget.js b/test/parallel/test-eventtarget.js index d3b1ee7358a104..0211dc964d253a 100644 --- a/test/parallel/test-eventtarget.js +++ b/test/parallel/test-eventtarget.js @@ -408,6 +408,13 @@ let asyncTest = Promise.resolve(); target.onfoo = common.mustCall(); target.dispatchEvent(new Event('foo')); } + +{ + const target = new EventTarget(); + defineEventHandler(target, 'foo'); + strictEqual(target.onfoo, null); +} + { const target = new EventTarget(); defineEventHandler(target, 'foo'); @@ -623,14 +630,14 @@ let asyncTest = Promise.resolve(); strictEqual(et.constructor.name, 'EventTarget'); } { - // Weak event handlers work + // Weak event listeners work const et = new EventTarget(); const listener = common.mustCall(); et.addEventListener('foo', listener, { [kWeakHandler]: et }); et.dispatchEvent(new Event('foo')); } { - // Weak event handlers can be removed and weakness is not part of the key + // Weak event listeners can be removed and weakness is not part of the key const et = new EventTarget(); const listener = common.mustNotCall(); et.addEventListener('foo', listener, { [kWeakHandler]: et }); diff --git a/test/parallel/test-fetch-disabled.mjs b/test/parallel/test-fetch-disabled.mjs new file mode 100644 index 00000000000000..839cdf8f2ac947 --- /dev/null +++ b/test/parallel/test-fetch-disabled.mjs @@ -0,0 +1,9 @@ +import '../common/index.mjs'; + +import assert from 'assert'; + +assert.strictEqual(typeof globalThis.fetch, 'undefined'); +assert.strictEqual(typeof globalThis.FormData, 'undefined'); +assert.strictEqual(typeof globalThis.Headers, 'undefined'); +assert.strictEqual(typeof globalThis.Request, 'undefined'); +assert.strictEqual(typeof globalThis.Response, 'undefined'); diff --git a/test/parallel/test-fetch.mjs b/test/parallel/test-fetch.mjs new file mode 100644 index 00000000000000..d435ec3fa6e2b5 --- /dev/null +++ b/test/parallel/test-fetch.mjs @@ -0,0 +1,33 @@ +// Flags: --experimental-fetch --no-warnings + +import '../common/index.mjs'; + +import assert from 'assert'; +import events from 'events'; +import http from 'http'; + +assert.strictEqual(typeof globalThis.fetch, 'function'); +assert.strictEqual(typeof globalThis.FormData, 'function'); +assert.strictEqual(typeof globalThis.Headers, 'function'); +assert.strictEqual(typeof globalThis.Request, 'function'); +assert.strictEqual(typeof globalThis.Response, 'function'); + +const server = http.createServer((req, res) => { + // TODO: Remove this once keep-alive behavior can be disabled from the client + // side. + res.setHeader('Keep-Alive', 'timeout=0, max=0'); + res.end('Hello world'); +}); +server.listen(0); +await events.once(server, 'listening'); +const port = server.address().port; + +const response = await fetch(`http://localhost:${port}`); + +assert(response instanceof Response); +assert.strictEqual(response.status, 200); +assert.strictEqual(response.statusText, 'OK'); +const body = await response.text(); +assert.strictEqual(body, 'Hello world'); + +server.close(); diff --git a/test/parallel/test-fs-cp.mjs b/test/parallel/test-fs-cp.mjs index 804b5a1f4c322c..dfe6254c6b6494 100644 --- a/test/parallel/test-fs-cp.mjs +++ b/test/parallel/test-fs-cp.mjs @@ -95,6 +95,77 @@ function nextdir() { } +// It throws error when verbatimSymlinks is not a boolean. +{ + const src = './test/fixtures/copy/kitchen-sink'; + [1, [], {}, null, 1n, undefined, null, Symbol(), '', () => {}] + .forEach((verbatimSymlinks) => { + assert.throws( + () => cpSync(src, src, { verbatimSymlinks }), + { code: 'ERR_INVALID_ARG_TYPE' } + ); + }); +} + + +// It throws an error when both dereference and verbatimSymlinks are enabled. +{ + const src = './test/fixtures/copy/kitchen-sink'; + assert.throws( + () => cpSync(src, src, { dereference: true, verbatimSymlinks: true }), + { code: 'ERR_INCOMPATIBLE_OPTION_PAIR' } + ); +} + + +// It resolves relative symlinks to their absolute path by default. +{ + const src = nextdir(); + mkdirSync(src, { recursive: true }); + writeFileSync(join(src, 'foo.js'), 'foo', 'utf8'); + symlinkSync('foo.js', join(src, 'bar.js')); + + const dest = nextdir(); + mkdirSync(dest, { recursive: true }); + + cpSync(src, dest, { recursive: true }); + const link = readlinkSync(join(dest, 'bar.js')); + assert.strictEqual(link, join(src, 'foo.js')); +} + + +// It resolves relative symlinks when verbatimSymlinks is false. +{ + const src = nextdir(); + mkdirSync(src, { recursive: true }); + writeFileSync(join(src, 'foo.js'), 'foo', 'utf8'); + symlinkSync('foo.js', join(src, 'bar.js')); + + const dest = nextdir(); + mkdirSync(dest, { recursive: true }); + + cpSync(src, dest, { recursive: true, verbatimSymlinks: false }); + const link = readlinkSync(join(dest, 'bar.js')); + assert.strictEqual(link, join(src, 'foo.js')); +} + + +// It does not resolve relative symlinks when verbatimSymlinks is true. +{ + const src = nextdir(); + mkdirSync(src, { recursive: true }); + writeFileSync(join(src, 'foo.js'), 'foo', 'utf8'); + symlinkSync('foo.js', join(src, 'bar.js')); + + const dest = nextdir(); + mkdirSync(dest, { recursive: true }); + + cpSync(src, dest, { recursive: true, verbatimSymlinks: true }); + const link = readlinkSync(join(dest, 'bar.js')); + assert.strictEqual(link, 'foo.js'); +} + + // It throws error when src and dest are identical. { const src = './test/fixtures/copy/kitchen-sink'; diff --git a/test/parallel/test-fs-readSync-optional-params.js b/test/parallel/test-fs-readSync-optional-params.js index 37d3d24911db51..00f1a5531cf6ea 100644 --- a/test/parallel/test-fs-readSync-optional-params.js +++ b/test/parallel/test-fs-readSync-optional-params.js @@ -5,23 +5,53 @@ const fixtures = require('../common/fixtures'); const fs = require('fs'); const assert = require('assert'); const filepath = fixtures.path('x.txt'); -const fd = fs.openSync(filepath, 'r'); const expected = Buffer.from('xyz\n'); function runTest(defaultBuffer, options) { - const result = fs.readSync(fd, defaultBuffer, options); - assert.strictEqual(result, expected.length); - assert.deepStrictEqual(defaultBuffer, expected); + let fd; + try { + fd = fs.openSync(filepath, 'r'); + const result = fs.readSync(fd, defaultBuffer, options); + assert.strictEqual(result, expected.length); + assert.deepStrictEqual(defaultBuffer, expected); + } finally { + if (fd != null) fs.closeSync(fd); + } } -// Test passing in an empty options object -runTest(Buffer.allocUnsafe(expected.length), { position: 0 }); +for (const options of [ -// Test not passing in any options object -runTest(Buffer.allocUnsafe(expected.length)); + // Test options object + { offset: 0 }, + { length: expected.length }, + { position: 0 }, + { offset: 0, length: expected.length }, + { offset: 0, position: 0 }, + { length: expected.length, position: 0 }, + { offset: 0, length: expected.length, position: 0 }, -// Test passing in options -runTest(Buffer.allocUnsafe(expected.length), { offset: 0, - length: expected.length, - position: 0 }); + { offset: null }, + { position: null }, + { position: -1 }, + { position: 0n }, + + // Test default params + {}, + null, + undefined, + + // Test if bad params are interpreted as default (not mandatory) + false, + true, + Infinity, + 42n, + Symbol(), + + // Test even more malicious corner cases + '4'.repeat(expected.length), + new String('4444'), + [4, 4, 4, 4], +]) { + runTest(Buffer.allocUnsafe(expected.length), options); +} diff --git a/test/parallel/test-fs-writev-promises.js b/test/parallel/test-fs-writev-promises.js index 7c610adb4e50c2..650f00dae74b69 100644 --- a/test/parallel/test-fs-writev-promises.js +++ b/test/parallel/test-fs-writev-promises.js @@ -47,4 +47,13 @@ tmpdir.refresh(); assert(Buffer.concat(bufferArr).equals(await fs.readFile(filename))); handle.close(); } + + { + // Writev with empty array behavior + const handle = await fs.open(getFileName(), 'w'); + const result = await handle.writev([]); + assert.strictEqual(result.bytesWritten, 0); + assert.strictEqual(result.buffers.length, 0); + handle.close(); + } })().then(common.mustCall()); diff --git a/test/parallel/test-fs-writev-sync.js b/test/parallel/test-fs-writev-sync.js index 33b76c7af25d9f..1db437972ed29d 100644 --- a/test/parallel/test-fs-writev-sync.js +++ b/test/parallel/test-fs-writev-sync.js @@ -56,11 +56,21 @@ const getFileName = (i) => path.join(tmpdir.path, `writev_sync_${i}.txt`); assert(Buffer.concat(bufferArr).equals(fs.readFileSync(filename))); } +// fs.writevSync with empty array of buffers +{ + const filename = getFileName(3); + const fd = fs.openSync(filename, 'w'); + const written = fs.writevSync(fd, []); + assert.strictEqual(written, 0); + fs.closeSync(fd); + +} + /** * Testing with wrong input types */ { - const filename = getFileName(3); + const filename = getFileName(4); const fd = fs.openSync(filename, 'w'); [false, 'test', {}, [{}], ['sdf'], null, undefined].forEach((i) => { diff --git a/test/parallel/test-fs-writev.js b/test/parallel/test-fs-writev.js index a516f9c328f1b2..7ea52ef77ac4ee 100644 --- a/test/parallel/test-fs-writev.js +++ b/test/parallel/test-fs-writev.js @@ -57,11 +57,30 @@ const getFileName = (i) => path.join(tmpdir.path, `writev_${i}.txt`); fs.writev(fd, bufferArr, done); } + +// fs.writev with empty array of buffers +{ + const filename = getFileName(3); + const fd = fs.openSync(filename, 'w'); + const bufferArr = []; + let afterSyncCall = false; + + const done = common.mustSucceed((written, buffers) => { + assert.strictEqual(buffers.length, 0); + assert.strictEqual(written, 0); + assert(afterSyncCall); + fs.closeSync(fd); + }); + + fs.writev(fd, bufferArr, done); + afterSyncCall = true; +} + /** * Testing with wrong input types */ { - const filename = getFileName(3); + const filename = getFileName(4); const fd = fs.openSync(filename, 'w'); [false, 'test', {}, [{}], ['sdf'], null, undefined].forEach((i) => { diff --git a/test/parallel/test-global-webcrypto-classes.js b/test/parallel/test-global-webcrypto-classes.js new file mode 100644 index 00000000000000..083592bd92278c --- /dev/null +++ b/test/parallel/test-global-webcrypto-classes.js @@ -0,0 +1,13 @@ +// Flags: --experimental-global-webcrypto --expose-internals +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const webcrypto = require('internal/crypto/webcrypto'); + +assert.strictEqual(Crypto, webcrypto.Crypto); +assert.strictEqual(CryptoKey, webcrypto.CryptoKey); +assert.strictEqual(SubtleCrypto, webcrypto.SubtleCrypto); diff --git a/test/parallel/test-global-webcrypto.js b/test/parallel/test-global-webcrypto.js new file mode 100644 index 00000000000000..70f84a1b3a0643 --- /dev/null +++ b/test/parallel/test-global-webcrypto.js @@ -0,0 +1,13 @@ +// Flags: --experimental-global-webcrypto +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const crypto = require('crypto'); + +assert.strictEqual(globalThis.crypto, crypto.webcrypto); +assert.strictEqual(Crypto, crypto.webcrypto.constructor); +assert.strictEqual(SubtleCrypto, crypto.webcrypto.subtle.constructor); diff --git a/test/parallel/test-http-agent-getname.js b/test/parallel/test-http-agent-getname.js index ab946a4bde3ddf..14cfcfaabdf6f4 100644 --- a/test/parallel/test-http-agent-getname.js +++ b/test/parallel/test-http-agent-getname.js @@ -18,7 +18,13 @@ assert.strictEqual( 'localhost:80:192.168.1.1' ); -// empty +// empty argument +assert.strictEqual( + agent.getName(), + 'localhost::' +); + +// empty options assert.strictEqual( agent.getName({}), 'localhost::' diff --git a/test/parallel/test-http-agent-timeout.js b/test/parallel/test-http-agent-timeout.js index 07c189e9745330..5eb2abe9bb7e69 100644 --- a/test/parallel/test-http-agent-timeout.js +++ b/test/parallel/test-http-agent-timeout.js @@ -34,7 +34,7 @@ const http = require('http'); } { - // Ensure that timeouted sockets are not reused. + // Ensure that timed-out sockets are not reused. const agent = new http.Agent({ keepAlive: true, timeout: 50 }); diff --git a/test/parallel/test-http-outgoing-internal-headernames-getter.js b/test/parallel/test-http-outgoing-internal-headernames-getter.js index 4a56a1301050b5..2d8238066e0cd4 100644 --- a/test/parallel/test-http-outgoing-internal-headernames-getter.js +++ b/test/parallel/test-http-outgoing-internal-headernames-getter.js @@ -2,6 +2,7 @@ const common = require('../common'); const { OutgoingMessage } = require('http'); +const assert = require('assert'); const warn = 'OutgoingMessage.prototype._headerNames is deprecated'; common.expectWarning('DeprecationWarning', warn, 'DEP0066'); @@ -11,3 +12,12 @@ common.expectWarning('DeprecationWarning', warn, 'DEP0066'); const outgoingMessage = new OutgoingMessage(); outgoingMessage._headerNames; // eslint-disable-line no-unused-expressions } + +{ + // Tests _headerNames getter result after setting a header. + const outgoingMessage = new OutgoingMessage(); + outgoingMessage.setHeader('key', 'value'); + const expect = Object.create(null); + expect.key = 'key'; + assert.deepStrictEqual(outgoingMessage._headerNames, expect); +} diff --git a/test/parallel/test-http-perf_hooks.js b/test/parallel/test-http-perf_hooks.js index 826211472f222c..0708a1e8c06f5a 100644 --- a/test/parallel/test-http-perf_hooks.js +++ b/test/parallel/test-http-perf_hooks.js @@ -5,13 +5,9 @@ const assert = require('assert'); const http = require('http'); const { PerformanceObserver } = require('perf_hooks'); - +const entries = []; const obs = new PerformanceObserver(common.mustCallAtLeast((items) => { - items.getEntries().forEach((entry) => { - assert.strictEqual(entry.entryType, 'http'); - assert.strictEqual(typeof entry.startTime, 'number'); - assert.strictEqual(typeof entry.duration, 'number'); - }); + entries.push(...items.getEntries()); })); obs.observe({ type: 'http' }); @@ -57,3 +53,20 @@ server.listen(0, common.mustCall(async () => { ]); server.close(); })); + +process.on('exit', () => { + let numberOfHttpClients = 0; + let numberOfHttpRequests = 0; + entries.forEach((entry) => { + assert.strictEqual(entry.entryType, 'http'); + assert.strictEqual(typeof entry.startTime, 'number'); + assert.strictEqual(typeof entry.duration, 'number'); + if (entry.name === 'HttpClient') { + numberOfHttpClients++; + } else if (entry.name === 'HttpRequest') { + numberOfHttpRequests++; + } + }); + assert.strictEqual(numberOfHttpClients, 2); + assert.strictEqual(numberOfHttpRequests, 2); +}); diff --git a/test/parallel/test-http2-exceeds-server-trailer-size.js b/test/parallel/test-http2-exceeds-server-trailer-size.js new file mode 100644 index 00000000000000..87c1070afbb7a4 --- /dev/null +++ b/test/parallel/test-http2-exceeds-server-trailer-size.js @@ -0,0 +1,51 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const { createServer, constants, connect } = require('http2'); + +const server = createServer(); + +server.on('stream', (stream, headers) => { + stream.respond(undefined, { waitForTrailers: true }); + + stream.on('data', common.mustNotCall()); + + stream.on('wantTrailers', common.mustCall(() => { + // Trigger a frame error by sending a trailer that is too large + stream.sendTrailers({ 'test-trailer': 'X'.repeat(64 * 1024) }); + })); + + stream.on('frameError', common.mustCall((frameType, errorCode) => { + assert.strictEqual(errorCode, constants.NGHTTP2_FRAME_SIZE_ERROR); + })); + + stream.on('error', common.expectsError({ + code: 'ERR_HTTP2_STREAM_ERROR', + })); + + stream.on('close', common.mustCall()); + + stream.end(); +}); + +server.listen(0, () => { + const clientSession = connect(`http://localhost:${server.address().port}`); + + clientSession.on('frameError', common.mustNotCall()); + clientSession.on('close', common.mustCall(() => { + server.close(); + })); + + const clientStream = clientSession.request(); + + clientStream.on('close', common.mustCall()); + // These events mustn't be called once the frame size error is from the server + clientStream.on('frameError', common.mustNotCall()); + clientStream.on('error', common.mustNotCall()); + + clientStream.end(); +}); diff --git a/test/parallel/test-http2-goaway-delayed-request.js b/test/parallel/test-http2-goaway-delayed-request.js new file mode 100644 index 00000000000000..7afadbe80186ee --- /dev/null +++ b/test/parallel/test-http2-goaway-delayed-request.js @@ -0,0 +1,22 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const http2 = require('http2'); + +const server = http2.createServer(); + +server.listen(0, () => { + const client = http2.connect(`http://localhost:${server.address().port}`); + client.on('close', common.mustCall(() => { + server.close(); + })); + + // The client.close() is executed before the socket is able to make request + const stream = client.request(); + stream.on('error', common.expectsError({ code: 'ERR_HTTP2_GOAWAY_SESSION' })); + + setImmediate(() => client.close()); +}); diff --git a/test/parallel/test-http2-options-max-headers-block-length.js b/test/parallel/test-http2-options-max-headers-block-length.js index 11632c6e825c53..af1cc6f9bc4860 100644 --- a/test/parallel/test-http2-options-max-headers-block-length.js +++ b/test/parallel/test-http2-options-max-headers-block-length.js @@ -32,12 +32,12 @@ server.listen(0, common.mustCall(() => { })); req.on('frameError', common.mustCall((type, code) => { - assert.strictEqual(code, h2.constants.NGHTTP2_ERR_FRAME_SIZE_ERROR); + assert.strictEqual(code, h2.constants.NGHTTP2_FRAME_SIZE_ERROR); })); req.on('error', common.expectsError({ code: 'ERR_HTTP2_STREAM_ERROR', name: 'Error', - message: 'Stream closed with error code NGHTTP2_REFUSED_STREAM' + message: 'Stream closed with error code NGHTTP2_FRAME_SIZE_ERROR' })); })); diff --git a/test/parallel/test-https-agent-getname.js b/test/parallel/test-https-agent-getname.js index 6f8c32b299a669..2a13ab1c6f47ee 100644 --- a/test/parallel/test-https-agent-getname.js +++ b/test/parallel/test-https-agent-getname.js @@ -9,6 +9,12 @@ const https = require('https'); const agent = new https.Agent(); +// empty argument +assert.strictEqual( + agent.getName(), + 'localhost::::::::::::::::::::::' +); + // empty options assert.strictEqual( agent.getName({}), diff --git a/test/parallel/test-intl.js b/test/parallel/test-intl.js index 956383c5281389..7d1742f2c7d1c6 100644 --- a/test/parallel/test-intl.js +++ b/test/parallel/test-intl.js @@ -104,6 +104,12 @@ if (!common.hasIntl) { const numberFormat = new Intl.NumberFormat(['en']).format(12345.67890); assert.strictEqual(numberFormat, '12,345.679'); } + // If list is specified and doesn't contain 'en-US' then return. + if (process.config.variables.icu_locales && !haveLocale('en-US')) { + common.printSkipMessage('detailed Intl tests because American English is ' + + 'not listed as supported.'); + return; + } // Number format resolved options { const numberFormat = new Intl.NumberFormat('en-US', { style: 'percent' }); diff --git a/test/parallel/test-net-connect-keepalive.js b/test/parallel/test-net-connect-keepalive.js new file mode 100644 index 00000000000000..3e439c647c36be --- /dev/null +++ b/test/parallel/test-net-connect-keepalive.js @@ -0,0 +1,56 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const truthyValues = [true, 1, 'true', {}, []]; +const delays = [[123, 0], [456123, 456], [-123000, 0], [undefined, 0]]; +const falseyValues = [false, 0, '']; + +const genSetKeepAlive = (desiredEnable, desiredDelay) => (enable, delay) => { + assert.strictEqual(enable, desiredEnable); + assert.strictEqual(delay, desiredDelay); +}; + +for (const value of truthyValues) { + for (const delay of delays) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, keepAlive: value, keepAliveInitialDelay: delay[0] }, + common.mustCall(() => client.end()) + ); + + client._handle.setKeepAlive = common.mustCall( + genSetKeepAlive(true, delay[1]) + ); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); + } +} + +for (const value of falseyValues) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, keepAlive: value }, + common.mustCall(() => client.end()) + ); + + client._handle.setKeepAlive = common.mustNotCall(); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); +} diff --git a/test/parallel/test-net-connect-nodelay.js b/test/parallel/test-net-connect-nodelay.js new file mode 100644 index 00000000000000..6810e339e2b6c4 --- /dev/null +++ b/test/parallel/test-net-connect-nodelay.js @@ -0,0 +1,49 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const truthyValues = [true, 1, 'true', {}, []]; +const falseyValues = [false, 0, '']; +const genSetNoDelay = (desiredArg) => (enable) => { + assert.strictEqual(enable, desiredArg); +}; + +for (const value of truthyValues) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, noDelay: value }, + common.mustCall(() => client.end()) + ); + + client._handle.setNoDelay = common.mustCall(genSetNoDelay(true)); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); +} + +for (const value of falseyValues) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, noDelay: value }, + common.mustCall(() => client.end()) + ); + + client._handle.setNoDelay = common.mustNotCall(); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); +} diff --git a/test/parallel/test-performance-timeline.mjs b/test/parallel/test-performance-timeline.mjs new file mode 100644 index 00000000000000..57e0f6f0b7b6da --- /dev/null +++ b/test/parallel/test-performance-timeline.mjs @@ -0,0 +1,50 @@ +// This file may needs to be updated to wpt: +// https://github.com/web-platform-tests/wpt + +import '../common/index.mjs'; +import assert from 'assert'; + +import { performance } from 'perf_hooks'; +import { setTimeout } from 'timers/promises'; + +// Order by startTime +performance.mark('one'); +await setTimeout(50); +performance.mark('two'); +await setTimeout(50); +performance.mark('three'); +await setTimeout(50); +performance.measure('three', 'three'); +await setTimeout(50); +performance.measure('two', 'two'); +await setTimeout(50); +performance.measure('one', 'one'); +const entries = performance.getEntriesByType('measure'); +assert.deepStrictEqual(entries.map((x) => x.name), ['one', 'two', 'three']); +const allEntries = performance.getEntries(); +assert.deepStrictEqual(allEntries.map((x) => x.name), ['one', 'one', 'two', 'two', 'three', 'three']); + +performance.mark('a'); +await setTimeout(50); +performance.measure('a', 'a'); +await setTimeout(50); +performance.mark('a'); +await setTimeout(50); +performance.measure('a', 'one'); +const entriesByName = performance.getEntriesByName('a'); +assert.deepStrictEqual(entriesByName.map((x) => x.entryType), ['measure', 'mark', 'measure', 'mark']); + +// getEntriesBy[Name|Type](undefined) +performance.mark(undefined); +assert.strictEqual(performance.getEntriesByName(undefined).length, 1); +assert.strictEqual(performance.getEntriesByType(undefined).length, 0); +assert.throws(() => performance.getEntriesByName(), { + name: 'TypeError', + message: 'The "name" argument must be specified', + code: 'ERR_MISSING_ARGS' +}); +assert.throws(() => performance.getEntriesByType(), { + name: 'TypeError', + message: 'The "type" argument must be specified', + code: 'ERR_MISSING_ARGS' +}); diff --git a/test/parallel/test-process-env-tz.js b/test/parallel/test-process-env-tz.js index da716299c9f622..b0188ab9c267f8 100644 --- a/test/parallel/test-process-env-tz.js +++ b/test/parallel/test-process-env-tz.js @@ -31,19 +31,19 @@ if (date.toString().includes('(Central European Time)') || common.skip('tzdata too old'); } -assert.strictEqual( - date.toString().replace('Central European Summer Time', 'CEST'), - 'Sat Apr 14 2018 14:34:56 GMT+0200 (CEST)'); +assert.match( + date.toString(), + /^Sat Apr 14 2018 14:34:56 GMT\+0200 \(.+\)$/); process.env.TZ = 'Europe/London'; -assert.strictEqual( - date.toString().replace('British Summer Time', 'BST'), - 'Sat Apr 14 2018 13:34:56 GMT+0100 (BST)'); +assert.match( + date.toString(), + /^Sat Apr 14 2018 13:34:56 GMT\+0100 \(.+\)$/); process.env.TZ = 'Etc/UTC'; -assert.strictEqual( - date.toString().replace('Coordinated Universal Time', 'UTC'), - 'Sat Apr 14 2018 12:34:56 GMT+0000 (UTC)'); +assert.match( + date.toString(), + /^Sat Apr 14 2018 12:34:56 GMT\+0000 \(.+\)$/); // Just check that deleting the environment variable doesn't crash the process. // We can't really check the result of date.toString() because we don't know diff --git a/test/parallel/test-repl-history-navigation.js b/test/parallel/test-repl-history-navigation.js index 527cf235bddd21..29cb7816f0feb0 100644 --- a/test/parallel/test-repl-history-navigation.js +++ b/test/parallel/test-repl-history-navigation.js @@ -393,7 +393,7 @@ const tests = [ // 10. Word right. Cleanup '\x1B[0K', '\x1B[3G', '\x1B[7C', ' // n', '\x1B[10G', // 11. ESCAPE - '\x1B[0K', ' // n', '\x1B[10G', '\x1B[0K', + '\x1B[0K', // 12. ENTER '\r\n', 'Uncaught ReferenceError: functio is not defined\n', diff --git a/test/parallel/test-repl-tab-complete.js b/test/parallel/test-repl-tab-complete.js index aa5c7958a58b2e..270fb768b030cf 100644 --- a/test/parallel/test-repl-tab-complete.js +++ b/test/parallel/test-repl-tab-complete.js @@ -558,6 +558,26 @@ testMe.complete('obj.', common.mustCall(function(error, data) { putIn.run(['.clear']); testMe.complete('Buffer.prototype.', common.mustCall()); +// Make sure repl gives correct autocomplete on literals +testMe.complete('``.a', common.mustCall((err, data) => { + assert.strictEqual(data[0].includes('``.at'), true); +})); +testMe.complete('\'\'.a', common.mustCall((err, data) => { + assert.strictEqual(data[0].includes('\'\'.at'), true); +})); +testMe.complete('"".a', common.mustCall((err, data) => { + assert.strictEqual(data[0].includes('"".at'), true); +})); +testMe.complete('("").a', common.mustCall((err, data) => { + assert.strictEqual(data[0].includes('("").at'), true); +})); +testMe.complete('[].a', common.mustCall((err, data) => { + assert.strictEqual(data[0].includes('[].at'), true); +})); +testMe.complete('{}.a', common.mustCall((err, data) => { + assert.deepStrictEqual(data[0], []); +})); + const testNonGlobal = repl.start({ input: putIn, output: putIn, diff --git a/test/parallel/test-set-incoming-message-header.js b/test/parallel/test-set-incoming-message-header.js new file mode 100644 index 00000000000000..9ac05a8138d445 --- /dev/null +++ b/test/parallel/test-set-incoming-message-header.js @@ -0,0 +1,27 @@ +'use strict'; + +require('../common'); +const { IncomingMessage } = require('http'); +const assert = require('assert'); + +// Headers setter function set a header correctly +{ + const im = new IncomingMessage(); + im.headers = { key: 'value' }; + assert.deepStrictEqual(im.headers, { key: 'value' }); +} + +// Trailers setter function set a header correctly +{ + const im = new IncomingMessage(); + im.trailers = { key: 'value' }; + assert.deepStrictEqual(im.trailers, { key: 'value' }); +} + +// _addHeaderLines function set a header correctly +{ + const im = new IncomingMessage(); + im.headers = { key1: 'value1' }; + im._addHeaderLines(['key2', 'value2'], 2); + assert.deepStrictEqual(im.headers, { key1: 'value1', key2: 'value2' }); +} diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs new file mode 100644 index 00000000000000..382ec7a8af04b6 --- /dev/null +++ b/test/parallel/test-stream-asIndexedPairs.mjs @@ -0,0 +1,47 @@ +import '../common/index.mjs'; +import { Readable } from 'stream'; +import { deepStrictEqual, rejects } from 'assert'; + +{ + // asIndexedPairs with a synchronous stream + const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await Readable.from([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); +} + +{ + // asIndexedPairs works an asynchronous streams + const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x); + const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await asyncFrom([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); +} + +{ + // Does not enumerate an infinite stream + const infinite = () => Readable.from(async function* () { + while (true) yield 1; + }()); + const pairs = await infinite().asIndexedPairs().take(3).toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 1], [2, 1]]); + const empty = await infinite().asIndexedPairs().take(0).toArray(); + deepStrictEqual(empty, []); +} + +{ + // AbortSignal + await rejects(async () => { + const ac = new AbortController(); + const { signal } = ac; + const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + ac.abort(); + await p; + }, { name: 'AbortError' }); + + await rejects(async () => { + const signal = AbortSignal.abort(); + await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + }, /AbortError/); +} diff --git a/test/parallel/test-stream-construct-async-error.js b/test/parallel/test-stream-construct-async-error.js index ea2d8740e29c94..3fe81b4ebe2d6b 100644 --- a/test/parallel/test-stream-construct-async-error.js +++ b/test/parallel/test-stream-construct-async-error.js @@ -12,7 +12,6 @@ const assert = require('assert'); { class Foo extends Duplex { async _destroy(err, cb) { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); throw new Error('boom'); } @@ -31,7 +30,6 @@ const assert = require('assert'); { class Foo extends Duplex { async _destroy(err, cb) { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); } } @@ -46,7 +44,6 @@ const assert = require('assert'); { class Foo extends Duplex { async _construct() { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); } @@ -64,7 +61,6 @@ const assert = require('assert'); { class Foo extends Duplex { async _construct(callback) { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); callback(); } @@ -88,7 +84,6 @@ const assert = require('assert'); }); async _final() { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); } } @@ -105,7 +100,6 @@ const assert = require('assert'); }); async _final(callback) { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); callback(); } @@ -123,7 +117,6 @@ const assert = require('assert'); }); async _final() { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); throw new Error('boom'); } diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js new file mode 100644 index 00000000000000..ddeb6054a78164 --- /dev/null +++ b/test/parallel/test-stream-drop-take.js @@ -0,0 +1,96 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const { deepStrictEqual, rejects, throws } = require('assert'); + +const { from } = Readable; + +const fromAsync = (...args) => from(...args).map(async (x) => x); + +const naturals = () => from(async function*() { + let i = 1; + while (true) { + yield i++; + } +}()); + +{ + // Synchronous streams + (async () => { + deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await from([]).drop(2).toArray(), []); + deepStrictEqual(await from([]).take(1).toArray(), []); + deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await from([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Asynchronous streams + (async () => { + deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); + deepStrictEqual(await fromAsync([]).take(1).toArray(), []); + deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Infinite streams + // Asynchronous streams + (async () => { + deepStrictEqual(await naturals().take(1).toArray(), [1]); + deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); + const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; + deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); + deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); + })().then(common.mustCall()); +} + +{ + // Coercion + (async () => { + // The spec made me do this ^^ + deepStrictEqual(await naturals().take('cat').toArray(), []); + deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); + deepStrictEqual(await naturals().take(true).toArray(), [1]); + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + rejects( + Readable.from([1, 2, 3]).drop(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + +{ + // Support for AbortSignal, already aborted + const signal = AbortSignal.abort(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + const invalidArgs = [ + -1, + -Infinity, + -40, + ]; + + for (const example of invalidArgs) { + throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); + } +} diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js index 100921a766977e..494c94f02f8cb0 100644 --- a/test/parallel/test-stream-filter.js +++ b/test/parallel/test-stream-filter.js @@ -87,20 +87,11 @@ const { setTimeout } = require('timers/promises'); { // Error cases - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const unused of Readable.from([1]).filter(1)); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const _ of Readable.from([1]).filter((x) => x, { - concurrency: 'Foo' - })); - }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const _ of Readable.from([1]).filter((x) => x, 1)); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).filter((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/); } { // Test result is a Readable diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js new file mode 100644 index 00000000000000..952043b07b35f7 --- /dev/null +++ b/test/parallel/test-stream-flatMap.js @@ -0,0 +1,122 @@ +'use strict'; + +const common = require('../common'); +const fixtures = require('../common/fixtures'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { setTimeout } = require('timers/promises'); +const { createReadStream } = require('fs'); + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +{ + // flatMap works on synchronous streams with a synchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x + x]).toArray(), + [2, 4, 6, 8, 10] + ); + assert.deepStrictEqual( + await oneTo5().flatMap(() => []).toArray(), + [] + ); + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} + + +{ + // flatMap works on sync/async streams with an asynchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + const asyncOneTo5 = oneTo5().map(async (x) => x); + assert.deepStrictEqual( + await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} +{ + // flatMap works on a stream where mapping returns a stream + (async () => { + const result = await oneTo5().flatMap(async (x) => { + return Readable.from([x, x]); + }).toArray(); + assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]); + })().then(common.mustCall()); + // flatMap works on an objectMode stream where mappign returns a stream + (async () => { + const result = await oneTo5().flatMap(() => { + return createReadStream(fixtures.path('x.txt')); + }).toArray(); + // The resultant stream is in object mode so toArray shouldn't flatten + assert.strictEqual(result.length, 5); + assert.deepStrictEqual( + Buffer.concat(result).toString(), + 'xyz\n'.repeat(5) + ); + + })().then(common.mustCall()); + +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + console.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + queueMicrotask(() => { + ac.abort(); + }); +} + +{ + // Already aborted AbortSignal + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: AbortSignal.abort() }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + console.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = oneTo5().flatMap((x) => x); + assert.strictEqual(stream.readable, true); +} diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js new file mode 100644 index 00000000000000..ae9dfd431f9f37 --- /dev/null +++ b/test/parallel/test-stream-forEach.js @@ -0,0 +1,131 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { once } = require('events'); + +{ + // forEach works on synchronous streams with a synchronous predicate + const stream = Readable.from([1, 2, 3]); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works an asynchronous streams + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works on asynchronous streams with a asynchronous forEach fn + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach(async (value) => { + await Promise.resolve(); + assert.strictEqual(value, result.shift()); + }); + })().then(common.mustCall()); +} + +{ + // forEach works on an infinite stream + const ac = new AbortController(); + const { signal } = ac; + const stream = Readable.from(async function* () { + while (true) yield 1; + }(), { signal }); + let i = 0; + assert.rejects(stream.forEach(common.mustCall((x) => { + i++; + if (i === 10) ac.abort(); + assert.strictEqual(x, 1); + }, 10)), { name: 'AbortError' }).then(common.mustCall()); +} + +{ + // Emitting an error during `forEach` + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + stream.emit('error', new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach((x) => { + if (x === 3) { + throw new Error('boom'); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (async) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + return Promise.reject(new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + let calls = 0; + const forEachPromise = + Readable.from([1, 2, 3, 4]).forEach(async (_, { signal }) => { + calls++; + await once(signal, 'abort'); + }, { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + await forEachPromise; + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + assert.strictEqual(calls, 2); + }); +} + +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).forEach(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} +{ + // Test result is a Promise + const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true); + assert.strictEqual(typeof stream.then, 'function'); +} diff --git a/test/parallel/test-stream-map.js b/test/parallel/test-stream-map.js index 2d5c5894e1eb0f..22e7e7f4e0da2c 100644 --- a/test/parallel/test-stream-map.js +++ b/test/parallel/test-stream-map.js @@ -86,20 +86,11 @@ const { setTimeout } = require('timers/promises'); { // Error cases - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const unused of Readable.from([1]).map(1)); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const _ of Readable.from([1]).map((x) => x, { - concurrency: 'Foo' - })); - }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); - assert.rejects(async () => { - // eslint-disable-next-line no-unused-vars - for await (const _ of Readable.from([1]).map((x) => x, 1)); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.throws(() => Readable.from([1]).map(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).map((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).map((x) => x, 1), /ERR_INVALID_ARG_TYPE/); } { // Test result is a Readable diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js new file mode 100644 index 00000000000000..a8b41efa28415d --- /dev/null +++ b/test/parallel/test-stream-reduce.js @@ -0,0 +1,130 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); + +function sum(p, c) { + return p + c; +} + +{ + // Does the same thing as `(await stream.toArray()).reduce(...)` + (async () => { + const tests = [ + [[], sum, 0], + [[1], sum, 0], + [[1, 2, 3, 4, 5], sum, 0], + [[...Array(100).keys()], sum, 0], + [['a', 'b', 'c'], sum, ''], + [[1, 2], sum], + [[1, 2, 3], (x, y) => y], + ]; + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + // Does the same thing as `(await stream.toArray()).reduce(...)` with an + // asynchronous reducer + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .map(async (x) => x) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + })().then(common.mustCall()); +} +{ + // Works with an async reducer, with or without initial value + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0); + assert.strictEqual(six, 6); + })().then(common.mustCall()); + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c); + assert.strictEqual(six, 6); + })().then(common.mustCall()); +} +{ + // Works lazily + assert.rejects(Readable.from([1, 2, 3, 4, 5, 6]) + .map(common.mustCall((x) => { + return x; + }, 3)) // Two consumed and one buffered by `map` due to default concurrency + .reduce(async (p, c) => { + if (p === 1) { + throw new Error('boom'); + } + return c; + }, 0) + , /boom/).then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + assert.rejects(async () => { + await Readable.from([1, 2, 3]).reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + + +{ + // Support for AbortSignal - pre aborted + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Support for AbortSignal - deep + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c, { signal }) => { + signal.addEventListener('abort', common.mustCall(), { once: true }); + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Error cases + assert.rejects(() => Readable.from([]).reduce(1), /TypeError/); + assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/); +} + +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0); + assert.ok(result instanceof Promise); +} diff --git a/test/parallel/test-stream-some-every.js b/test/parallel/test-stream-some-every.js new file mode 100644 index 00000000000000..c2be5ea955bcd2 --- /dev/null +++ b/test/parallel/test-stream-some-every.js @@ -0,0 +1,95 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +function oneTo5Async() { + return oneTo5().map(async (x) => { + await Promise.resolve(); + return x; + }); +} +{ + // Some and every work with a synchronous stream and predicate + (async () => { + assert.strictEqual(await oneTo5().some((x) => x > 3), true); + assert.strictEqual(await oneTo5().every((x) => x > 3), false); + assert.strictEqual(await oneTo5().some((x) => x > 6), false); + assert.strictEqual(await oneTo5().every((x) => x < 6), true); + assert.strictEqual(await Readable.from([]).some((x) => true), false); + assert.strictEqual(await Readable.from([]).every((x) => true), true); + })().then(common.mustCall()); +} + +{ + // Some and every work with an asynchronous stream and synchronous predicate + (async () => { + assert.strictEqual(await oneTo5Async().some((x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every((x) => x > 3), false); + assert.strictEqual(await oneTo5Async().some((x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every((x) => x < 6), true); + })().then(common.mustCall()); +} + +{ + // Some and every work on asynchronous streams with an asynchronous predicate + (async () => { + assert.strictEqual(await oneTo5().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5().every(async (x) => x < 6), true); + })().then(common.mustCall()); +} + +{ + // Some and every short circuit + (async () => { + await oneTo5().some(common.mustCall((x) => x > 2, 3)); + await oneTo5().every(common.mustCall((x) => x < 3, 3)); + // When short circuit isn't possible the whole stream is iterated + await oneTo5().some(common.mustCall((x) => x > 6, 5)); + // The stream is destroyed afterwards + const stream = oneTo5(); + await stream.some(common.mustCall((x) => x > 2, 3)); + assert.strictEqual(stream.destroyed, true); + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + assert.rejects(Readable.from([1, 2, 3]).some( + () => new Promise(() => {}), + { signal: ac.signal } + ), { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} +{ + // Support for pre-aborted AbortSignal + assert.rejects(Readable.from([1, 2, 3]).some( + () => new Promise(() => {}), + { signal: AbortSignal.abort() } + ), { + name: 'AbortError', + }).then(common.mustCall()); +} +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).every(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).every((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); +} diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js new file mode 100644 index 00000000000000..3bd15e7c0fbf34 --- /dev/null +++ b/test/parallel/test-stream-toArray.js @@ -0,0 +1,79 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); + +{ + // Works on a synchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Works on a non-object-mode stream and flattens it + (async () => { + const stream = Readable.from( + [Buffer.from([1, 2, 3]), Buffer.from([4, 5, 6])] + , { objectMode: false }); + const result = await stream.toArray(); + assert.strictEqual(Buffer.isBuffer(result), true); + assert.deepStrictEqual(Array.from(result), [1, 2, 3, 4, 5, 6]); + })().then(common.mustCall()); +} + +{ + // Works on an asynchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test).map((x) => Promise.resolve(x)); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + let stream; + assert.rejects(async () => { + stream = Readable.from([1, 2, 3]).map(async (x) => { + if (x === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(x); + }); + await stream.toArray({ signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + // Only stops toArray, does not destory the stream + assert(stream.destroyed, false); + })); + ac.abort(); +} +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).toArray(); + assert.strictEqual(result instanceof Promise, true); +} diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js index 5571b794fd4fbd..c17b843322222e 100644 --- a/test/parallel/test-stream-writable-final-async.js +++ b/test/parallel/test-stream-writable-final-async.js @@ -9,7 +9,6 @@ const { setTimeout } = require('timers/promises'); { class Foo extends Duplex { async _final(callback) { - // eslint-disable-next-line no-restricted-syntax await setTimeout(common.platformTimeout(1)); callback(); } diff --git a/test/parallel/test-string-decoder.js b/test/parallel/test-string-decoder.js index be876f46e5af02..02f0a3a718bdec 100644 --- a/test/parallel/test-string-decoder.js +++ b/test/parallel/test-string-decoder.js @@ -210,6 +210,13 @@ if (common.enoughTestMem) { ); } +assert.throws( + () => new StringDecoder('utf8').__proto__.write(Buffer.from('abc')), // eslint-disable-line no-proto + { + code: 'ERR_INVALID_THIS', + } +); + // Test verifies that StringDecoder will correctly decode the given input // buffer with the given encoding to the expected output. It will attempt all // possible ways to write() the input buffer, see writeSequences(). The diff --git a/test/parallel/test-trace-events-all.js b/test/parallel/test-trace-events-all.js index b9f9b70a61b81b..8e836561ff7091 100644 --- a/test/parallel/test-trace-events-all.js +++ b/test/parallel/test-trace-events-all.js @@ -27,7 +27,7 @@ proc.once('exit', common.mustCall(() => { return false; if (trace.cat !== 'v8') return false; - if (trace.name !== 'V8.GCScavenger') + if (!trace.name.startsWith('V8.')) return false; return true; })); diff --git a/test/parallel/test-trace-events-v8.js b/test/parallel/test-trace-events-v8.js index 5d06124d6879b5..e5fc8b6c726600 100644 --- a/test/parallel/test-trace-events-v8.js +++ b/test/parallel/test-trace-events-v8.js @@ -29,7 +29,7 @@ proc.once('exit', common.mustCall(() => { return false; if (trace.cat !== 'v8') return false; - if (trace.name !== 'V8.GCScavenger') + if (!trace.name.startsWith('V8.')) return false; return true; })); diff --git a/test/parallel/test-url-null-char.js b/test/parallel/test-url-null-char.js new file mode 100644 index 00000000000000..468080844d534b --- /dev/null +++ b/test/parallel/test-url-null-char.js @@ -0,0 +1,8 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +assert.throws( + () => { new URL('a\0b'); }, + { input: 'a\0b' } +); diff --git a/test/parallel/test-url-parse-format.js b/test/parallel/test-url-parse-format.js index e1cf80a2778abd..99a6ace23a2fb3 100644 --- a/test/parallel/test-url-parse-format.js +++ b/test/parallel/test-url-parse-format.js @@ -977,6 +977,21 @@ const parseTests = { path: '/everybody', href: '//fhqwhgads@example.com/everybody#to-the-limit' }, + + '\bhttp://example.com/\b': { + protocol: 'http:', + slashes: true, + auth: null, + host: 'example.com', + port: null, + hostname: 'example.com', + hash: null, + search: null, + query: null, + pathname: '/', + path: '/', + href: 'http://example.com/' + } }; for (const u in parseTests) { diff --git a/test/parallel/test-url-relative.js b/test/parallel/test-url-relative.js index 1eb2bdc58fc3a6..9a398a0e508f33 100644 --- a/test/parallel/test-url-relative.js +++ b/test/parallel/test-url-relative.js @@ -53,6 +53,7 @@ const relativeTests = [ ['/foo/bar/baz', '/../etc/passwd', '/etc/passwd'], ['http://localhost', 'file:///Users/foo', 'file:///Users/foo'], ['http://localhost', 'file://foo/Users', 'file://foo/Users'], + ['https://registry.npmjs.org', '@foo/bar', 'https://registry.npmjs.org/@foo/bar'], ]; relativeTests.forEach(function(relativeTest) { const a = url.resolve(relativeTest[0], relativeTest[1]); diff --git a/test/parallel/test-vm-basic.js b/test/parallel/test-vm-basic.js index c6dadce9ad0263..13d212f0079ca7 100644 --- a/test/parallel/test-vm-basic.js +++ b/test/parallel/test-vm-basic.js @@ -323,6 +323,14 @@ const vm = require('vm'); global ); + // Test compileFunction produceCachedData option + const result = vm.compileFunction('console.log("Hello, World!")', [], { + produceCachedData: true, + }); + + assert.ok(result.cachedDataProduced); + assert.ok(result.cachedData.length > 0); + // Resetting value Error.stackTraceLimit = oldLimit; } diff --git a/test/parallel/test-webcrypto-derivebits-ecdh.js b/test/parallel/test-webcrypto-derivebits-ecdh.js index 64cbae7cec6a03..166da81e3e4e6d 100644 --- a/test/parallel/test-webcrypto-derivebits-ecdh.js +++ b/test/parallel/test-webcrypto-derivebits-ecdh.js @@ -6,7 +6,8 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { subtle, getRandomValues } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); +const { subtle } = webcrypto; const kTests = [ { @@ -250,7 +251,7 @@ async function prepareKeys() { { // Public is a secret key - const keyData = getRandomValues(new Uint8Array(32)); + const keyData = webcrypto.getRandomValues(new Uint8Array(32)); const key = await subtle.importKey( 'raw', keyData, diff --git a/test/parallel/test-webcrypto-derivekey-ecdh.js b/test/parallel/test-webcrypto-derivekey-ecdh.js index bdd9bd7588a763..42c8d250f42b06 100644 --- a/test/parallel/test-webcrypto-derivekey-ecdh.js +++ b/test/parallel/test-webcrypto-derivekey-ecdh.js @@ -6,7 +6,8 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { subtle, getRandomValues } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); +const { subtle } = webcrypto; const kTests = [ { @@ -226,7 +227,7 @@ async function prepareKeys() { { // Public is a secret key - const keyData = getRandomValues(new Uint8Array(32)); + const keyData = webcrypto.getRandomValues(new Uint8Array(32)); const key = await subtle.importKey( 'raw', keyData, diff --git a/test/parallel/test-webcrypto-encrypt-decrypt-aes.js b/test/parallel/test-webcrypto-encrypt-decrypt-aes.js index 7adb18918d2205..885cded906b079 100644 --- a/test/parallel/test-webcrypto-encrypt-decrypt-aes.js +++ b/test/parallel/test-webcrypto-encrypt-decrypt-aes.js @@ -6,7 +6,8 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { getRandomValues, subtle } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); +const { subtle } = webcrypto; async function testEncrypt({ keyBuffer, algorithm, plaintext, result }) { // Using a copy of plaintext to prevent tampering of the original @@ -213,8 +214,8 @@ async function testDecrypt({ keyBuffer, algorithm, result }) { ['encrypt', 'decrypt'], ); - const iv = getRandomValues(new Uint8Array(12)); - const aad = getRandomValues(new Uint8Array(32)); + const iv = webcrypto.getRandomValues(new Uint8Array(12)); + const aad = webcrypto.getRandomValues(new Uint8Array(32)); const encrypted = await subtle.encrypt( { @@ -224,7 +225,7 @@ async function testDecrypt({ keyBuffer, algorithm, result }) { tagLength: 128 }, secretKey, - getRandomValues(new Uint8Array(32)) + webcrypto.getRandomValues(new Uint8Array(32)) ); await subtle.decrypt( diff --git a/test/parallel/test-webcrypto-encrypt-decrypt.js b/test/parallel/test-webcrypto-encrypt-decrypt.js index 50fa99a999cd92..cc997e7d2e6d26 100644 --- a/test/parallel/test-webcrypto-encrypt-decrypt.js +++ b/test/parallel/test-webcrypto-encrypt-decrypt.js @@ -6,14 +6,15 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { subtle, getRandomValues } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); +const { subtle } = webcrypto; // This is only a partial test. The WebCrypto Web Platform Tests // will provide much greater coverage. // Test Encrypt/Decrypt RSA-OAEP { - const buf = getRandomValues(new Uint8Array(50)); + const buf = webcrypto.getRandomValues(new Uint8Array(50)); async function test() { const ec = new TextEncoder(); @@ -44,8 +45,8 @@ const { subtle, getRandomValues } = require('crypto').webcrypto; // Test Encrypt/Decrypt AES-CTR { - const buf = getRandomValues(new Uint8Array(50)); - const counter = getRandomValues(new Uint8Array(16)); + const buf = webcrypto.getRandomValues(new Uint8Array(50)); + const counter = webcrypto.getRandomValues(new Uint8Array(16)); async function test() { const key = await subtle.generateKey({ @@ -71,8 +72,8 @@ const { subtle, getRandomValues } = require('crypto').webcrypto; // Test Encrypt/Decrypt AES-CBC { - const buf = getRandomValues(new Uint8Array(50)); - const iv = getRandomValues(new Uint8Array(16)); + const buf = webcrypto.getRandomValues(new Uint8Array(50)); + const iv = webcrypto.getRandomValues(new Uint8Array(16)); async function test() { const key = await subtle.generateKey({ @@ -98,8 +99,8 @@ const { subtle, getRandomValues } = require('crypto').webcrypto; // Test Encrypt/Decrypt AES-GCM { - const buf = getRandomValues(new Uint8Array(50)); - const iv = getRandomValues(new Uint8Array(12)); + const buf = webcrypto.getRandomValues(new Uint8Array(50)); + const iv = webcrypto.getRandomValues(new Uint8Array(12)); async function test() { const key = await subtle.generateKey({ diff --git a/test/parallel/test-webcrypto-export-import.js b/test/parallel/test-webcrypto-export-import.js index d7db433b364011..b4fd26b0cda6ae 100644 --- a/test/parallel/test-webcrypto-export-import.js +++ b/test/parallel/test-webcrypto-export-import.js @@ -6,11 +6,12 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { subtle, getRandomValues } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); +const { subtle } = webcrypto; { async function test() { - const keyData = getRandomValues(new Uint8Array(32)); + const keyData = webcrypto.getRandomValues(new Uint8Array(32)); await Promise.all([1, null, undefined, {}, []].map((format) => assert.rejects( subtle.importKey(format, keyData, {}, false, ['wrapKey']), { @@ -82,7 +83,7 @@ const { subtle, getRandomValues } = require('crypto').webcrypto; // Import/Export HMAC Secret Key { async function test() { - const keyData = getRandomValues(new Uint8Array(32)); + const keyData = webcrypto.getRandomValues(new Uint8Array(32)); const key = await subtle.importKey( 'raw', keyData, { @@ -112,7 +113,7 @@ const { subtle, getRandomValues } = require('crypto').webcrypto; // Import/Export AES Secret Key { async function test() { - const keyData = getRandomValues(new Uint8Array(32)); + const keyData = webcrypto.getRandomValues(new Uint8Array(32)); const key = await subtle.importKey( 'raw', keyData, { diff --git a/test/parallel/test-webcrypto-getRandomValues.js b/test/parallel/test-webcrypto-getRandomValues.js new file mode 100644 index 00000000000000..049cdcc847feb1 --- /dev/null +++ b/test/parallel/test-webcrypto-getRandomValues.js @@ -0,0 +1,11 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const { getRandomValues } = require('crypto').webcrypto; + +assert.throws(() => getRandomValues(new Uint8Array()), { code: 'ERR_INVALID_THIS' }); diff --git a/test/parallel/test-webcrypto-random.js b/test/parallel/test-webcrypto-random.js index e17cc834b6c2bf..c3fc6aaab2eb1e 100644 --- a/test/parallel/test-webcrypto-random.js +++ b/test/parallel/test-webcrypto-random.js @@ -7,7 +7,7 @@ if (!common.hasCrypto) const { Buffer } = require('buffer'); const assert = require('assert'); -const { getRandomValues } = require('crypto').webcrypto; +const { webcrypto } = require('crypto'); [ undefined, null, '', 1, {}, [], @@ -16,14 +16,14 @@ const { getRandomValues } = require('crypto').webcrypto; new DataView(new ArrayBuffer(1)), ].forEach((i) => { assert.throws( - () => getRandomValues(i), + () => webcrypto.getRandomValues(i), { name: 'TypeMismatchError', code: 17 }, ); }); { const buf = new Uint8Array(0); - getRandomValues(buf); + webcrypto.getRandomValues(buf); } const intTypedConstructors = [ @@ -41,7 +41,7 @@ const intTypedConstructors = [ for (const ctor of intTypedConstructors) { const buf = new ctor(10); const before = Buffer.from(buf.buffer).toString('hex'); - getRandomValues(buf); + webcrypto.getRandomValues(buf); const after = Buffer.from(buf.buffer).toString('hex'); assert.notStrictEqual(before, after); } @@ -49,7 +49,7 @@ for (const ctor of intTypedConstructors) { { const buf = new Uint16Array(10); const before = Buffer.from(buf).toString('hex'); - getRandomValues(buf); + webcrypto.getRandomValues(buf); const after = Buffer.from(buf).toString('hex'); assert.notStrictEqual(before, after); } @@ -63,7 +63,7 @@ for (const ctor of intTypedConstructors) { } if (kData !== undefined) { - assert.throws(() => getRandomValues(kData), { + assert.throws(() => webcrypto.getRandomValues(kData), { code: 22 }); } diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js index dd804ff212822d..83ec85fae61405 100644 --- a/test/parallel/test-whatwg-readablebytestream.js +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -232,3 +232,33 @@ class Source { code: 'ERR_INVALID_STATE', }); } + +{ + const stream = new ReadableStream({ + type: 'bytes', + pull(c) { + const v = new Uint8Array(c.byobRequest.view.buffer, 0, 3); + v.set([20, 21, 22]); + c.byobRequest.respondWithNewView(v); + }, + }); + const buffer = new ArrayBuffer(10); + const view = new Uint8Array(buffer, 0, 3); + view.set([10, 11, 12]); + const reader = stream.getReader({ mode: 'byob' }); + reader.read(view); +} + +{ + const stream = new ReadableStream({ + type: 'bytes', + autoAllocateChunkSize: 10, + pull(c) { + const v = new Uint8Array(c.byobRequest.view.buffer, 0, 3); + v.set([20, 21, 22]); + c.byobRequest.respondWithNewView(v); + }, + }); + const reader = stream.getReader(); + reader.read(); +} diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js index 13261fe6b7ca84..cef3eca6ed2733 100644 --- a/test/parallel/test-whatwg-readablestream.js +++ b/test/parallel/test-whatwg-readablestream.js @@ -80,6 +80,36 @@ const { assert(r.locked); } +{ + // Throw error and return rejected promise in `cancel()` method + // would execute same cleanup code + const r1 = new ReadableStream({ + cancel: () => { + return Promise.reject('Cancel Error'); + }, + }); + r1.cancel().finally(common.mustCall(() => { + const controllerState = r1[kState].controller[kState]; + + assert.strictEqual(controllerState.pullAlgorithm, undefined); + assert.strictEqual(controllerState.cancelAlgorithm, undefined); + assert.strictEqual(controllerState.sizeAlgorithm, undefined); + })).catch(() => {}); + + const r2 = new ReadableStream({ + cancel() { + throw new Error('Cancel Error'); + } + }); + r2.cancel().finally(common.mustCall(() => { + const controllerState = r2[kState].controller[kState]; + + assert.strictEqual(controllerState.pullAlgorithm, undefined); + assert.strictEqual(controllerState.cancelAlgorithm, undefined); + assert.strictEqual(controllerState.sizeAlgorithm, undefined); + })).catch(() => {}); +} + { const source = { start: common.mustCall((controller) => { diff --git a/test/parallel/test-whatwg-webstreams-adapters-to-streamduplex.js b/test/parallel/test-whatwg-webstreams-adapters-to-streamduplex.js new file mode 100644 index 00000000000000..15ac9f832714e9 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-adapters-to-streamduplex.js @@ -0,0 +1,166 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); + +const { + TransformStream, +} = require('stream/web'); + +const { + newStreamDuplexFromReadableWritablePair, +} = require('internal/webstreams/adapters'); + +const { + finished, + pipeline, + Readable, + Writable, +} = require('stream'); + +const { + kState, +} = require('internal/webstreams/util'); + +{ + const transform = new TransformStream(); + const duplex = newStreamDuplexFromReadableWritablePair(transform); + + assert(transform.readable.locked); + assert(transform.writable.locked); + + duplex.destroy(); + + duplex.on('close', common.mustCall(() => { + assert.strictEqual(transform.readable[kState].state, 'closed'); + assert.strictEqual(transform.writable[kState].state, 'errored'); + })); +} + +{ + const error = new Error('boom'); + const transform = new TransformStream(); + const duplex = newStreamDuplexFromReadableWritablePair(transform); + + assert(transform.readable.locked); + assert(transform.writable.locked); + + duplex.destroy(error); + duplex.on('error', common.mustCall((reason) => { + assert.strictEqual(reason, error); + })); + + duplex.on('close', common.mustCall(() => { + assert.strictEqual(transform.readable[kState].state, 'closed'); + assert.strictEqual(transform.writable[kState].state, 'errored'); + assert.strictEqual(transform.writable[kState].storedError, error); + })); +} + +{ + const transform = new TransformStream(); + const duplex = new newStreamDuplexFromReadableWritablePair(transform); + + duplex.end(); + duplex.resume(); + + duplex.on('close', common.mustCall(() => { + assert.strictEqual(transform.readable[kState].state, 'closed'); + assert.strictEqual(transform.writable[kState].state, 'closed'); + })); +} + +{ + const ec = new TextEncoder(); + const dc = new TextDecoder(); + const transform = new TransformStream({ + transform(chunk, controller) { + const text = dc.decode(chunk); + controller.enqueue(ec.encode(text.toUpperCase())); + } + }); + const duplex = new newStreamDuplexFromReadableWritablePair(transform, { + encoding: 'utf8', + }); + + duplex.end('hello'); + duplex.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, 'HELLO'); + })); + duplex.on('end', common.mustCall()); + + duplex.on('close', common.mustCall(() => { + assert.strictEqual(transform.readable[kState].state, 'closed'); + assert.strictEqual(transform.writable[kState].state, 'closed'); + })); +} + +{ + const ec = new TextEncoder(); + const dc = new TextDecoder(); + const transform = new TransformStream({ + transform: common.mustCall((chunk, controller) => { + const text = dc.decode(chunk); + controller.enqueue(ec.encode(text.toUpperCase())); + }) + }); + const duplex = new newStreamDuplexFromReadableWritablePair(transform, { + encoding: 'utf8', + }); + + finished(duplex, common.mustCall()); + + duplex.end('hello'); + duplex.resume(); +} + +{ + const ec = new TextEncoder(); + const dc = new TextDecoder(); + const transform = new TransformStream({ + transform: common.mustCall((chunk, controller) => { + const text = dc.decode(chunk); + controller.enqueue(ec.encode(text.toUpperCase())); + }) + }); + const duplex = new newStreamDuplexFromReadableWritablePair(transform, { + encoding: 'utf8', + }); + + const readable = new Readable({ + read() { + readable.push(Buffer.from('hello')); + readable.push(null); + } + }); + + const writable = new Writable({ + write: common.mustCall((chunk, encoding, callback) => { + assert.strictEqual(dc.decode(chunk), 'HELLO'); + assert.strictEqual(encoding, 'buffer'); + callback(); + }) + }); + + finished(duplex, common.mustCall()); + pipeline(readable, duplex, writable, common.mustCall()); +} + +{ + const transform = new TransformStream(); + const duplex = newStreamDuplexFromReadableWritablePair(transform); + duplex.setEncoding('utf-8'); + duplex.on('data', common.mustCall((data) => { + assert.strictEqual(data, 'hello'); + }, 5)); + + duplex.write(Buffer.from('hello')); + duplex.write(Buffer.from('hello')); + duplex.write(Buffer.from('hello')); + duplex.write(Buffer.from('hello')); + duplex.write(Buffer.from('hello')); + + duplex.end(); +} diff --git a/test/parallel/test-whatwg-webstreams-adapters-to-streamwritable.js b/test/parallel/test-whatwg-webstreams-adapters-to-streamwritable.js new file mode 100644 index 00000000000000..495eef73f79272 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-adapters-to-streamwritable.js @@ -0,0 +1,234 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); + +const { + WritableStream, +} = require('stream/web'); + +const { + newStreamWritableFromWritableStream, +} = require('internal/webstreams/adapters'); + +const { + finished, + pipeline, + Readable, +} = require('stream'); + +const { + kState, +} = require('internal/webstreams/util'); + +class TestSource { + constructor() { + this.chunks = []; + } + + start(c) { + this.controller = c; + this.started = true; + } + + write(chunk) { + this.chunks.push(chunk); + } + + close() { + this.closed = true; + } + + abort(reason) { + this.abortReason = reason; + } +} + +[1, {}, false, []].forEach((arg) => { + assert.throws(() => newStreamWritableFromWritableStream(arg), { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +{ + // Ending the stream.Writable should close the writableStream + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = newStreamWritableFromWritableStream(writableStream); + + assert(writableStream.locked); + + writable.end('chunk'); + + writable.on('close', common.mustCall(() => { + assert(writableStream.locked); + assert.strictEqual(writableStream[kState].state, 'closed'); + assert.strictEqual(source.chunks.length, 1); + assert.deepStrictEqual(source.chunks[0], Buffer.from('chunk')); + })); +} + +{ + // Destroying the stream.Writable without an error should close + // the writableStream with no error. + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = newStreamWritableFromWritableStream(writableStream); + + assert(writableStream.locked); + + writable.destroy(); + + writable.on('close', common.mustCall(() => { + assert(writableStream.locked); + assert.strictEqual(writableStream[kState].state, 'closed'); + assert.strictEqual(source.chunks.length, 0); + })); +} + +{ + // Destroying the stream.Writable with an error should error + // the writableStream + const error = new Error('boom'); + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = newStreamWritableFromWritableStream(writableStream); + + assert(writableStream.locked); + + writable.destroy(error); + + writable.on('error', common.mustCall((reason) => { + assert.strictEqual(reason, error); + })); + + writable.on('close', common.mustCall(() => { + assert(writableStream.locked); + assert.strictEqual(writableStream[kState].state, 'errored'); + assert.strictEqual(writableStream[kState].storedError, error); + assert.strictEqual(source.chunks.length, 0); + })); +} + +{ + // Attempting to close, abort, or getWriter on writableStream + // should fail because it is locked. An internal error in + // writableStream should error the writable. + const error = new Error('boom'); + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = newStreamWritableFromWritableStream(writableStream); + + assert(writableStream.locked); + + assert.rejects(writableStream.close(), { + code: 'ERR_INVALID_STATE', + }); + + assert.rejects(writableStream.abort(), { + code: 'ERR_INVALID_STATE', + }); + + assert.throws(() => writableStream.getWriter(), { + code: 'ERR_INVALID_STATE', + }); + + writable.on('error', common.mustCall((reason) => { + assert.strictEqual(error, reason); + })); + + source.controller.error(error); +} + +{ + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = newStreamWritableFromWritableStream(writableStream); + + writable.on('error', common.mustNotCall()); + writable.on('finish', common.mustCall()); + writable.on('close', common.mustCall(() => { + assert.strictEqual(source.chunks.length, 1); + assert.deepStrictEqual(source.chunks[0], Buffer.from('hello')); + })); + + writable.write('hello', common.mustCall()); + writable.end(); +} + +{ + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = + newStreamWritableFromWritableStream(writableStream, { + decodeStrings: false, + }); + + writable.on('error', common.mustNotCall()); + writable.on('finish', common.mustCall()); + writable.on('close', common.mustCall(() => { + assert.strictEqual(source.chunks.length, 1); + assert.strictEqual(source.chunks[0], 'hello'); + })); + + writable.write('hello', common.mustCall()); + writable.end(); +} + +{ + const source = new TestSource(); + const writableStream = new WritableStream(source); + const writable = + newStreamWritableFromWritableStream( + writableStream, { + objectMode: true + }); + assert(writable.writableObjectMode); + + writable.on('error', common.mustNotCall()); + writable.on('finish', common.mustCall()); + writable.on('close', common.mustCall(() => { + assert.strictEqual(source.chunks.length, 1); + assert.strictEqual(source.chunks[0], 'hello'); + })); + + writable.write('hello', common.mustCall()); + writable.end(); +} + +{ + const writableStream = new WritableStream({ + write: common.mustCall(5), + close: common.mustCall(), + }); + const writable = newStreamWritableFromWritableStream(writableStream); + + finished(writable, common.mustCall()); + + writable.write('hello'); + writable.write('hello'); + writable.write('hello'); + writable.write('world'); + writable.write('world'); + writable.end(); +} + +{ + const writableStream = new WritableStream({ + write: common.mustCall(2), + close: common.mustCall(), + }); + const writable = newStreamWritableFromWritableStream(writableStream); + + const readable = new Readable({ + read() { + readable.push(Buffer.from('hello')); + readable.push(Buffer.from('world')); + readable.push(null); + } + }); + + pipeline(readable, writable, common.mustCall()); +} diff --git a/test/parallel/test-windows-failed-heap-allocation.js b/test/parallel/test-windows-failed-heap-allocation.js index 56a941ce58d072..be901b7dc2242c 100644 --- a/test/parallel/test-windows-failed-heap-allocation.js +++ b/test/parallel/test-windows-failed-heap-allocation.js @@ -19,8 +19,8 @@ const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); // --max-old-space-size=3 is the min 'old space' in V8, explodes fast -const cmd = `"${process.execPath}" --max-old-space-size=3 "${__filename}"`; -exec(`${cmd} heapBomb`, { cwd: tmpdir.path }, common.mustCall((err) => { +const cmd = `"${process.execPath}" --max-old-space-size=30 "${__filename}"`; +exec(`${cmd} heapBomb`, { cwd: tmpdir.path }, common.mustCall((err, stdout, stderr) => { const msg = `Wrong exit code of ${err.code}! Expected 134 for abort`; // Note: common.nodeProcessAborted() is not asserted here because it // returns true on 134 as well as 0x80000003 (V8's base::OS::Abort) diff --git a/test/parallel/test-worker-message-port-close.js b/test/parallel/test-worker-message-port-close.js index 6abc01d1b7b568..6562824d6a9ed3 100644 --- a/test/parallel/test-worker-message-port-close.js +++ b/test/parallel/test-worker-message-port-close.js @@ -39,3 +39,11 @@ function dummy() {} message: 'Cannot send data on closed MessagePort' }); } + +// Refs: https://github.com/nodejs/node/issues/42296 +{ + const ch = new MessageChannel(); + ch.port1.onmessage = common.mustNotCall(); + ch.port2.close(); + ch.port2.postMessage('fhqwhgads'); +} diff --git a/test/parallel/test-x509-escaping.js b/test/parallel/test-x509-escaping.js index ba11dde79b0821..99418e4c0bf21c 100644 --- a/test/parallel/test-x509-escaping.js +++ b/test/parallel/test-x509-escaping.js @@ -424,6 +424,15 @@ const { hasOpenSSL3 } = common; assert.strictEqual(certX509.subject, `CN=${servername}`); assert.strictEqual(certX509.subjectAltName, 'DNS:evil.example.com'); + // The newer X509Certificate API allows customizing this behavior: + assert.strictEqual(certX509.checkHost(servername), servername); + assert.strictEqual(certX509.checkHost(servername, { subject: 'default' }), + undefined); + assert.strictEqual(certX509.checkHost(servername, { subject: 'always' }), + servername); + assert.strictEqual(certX509.checkHost(servername, { subject: 'never' }), + undefined); + // Try connecting to a server that uses the self-signed certificate. const server = tls.createServer({ key, cert }, common.mustNotCall()); server.listen(common.mustCall(() => { @@ -454,6 +463,15 @@ const { hasOpenSSL3 } = common; assert.strictEqual(certX509.subject, `CN=${servername}`); assert.strictEqual(certX509.subjectAltName, 'IP Address:1.2.3.4'); + // The newer X509Certificate API allows customizing this behavior: + assert.strictEqual(certX509.checkHost(servername), servername); + assert.strictEqual(certX509.checkHost(servername, { subject: 'default' }), + servername); + assert.strictEqual(certX509.checkHost(servername, { subject: 'always' }), + servername); + assert.strictEqual(certX509.checkHost(servername, { subject: 'never' }), + undefined); + // Connect to a server that uses the self-signed certificate. const server = tls.createServer({ key, cert }, common.mustCall((socket) => { socket.destroy(); diff --git a/test/pummel/test-policy-integrity-dep.js b/test/pummel/test-policy-integrity-dep.js index 1b64e2bc99b1ea..ec58462335cd56 100644 --- a/test/pummel/test-policy-integrity-dep.js +++ b/test/pummel/test-policy-integrity-dep.js @@ -174,6 +174,7 @@ function drainQueue() { console.log('exit code:', status, 'signal:', signal); console.log(`stdout: ${Buffer.concat(stdout)}`); console.log(`stderr: ${Buffer.concat(stderr)}`); + process.kill(process.pid, 'SIGKILL'); throw e; } fs.rmSync(configDirPath, { maxRetries: 3, recursive: true, force: true }); diff --git a/test/sequential/test-child-process-execsync.js b/test/sequential/test-child-process-execsync.js index bc589efb8b5b64..5512eaeed7af35 100644 --- a/test/sequential/test-child-process-execsync.js +++ b/test/sequential/test-child-process-execsync.js @@ -60,7 +60,7 @@ try { assert.ok(caught, 'execSync should throw'); const end = Date.now() - start; assert(end < SLEEP); - assert(err.status > 128 || err.signal); + assert(err.status > 128 || err.signal, `status: ${err.status}, signal: ${err.signal}`); } assert.throws(function() { diff --git a/test/sequential/test-debugger-exec.js b/test/sequential/test-debugger-exec.js index 68a9b37d09d6aa..4057dd03785e7c 100644 --- a/test/sequential/test-debugger-exec.js +++ b/test/sequential/test-debugger-exec.js @@ -27,6 +27,14 @@ const assert = require('assert'); 'works w/o paren' ); }) + .then(() => cli.command('p [typeof heartbeat, typeof process.exit]')) + .then(() => { + assert.match( + cli.output, + /\[ 'function', 'function' \]/, + 'works w/o paren, short' + ); + }) .then(() => cli.command('repl')) .then(() => { assert.match( @@ -54,6 +62,14 @@ const assert = require('assert'); 'works w/ paren' ); }) + .then(() => cli.command('p("[typeof heartbeat, typeof process.exit]")')) + .then(() => { + assert.match( + cli.output, + /\[ 'function', 'function' \]/, + 'works w/ paren, short' + ); + }) .then(() => cli.command('cont')) .then(() => cli.command('exec [typeof heartbeat, typeof process.exit]')) .then(() => { diff --git a/test/sequential/test-tls-psk-client.js b/test/sequential/test-tls-psk-client.js index af9629dab9dc23..24d38d042938c0 100644 --- a/test/sequential/test-tls-psk-client.js +++ b/test/sequential/test-tls-psk-client.js @@ -33,7 +33,7 @@ const cleanUp = (err) => { process.exitCode = err ? 1 : 0; }; -const timeout = setTimeout(() => cleanUp('Timeouted'), 5000); +const timeout = setTimeout(() => cleanUp('Timed out'), 5000); function waitForPort(port, cb) { const socket = net.connect(common.PORT, () => { diff --git a/test/wpt/status/webmessaging/broadcastchannel.json b/test/wpt/status/webmessaging/broadcastchannel.json new file mode 100644 index 00000000000000..1cc693407a2359 --- /dev/null +++ b/test/wpt/status/webmessaging/broadcastchannel.json @@ -0,0 +1,26 @@ +{ + "basics.any.js": { + "fail": { + "unexpected": [ + "assert_equals: origin expected \"https://example.com\" but got \"\"", + "assert_equals: target for event 0 expected object \"[object EventTarget]\" but got object \"[object EventTarget]\"" + ] + } + }, + "interface.any.js": { + "fail": { + "unexpected": [ + "assert_throws_dom: function \"() => c.postMessage('')\" threw object \"Error: BroadcastChannel is closed.\" that is not a DOMException InvalidStateError: property \"code\" is equal to 0, expected 11", + "assert_not_equals: got disallowed value undefined", + "assert_throws_dom: function \"() => c.postMessage(new Symbol())\" threw object \"Error: BroadcastChannel is closed.\" that is not a DOMException InvalidStateError: property \"code\" is equal to 0, expected 11" + ] + } + }, + "origin.window.js": { + "fail": { + "expected": [ + "document is not defined" + ] + } + } +} diff --git a/test/wpt/test-broadcastchannel.js b/test/wpt/test-broadcastchannel.js new file mode 100644 index 00000000000000..988a6270a936d6 --- /dev/null +++ b/test/wpt/test-broadcastchannel.js @@ -0,0 +1,13 @@ +'use strict'; + +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('webmessaging/broadcastchannel'); + +runner.setInitScript(` + const { BroadcastChannel } = require('worker_threads'); + global.BroadcastChannel = BroadcastChannel; +`); + +runner.runJsTests(); diff --git a/test/wpt/test-url.js b/test/wpt/test-url.js index f64189ea3f66d6..18101990a0b2af 100644 --- a/test/wpt/test-url.js +++ b/test/wpt/test-url.js @@ -21,6 +21,15 @@ runner.setScriptModifier((obj) => { // created via `document.createElement`. So we need to ignore them and just // test `URL`. obj.code = obj.code.replace(/\["url", "a", "area"\]/, '[ "url" ]'); + } else if (typeof FormData === 'undefined' && + obj.filename.includes('urlsearchparams-constructor.any.js')) { + // TODO(XadillaX): Remove this `else if` after `FormData` is supported. + + // Ignore test named `URLSearchParams constructor, FormData.` because we do + // not have `FormData`. + obj.code = obj.code.replace( + /('URLSearchParams constructor, object\.'\);[\w\W]+)test\(function\(\) {[\w\W]*?}, 'URLSearchParams constructor, FormData\.'\);/, + '$1'); } }); runner.pretendGlobalThisAs('Window'); diff --git a/test/wpt/test-webcrypto.js b/test/wpt/test-webcrypto.js index e8707a464f434c..c1ee6402c5a75c 100644 --- a/test/wpt/test-webcrypto.js +++ b/test/wpt/test-webcrypto.js @@ -9,45 +9,6 @@ const { WPTRunner } = require('../common/wpt'); const runner = new WPTRunner('WebCryptoAPI'); // Set Node.js flags required for the tests. -runner.setFlags(['--expose-internals']); - -// Set a script that will be executed in the worker before running the tests. -runner.setInitScript(` - const { - Crypto, - SubtleCrypto, - crypto, - } = require('internal/crypto/webcrypto'); - const { internalBinding } = require('internal/test/binding'); - const { DOMException } = internalBinding('messaging'); - global.DOMException = DOMException; - - Object.defineProperties(global, { - Crypto: { - value: Crypto, - configurable: true, - writable: true, - enumerable: false, - }, - SubtleCrypto: { - value: SubtleCrypto, - configurable: true, - writable: true, - enumerable: false, - }, - CryptoKey: { - value: crypto.CryptoKey, - configurable: true, - writable: true, - enumerable: false, - }, - crypto: { - value: crypto, - configurable: true, - writable: true, - enumerable: false, - }, - }); -`); +runner.setFlags(['--experimental-global-webcrypto']); runner.runJsTests(); diff --git a/tools/.eslintrc.yaml b/tools/.eslintrc.yaml index d2fe553393693d..de30cf6d123f33 100644 --- a/tools/.eslintrc.yaml +++ b/tools/.eslintrc.yaml @@ -5,11 +5,11 @@ env: rules: camelcase: - error - - properties: 'never' + - properties: never ignoreDestructuring: true - allow: ['child_process'] + allow: [child_process] no-unused-vars: - error - - args: 'after-used' + - args: after-used prefer-arrow-callback: error no-var: error diff --git a/tools/doc/deprecationCodes.mjs b/tools/doc/deprecationCodes.mjs new file mode 100644 index 00000000000000..6715bb04b60c26 --- /dev/null +++ b/tools/doc/deprecationCodes.mjs @@ -0,0 +1,92 @@ +import fs from 'fs'; +import { resolve } from 'path'; +import assert from 'assert'; + +import { unified } from 'unified'; +import remarkParse from 'remark-parse'; + +const source = resolve(process.argv[2]); + +const skipDeprecationComment = /^$/; + +const generateDeprecationCode = (codeAsNumber) => + `DEP${codeAsNumber.toString().padStart(4, '0')}`; + +const addMarkdownPathToErrorStack = (error, node) => { + const { line, column } = node.position.start; + const [header, ...lines] = error.stack.split('\n'); + error.stack = + header + + `\n at (${source}:${line}:${column})\n` + + lines.join('\n'); + return error; +}; + +const testHeading = (headingNode, expectedDeprecationCode) => { + try { + assert.strictEqual( + headingNode?.children[0]?.value.substring(0, 9), + `${expectedDeprecationCode}: `, + 'Ill-formed or out-of-order deprecation code.' + ); + } catch (e) { + throw addMarkdownPathToErrorStack(e, headingNode); + } +}; + +const testYAMLComment = (commentNode) => { + try { + assert.match( + commentNode?.value?.substring(0, 21), + /^'; + file.toc = file.tocPicker = ''; } }; } @@ -508,9 +511,12 @@ function altDocs(filename, docCreated, versions) { const list = versions.filter(isDocInVersion).map(wrapInListItem).join('\n'); return list ? ` -
  • - View another version -
      ${list}
    +
  • + + + Other versions + +
      ${list}
  • ` : ''; } @@ -518,3 +524,47 @@ function altDocs(filename, docCreated, versions) { function editOnGitHub(filename) { return `
  • Edit on GitHub
  • `; } + +function gtocPicker(id) { + if (id === 'index') { + return ''; + } + + // Highlight the current module and add a link to the index + const gtoc = gtocHTML.replace( + `class="nav-${id}"`, `class="nav-${id} active"` + ).replace('', ` +
  • + Index +
  • + + `); + + return ` +
  • + + + Index + + +
    ${gtoc}
    +
  • + `; +} + +function tocPicker(id, content) { + if (id === 'index') { + return ''; + } + + return ` +
  • + + + Table of contents + + +
    ${content.tocPicker}
    +
  • + `; +} diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index ac1745798cab1f..629441b0bd142f 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -11,7 +11,7 @@ "node-doc-generator": "generate.js" }, "devDependencies": { - "highlight.js": "^11.4.0", + "highlight.js": "^11.5.0", "js-yaml": "^4.1.0", "rehype-raw": "^6.1.1", "rehype-stringify": "^9.0.3", @@ -21,7 +21,7 @@ "remark-parse": "^10.0.1", "remark-rehype": "^10.1.0", "to-vfile": "^7.2.3", - "unified": "^10.1.1", + "unified": "^10.1.2", "unist-util-select": "^4.0.1", "unist-util-visit": "^4.1.0" }, @@ -416,9 +416,9 @@ } }, "node_modules/highlight.js": { - "version": "11.4.0", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.4.0.tgz", - "integrity": "sha512-nawlpCBCSASs7EdvZOYOYVkJpGmAOKMYZgZtUqSRqodZE0GRVcFKwo1RcpeOemqh9hyttTdd5wDBwHkuSyUfnA==", + "version": "11.5.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.5.0.tgz", + "integrity": "sha512-SM6WDj5/C+VfIY8pZ6yW6Xa0Fm1tniYVYWYW1Q/DcMnISZFrC3aQAZZZFAAZtybKNrGId3p/DNbFTtcTXXgYBw==", "dev": true, "engines": { "node": ">=12.0.0" @@ -1537,9 +1537,9 @@ } }, "node_modules/trough": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", - "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", "dev": true, "funding": { "type": "github", @@ -1547,9 +1547,9 @@ } }, "node_modules/unified": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.1.tgz", - "integrity": "sha512-v4ky1+6BN9X3pQrOdkFIPWAaeDsHPE1svRDxq7YpTc2plkIqFMwukfqM+l0ewpP9EfwARlt9pPFAeWYhHm8X9w==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", @@ -1626,9 +1626,9 @@ } }, "node_modules/unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz", + "integrity": "sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==", "dev": true, "dependencies": { "@types/unist": "^2.0.0" @@ -1700,9 +1700,9 @@ } }, "node_modules/vfile": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.0.tgz", - "integrity": "sha512-Tj44nY/48OQvarrE4FAjUfrv7GZOYzPbl5OD65HxVKwLJKMPU7zmfV8cCgCnzKWnSfYG2f3pxu+ALqs7j22xQQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.2.tgz", + "integrity": "sha512-w0PLIugRY3Crkgw89TeMvHCzqCs/zpreR31hl4D92y6SOE07+bfJe+dK5Q2akwS+i/c801kzjoOr9gMcTe6IAA==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", @@ -1730,9 +1730,9 @@ } }, "node_modules/vfile-message": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.0.tgz", - "integrity": "sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.2.tgz", + "integrity": "sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", @@ -2057,9 +2057,9 @@ } }, "highlight.js": { - "version": "11.4.0", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.4.0.tgz", - "integrity": "sha512-nawlpCBCSASs7EdvZOYOYVkJpGmAOKMYZgZtUqSRqodZE0GRVcFKwo1RcpeOemqh9hyttTdd5wDBwHkuSyUfnA==", + "version": "11.5.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.5.0.tgz", + "integrity": "sha512-SM6WDj5/C+VfIY8pZ6yW6Xa0Fm1tniYVYWYW1Q/DcMnISZFrC3aQAZZZFAAZtybKNrGId3p/DNbFTtcTXXgYBw==", "dev": true }, "html-void-elements": { @@ -2785,15 +2785,15 @@ } }, "trough": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", - "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", "dev": true }, "unified": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.1.tgz", - "integrity": "sha512-v4ky1+6BN9X3pQrOdkFIPWAaeDsHPE1svRDxq7YpTc2plkIqFMwukfqM+l0ewpP9EfwARlt9pPFAeWYhHm8X9w==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", "dev": true, "requires": { "@types/unist": "^2.0.0", @@ -2846,9 +2846,9 @@ } }, "unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz", + "integrity": "sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==", "dev": true, "requires": { "@types/unist": "^2.0.0" @@ -2900,9 +2900,9 @@ } }, "vfile": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.0.tgz", - "integrity": "sha512-Tj44nY/48OQvarrE4FAjUfrv7GZOYzPbl5OD65HxVKwLJKMPU7zmfV8cCgCnzKWnSfYG2f3pxu+ALqs7j22xQQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.2.tgz", + "integrity": "sha512-w0PLIugRY3Crkgw89TeMvHCzqCs/zpreR31hl4D92y6SOE07+bfJe+dK5Q2akwS+i/c801kzjoOr9gMcTe6IAA==", "dev": true, "requires": { "@types/unist": "^2.0.0", @@ -2922,9 +2922,9 @@ } }, "vfile-message": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.0.tgz", - "integrity": "sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.2.tgz", + "integrity": "sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA==", "dev": true, "requires": { "@types/unist": "^2.0.0", diff --git a/tools/doc/package.json b/tools/doc/package.json index af39f927b4dc56..9856d6149845d8 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -7,7 +7,7 @@ "node": ">=14.8.0" }, "devDependencies": { - "highlight.js": "^11.4.0", + "highlight.js": "^11.5.0", "js-yaml": "^4.1.0", "rehype-raw": "^6.1.1", "rehype-stringify": "^9.0.3", @@ -17,7 +17,7 @@ "remark-parse": "^10.0.1", "remark-rehype": "^10.1.0", "to-vfile": "^7.2.3", - "unified": "^10.1.1", + "unified": "^10.1.2", "unist-util-select": "^4.0.1", "unist-util-visit": "^4.1.0" }, diff --git a/tools/doc/type-parser.mjs b/tools/doc/type-parser.mjs index 296eec53c201e2..71a4eb98976da6 100644 --- a/tools/doc/type-parser.mjs +++ b/tools/doc/type-parser.mjs @@ -236,35 +236,40 @@ const customTypesMap = { 'zlib options': 'zlib.html#class-options', 'ReadableStream': - 'webstreams.md#class-readablestream', + 'webstreams.html#class-readablestream', 'ReadableStreamDefaultReader': - 'webstreams.md#class-readablestreamdefaultreader', + 'webstreams.html#class-readablestreamdefaultreader', 'ReadableStreamBYOBReader': - 'webstreams.md#class-readablestreambyobreader', + 'webstreams.html#class-readablestreambyobreader', 'ReadableStreamDefaultController': - 'webstreams.md#class-readablestreamdefaultcontroller', + 'webstreams.html#class-readablestreamdefaultcontroller', 'ReadableByteStreamController': - 'webstreams.md#class-readablebytestreamcontroller', + 'webstreams.html#class-readablebytestreamcontroller', 'ReadableStreamBYOBRequest': - 'webstreams.md#class-readablestreambyobrequest', + 'webstreams.html#class-readablestreambyobrequest', 'WritableStream': - 'webstreams.md#class-writablestream', + 'webstreams.html#class-writablestream', 'WritableStreamDefaultWriter': - 'webstreams.md#class-writablestreamdefaultwriter', + 'webstreams.html#class-writablestreamdefaultwriter', 'WritableStreamDefaultController': - 'webstreams.md#class-writablestreamdefaultcontroller', + 'webstreams.html#class-writablestreamdefaultcontroller', 'TransformStream': - 'webstreams.md#class-transformstream', + 'webstreams.html#class-transformstream', 'TransformStreamDefaultController': - 'webstreams.md#class-transformstreamdefaultcontroller', + 'webstreams.html#class-transformstreamdefaultcontroller', 'ByteLengthQueuingStrategy': - 'webstreams.md#class-bytelengthqueuingstrategy', + 'webstreams.html#class-bytelengthqueuingstrategy', 'CountQueuingStrategy': - 'webstreams.md#class-countqueuingstrategy', + 'webstreams.html#class-countqueuingstrategy', 'TextEncoderStream': - 'webstreams.md#class-textencoderstream', + 'webstreams.html#class-textencoderstream', 'TextDecoderStream': - 'webstreams.md#class-textdecoderstream', + 'webstreams.html#class-textdecoderstream', + + 'FormData': 'https://developer.mozilla.org/en-US/docs/Web/API/FormData', + 'Headers': 'https://developer.mozilla.org/en-US/docs/Web/API/Headers', + 'Response': 'https://developer.mozilla.org/en-US/docs/Web/API/Response', + 'Request': 'https://developer.mozilla.org/en-US/docs/Web/API/Request', }; const arrayPart = /(?:\[])+$/; diff --git a/tools/eslint-rules/documented-deprecation-codes.js b/tools/eslint-rules/documented-deprecation-codes.js new file mode 100644 index 00000000000000..3317f3c983cc5e --- /dev/null +++ b/tools/eslint-rules/documented-deprecation-codes.js @@ -0,0 +1,37 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const { isDefiningDeprecation } = require('./rules-utils.js'); + +const patternToMatch = /^DEP\d+$/; + +const mdFile = 'doc/api/deprecations.md'; +const doc = fs.readFileSync(path.resolve(__dirname, '../..', mdFile), 'utf8'); + +function isInDoc(code) { + return doc.includes(`### ${code}:`); +} + +function getDeprecationCode(node) { + return node.expression.arguments[2].value; +} + +module.exports = { + create: function(context) { + return { + ExpressionStatement: function(node) { + if (!isDefiningDeprecation(node) || !getDeprecationCode(node)) return; + const code = getDeprecationCode(node); + if (!patternToMatch.test(code)) { + const message = `"${code}" does not match the expected pattern`; + context.report({ node, message }); + } + if (!isInDoc(code)) { + const message = `"${code}" is not documented in ${mdFile}`; + context.report({ node, message }); + } + }, + }; + }, +}; diff --git a/tools/eslint-rules/prefer-primordials.js b/tools/eslint-rules/prefer-primordials.js index b30527eaf423a4..d2531556de225d 100644 --- a/tools/eslint-rules/prefer-primordials.js +++ b/tools/eslint-rules/prefer-primordials.js @@ -1,6 +1,6 @@ /** * @fileoverview We shouldn't use global built-in object for security and - * performance reason. This linter rule reports replacable codes + * performance reason. This linter rule reports replaceable codes * that can be replaced with primordials. * @author Leko */ @@ -57,8 +57,18 @@ function getDestructuringAssignmentParent(scope, node) { return declaration.defs[0].node.init; } -const identifierSelector = - '[type!=VariableDeclarator][type!=MemberExpression]>Identifier'; +const parentSelectors = [ + // We want to select identifiers that refer to other references, not the ones + // that create a new reference. + 'ClassDeclaration', + 'FunctionDeclaration', + 'LabeledStatement', + 'MemberExpression', + 'MethodDefinition', + 'SwitchCase', + 'VariableDeclarator', +]; +const identifierSelector = parentSelectors.map((selector) => `[type!=${selector}]`).join('') + '>Identifier'; module.exports = { meta: { @@ -90,6 +100,11 @@ module.exports = { reported = new Set(); }, [identifierSelector](node) { + if (node.parent.type === 'Property' && node.parent.key === node) { + // If the identifier is the key for this property declaration, it + // can't be referring to a primordials member. + return; + } if (reported.has(node.range[0])) { return; } diff --git a/tools/eslint-rules/rules-utils.js b/tools/eslint-rules/rules-utils.js index 1cba9218a1d4b2..c5362c96cdaebe 100644 --- a/tools/eslint-rules/rules-utils.js +++ b/tools/eslint-rules/rules-utils.js @@ -20,6 +20,14 @@ module.exports.isDefiningError = function(node) { node.expression.arguments.length !== 0; }; +module.exports.isDefiningDeprecation = function(node) { + return node.expression && + node.expression.type === 'CallExpression' && + node.expression.callee && + node.expression.callee.name.endsWith('deprecate') && + node.expression.arguments.length !== 0; +}; + /** * Returns true if any of the passed in modules are used in * require calls. diff --git a/tools/license-builder.sh b/tools/license-builder.sh index 31c525c0ec4a64..4aa7224d8e396d 100755 --- a/tools/license-builder.sh +++ b/tools/license-builder.sh @@ -63,6 +63,8 @@ licenseText="$(cat deps/llhttp/LICENSE-MIT)" addlicense "llhttp" "deps/llhttp" "$licenseText" licenseText="$(cat "${rootdir}"/deps/corepack/LICENSE.md)" addlicense "corepack" "deps/corepack" "$licenseText" +licenseText="$(cat "${rootdir}"/deps/undici/LICENSE)" +addlicense "undici" "deps/undici" "$licenseText" licenseText="$(cat "${rootdir}"/deps/openssl/openssl/LICENSE.txt)" addlicense "OpenSSL" "deps/openssl" "$licenseText" licenseText="$(curl -sL https://raw.githubusercontent.com/bestiejs/punycode.js/HEAD/LICENSE-MIT.txt)" diff --git a/tools/lint-md/lint-md.mjs b/tools/lint-md/lint-md.mjs index 666b67dcc774e9..ab6150f996e66b 100644 --- a/tools/lint-md/lint-md.mjs +++ b/tools/lint-md/lint-md.mjs @@ -172,9 +172,9 @@ function wrap(middleware, callback) { parameters.push(done); } try { - result = middleware(...parameters); + result = middleware.apply(this, parameters); } catch (error) { - const exception = error; + const exception = (error); if (fnExpectsCallback && called) { throw exception } @@ -201,18 +201,17 @@ function wrap(middleware, callback) { } } -var own$8 = {}.hasOwnProperty; function stringifyPosition(value) { if (!value || typeof value !== 'object') { return '' } - if (own$8.call(value, 'position') || own$8.call(value, 'type')) { + if ('position' in value || 'type' in value) { return position(value.position) } - if (own$8.call(value, 'start') || own$8.call(value, 'end')) { + if ('start' in value || 'end' in value) { return position(value) } - if (own$8.call(value, 'line') || own$8.call(value, 'column')) { + if ('line' in value || 'column' in value) { return point$1(value) } return '' @@ -229,19 +228,18 @@ function index(value) { class VFileMessage extends Error { constructor(reason, place, origin) { - var parts = [null, null]; - var position = { + const parts = [null, null]; + let position = { start: {line: null, column: null}, end: {line: null, column: null} }; - var index; super(); if (typeof place === 'string') { origin = place; - place = null; + place = undefined; } if (typeof origin === 'string') { - index = origin.indexOf(':'); + const index = origin.indexOf(':'); if (index === -1) { parts[1] = origin; } else { @@ -484,7 +482,7 @@ function base() { continue } if (options[0] === true) { - options[1] = undefined; + options[0] = undefined; } const transformer = attacher.call(processor, ...options); if (typeof transformer === 'function') { @@ -12629,12 +12627,12 @@ const remarkLintListItemBulletIndent = lintRule( ); var remarkLintListItemBulletIndent$1 = remarkLintListItemBulletIndent; -var pointStart = point('start'); -var pointEnd = point('end'); +const pointStart = point('start'); +const pointEnd = point('end'); function point(type) { return point function point(node) { - var point = (node && node.position && node.position[type]) || {}; + const point = (node && node.position && node.position[type]) || {}; return { line: point.line || null, column: point.column || null, @@ -20745,6 +20743,7 @@ const plugins = [ "powershell", "r", "text", + "ts", ], }, ], @@ -20783,6 +20782,7 @@ const plugins = [ { yes: "RFC" }, { no: "[Rr][Ff][Cc]\\d+", yes: "RFC " }, { yes: "Unix" }, + { yes: "Valgrind" }, { yes: "V8" }, ], ], @@ -21226,29 +21226,33 @@ function stringWidth(string, options = {}) { if (typeof string !== 'string' || string.length === 0) { return 0; } + options = { + ambiguousIsNarrow: true, + ...options + }; string = stripAnsi(string); if (string.length === 0) { return 0; } string = string.replace(emojiRegex(), ' '); - const ambiguousCharWidth = options.ambiguousIsNarrow ? 1 : 2; + const ambiguousCharacterWidth = options.ambiguousIsNarrow ? 1 : 2; let width = 0; - for (let index = 0; index < string.length; index++) { - const codePoint = string.codePointAt(index); + for (const character of string) { + const codePoint = character.codePointAt(0); if (codePoint <= 0x1F || (codePoint >= 0x7F && codePoint <= 0x9F)) { continue; } if (codePoint >= 0x300 && codePoint <= 0x36F) { continue; } - const code = eastAsianWidth.eastAsianWidth(string.charAt(index)); + const code = eastAsianWidth.eastAsianWidth(character); switch (code) { case 'F': case 'W': width += 2; break; case 'A': - width += ambiguousCharWidth; + width += ambiguousCharacterWidth; break; default: width += 1; diff --git a/tools/lint-md/package-lock.json b/tools/lint-md/package-lock.json index f77182a6392fe7..4246a50ba64c93 100644 --- a/tools/lint-md/package-lock.json +++ b/tools/lint-md/package-lock.json @@ -9,23 +9,23 @@ "version": "1.0.0", "dependencies": { "remark-parse": "^10.0.1", - "remark-preset-lint-node": "^3.3.0", + "remark-preset-lint-node": "^3.3.1", "remark-stringify": "^10.0.2", "to-vfile": "^7.2.3", - "unified": "^10.1.1", + "unified": "^10.1.2", "vfile-reporter": "^7.0.3" }, "devDependencies": { - "@rollup/plugin-commonjs": "^21.0.1", + "@rollup/plugin-commonjs": "^21.0.2", "@rollup/plugin-node-resolve": "^13.1.3", - "rollup": "^2.67.0", + "rollup": "^2.70.1", "rollup-plugin-cleanup": "^3.2.1" } }, "node_modules/@rollup/plugin-commonjs": { - "version": "21.0.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-21.0.1.tgz", - "integrity": "sha512-EA+g22lbNJ8p5kuZJUYyhhDK7WgJckW5g4pNN7n4mAFUM96VuwUnNT3xr2Db2iCZPI1pJPbGyfT5mS9T1dHfMg==", + "version": "21.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-21.0.2.tgz", + "integrity": "sha512-d/OmjaLVO4j/aQX69bwpWPpbvI3TJkQuxoAk7BH8ew1PyoMBLTOuvJTjzG8oEoW7drIIqB0KCJtfFLu/2GClWg==", "dev": true, "dependencies": { "@rollup/pluginutils": "^3.1.0", @@ -129,9 +129,9 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" }, "node_modules/@types/node": { - "version": "17.0.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.15.tgz", - "integrity": "sha512-zWt4SDDv1S9WRBNxLFxFRHxdD9tvH8f5/kg5/IaLFdnSNXsDY4eL3Q3XXN+VxUnWIhyVFDwcsmAprvwXoM/ClA==", + "version": "17.0.21", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", + "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==", "dev": true }, "node_modules/@types/resolve": { @@ -242,9 +242,9 @@ "dev": true }, "node_modules/debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dependencies": { "ms": "2.1.2" }, @@ -514,12 +514,12 @@ } }, "node_modules/magic-string": { - "version": "0.25.7", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz", - "integrity": "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz", + "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==", "dev": true, "dependencies": { - "sourcemap-codec": "^1.4.4" + "sourcemap-codec": "^1.4.8" } }, "node_modules/markdown-table": { @@ -581,15 +581,17 @@ } }, "node_modules/mdast-util-gfm": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.0.tgz", - "integrity": "sha512-wMwejlTN3EQADPFuvxe8lmGsay3+f6gSJKdAHR6KBJzpcxvsjJSILB9K6u6G7eQLC7iOTyVIHYGui9uBc9r1Tg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.1.tgz", + "integrity": "sha512-42yHBbfWIFisaAfV1eixlabbsa6q7vHeSPY+cg+BBjX51M8xhgMacqH9g6TftB/9+YkcI0ooV4ncfrJslzm/RQ==", "dependencies": { + "mdast-util-from-markdown": "^1.0.0", "mdast-util-gfm-autolink-literal": "^1.0.0", "mdast-util-gfm-footnote": "^1.0.0", "mdast-util-gfm-strikethrough": "^1.0.0", "mdast-util-gfm-table": "^1.0.0", - "mdast-util-gfm-task-list-item": "^1.0.0" + "mdast-util-gfm-task-list-item": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" }, "funding": { "type": "opencollective", @@ -639,11 +641,12 @@ } }, "node_modules/mdast-util-gfm-table": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.3.tgz", - "integrity": "sha512-B/tgpJjND1qIZM2WZst+NYnb0notPE6m0J+YOe3NOHXyEmvK38ytxaOsgz4BvrRPQQcNbRrTzSHMPnBkj1fCjg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.4.tgz", + "integrity": "sha512-aEuoPwZyP4iIMkf2cLWXxx3EQ6Bmh2yKy9MVCg4i6Sd3cX80dcLEfXO/V4ul3pGH9czBK4kp+FAl+ZHmSUt9/w==", "dependencies": { "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", "mdast-util-to-markdown": "^1.3.0" }, "funding": { @@ -1256,9 +1259,9 @@ ] }, "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "dependencies": { "brace-expansion": "^1.1.7" @@ -2089,44 +2092,44 @@ } }, "node_modules/remark-preset-lint-node": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.0.tgz", - "integrity": "sha512-JPjXould+7VTpwj+YJHSoPiGwKLpmLAZJRveU/dT7mCDOdSSORe/SGo9fJDm6owUReg50b5AG2AY8nlReytHcA==", - "dependencies": { - "js-yaml": "^4.0.0", - "remark-gfm": "^3.0.0", - "remark-lint-blockquote-indentation": "^3.0.0", - "remark-lint-checkbox-character-style": "^4.0.0", - "remark-lint-checkbox-content-indent": "^4.0.0", - "remark-lint-code-block-style": "^3.0.0", - "remark-lint-definition-spacing": "^3.0.0", - "remark-lint-fenced-code-flag": "^3.0.0", - "remark-lint-fenced-code-marker": "^3.0.0", - "remark-lint-file-extension": "^2.0.0", - "remark-lint-final-definition": "^3.0.0", - "remark-lint-first-heading-level": "^3.0.0", - "remark-lint-heading-style": "^3.0.0", - "remark-lint-list-item-indent": "^3.0.0", - "remark-lint-maximum-line-length": "^3.0.0", - "remark-lint-no-consecutive-blank-lines": "^4.0.0", - "remark-lint-no-file-name-articles": "^2.0.0", - "remark-lint-no-file-name-consecutive-dashes": "^2.0.0", - "remark-lint-no-file-name-outer-dashes": "^2.0.0", - "remark-lint-no-heading-indent": "^4.0.0", - "remark-lint-no-multiple-toplevel-headings": "^3.0.0", - "remark-lint-no-shell-dollars": "^3.0.0", - "remark-lint-no-table-indentation": "^4.0.0", - "remark-lint-no-tabs": "^3.0.0", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.1.tgz", + "integrity": "sha512-TEkyYgmaiWd/oKy0i5Nyn/CW1nfufqtWna4WaO689bsE7fbzPxsmiHY7Q8hRq9KAkneknrJOKJKgcRBGKMuivQ==", + "dependencies": { + "js-yaml": "^4.1.0", + "remark-gfm": "^3.0.1", + "remark-lint-blockquote-indentation": "^3.1.1", + "remark-lint-checkbox-character-style": "^4.1.1", + "remark-lint-checkbox-content-indent": "^4.1.1", + "remark-lint-code-block-style": "^3.1.0", + "remark-lint-definition-spacing": "^3.1.1", + "remark-lint-fenced-code-flag": "^3.1.1", + "remark-lint-fenced-code-marker": "^3.1.1", + "remark-lint-file-extension": "^2.1.1", + "remark-lint-final-definition": "^3.1.1", + "remark-lint-first-heading-level": "^3.1.1", + "remark-lint-heading-style": "^3.1.1", + "remark-lint-list-item-indent": "^3.1.1", + "remark-lint-maximum-line-length": "^3.1.2", + "remark-lint-no-consecutive-blank-lines": "^4.1.2", + "remark-lint-no-file-name-articles": "^2.1.1", + "remark-lint-no-file-name-consecutive-dashes": "^2.1.1", + "remark-lint-no-file-name-outer-dashes": "^2.1.1", + "remark-lint-no-heading-indent": "^4.1.1", + "remark-lint-no-multiple-toplevel-headings": "^3.1.1", + "remark-lint-no-shell-dollars": "^3.1.1", + "remark-lint-no-table-indentation": "^4.1.1", + "remark-lint-no-tabs": "^3.1.1", "remark-lint-no-trailing-spaces": "^2.0.1", - "remark-lint-prohibited-strings": "^3.0.0", - "remark-lint-rule-style": "^3.0.0", - "remark-lint-strong-marker": "^3.0.0", - "remark-lint-table-cell-padding": "^4.0.0", - "remark-lint-table-pipes": "^4.0.0", - "remark-lint-unordered-list-marker-style": "^3.0.0", - "remark-preset-lint-recommended": "^6.1.1", - "semver": "^7.3.2", - "unified-lint-rule": "^2.0.0", + "remark-lint-prohibited-strings": "^3.1.0", + "remark-lint-rule-style": "^3.1.1", + "remark-lint-strong-marker": "^3.1.1", + "remark-lint-table-cell-padding": "^4.1.2", + "remark-lint-table-pipes": "^4.1.1", + "remark-lint-unordered-list-marker-style": "^3.1.1", + "remark-preset-lint-recommended": "^6.1.2", + "semver": "^7.3.5", + "unified-lint-rule": "^2.1.1", "unist-util-visit": "^4.1.0" }, "engines": { @@ -2193,9 +2196,9 @@ } }, "node_modules/rollup": { - "version": "2.67.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", - "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "version": "2.70.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.70.1.tgz", + "integrity": "sha512-CRYsI5EuzLbXdxC6RnYhOuRdtz4bhejPMSWjsFLfVM/7w/85n2szZv6yExqUXsBdz5KT8eoubeyDUDjhLHEslA==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -2284,9 +2287,9 @@ "dev": true }, "node_modules/string-width": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.0.tgz", - "integrity": "sha512-7x54QnN21P+XL/v8SuNKvfgsUre6PXpN7mc77N3HlZv+f1SBRGmjxtOud2Z6FZ8DmdkD/IdjCaf9XXbnqmTZGQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -2350,18 +2353,18 @@ } }, "node_modules/trough": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", - "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/unified": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.1.tgz", - "integrity": "sha512-v4ky1+6BN9X3pQrOdkFIPWAaeDsHPE1svRDxq7YpTc2plkIqFMwukfqM+l0ewpP9EfwARlt9pPFAeWYhHm8X9w==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", "dependencies": { "@types/unist": "^2.0.0", "bail": "^2.0.0", @@ -2441,18 +2444,21 @@ } }, "node_modules/unist-util-position": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.1.tgz", - "integrity": "sha512-mgy/zI9fQ2HlbOtTdr2w9lhVaiFUHWQnZrFF2EUoVOqtAUdzqMtNiD99qA5a1IcjWVR8O6aVYE9u7Z2z1v0SQA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.2.tgz", + "integrity": "sha512-Y6+plxR41dOLbyyqVDLuGWgXDmxdXslCSRYQkSDagBnOT9oFsQH0J8FzhirSklUEe0xZTT0WDnAE1gXPaDFljA==", + "dependencies": { + "@types/unist": "^2.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz", + "integrity": "sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==", "dependencies": { "@types/unist": "^2.0.0" }, @@ -2519,9 +2525,9 @@ } }, "node_modules/vfile": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.0.tgz", - "integrity": "sha512-Tj44nY/48OQvarrE4FAjUfrv7GZOYzPbl5OD65HxVKwLJKMPU7zmfV8cCgCnzKWnSfYG2f3pxu+ALqs7j22xQQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.2.tgz", + "integrity": "sha512-w0PLIugRY3Crkgw89TeMvHCzqCs/zpreR31hl4D92y6SOE07+bfJe+dK5Q2akwS+i/c801kzjoOr9gMcTe6IAA==", "dependencies": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -2547,9 +2553,9 @@ } }, "node_modules/vfile-message": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.0.tgz", - "integrity": "sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.2.tgz", + "integrity": "sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA==", "dependencies": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^3.0.0" @@ -2632,9 +2638,9 @@ }, "dependencies": { "@rollup/plugin-commonjs": { - "version": "21.0.1", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-21.0.1.tgz", - "integrity": "sha512-EA+g22lbNJ8p5kuZJUYyhhDK7WgJckW5g4pNN7n4mAFUM96VuwUnNT3xr2Db2iCZPI1pJPbGyfT5mS9T1dHfMg==", + "version": "21.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-21.0.2.tgz", + "integrity": "sha512-d/OmjaLVO4j/aQX69bwpWPpbvI3TJkQuxoAk7BH8ew1PyoMBLTOuvJTjzG8oEoW7drIIqB0KCJtfFLu/2GClWg==", "dev": true, "requires": { "@rollup/pluginutils": "^3.1.0", @@ -2722,9 +2728,9 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" }, "@types/node": { - "version": "17.0.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.15.tgz", - "integrity": "sha512-zWt4SDDv1S9WRBNxLFxFRHxdD9tvH8f5/kg5/IaLFdnSNXsDY4eL3Q3XXN+VxUnWIhyVFDwcsmAprvwXoM/ClA==", + "version": "17.0.21", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz", + "integrity": "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==", "dev": true }, "@types/resolve": { @@ -2811,9 +2817,9 @@ "dev": true }, "debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } @@ -2998,12 +3004,12 @@ } }, "magic-string": { - "version": "0.25.7", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz", - "integrity": "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz", + "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==", "dev": true, "requires": { - "sourcemap-codec": "^1.4.4" + "sourcemap-codec": "^1.4.8" } }, "markdown-table": { @@ -3049,15 +3055,17 @@ } }, "mdast-util-gfm": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.0.tgz", - "integrity": "sha512-wMwejlTN3EQADPFuvxe8lmGsay3+f6gSJKdAHR6KBJzpcxvsjJSILB9K6u6G7eQLC7iOTyVIHYGui9uBc9r1Tg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.1.tgz", + "integrity": "sha512-42yHBbfWIFisaAfV1eixlabbsa6q7vHeSPY+cg+BBjX51M8xhgMacqH9g6TftB/9+YkcI0ooV4ncfrJslzm/RQ==", "requires": { + "mdast-util-from-markdown": "^1.0.0", "mdast-util-gfm-autolink-literal": "^1.0.0", "mdast-util-gfm-footnote": "^1.0.0", "mdast-util-gfm-strikethrough": "^1.0.0", "mdast-util-gfm-table": "^1.0.0", - "mdast-util-gfm-task-list-item": "^1.0.0" + "mdast-util-gfm-task-list-item": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" } }, "mdast-util-gfm-autolink-literal": { @@ -3091,11 +3099,12 @@ } }, "mdast-util-gfm-table": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.3.tgz", - "integrity": "sha512-B/tgpJjND1qIZM2WZst+NYnb0notPE6m0J+YOe3NOHXyEmvK38ytxaOsgz4BvrRPQQcNbRrTzSHMPnBkj1fCjg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.4.tgz", + "integrity": "sha512-aEuoPwZyP4iIMkf2cLWXxx3EQ6Bmh2yKy9MVCg4i6Sd3cX80dcLEfXO/V4ul3pGH9czBK4kp+FAl+ZHmSUt9/w==", "requires": { "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", "mdast-util-to-markdown": "^1.3.0" } }, @@ -3446,9 +3455,9 @@ "integrity": "sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w==" }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "requires": { "brace-expansion": "^1.1.7" @@ -4080,44 +4089,44 @@ } }, "remark-preset-lint-node": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.0.tgz", - "integrity": "sha512-JPjXould+7VTpwj+YJHSoPiGwKLpmLAZJRveU/dT7mCDOdSSORe/SGo9fJDm6owUReg50b5AG2AY8nlReytHcA==", - "requires": { - "js-yaml": "^4.0.0", - "remark-gfm": "^3.0.0", - "remark-lint-blockquote-indentation": "^3.0.0", - "remark-lint-checkbox-character-style": "^4.0.0", - "remark-lint-checkbox-content-indent": "^4.0.0", - "remark-lint-code-block-style": "^3.0.0", - "remark-lint-definition-spacing": "^3.0.0", - "remark-lint-fenced-code-flag": "^3.0.0", - "remark-lint-fenced-code-marker": "^3.0.0", - "remark-lint-file-extension": "^2.0.0", - "remark-lint-final-definition": "^3.0.0", - "remark-lint-first-heading-level": "^3.0.0", - "remark-lint-heading-style": "^3.0.0", - "remark-lint-list-item-indent": "^3.0.0", - "remark-lint-maximum-line-length": "^3.0.0", - "remark-lint-no-consecutive-blank-lines": "^4.0.0", - "remark-lint-no-file-name-articles": "^2.0.0", - "remark-lint-no-file-name-consecutive-dashes": "^2.0.0", - "remark-lint-no-file-name-outer-dashes": "^2.0.0", - "remark-lint-no-heading-indent": "^4.0.0", - "remark-lint-no-multiple-toplevel-headings": "^3.0.0", - "remark-lint-no-shell-dollars": "^3.0.0", - "remark-lint-no-table-indentation": "^4.0.0", - "remark-lint-no-tabs": "^3.0.0", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.1.tgz", + "integrity": "sha512-TEkyYgmaiWd/oKy0i5Nyn/CW1nfufqtWna4WaO689bsE7fbzPxsmiHY7Q8hRq9KAkneknrJOKJKgcRBGKMuivQ==", + "requires": { + "js-yaml": "^4.1.0", + "remark-gfm": "^3.0.1", + "remark-lint-blockquote-indentation": "^3.1.1", + "remark-lint-checkbox-character-style": "^4.1.1", + "remark-lint-checkbox-content-indent": "^4.1.1", + "remark-lint-code-block-style": "^3.1.0", + "remark-lint-definition-spacing": "^3.1.1", + "remark-lint-fenced-code-flag": "^3.1.1", + "remark-lint-fenced-code-marker": "^3.1.1", + "remark-lint-file-extension": "^2.1.1", + "remark-lint-final-definition": "^3.1.1", + "remark-lint-first-heading-level": "^3.1.1", + "remark-lint-heading-style": "^3.1.1", + "remark-lint-list-item-indent": "^3.1.1", + "remark-lint-maximum-line-length": "^3.1.2", + "remark-lint-no-consecutive-blank-lines": "^4.1.2", + "remark-lint-no-file-name-articles": "^2.1.1", + "remark-lint-no-file-name-consecutive-dashes": "^2.1.1", + "remark-lint-no-file-name-outer-dashes": "^2.1.1", + "remark-lint-no-heading-indent": "^4.1.1", + "remark-lint-no-multiple-toplevel-headings": "^3.1.1", + "remark-lint-no-shell-dollars": "^3.1.1", + "remark-lint-no-table-indentation": "^4.1.1", + "remark-lint-no-tabs": "^3.1.1", "remark-lint-no-trailing-spaces": "^2.0.1", - "remark-lint-prohibited-strings": "^3.0.0", - "remark-lint-rule-style": "^3.0.0", - "remark-lint-strong-marker": "^3.0.0", - "remark-lint-table-cell-padding": "^4.0.0", - "remark-lint-table-pipes": "^4.0.0", - "remark-lint-unordered-list-marker-style": "^3.0.0", - "remark-preset-lint-recommended": "^6.1.1", - "semver": "^7.3.2", - "unified-lint-rule": "^2.0.0", + "remark-lint-prohibited-strings": "^3.1.0", + "remark-lint-rule-style": "^3.1.1", + "remark-lint-strong-marker": "^3.1.1", + "remark-lint-table-cell-padding": "^4.1.2", + "remark-lint-table-pipes": "^4.1.1", + "remark-lint-unordered-list-marker-style": "^3.1.1", + "remark-preset-lint-recommended": "^6.1.2", + "semver": "^7.3.5", + "unified-lint-rule": "^2.1.1", "unist-util-visit": "^4.1.0" } }, @@ -4167,9 +4176,9 @@ } }, "rollup": { - "version": "2.67.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.67.0.tgz", - "integrity": "sha512-W83AaERwvDiHwHEF/dfAfS3z1Be5wf7n+pO3ZAO5IQadCT2lBTr7WQ2MwZZe+nodbD+n3HtC4OCOAdsOPPcKZQ==", + "version": "2.70.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.70.1.tgz", + "integrity": "sha512-CRYsI5EuzLbXdxC6RnYhOuRdtz4bhejPMSWjsFLfVM/7w/85n2szZv6yExqUXsBdz5KT8eoubeyDUDjhLHEslA==", "dev": true, "requires": { "fsevents": "~2.3.2" @@ -4236,9 +4245,9 @@ "dev": true }, "string-width": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.0.tgz", - "integrity": "sha512-7x54QnN21P+XL/v8SuNKvfgsUre6PXpN7mc77N3HlZv+f1SBRGmjxtOud2Z6FZ8DmdkD/IdjCaf9XXbnqmTZGQ==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "requires": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -4274,14 +4283,14 @@ } }, "trough": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", - "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==" }, "unified": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.1.tgz", - "integrity": "sha512-v4ky1+6BN9X3pQrOdkFIPWAaeDsHPE1svRDxq7YpTc2plkIqFMwukfqM+l0ewpP9EfwARlt9pPFAeWYhHm8X9w==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", "requires": { "@types/unist": "^2.0.0", "bail": "^2.0.0", @@ -4339,14 +4348,17 @@ "integrity": "sha512-F5CZ68eYzuSvJjGhCLPL3cYx45IxkqXSetCcRgUXtbcm50X2L9oOWQlfUfDdAf+6Pd27YDblBfdtmsThXmwpbQ==" }, "unist-util-position": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.1.tgz", - "integrity": "sha512-mgy/zI9fQ2HlbOtTdr2w9lhVaiFUHWQnZrFF2EUoVOqtAUdzqMtNiD99qA5a1IcjWVR8O6aVYE9u7Z2z1v0SQA==" + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.2.tgz", + "integrity": "sha512-Y6+plxR41dOLbyyqVDLuGWgXDmxdXslCSRYQkSDagBnOT9oFsQH0J8FzhirSklUEe0xZTT0WDnAE1gXPaDFljA==", + "requires": { + "@types/unist": "^2.0.0" + } }, "unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz", + "integrity": "sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==", "requires": { "@types/unist": "^2.0.0" } @@ -4393,9 +4405,9 @@ } }, "vfile": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.0.tgz", - "integrity": "sha512-Tj44nY/48OQvarrE4FAjUfrv7GZOYzPbl5OD65HxVKwLJKMPU7zmfV8cCgCnzKWnSfYG2f3pxu+ALqs7j22xQQ==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.2.tgz", + "integrity": "sha512-w0PLIugRY3Crkgw89TeMvHCzqCs/zpreR31hl4D92y6SOE07+bfJe+dK5Q2akwS+i/c801kzjoOr9gMcTe6IAA==", "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -4413,9 +4425,9 @@ } }, "vfile-message": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.0.tgz", - "integrity": "sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.2.tgz", + "integrity": "sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA==", "requires": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^3.0.0" diff --git a/tools/lint-md/package.json b/tools/lint-md/package.json index 5f95fbe1abf6ed..88923351e830bd 100644 --- a/tools/lint-md/package.json +++ b/tools/lint-md/package.json @@ -7,16 +7,16 @@ }, "dependencies": { "remark-parse": "^10.0.1", - "remark-preset-lint-node": "^3.3.0", + "remark-preset-lint-node": "^3.3.1", "remark-stringify": "^10.0.2", "to-vfile": "^7.2.3", - "unified": "^10.1.1", + "unified": "^10.1.2", "vfile-reporter": "^7.0.3" }, "devDependencies": { - "@rollup/plugin-commonjs": "^21.0.1", + "@rollup/plugin-commonjs": "^21.0.2", "@rollup/plugin-node-resolve": "^13.1.3", - "rollup": "^2.67.0", + "rollup": "^2.70.1", "rollup-plugin-cleanup": "^3.2.1" } } diff --git a/tools/make-v8.sh b/tools/make-v8.sh index 978b9f00860a65..62cabc70a6eac7 100755 --- a/tools/make-v8.sh +++ b/tools/make-v8.sh @@ -9,7 +9,7 @@ cd deps/v8 || exit find . -type d -name .git -print0 | xargs -0 rm -rf ../../tools/v8/fetch_deps.py . -ARCH="`arch`" +ARCH=$(arch) if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then TARGET_ARCH=$ARCH if [ "$ARCH" = "ppc64le" ]; then @@ -18,22 +18,34 @@ if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then # set paths manually for now to use locally installed gn export BUILD_TOOLS=/home/iojs/build-tools export LD_LIBRARY_PATH="$BUILD_TOOLS:$LD_LIBRARY_PATH" - # Avoid linking to ccache symbolic links as ccache decides which - # binary to run based on the name of the link (we always name them gcc/g++). - # shellcheck disable=SC2154 - CC_PATH=`command -v "$CC" gcc | grep -v ccache | head -n 1` - # shellcheck disable=SC2154 - CXX_PATH=`command -v "$CXX" g++ | grep -v ccache | head -n 1` rm -f "$BUILD_TOOLS/g++" rm -f "$BUILD_TOOLS/gcc" - ln -s "$CXX_PATH" "$BUILD_TOOLS/g++" - ln -s "$CC_PATH" "$BUILD_TOOLS/gcc" + # V8's build config looks for binaries called `gcc` and `g++` if not using + # clang. Ensure that `gcc` and `g++` point to the compilers we want to + # invoke, creating symbolic links placed at the front of PATH, if needed. + # Avoid linking to ccache symbolic links as ccache decides which binary + # to run based on the name of the link (i.e. `gcc`/`g++` in our case). + # shellcheck disable=SC2154 + if [ "$CC" != "" ] && [ "$CC" != "gcc" ]; then + CC_PATH=$(command -v "$CC" gcc | grep -v ccache | head -n 1) + ln -s "$CC_PATH" "$BUILD_TOOLS/gcc" + fi + # shellcheck disable=SC2154 + if [ "$CXX" != "" ] && [ "$CXX" != "g++" ]; then + CXX_PATH=$(command -v "$CXX" g++ | grep -v ccache | head -n 1) + ln -s "$CXX_PATH" "$BUILD_TOOLS/g++" + fi export PATH="$BUILD_TOOLS:$PATH" + # Propagate ccache to gn. + case "$CXX" in + *ccache*) CC_WRAPPER="cc_wrapper=\"ccache\"" ;; + *) ;; + esac g++ --version gcc --version export PKG_CONFIG_PATH=$BUILD_TOOLS/pkg-config - gn gen -v "out.gn/$BUILD_ARCH_TYPE" --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true" + gn gen -v "out.gn/$BUILD_ARCH_TYPE" --args="is_component_build=false is_debug=false use_goma=false goma_dir=\"None\" use_custom_libcxx=false v8_target_cpu=\"$TARGET_ARCH\" target_cpu=\"$TARGET_ARCH\" v8_enable_backtrace=true $CC_WRAPPER" ninja -v -C "out.gn/$BUILD_ARCH_TYPE" d8 cctest inspector-test else DEPOT_TOOLS_DIR="$(cd _depot_tools && pwd)" diff --git a/tools/node_modules/eslint/lib/cli-engine/cli-engine.js b/tools/node_modules/eslint/lib/cli-engine/cli-engine.js index 3ae8b685cf34e6..92b8755783fb79 100644 --- a/tools/node_modules/eslint/lib/cli-engine/cli-engine.js +++ b/tools/node_modules/eslint/lib/cli-engine/cli-engine.js @@ -616,8 +616,8 @@ class CLIEngine { useEslintrc: options.useEslintrc, builtInRules, loadRules, - eslintRecommendedPath: path.resolve(__dirname, "../../conf/eslint-recommended.js"), - eslintAllPath: path.resolve(__dirname, "../../conf/eslint-all.js") + getEslintRecommendedConfig: () => require("../../conf/eslint-recommended.js"), + getEslintAllConfig: () => require("../../conf/eslint-all.js") }); const fileEnumerator = new FileEnumerator({ configArrayFactory, diff --git a/tools/node_modules/eslint/lib/cli-engine/file-enumerator.js b/tools/node_modules/eslint/lib/cli-engine/file-enumerator.js index f1442d150b844d..674e83e540de3d 100644 --- a/tools/node_modules/eslint/lib/cli-engine/file-enumerator.js +++ b/tools/node_modules/eslint/lib/cli-engine/file-enumerator.js @@ -215,8 +215,8 @@ class FileEnumerator { cwd = process.cwd(), configArrayFactory = new CascadingConfigArrayFactory({ cwd, - eslintRecommendedPath: path.resolve(__dirname, "../../conf/eslint-recommended.js"), - eslintAllPath: path.resolve(__dirname, "../../conf/eslint-all.js") + getEslintRecommendedConfig: () => require("../../conf/eslint-recommended.js"), + getEslintAllConfig: () => require("../../conf/eslint-all.js") }), extensions = null, globInputPaths = true, diff --git a/tools/node_modules/eslint/lib/cli-engine/formatters/html.js b/tools/node_modules/eslint/lib/cli-engine/formatters/html.js index e28996f6cd2f42..b636599711772a 100644 --- a/tools/node_modules/eslint/lib/cli-engine/formatters/html.js +++ b/tools/node_modules/eslint/lib/cli-engine/formatters/html.js @@ -39,6 +39,8 @@ function pageTemplate(it) { ESLint Report + +