diff options
author | npm CLI robot <npm-cli+bot@github.com> | 2023-03-15 17:39:48 +0000 |
---|---|---|
committer | Myles Borins <mylesborins@github.com> | 2023-03-27 14:41:08 -0400 |
commit | a4808b6c3b111608e0b9945c7f62534b285b0c96 (patch) | |
tree | 53b46569b2eab81329a0636ec845556ec2b1c447 | |
parent | 13767a4675f28550e3720c46c5a16349b95283a9 (diff) | |
download | node-new-a4808b6c3b111608e0b9945c7f62534b285b0c96.tar.gz |
deps: upgrade npm to 9.6.2
PR-URL: https://github.com/nodejs/node/pull/47108
Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com>
Reviewed-By: Mohammed Keyvanzadeh <mohammadkeyvanzade94@gmail.com>
Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
260 files changed, 3377 insertions, 2191 deletions
diff --git a/deps/npm/docs/content/commands/npm-install-ci-test.md b/deps/npm/docs/content/commands/npm-install-ci-test.md index f4db7ee0c5..5a8095787a 100644 --- a/deps/npm/docs/content/commands/npm-install-ci-test.md +++ b/deps/npm/docs/content/commands/npm-install-ci-test.md @@ -9,7 +9,7 @@ description: Install a project with a clean slate and run tests ```bash npm install-ci-test -alias: cit +aliases: cit, clean-install-test, sit ``` ### Description diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 139e6ec56e..b1754ed02c 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@9.5.1 /path/to/npm +npm@9.6.2 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 6d6f5771e3..6f8ea0a36e 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -9.5.1 +9.6.2 ### Description diff --git a/deps/npm/docs/content/configuring-npm/package-lock-json.md b/deps/npm/docs/content/configuring-npm/package-lock-json.md index 61766dea35..8904f30887 100644 --- a/deps/npm/docs/content/configuring-npm/package-lock-json.md +++ b/deps/npm/docs/content/configuring-npm/package-lock-json.md @@ -112,12 +112,9 @@ the npm registry. Lockfiles generated by npm v7 will contain * No version provided: an "ancient" shrinkwrap file from a version of npm prior to npm v5. * `1`: The lockfile version used by npm v5 and v6. -* `2`: The lockfile version used by npm v7, which is backwards compatible - to v1 lockfiles. -* `3`: The lockfile version used by npm v7, _without_ backwards - compatibility affordances. This is used for the hidden lockfile at - `node_modules/.package-lock.json`, and will likely be used in a future - version of npm, once support for npm v6 is no longer relevant. +* `2`: The lockfile version used by npm v7 and v8. Backwards compatible to v1 + lockfiles. +* `3`: The lockfile version used by npm v9. Backwards compatible to npm v7. npm will always attempt to get whatever data it can out of a lockfile, even if it is not a version that it was designed to support. diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html index b2f6a3affb..20b001f4e8 100644 --- a/deps/npm/docs/output/commands/npm-install-ci-test.html +++ b/deps/npm/docs/output/commands/npm-install-ci-test.html @@ -148,7 +148,7 @@ npm command-line interface <div id="_content"><h3 id="synopsis">Synopsis</h3> <pre><code class="language-bash">npm install-ci-test -alias: cit +aliases: cit, clean-install-test, sit </code></pre> <h3 id="description">Description</h3> <p>This command runs <code>npm ci</code> followed immediately by <code>npm test</code>.</p> diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 26d3dce771..51142e341c 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@ tree at all, use <a href="../commands/npm-explain.html"><code>npm explain</code> the results to only the paths to the packages named. Note that nested packages will <em>also</em> show the paths to the specified packages. For example, running <code>npm ls promzard</code> in npm's source tree will show:</p> -<pre><code class="language-bash">npm@9.5.1 /path/to/npm +<pre><code class="language-bash">npm@9.6.2 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 </code></pre> diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index d16a1cec6c..fd8603a02d 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -150,7 +150,7 @@ npm command-line interface </code></pre> <p>Note: This command is unaware of workspaces.</p> <h3 id="version">Version</h3> -<p>9.5.1</p> +<p>9.6.2</p> <h3 id="description">Description</h3> <p>npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/configuring-npm/package-lock-json.html b/deps/npm/docs/output/configuring-npm/package-lock-json.html index 59b757568f..7587b4ad60 100644 --- a/deps/npm/docs/output/configuring-npm/package-lock-json.html +++ b/deps/npm/docs/output/configuring-npm/package-lock-json.html @@ -239,12 +239,9 @@ the npm registry. Lockfiles generated by npm v7 will contain <li>No version provided: an "ancient" shrinkwrap file from a version of npm prior to npm v5.</li> <li><code>1</code>: The lockfile version used by npm v5 and v6.</li> -<li><code>2</code>: The lockfile version used by npm v7, which is backwards compatible -to v1 lockfiles.</li> -<li><code>3</code>: The lockfile version used by npm v7, <em>without</em> backwards -compatibility affordances. This is used for the hidden lockfile at -<code>node_modules/.package-lock.json</code>, and will likely be used in a future -version of npm, once support for npm v6 is no longer relevant.</li> +<li><code>2</code>: The lockfile version used by npm v7 and v8. Backwards compatible to v1 +lockfiles.</li> +<li><code>3</code>: The lockfile version used by npm v9. Backwards compatible to npm v7.</li> </ul> <p>npm will always attempt to get whatever data it can out of a lockfile, even if it is not a version that it was designed to support.</p> diff --git a/deps/npm/lib/commands/access.js b/deps/npm/lib/commands/access.js index 23e51f071b..318151fc81 100644 --- a/deps/npm/lib/commands/access.js +++ b/deps/npm/lib/commands/access.js @@ -53,20 +53,22 @@ class Access extends BaseCommand { return commands } - switch (argv[2]) { - case 'grant': - return ['read-only', 'read-write'] - case 'revoke': - return [] - case 'list': - case 'ls': - return ['packages', 'collaborators'] - case 'get': - return ['status'] - case 'set': - return setCommands - default: - throw new Error(argv[2] + ' not recognized') + if (argv.length === 3) { + switch (argv[2]) { + case 'grant': + return ['read-only', 'read-write'] + case 'revoke': + return [] + case 'list': + case 'ls': + return ['packages', 'collaborators'] + case 'get': + return ['status'] + case 'set': + return setCommands + default: + throw new Error(argv[2] + ' not recognized') + } } } @@ -116,11 +118,11 @@ class Access extends BaseCommand { } async #grant (permissions, scope, pkg) { - await libnpmaccess.setPermissions(scope, pkg, permissions) + await libnpmaccess.setPermissions(scope, pkg, permissions, this.npm.flatOptions) } async #revoke (scope, pkg) { - await libnpmaccess.removePermissions(scope, pkg) + await libnpmaccess.removePermissions(scope, pkg, this.npm.flatOptions) } async #listPackages (owner, pkg) { diff --git a/deps/npm/lib/commands/audit.js b/deps/npm/lib/commands/audit.js index 05830fff69..dab871669e 100644 --- a/deps/npm/lib/commands/audit.js +++ b/deps/npm/lib/commands/audit.js @@ -389,11 +389,12 @@ class Audit extends ArboristWorkspaceCmd { const argv = opts.conf.argv.remain if (argv.length === 2) { - return ['fix'] + return ['fix', 'signatures'] } switch (argv[2]) { case 'fix': + case 'signatures': return [] default: throw Object.assign(new Error(argv[2] + ' not recognized'), { diff --git a/deps/npm/lib/commands/completion.js b/deps/npm/lib/commands/completion.js index f5604e099f..49a66627cc 100644 --- a/deps/npm/lib/commands/completion.js +++ b/deps/npm/lib/commands/completion.js @@ -79,12 +79,10 @@ class Completion extends BaseCommand { }) } - const { COMP_CWORD, COMP_LINE, COMP_POINT } = process.env + const { COMP_CWORD, COMP_LINE, COMP_POINT, COMP_FISH } = process.env // if the COMP_* isn't in the env, then just dump the script. - if (COMP_CWORD === undefined || - COMP_LINE === undefined || - COMP_POINT === undefined) { + if (COMP_CWORD === undefined || COMP_LINE === undefined || COMP_POINT === undefined) { return dumpScript(resolve(this.npm.npmRoot, 'lib', 'utils', 'completion.sh')) } @@ -111,6 +109,7 @@ class Completion extends BaseCommand { partialWords.push(partialWord) const opts = { + isFish: COMP_FISH === 'true', words, w, word, diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js index 51746c5e52..40e18e1ea0 100644 --- a/deps/npm/lib/commands/run-script.js +++ b/deps/npm/lib/commands/run-script.js @@ -51,6 +51,9 @@ class RunScript extends BaseCommand { // find the script name const json = resolve(this.npm.localPrefix, 'package.json') const { scripts = {} } = await rpj(json).catch(er => ({})) + if (opts.isFish) { + return Object.keys(scripts).map(s => `${s}\t${scripts[s].slice(0, 30)}`) + } return Object.keys(scripts) } } diff --git a/deps/npm/lib/utils/audit-error.js b/deps/npm/lib/utils/audit-error.js index 7feccc739b..aaf35566fc 100644 --- a/deps/npm/lib/utils/audit-error.js +++ b/deps/npm/lib/utils/audit-error.js @@ -1,4 +1,5 @@ const log = require('./log-shim') +const replaceInfo = require('./replace-info.js') // print an error or just nothing if the audit report has an error // this is called by the audit command, and by the reify-output util @@ -24,7 +25,7 @@ const auditError = (npm, report) => { npm.output(JSON.stringify({ message: error.message, method: error.method, - uri: error.uri, + uri: replaceInfo(error.uri), headers: error.headers, statusCode: error.statusCode, body, diff --git a/deps/npm/lib/utils/cmd-list.js b/deps/npm/lib/utils/cmd-list.js index 03fe8ed07c..68074fe9a4 100644 --- a/deps/npm/lib/utils/cmd-list.js +++ b/deps/npm/lib/utils/cmd-list.js @@ -36,7 +36,7 @@ const aliases = { v: 'view', run: 'run-script', 'clean-install': 'ci', - 'clean-install-test': 'cit', + 'clean-install-test': 'install-ci-test', x: 'exec', why: 'explain', la: 'll', @@ -62,7 +62,7 @@ const aliases = { upgrade: 'update', udpate: 'update', rum: 'run-script', - sit: 'cit', + sit: 'install-ci-test', urn: 'run-script', ogr: 'org', 'add-user': 'adduser', diff --git a/deps/npm/lib/utils/completion.fish b/deps/npm/lib/utils/completion.fish new file mode 100644 index 0000000000..5e274ad77e --- /dev/null +++ b/deps/npm/lib/utils/completion.fish @@ -0,0 +1,40 @@ +# npm completions for Fish shell +# This script is a work in progress and does not fall under the normal semver contract as the rest of npm. + +# __fish_npm_needs_command taken from: +# https://stackoverflow.com/questions/16657803/creating-autocomplete-script-with-sub-commands +function __fish_npm_needs_command + set -l cmd (commandline -opc) + + if test (count $cmd) -eq 1 + return 0 + end + + return 1 +end + +# Taken from https://github.com/fish-shell/fish-shell/blob/HEAD/share/completions/npm.fish +function __fish_complete_npm -d "Complete the commandline using npm's 'completion' tool" + # tell npm we are fish shell + set -lx COMP_FISH true + if command -sq npm + # npm completion is bash-centric, so we need to translate fish's "commandline" stuff to bash's $COMP_* stuff + # COMP_LINE is an array with the words in the commandline + set -lx COMP_LINE (commandline -opc) + # COMP_CWORD is the index of the current word in COMP_LINE + # bash starts arrays with 0, so subtract 1 + set -lx COMP_CWORD (math (count $COMP_LINE) - 1) + # COMP_POINT is the index of point/cursor when the commandline is viewed as a string + set -lx COMP_POINT (commandline -C) + # If the cursor is after the last word, the empty token will disappear in the expansion + # Readd it + if test (commandline -ct) = "" + set COMP_CWORD (math $COMP_CWORD + 1) + set COMP_LINE $COMP_LINE "" + end + command npm completion -- $COMP_LINE 2>/dev/null + end +end + +# flush out what ships with fish +complete -e npm diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 732813c2b8..8d87e40f24 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "February 2023" "" "" +.TH "NPM-ACCESS" "1" "March 2023" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index 5a4ee97566..d123f80979 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "February 2023" "" "" +.TH "NPM-ADDUSER" "1" "March 2023" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index f0c8475937..084ca09733 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "February 2023" "" "" +.TH "NPM-AUDIT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index 1f2ee1642d..a48dd27ebd 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "February 2023" "" "" +.TH "NPM-BUGS" "1" "March 2023" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index 5559b3be35..d28a915c1b 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "February 2023" "" "" +.TH "NPM-CACHE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 66b688f971..cc1a7b2471 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "February 2023" "" "" +.TH "NPM-CI" "1" "March 2023" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index b82899fd35..9a86532597 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "February 2023" "" "" +.TH "NPM-COMPLETION" "1" "March 2023" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index 0cf96143ff..d8a23d49c6 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "February 2023" "" "" +.TH "NPM-CONFIG" "1" "March 2023" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 302d63b3c2..7e9b6c1ed1 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "February 2023" "" "" +.TH "NPM-DEDUPE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index d525e703e8..0a5b8e497b 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "February 2023" "" "" +.TH "NPM-DEPRECATE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 5d2c240f83..360b202a60 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "February 2023" "" "" +.TH "NPM-DIFF" "1" "March 2023" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index bd700f0fef..ab0b91e90d 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "February 2023" "" "" +.TH "NPM-DIST-TAG" "1" "March 2023" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index 6ee27d9348..b62d222cd3 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "February 2023" "" "" +.TH "NPM-DOCS" "1" "March 2023" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index d1b5a41654..1cb73f9c5b 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "February 2023" "" "" +.TH "NPM-DOCTOR" "1" "March 2023" "" "" .SH "NAME" \fBnpm-doctor\fR - Check your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index de723dfabe..5e56a2fc90 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "February 2023" "" "" +.TH "NPM-EDIT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 10d06ae263..d2627715dd 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "February 2023" "" "" +.TH "NPM-EXEC" "1" "March 2023" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index a8f5e0e2b6..4623c49f5a 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "February 2023" "" "" +.TH "NPM-EXPLAIN" "1" "March 2023" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 82452e0cba..cfa05d1b5e 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "February 2023" "" "" +.TH "NPM-EXPLORE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 4834f54df6..cd808c61bc 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "February 2023" "" "" +.TH "NPM-FIND-DUPES" "1" "March 2023" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index a0a31eebd4..896eb052c0 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "February 2023" "" "" +.TH "NPM-FUND" "1" "March 2023" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 846f507f51..51c948d072 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "February 2023" "" "" +.TH "NPM-HELP-SEARCH" "1" "March 2023" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index cffb33d49f..90cd1c1f2c 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "February 2023" "" "" +.TH "NPM-HELP" "1" "March 2023" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index 0cd0935d94..9ae87f10bb 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "February 2023" "" "" +.TH "NPM-HOOK" "1" "March 2023" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 763e2914fa..09c5305b2c 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM-INIT" "1" "February 2023" "" "" +.TH "NPM-INIT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index fbe641322d..ba98387c83 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "February 2023" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "March 2023" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" @@ -7,7 +7,7 @@ .nf npm install-ci-test -alias: cit +aliases: cit, clean-install-test, sit .fi .RE .SS "Description" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 528ec9b75a..73ff246c9e 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "February 2023" "" "" +.TH "NPM-INSTALL-TEST" "1" "March 2023" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 4eb982a825..1cbf4a6523 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "February 2023" "" "" +.TH "NPM-INSTALL" "1" "March 2023" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 84c3e7fd7c..efd5925f87 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "February 2023" "" "" +.TH "NPM-LINK" "1" "March 2023" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index 83535a9344..dc154a2cd6 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "February 2023" "" "" +.TH "NPM-LOGIN" "1" "March 2023" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 945b6ef8e0..c55de1b109 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "February 2023" "" "" +.TH "NPM-LOGOUT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index ac59cba39a..ebc3f41d46 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "February 2023" "" "" +.TH "NPM-LS" "1" "March 2023" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@9.5.1 /path/to/npm +npm@9.6.2 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 2e952687b3..0c54d854a9 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "February 2023" "" "" +.TH "NPM-ORG" "1" "March 2023" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index 683321321f..6fce766932 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "February 2023" "" "" +.TH "NPM-OUTDATED" "1" "March 2023" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index bf73df68cd..adb0168df4 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "February 2023" "" "" +.TH "NPM-OWNER" "1" "March 2023" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index e6b19f2eb0..18c1a786c5 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "February 2023" "" "" +.TH "NPM-PACK" "1" "March 2023" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 4ae24a8cd6..166edeb769 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "February 2023" "" "" +.TH "NPM-PING" "1" "March 2023" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index d67f473563..9c18b6dd6d 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "February 2023" "" "" +.TH "NPM-PKG" "1" "March 2023" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index a14aec7c6b..2bb8b31bf4 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "February 2023" "" "" +.TH "NPM-PREFIX" "1" "March 2023" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 40bc3df8e8..0d24f9d05b 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "February 2023" "" "" +.TH "NPM-PROFILE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 86546ac112..d0afcb49aa 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "February 2023" "" "" +.TH "NPM-PRUNE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 97b912987d..e434b4f2a7 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "February 2023" "" "" +.TH "NPM-PUBLISH" "1" "March 2023" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 24ba7853ad..91459b2a27 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "February 2023" "" "" +.TH "NPM-QUERY" "1" "March 2023" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 1c00566dd1..8f4d6d75a4 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "February 2023" "" "" +.TH "NPM-REBUILD" "1" "March 2023" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 65bb96687f..0588d9b595 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "February 2023" "" "" +.TH "NPM-REPO" "1" "March 2023" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index 9323e2841a..a05bc3f189 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "February 2023" "" "" +.TH "NPM-RESTART" "1" "March 2023" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index bd943bb95a..af4c9f323f 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "February 2023" "" "" +.TH "NPM-ROOT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index afcf4d655b..a905e08ae6 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "February 2023" "" "" +.TH "NPM-RUN-SCRIPT" "1" "March 2023" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 2241f81e22..9c172a44b6 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "February 2023" "" "" +.TH "NPM-SEARCH" "1" "March 2023" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 5192bc1222..7981f03b16 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "February 2023" "" "" +.TH "NPM-SHRINKWRAP" "1" "March 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index ecb2ffe84f..1f0392794b 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "February 2023" "" "" +.TH "NPM-STAR" "1" "March 2023" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 49ffc90e03..c2472bc237 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "February 2023" "" "" +.TH "NPM-STARS" "1" "March 2023" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index ecf3ad2838..83905a1bd6 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "February 2023" "" "" +.TH "NPM-START" "1" "March 2023" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 4a5f604941..c04cffa982 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "February 2023" "" "" +.TH "NPM-STOP" "1" "March 2023" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index 30399634bc..435f882a60 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "February 2023" "" "" +.TH "NPM-TEAM" "1" "March 2023" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index ddd45f0757..48100ddd2e 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "February 2023" "" "" +.TH "NPM-TEST" "1" "March 2023" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 202f55bd69..380160743e 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "February 2023" "" "" +.TH "NPM-TOKEN" "1" "March 2023" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 9c90a98632..b8f291cbed 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "February 2023" "" "" +.TH "NPM-UNINSTALL" "1" "March 2023" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index 8b250dd67b..0b5bae99d9 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "February 2023" "" "" +.TH "NPM-UNPUBLISH" "1" "March 2023" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index c76968c905..7e01bf1a96 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "February 2023" "" "" +.TH "NPM-UNSTAR" "1" "March 2023" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index 7819631243..1ac95a9b5a 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "February 2023" "" "" +.TH "NPM-UPDATE" "1" "March 2023" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index 66bfdbdc59..9f13e68d7f 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "February 2023" "" "" +.TH "NPM-VERSION" "1" "March 2023" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index 6075c163d0..5da1474d51 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "February 2023" "" "" +.TH "NPM-VIEW" "1" "March 2023" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index d96a364877..d20de132af 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "February 2023" "" "" +.TH "NPM-WHOAMI" "1" "March 2023" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 631f484d45..cd6b3e58c9 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "February 2023" "" "" +.TH "NPM" "1" "March 2023" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -9.5.1 +9.6.2 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index 079dffeecd..bf5e9bf8ea 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "February 2023" "" "" +.TH "NPX" "1" "March 2023" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index 50bf4c8c7a..403d66ee7f 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "February 2023" "" "" +.TH "FOLDERS" "5" "March 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index 68af465f5e..3145d5ac91 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "February 2023" "" "" +.TH "INSTALL" "5" "March 2023" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 50bf4c8c7a..403d66ee7f 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "February 2023" "" "" +.TH "FOLDERS" "5" "March 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index 8f7dd8fd5b..b9c12028df 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "February 2023" "" "" +.TH "PACKAGE.JSON" "5" "March 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index 12a5dc0588..28f1fb6185 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "February 2023" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "March 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index a568984bc8..d4ab9f6577 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "February 2023" "" "" +.TH "NPMRC" "5" "March 2023" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 8f7dd8fd5b..b9c12028df 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "February 2023" "" "" +.TH "PACKAGE.JSON" "5" "March 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 20be57b80d..d6b65de211 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "February 2023" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "March 2023" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" @@ -67,9 +67,9 @@ No version provided: an "ancient" shrinkwrap file from a version of npm prior to .IP \(bu 4 \fB1\fR: The lockfile version used by npm v5 and v6. .IP \(bu 4 -\fB2\fR: The lockfile version used by npm v7, which is backwards compatible to v1 lockfiles. +\fB2\fR: The lockfile version used by npm v7 and v8. Backwards compatible to v1 lockfiles. .IP \(bu 4 -\fB3\fR: The lockfile version used by npm v7, \fIwithout\fR backwards compatibility affordances. This is used for the hidden lockfile at \fBnode_modules/.package-lock.json\fR, and will likely be used in a future version of npm, once support for npm v6 is no longer relevant. +\fB3\fR: The lockfile version used by npm v9. Backwards compatible to npm v7. .RE 0 .P diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index dc4261c280..93fd95d791 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "February 2023" "" "" +.TH "CONFIG" "7" "March 2023" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index f1670a3a20..2d5c77e671 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "February 2023" "" "" +.TH "QUERYING" "7" "March 2023" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index a15142ba20..83d9c5a399 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "February 2023" "" "" +.TH "DEVELOPERS" "7" "March 2023" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index d1cc3bdf00..4d1000caa9 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "February 2023" "" "" +.TH "LOGGING" "7" "March 2023" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index 7aa5da2ede..ea8e56ad71 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "February 2023" "" "" +.TH "ORGS" "7" "March 2023" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 4ae0095bd1..c3eb5e458f 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "February 2023" "" "" +.TH "PACKAGE-SPEC" "7" "March 2023" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index e67c8bb2b0..b8926cb2f5 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "February 2023" "" "" +.TH "REGISTRY" "7" "March 2023" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 82e39c8cb1..97066d4d97 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "February 2023" "" "" +.TH "REMOVAL" "7" "March 2023" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 670b2f88c8..6b58330b7b 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "February 2023" "" "" +.TH "SCOPE" "7" "March 2023" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 5f0ee7735d..d4e5de7e97 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "February 2023" "" "" +.TH "SCRIPTS" "7" "March 2023" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 6e1e9bf59e..7db3d49cc2 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "February 2023" "" "" +.TH "WORKSPACES" "7" "March 2023" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 2ea66ac336..0d936d8ef7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -1243,7 +1243,7 @@ This is a one-time fix-up, please be patient... if (isWorkspace) { const existingNode = this.idealTree.edgesOut.get(spec.name).to if (existingNode && existingNode.isWorkspace && existingNode.satisfies(edge)) { - return edge.to + return existingNode } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 87993cca87..760fa977ec 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -535,9 +535,14 @@ module.exports = cls => class Reifier extends cls { await this[_renamePath](d, retired) } } - const made = await mkdir(node.path, { recursive: true }) this[_sparseTreeDirs].add(node.path) - this[_sparseTreeRoots].add(made) + const made = await mkdir(node.path, { recursive: true }) + // if the directory already exists, made will be undefined. if that's the case + // we don't want to remove it because we aren't the ones who created it so we + // omit it from the _sparseTreeRoots + if (made) { + this[_sparseTreeRoots].add(made) + } })) .then(() => process.emit('timeEnd', 'reify:createSparse')) } diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index aaa69e0484..5d1a9362e2 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,11 +1,11 @@ { "name": "@npmcli/arborist", - "version": "6.2.3", + "version": "6.2.5", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/fs": "^3.1.0", - "@npmcli/installed-package-contents": "^2.0.0", + "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", "@npmcli/metavuln-calculator": "^5.0.0", "@npmcli/name-from-folder": "^2.0.0", @@ -39,7 +39,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "benchmark": "^2.1.4", "chalk": "^4.1.0", "minify-registry-metadata": "^3.0.0", @@ -98,7 +98,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/@npmcli/config/lib/parse-field.js b/deps/npm/node_modules/@npmcli/config/lib/parse-field.js index 0c905bf23c..099b0b4eaf 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/parse-field.js +++ b/deps/npm/node_modules/@npmcli/config/lib/parse-field.js @@ -20,6 +20,7 @@ const parseField = (f, key, opts, listElement = false) => { const isUmask = typeList.has(typeDefs.Umask.type) const isNumber = typeList.has(typeDefs.Number.type) const isList = !listElement && typeList.has(Array) + const isDate = typeList.has(typeDefs.Date.type) if (Array.isArray(f)) { return !isList ? f : f.map(field => parseField(field, key, opts, true)) @@ -53,6 +54,10 @@ const parseField = (f, key, opts, listElement = false) => { f = envReplace(f, env) + if (isDate) { + return new Date(f) + } + if (isPath) { const homePattern = platform === 'win32' ? /^~(\/|\\)/ : /^~\// if (homePattern.test(f) && home) { diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 38c063e358..a5b48d3309 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "6.1.3", + "version": "6.1.4", "files": [ "bin/", "lib/" @@ -33,7 +33,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "tap": "^16.3.4" }, "dependencies": { @@ -50,6 +50,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4" + "version": "4.12.0" } } diff --git a/deps/npm/node_modules/@npmcli/installed-package-contents/lib/index.js b/deps/npm/node_modules/@npmcli/installed-package-contents/lib/index.js index e2c545b5ab..20b25c4bc8 100755 --- a/deps/npm/node_modules/@npmcli/installed-package-contents/lib/index.js +++ b/deps/npm/node_modules/@npmcli/installed-package-contents/lib/index.js @@ -1,3 +1,5 @@ +#! /usr/bin/env node + // to GET CONTENTS for folder at PATH (which may be a PACKAGE): // - if PACKAGE, read path/package.json // - if bins in ../node_modules/.bin, add those to result diff --git a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json index aac2de1304..3554754123 100644 --- a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json +++ b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/installed-package-contents", - "version": "2.0.1", + "version": "2.0.2", "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", "author": "GitHub Inc.", "main": "lib/index.js", @@ -19,8 +19,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.6.2", - "mkdirp": "^1.0.4", + "@npmcli/template-oss": "4.11.4", "require-inject": "^1.4.4", "tap": "^16.3.0" }, @@ -41,7 +40,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.6.2" + "version": "4.11.4" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE b/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000..e9e7c1679a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts index 81422a0075..81422a0075 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js index 715bb1aa5b..715bb1aa5b 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/envelope.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts index 1b4ed47aad..1b4ed47aad 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js index f9b57cccdc..f9b57cccdc 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/api/field_behavior.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts index ef43bf01c1..ef43bf01c1 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js index b8cfc86ab9..b8cfc86ab9 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/descriptor.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts index 1ab812b4a9..1ab812b4a9 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js index 159135fe87..159135fe87 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/google/protobuf/timestamp.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts index 51f748f459..51f748f459 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js index 1ef3e1b335..1ef3e1b335 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_bundle.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts index 0d8c2d5ebd..0d8c2d5ebd 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js index 63ace8db58..63ace8db58 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_common.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts index 9e33bb80e2..74eb82513d 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.d.ts @@ -98,12 +98,19 @@ export interface TransparencyLogEntry { */ inclusionProof: InclusionProof | undefined; /** - * The canonicalized Rekor entry body, used for SET verification. This - * is the same as the body returned by Rekor. It's included here for - * cases where the client cannot deterministically reconstruct the - * bundle from the other fields. Clients MUST verify that the signature - * referenced in the canonicalized_body matches the signature provided - * in the bundle content. + * The canonicalized transparency log entry, used to reconstruct + * the Signed Entry Timestamp (SET) during verification. + * The contents of this field are the same as the `body` field in + * a Rekor response, meaning that it does **not** include the "full" + * canonicalized form (of log index, ID, etc.) which are + * exposed as separate fields. The verifier is responsible for + * combining the `canonicalized_body`, `log_index`, `log_id`, + * and `integrated_time` into the payload that the SET's signature + * is generated over. + * + * Clients MUST verify that the signatured referenced in the + * `canonicalized_body` matches the signature provided in the + * `Bundle.content`. */ canonicalizedBody: Buffer; } diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js index bffc7700ed..bffc7700ed 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_rekor.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts index 152d08f5c6..152d08f5c6 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js index 05e566767c..05e566767c 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_trustroot.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts index 8ee32d8e66..8ee32d8e66 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.d.ts +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.d.ts diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js index b99a305ba5..b99a305ba5 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/__generated__/sigstore_verification.js +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts new file mode 100644 index 0000000000..f87f0aba29 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.d.ts @@ -0,0 +1,6 @@ +export * from './__generated__/envelope'; +export * from './__generated__/sigstore_bundle'; +export * from './__generated__/sigstore_common'; +export * from './__generated__/sigstore_rekor'; +export * from './__generated__/sigstore_trustroot'; +export * from './__generated__/sigstore_verification'; diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000..eafb768c48 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000..7cb4aa9c53 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.1.0", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node14": "^1.0.3", + "@types/node": "^18.14.0", + "typescript": "^4.9.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/deps/npm/node_modules/@tufjs/models/LICENSE b/deps/npm/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000..420700f5d3 --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/dist/models/base.d.ts b/deps/npm/node_modules/@tufjs/models/dist/base.d.ts index 4c5e0aaf4f..4cc2395328 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/base.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/base.d.ts @@ -1,5 +1,5 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Signature } from './signature'; +import { JSONObject, JSONValue } from './utils'; export interface Signable { signatures: Record<string, Signature>; signed: Signed; @@ -10,6 +10,13 @@ export interface SignedOptions { expires?: string; unrecognizedFields?: Record<string, JSONValue>; } +export declare enum MetadataKind { + Root = "root", + Timestamp = "timestamp", + Snapshot = "snapshot", + Targets = "targets" +} +export declare function isMetadataKind(value: unknown): value is MetadataKind; /*** * A base class for the signed part of TUF metadata. * diff --git a/deps/npm/node_modules/tuf-js/dist/models/base.js b/deps/npm/node_modules/@tufjs/models/dist/base.js index 7658567e2d..d89a089c33 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/base.js +++ b/deps/npm/node_modules/@tufjs/models/dist/base.js @@ -3,11 +3,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Signed = void 0; +exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const utils_1 = require("../utils"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +exports.isMetadataKind = isMetadataKind; /*** * A base class for the signed part of TUF metadata. * diff --git a/deps/npm/node_modules/tuf-js/dist/models/delegations.d.ts b/deps/npm/node_modules/@tufjs/models/dist/delegations.d.ts index b53862aa86..357e9dfeb8 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/delegations.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/delegations.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Key } from './key'; import { DelegatedRole, SuccinctRoles } from './role'; +import { JSONObject, JSONValue } from './utils'; type DelegatedRoleMap = Record<string, DelegatedRole>; type KeyMap = Record<string, Key>; interface DelegationsOptions { diff --git a/deps/npm/node_modules/tuf-js/dist/models/delegations.js b/deps/npm/node_modules/@tufjs/models/dist/delegations.js index 302bd52d8d..7165f1e244 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/delegations.js +++ b/deps/npm/node_modules/@tufjs/models/dist/delegations.js @@ -5,10 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Delegations = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); const key_1 = require("./key"); const role_1 = require("./role"); +const utils_1 = require("./utils"); /** * A container object storing information about all delegations. * @@ -67,7 +67,7 @@ class Delegations { static fromJSON(data) { const { keys, roles, succinct_roles, ...unrecognizedFields } = data; let succinctRoles; - if ((0, guard_1.isObject)(succinct_roles)) { + if (utils_1.guard.isObject(succinct_roles)) { succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); } return new Delegations({ @@ -89,7 +89,7 @@ function rolesToJSON(roles) { return Object.values(roles).map((role) => role.toJSON()); } function keysFromJSON(data) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('keys is malformed'); } return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ @@ -99,8 +99,8 @@ function keysFromJSON(data) { } function rolesFromJSON(data) { let roleMap; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectArray)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { throw new TypeError('roles is malformed'); } roleMap = data.reduce((acc, role) => { diff --git a/deps/npm/node_modules/@tufjs/models/dist/error.d.ts b/deps/npm/node_modules/@tufjs/models/dist/error.d.ts new file mode 100644 index 0000000000..e03d05a381 --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/error.d.ts @@ -0,0 +1,12 @@ +export declare class ValueError extends Error { +} +export declare class RepositoryError extends Error { +} +export declare class UnsignedMetadataError extends RepositoryError { +} +export declare class LengthOrHashMismatchError extends RepositoryError { +} +export declare class CryptoError extends Error { +} +export declare class UnsupportedAlgorithmError extends CryptoError { +} diff --git a/deps/npm/node_modules/@tufjs/models/dist/error.js b/deps/npm/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000..ba80698747 --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/deps/npm/node_modules/tuf-js/dist/models/file.d.ts b/deps/npm/node_modules/@tufjs/models/dist/file.d.ts index 9678cf1efe..7abeb2bb03 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/file.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/file.d.ts @@ -1,7 +1,7 @@ /// <reference types="node" /> /// <reference types="node" /> import { Readable } from 'stream'; -import { JSONObject, JSONValue } from '../utils/types'; +import { JSONObject, JSONValue } from './utils'; interface MetaFileOptions { version: number; length?: number; diff --git a/deps/npm/node_modules/tuf-js/dist/models/file.js b/deps/npm/node_modules/@tufjs/models/dist/file.js index d6d535f6ca..b35fe5950b 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/file.js +++ b/deps/npm/node_modules/@tufjs/models/dist/file.js @@ -6,8 +6,8 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.TargetFile = exports.MetaFile = void 0; const crypto_1 = __importDefault(require("crypto")); const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); // A container with information about a particular metadata file. // // This class is used for Timestamp and Snapshot metadata. @@ -75,10 +75,10 @@ class MetaFile { if (typeof version !== 'number') { throw new TypeError('version must be a number'); } - if ((0, guard_1.isDefined)(length) && typeof length !== 'number') { + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { throw new TypeError('length must be a number'); } - if ((0, guard_1.isDefined)(hashes) && !(0, guard_1.isStringRecord)(hashes)) { + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { throw new TypeError('hashes must be string keys and values'); } return new MetaFile({ @@ -163,7 +163,7 @@ class TargetFile { if (typeof length !== 'number') { throw new TypeError('length must be a number'); } - if (!(0, guard_1.isStringRecord)(hashes)) { + if (!utils_1.guard.isStringRecord(hashes)) { throw new TypeError('hashes must have string keys and values'); } return new TargetFile({ diff --git a/deps/npm/node_modules/@tufjs/models/dist/index.d.ts b/deps/npm/node_modules/@tufjs/models/dist/index.d.ts new file mode 100644 index 0000000000..f9768beaea --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/index.d.ts @@ -0,0 +1,10 @@ +export { MetadataKind } from './base'; +export { ValueError } from './error'; +export { MetaFile, TargetFile } from './file'; +export { Key } from './key'; +export { Metadata } from './metadata'; +export { Root } from './root'; +export { Signature } from './signature'; +export { Snapshot } from './snapshot'; +export { Targets } from './targets'; +export { Timestamp } from './timestamp'; diff --git a/deps/npm/node_modules/@tufjs/models/dist/index.js b/deps/npm/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000..a4dc783659 --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/deps/npm/node_modules/tuf-js/dist/models/key.d.ts b/deps/npm/node_modules/@tufjs/models/dist/key.d.ts index 160407ae70..9f90b7ee89 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/key.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/key.d.ts @@ -1,5 +1,5 @@ -import { JSONObject, JSONValue } from '../utils/types'; import { Signable } from './base'; +import { JSONObject, JSONValue } from './utils'; export interface KeyOptions { keyID: string; keyType: string; diff --git a/deps/npm/node_modules/tuf-js/dist/models/key.js b/deps/npm/node_modules/@tufjs/models/dist/key.js index 33ff514fc1..5e55b09d7c 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/key.js +++ b/deps/npm/node_modules/@tufjs/models/dist/key.js @@ -1,37 +1,13 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Key = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const key_1 = require("../utils/key"); -const signer = __importStar(require("../utils/signer")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); // A container class representing the public portion of a Key. class Key { constructor(options) { @@ -57,7 +33,7 @@ class Key { }); const signedData = metadata.signed.toJSON(); try { - if (!signer.verifySignature(signedData, publicKey, signature.sig)) { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); } } @@ -94,7 +70,7 @@ class Key { if (typeof scheme !== 'string') { throw new TypeError('scheme must be a string'); } - if (!(0, guard_1.isStringRecord)(keyval)) { + if (!utils_1.guard.isStringRecord(keyval)) { throw new TypeError('keyval must be a string record'); } return new Key({ diff --git a/deps/npm/node_modules/tuf-js/dist/models/metadata.d.ts b/deps/npm/node_modules/@tufjs/models/dist/metadata.d.ts index 39abf03406..55c9294a29 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/metadata.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/metadata.d.ts @@ -1,10 +1,11 @@ -import { JSONObject, JSONValue, MetadataKind } from '../utils/types'; -import { Signable } from './base'; +/// <reference types="node" /> +import { MetadataKind, Signable } from './base'; import { Root } from './root'; import { Signature } from './signature'; import { Snapshot } from './snapshot'; import { Targets } from './targets'; import { Timestamp } from './timestamp'; +import { JSONObject, JSONValue } from './utils'; type MetadataType = Root | Timestamp | Snapshot | Targets; /*** * A container for signed TUF metadata. @@ -35,8 +36,10 @@ export declare class Metadata<T extends MetadataType> implements Signable { signatures: Record<string, Signature>; unrecognizedFields: Record<string, JSONValue>; constructor(signed: T, signatures?: Record<string, Signature>, unrecognizedFields?: Record<string, JSONValue>); + sign(signer: (data: Buffer) => Signature, append?: boolean): void; verifyDelegate(delegatedRole: string, delegatedMetadata: Metadata<MetadataType>): void; equals(other: T): boolean; + toJSON(): JSONObject; static fromJSON(type: MetadataKind.Root, data: JSONObject): Metadata<Root>; static fromJSON(type: MetadataKind.Timestamp, data: JSONObject): Metadata<Timestamp>; static fromJSON(type: MetadataKind.Snapshot, data: JSONObject): Metadata<Snapshot>; diff --git a/deps/npm/node_modules/tuf-js/dist/models/metadata.js b/deps/npm/node_modules/@tufjs/models/dist/metadata.js index 11c3c54682..945d3a42a7 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/metadata.js +++ b/deps/npm/node_modules/@tufjs/models/dist/metadata.js @@ -5,14 +5,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Metadata = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); +const base_1 = require("./base"); +const error_1 = require("./error"); const root_1 = require("./root"); const signature_1 = require("./signature"); const snapshot_1 = require("./snapshot"); const targets_1 = require("./targets"); const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +const json_1 = require("./utils/json"); /*** * A container for signed TUF metadata. * @@ -43,15 +44,23 @@ class Metadata { this.signatures = signatures || {}; this.unrecognizedFields = unrecognizedFields || {}; } + sign(signer, append = true) { + const bytes = (0, json_1.canonicalize)(this.signed.toJSON()); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } verifyDelegate(delegatedRole, delegatedMetadata) { let role; let keys = {}; switch (this.signed.type) { - case types_1.MetadataKind.Root: + case base_1.MetadataKind.Root: keys = this.signed.keys; role = this.signed.roles[delegatedRole]; break; - case types_1.MetadataKind.Targets: + case base_1.MetadataKind.Targets: if (!this.signed.delegations) { throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); } @@ -98,9 +107,19 @@ class Metadata { util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } static fromJSON(type, data) { const { signed, signatures, ...rest } = data; - if (!(0, guard_1.isDefined)(signed) || !(0, guard_1.isObject)(signed)) { + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { throw new TypeError('signed is not defined'); } if (type !== signed._type) { @@ -108,16 +127,16 @@ class Metadata { } let signedObj; switch (type) { - case types_1.MetadataKind.Root: + case base_1.MetadataKind.Root: signedObj = root_1.Root.fromJSON(signed); break; - case types_1.MetadataKind.Timestamp: + case base_1.MetadataKind.Timestamp: signedObj = timestamp_1.Timestamp.fromJSON(signed); break; - case types_1.MetadataKind.Snapshot: + case base_1.MetadataKind.Snapshot: signedObj = snapshot_1.Snapshot.fromJSON(signed); break; - case types_1.MetadataKind.Targets: + case base_1.MetadataKind.Targets: signedObj = targets_1.Targets.fromJSON(signed); break; default: @@ -129,7 +148,7 @@ class Metadata { } exports.Metadata = Metadata; function signaturesFromJSON(data) { - if (!(0, guard_1.isObjectArray)(data)) { + if (!utils_1.guard.isObjectArray(data)) { throw new TypeError('signatures is not an array'); } return data.reduce((acc, sigData) => { diff --git a/deps/npm/node_modules/tuf-js/dist/models/role.d.ts b/deps/npm/node_modules/@tufjs/models/dist/role.d.ts index 4575300fb9..b3a6efae2c 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/role.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/role.d.ts @@ -1,4 +1,4 @@ -import { JSONObject, JSONValue } from '../utils/types'; +import { JSONObject, JSONValue } from './utils'; export declare const TOP_LEVEL_ROLE_NAMES: string[]; export interface RoleOptions { keyIDs: string[]; diff --git a/deps/npm/node_modules/tuf-js/dist/models/role.js b/deps/npm/node_modules/@tufjs/models/dist/role.js index da80a09b8b..143c5dc608 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/role.js +++ b/deps/npm/node_modules/@tufjs/models/dist/role.js @@ -7,8 +7,8 @@ exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL const crypto_1 = __importDefault(require("crypto")); const minimatch_1 = __importDefault(require("minimatch")); const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); +const error_1 = require("./error"); +const utils_1 = require("./utils"); exports.TOP_LEVEL_ROLE_NAMES = [ 'root', 'targets', @@ -51,7 +51,7 @@ class Role { } static fromJSON(data) { const { keyids, threshold, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array'); } if (typeof threshold !== 'number') { @@ -128,7 +128,7 @@ class DelegatedRole extends Role { } static fromJSON(data) { const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array of strings'); } if (typeof threshold !== 'number') { @@ -140,10 +140,11 @@ class DelegatedRole extends Role { if (typeof terminating !== 'boolean') { throw new TypeError('terminating must be a boolean'); } - if ((0, guard_1.isDefined)(paths) && !(0, guard_1.isStringArray)(paths)) { + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { throw new TypeError('paths must be an array of strings'); } - if ((0, guard_1.isDefined)(path_hash_prefixes) && !(0, guard_1.isStringArray)(path_hash_prefixes)) { + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { throw new TypeError('path_hash_prefixes must be an array of strings'); } return new DelegatedRole({ @@ -274,7 +275,7 @@ class SuccinctRoles extends Role { } static fromJSON(data) { const { keyids, threshold, bit_length, name_prefix, ...rest } = data; - if (!(0, guard_1.isStringArray)(keyids)) { + if (!utils_1.guard.isStringArray(keyids)) { throw new TypeError('keyids must be an array of strings'); } if (typeof threshold !== 'number') { diff --git a/deps/npm/node_modules/tuf-js/dist/models/root.d.ts b/deps/npm/node_modules/@tufjs/models/dist/root.d.ts index 66356628f4..eb5eb8dede 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/root.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/root.d.ts @@ -1,7 +1,7 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { Key } from './key'; import { Role } from './role'; +import { JSONObject } from './utils'; type KeyMap = Record<string, Key>; type RoleMap = Record<string, Role>; export interface RootOptions extends SignedOptions { @@ -21,6 +21,7 @@ export declare class Root extends Signed { readonly roles: RoleMap; readonly consistentSnapshot: boolean; constructor(options: RootOptions); + addKey(key: Key, role: string): void; equals(other: Root): boolean; toJSON(): JSONObject; static fromJSON(data: JSONObject): Root; diff --git a/deps/npm/node_modules/tuf-js/dist/models/root.js b/deps/npm/node_modules/@tufjs/models/dist/root.js index 574ec1acdc..36d0ef0f18 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/root.js +++ b/deps/npm/node_modules/@tufjs/models/dist/root.js @@ -5,12 +5,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Root = void 0; const util_1 = __importDefault(require("util")); -const error_1 = require("../error"); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); +const error_1 = require("./error"); const key_1 = require("./key"); const role_1 = require("./role"); +const utils_1 = require("./utils"); /** * A container for the signed part of root metadata. * @@ -20,7 +19,7 @@ const role_1 = require("./role"); class Root extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Root; + this.type = base_1.MetadataKind.Root; this.keys = options.keys || {}; this.consistentSnapshot = options.consistentSnapshot ?? true; if (!options.roles) { @@ -37,6 +36,15 @@ class Root extends base_1.Signed { this.roles = options.roles; } } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } equals(other) { if (!(other instanceof Root)) { return false; @@ -48,6 +56,7 @@ class Root extends base_1.Signed { } toJSON() { return { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -81,8 +90,8 @@ function rolesToJSON(roles) { } function keysFromJSON(data) { let keys; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('keys must be an object'); } keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ @@ -94,8 +103,8 @@ function keysFromJSON(data) { } function rolesFromJSON(data) { let roles; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('roles must be an object'); } roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ diff --git a/deps/npm/node_modules/tuf-js/dist/models/signature.d.ts b/deps/npm/node_modules/@tufjs/models/dist/signature.d.ts index 1d78e2d8e5..dbeabbef87 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/signature.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/signature.d.ts @@ -1,4 +1,4 @@ -import { JSONObject } from '../utils/types'; +import { JSONObject } from './utils'; export interface SignatureOptions { keyID: string; sig: string; @@ -15,5 +15,6 @@ export declare class Signature { readonly keyID: string; readonly sig: string; constructor(options: SignatureOptions); + toJSON(): JSONObject; static fromJSON(data: JSONObject): Signature; } diff --git a/deps/npm/node_modules/tuf-js/dist/models/signature.js b/deps/npm/node_modules/@tufjs/models/dist/signature.js index 9550fa7b55..33eb204eb0 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/signature.js +++ b/deps/npm/node_modules/@tufjs/models/dist/signature.js @@ -15,6 +15,12 @@ class Signature { this.keyID = keyID; this.sig = sig; } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } static fromJSON(data) { const { keyid, sig } = data; if (typeof keyid !== 'string') { diff --git a/deps/npm/node_modules/tuf-js/dist/models/snapshot.d.ts b/deps/npm/node_modules/@tufjs/models/dist/snapshot.d.ts index 79bc073595..bcc780aee0 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/snapshot.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/snapshot.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { MetaFile } from './file'; +import { JSONObject } from './utils'; type MetaFileMap = Record<string, MetaFile>; export interface SnapshotOptions extends SignedOptions { meta?: MetaFileMap; diff --git a/deps/npm/node_modules/tuf-js/dist/models/snapshot.js b/deps/npm/node_modules/@tufjs/models/dist/snapshot.js index 0945a28cd0..e90ea8e729 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/snapshot.js +++ b/deps/npm/node_modules/@tufjs/models/dist/snapshot.js @@ -5,10 +5,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Snapshot = void 0; const util_1 = __importDefault(require("util")); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const file_1 = require("./file"); +const utils_1 = require("./utils"); /** * A container for the signed part of snapshot metadata. * @@ -19,7 +18,7 @@ const file_1 = require("./file"); class Snapshot extends base_1.Signed { constructor(opts) { super(opts); - this.type = types_1.MetadataKind.Snapshot; + this.type = base_1.MetadataKind.Snapshot; this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; } equals(other) { @@ -30,6 +29,7 @@ class Snapshot extends base_1.Signed { } toJSON() { return { + _type: this.type, meta: metaToJSON(this.meta), spec_version: this.specVersion, version: this.version, @@ -56,8 +56,8 @@ function metaToJSON(meta) { } function metaFromJSON(data) { let meta; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('meta field is malformed'); } else { @@ -66,6 +66,6 @@ function metaFromJSON(data) { [path]: file_1.MetaFile.fromJSON(metadata), }), {}); } - return meta; } + return meta; } diff --git a/deps/npm/node_modules/tuf-js/dist/models/targets.d.ts b/deps/npm/node_modules/@tufjs/models/dist/targets.d.ts index 24dba9ac71..442f9e4439 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/targets.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/targets.d.ts @@ -1,7 +1,7 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { Delegations } from './delegations'; import { TargetFile } from './file'; +import { JSONObject } from './utils'; type TargetFileMap = Record<string, TargetFile>; interface TargetsOptions extends SignedOptions { targets?: TargetFileMap; @@ -12,6 +12,7 @@ export declare class Targets extends Signed { readonly targets: TargetFileMap; readonly delegations?: Delegations; constructor(options: TargetsOptions); + addTarget(target: TargetFile): void; equals(other: Targets): boolean; toJSON(): JSONObject; static fromJSON(data: JSONObject): Targets; diff --git a/deps/npm/node_modules/tuf-js/dist/models/targets.js b/deps/npm/node_modules/@tufjs/models/dist/targets.js index 90a2528764..54bd8f8c55 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/targets.js +++ b/deps/npm/node_modules/@tufjs/models/dist/targets.js @@ -5,11 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Targets = void 0; const util_1 = __importDefault(require("util")); -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const delegations_1 = require("./delegations"); const file_1 = require("./file"); +const utils_1 = require("./utils"); // Container for the signed part of targets metadata. // // Targets contains verifying information about target files and also delegates @@ -17,10 +16,13 @@ const file_1 = require("./file"); class Targets extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Targets; + this.type = base_1.MetadataKind.Targets; this.targets = options.targets || {}; this.delegations = options.delegations; } + addTarget(target) { + this.targets[target.path] = target; + } equals(other) { if (!(other instanceof Targets)) { return false; @@ -31,6 +33,7 @@ class Targets extends base_1.Signed { } toJSON() { const json = { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -62,8 +65,8 @@ function targetsToJSON(targets) { } function targetsFromJSON(data) { let targets; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObjectRecord)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { throw new TypeError('targets must be an object'); } else { @@ -77,8 +80,8 @@ function targetsFromJSON(data) { } function delegationsFromJSON(data) { let delegations; - if ((0, guard_1.isDefined)(data)) { - if (!(0, guard_1.isObject)(data)) { + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { throw new TypeError('delegations must be an object'); } else { diff --git a/deps/npm/node_modules/tuf-js/dist/models/timestamp.d.ts b/deps/npm/node_modules/@tufjs/models/dist/timestamp.d.ts index 481ada8e23..9ab012b891 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/timestamp.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/timestamp.d.ts @@ -1,6 +1,6 @@ -import { JSONObject, MetadataKind } from '../utils/types'; -import { Signed, SignedOptions } from './base'; +import { MetadataKind, Signed, SignedOptions } from './base'; import { MetaFile } from './file'; +import { JSONObject } from './utils'; interface TimestampOptions extends SignedOptions { snapshotMeta?: MetaFile; } diff --git a/deps/npm/node_modules/tuf-js/dist/models/timestamp.js b/deps/npm/node_modules/@tufjs/models/dist/timestamp.js index 84f681b68d..9880c4c9fc 100644 --- a/deps/npm/node_modules/tuf-js/dist/models/timestamp.js +++ b/deps/npm/node_modules/@tufjs/models/dist/timestamp.js @@ -1,10 +1,9 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Timestamp = void 0; -const guard_1 = require("../utils/guard"); -const types_1 = require("../utils/types"); const base_1 = require("./base"); const file_1 = require("./file"); +const utils_1 = require("./utils"); /** * A container for the signed part of timestamp metadata. * @@ -14,7 +13,7 @@ const file_1 = require("./file"); class Timestamp extends base_1.Signed { constructor(options) { super(options); - this.type = types_1.MetadataKind.Timestamp; + this.type = base_1.MetadataKind.Timestamp; this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); } equals(other) { @@ -25,6 +24,7 @@ class Timestamp extends base_1.Signed { } toJSON() { return { + _type: this.type, spec_version: this.specVersion, version: this.version, expires: this.expires, @@ -45,9 +45,9 @@ class Timestamp extends base_1.Signed { exports.Timestamp = Timestamp; function snapshotMetaFromJSON(data) { let snapshotMeta; - if ((0, guard_1.isDefined)(data)) { + if (utils_1.guard.isDefined(data)) { const snapshotData = data['snapshot.json']; - if (!(0, guard_1.isDefined)(snapshotData) || !(0, guard_1.isObject)(snapshotData)) { + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { throw new TypeError('missing snapshot.json in meta'); } else { diff --git a/deps/npm/node_modules/tuf-js/dist/utils/guard.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/guard.d.ts index 17bc4ce3c7..60c80e1607 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/guard.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/guard.d.ts @@ -1,8 +1,7 @@ -import { JSONObject, MetadataKind } from './types'; +import { JSONObject } from './types'; export declare function isDefined<T>(val: T | undefined): val is T; export declare function isObject(value: unknown): value is JSONObject; export declare function isStringArray(value: unknown): value is string[]; export declare function isObjectArray(value: unknown): value is JSONObject[]; export declare function isStringRecord(value: unknown): value is Record<string, string>; export declare function isObjectRecord(value: unknown): value is Record<string, JSONObject>; -export declare function isMetadataKind(value: unknown): value is MetadataKind; diff --git a/deps/npm/node_modules/tuf-js/dist/utils/guard.js b/deps/npm/node_modules/@tufjs/models/dist/utils/guard.js index f2207af186..efe5588523 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/guard.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/guard.js @@ -1,7 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isMetadataKind = exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; -const types_1 = require("./types"); +exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0; function isDefined(val) { return val !== undefined; } @@ -32,8 +31,3 @@ function isObjectRecord(value) { Object.values(value).every((v) => typeof v === 'object' && v !== null)); } exports.isObjectRecord = isObjectRecord; -function isMetadataKind(value) { - return (typeof value === 'string' && - Object.values(types_1.MetadataKind).includes(value)); -} -exports.isMetadataKind = isMetadataKind; diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/index.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/index.d.ts new file mode 100644 index 0000000000..7dbbd1eeec --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/index.d.ts @@ -0,0 +1,3 @@ +export * as guard from './guard'; +export { JSONObject, JSONValue } from './types'; +export * as crypto from './verify'; diff --git a/deps/npm/node_modules/tuf-js/dist/utils/index.js b/deps/npm/node_modules/@tufjs/models/dist/utils/index.js index 604696a305..872aae2804 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/index.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/index.js @@ -23,9 +23,6 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.types = exports.signer = exports.json = exports.guard = exports.config = void 0; -exports.config = __importStar(require("./config")); +exports.crypto = exports.guard = void 0; exports.guard = __importStar(require("./guard")); -exports.json = __importStar(require("./json")); -exports.signer = __importStar(require("./signer")); -exports.types = __importStar(require("./types")); +exports.crypto = __importStar(require("./verify")); diff --git a/deps/npm/node_modules/tuf-js/dist/utils/json.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/json.d.ts index ecddbee17c..ecddbee17c 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/json.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/json.d.ts diff --git a/deps/npm/node_modules/tuf-js/dist/utils/json.js b/deps/npm/node_modules/@tufjs/models/dist/utils/json.js index 30f82ea4c3..30f82ea4c3 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/json.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/json.js diff --git a/deps/npm/node_modules/tuf-js/dist/utils/key.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/key.d.ts index 7b631281a3..7b631281a3 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/key.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/key.d.ts diff --git a/deps/npm/node_modules/tuf-js/dist/utils/key.js b/deps/npm/node_modules/@tufjs/models/dist/utils/key.js index 1f795ba1a2..1f795ba1a2 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/key.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/key.js diff --git a/deps/npm/node_modules/tuf-js/dist/utils/oid.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/oid.d.ts index f20456a978..f20456a978 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/oid.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/oid.d.ts diff --git a/deps/npm/node_modules/tuf-js/dist/utils/oid.js b/deps/npm/node_modules/@tufjs/models/dist/utils/oid.js index e1bb7af5e5..e1bb7af5e5 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/oid.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/oid.js diff --git a/deps/npm/node_modules/tuf-js/dist/utils/types.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/types.d.ts index 24319ddf7b..dd3964ec57 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/types.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/types.d.ts @@ -1,9 +1,3 @@ -export declare enum MetadataKind { - Root = "root", - Timestamp = "timestamp", - Snapshot = "snapshot", - Targets = "targets" -} export type JSONObject = { [key: string]: JSONValue; }; diff --git a/deps/npm/node_modules/@tufjs/models/dist/utils/types.js b/deps/npm/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000..c8ad2e549b --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/deps/npm/node_modules/tuf-js/dist/utils/signer.d.ts b/deps/npm/node_modules/@tufjs/models/dist/utils/verify.d.ts index 376ef113c4..376ef113c4 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/signer.d.ts +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/verify.d.ts diff --git a/deps/npm/node_modules/tuf-js/dist/utils/signer.js b/deps/npm/node_modules/@tufjs/models/dist/utils/verify.js index d3b2e7515d..d3b2e7515d 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/signer.js +++ b/deps/npm/node_modules/@tufjs/models/dist/utils/verify.js diff --git a/deps/npm/node_modules/@tufjs/models/package.json b/deps/npm/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000..f3746c2704 --- /dev/null +++ b/deps/npm/node_modules/@tufjs/models/package.json @@ -0,0 +1,41 @@ +{ + "name": "@tufjs/models", + "version": "1.0.0", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build", + "clean": "rm -rf dist && rm tsconfig.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/packages/models#readme", + "devDependencies": { + "@types/minimatch": "^5.1.2", + "@types/node": "^18.14.1", + "typescript": "^4.9.5" + }, + "dependencies": { + "minimatch": "^6.1.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/deps/npm/node_modules/agentkeepalive/package.json b/deps/npm/node_modules/agentkeepalive/package.json index efa561d2c6..3115fee69a 100644 --- a/deps/npm/node_modules/agentkeepalive/package.json +++ b/deps/npm/node_modules/agentkeepalive/package.json @@ -1,6 +1,6 @@ { "name": "agentkeepalive", - "version": "4.2.1", + "version": "4.3.0", "description": "Missing keepalive http.Agent", "main": "index.js", "browser": "browser.js", @@ -11,12 +11,12 @@ "lib" ], "scripts": { + "contributor": "git-contributor", "test": "npm run lint && egg-bin test --full-trace", "test-local": "egg-bin test --full-trace", "cov": "cross-env DEBUG=agentkeepalive egg-bin cov --full-trace", "ci": "npm run lint && npm run cov", - "lint": "eslint lib test index.js", - "autod": "autod" + "lint": "eslint lib test index.js" }, "repository": { "type": "git", @@ -36,17 +36,16 @@ ], "dependencies": { "debug": "^4.1.0", - "depd": "^1.1.2", + "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "devDependencies": { - "autod": "^3.0.1", "coffee": "^5.3.0", "cross-env": "^6.0.3", "egg-bin": "^4.9.0", - "egg-ci": "^1.10.0", "eslint": "^5.7.0", "eslint-config-egg": "^7.1.0", + "git-contributor": "^2.0.0", "mm": "^2.4.1", "pedding": "^1.1.0", "typescript": "^3.8.3" @@ -54,13 +53,6 @@ "engines": { "node": ">= 8.0.0" }, - "ci": { - "type": "github", - "os": { - "github": "linux" - }, - "version": "8, 10, 12, 14, 16" - }, - "author": "fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)", + "author": "fengmk2 <fengmk2@gmail.com> (https://github.com/fengmk2)", "license": "MIT" } diff --git a/deps/npm/node_modules/depd/History.md b/deps/npm/node_modules/depd/History.md index 507ecb8de2..cd9ebaaa99 100644 --- a/deps/npm/node_modules/depd/History.md +++ b/deps/npm/node_modules/depd/History.md @@ -1,3 +1,10 @@ +2.0.0 / 2018-10-26 +================== + + * Drop support for Node.js 0.6 + * Replace internal `eval` usage with `Function` constructor + * Use instance methods on `process` to check for listeners + 1.1.2 / 2018-01-11 ================== diff --git a/deps/npm/node_modules/depd/LICENSE b/deps/npm/node_modules/depd/LICENSE index 84441fbb57..248de7af2b 100644 --- a/deps/npm/node_modules/depd/LICENSE +++ b/deps/npm/node_modules/depd/LICENSE @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2014-2017 Douglas Christopher Wilson +Copyright (c) 2014-2018 Douglas Christopher Wilson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/deps/npm/node_modules/depd/index.js b/deps/npm/node_modules/depd/index.js index d758d3c8f5..1bf2fcfdef 100644 --- a/deps/npm/node_modules/depd/index.js +++ b/deps/npm/node_modules/depd/index.js @@ -1,6 +1,6 @@ /*! * depd - * Copyright(c) 2014-2017 Douglas Christopher Wilson + * Copyright(c) 2014-2018 Douglas Christopher Wilson * MIT Licensed */ @@ -8,8 +8,6 @@ * Module dependencies. */ -var callSiteToString = require('./lib/compat').callSiteToString -var eventListenerCount = require('./lib/compat').eventListenerCount var relative = require('path').relative /** @@ -92,7 +90,7 @@ function createStackString (stack) { } for (var i = 0; i < stack.length; i++) { - str += '\n at ' + callSiteToString(stack[i]) + str += '\n at ' + stack[i].toString() } return str @@ -129,11 +127,30 @@ function depd (namespace) { } /** + * Determine if event emitter has listeners of a given type. + * + * The way to do this check is done three different ways in Node.js >= 0.8 + * so this consolidates them into a minimal set using instance methods. + * + * @param {EventEmitter} emitter + * @param {string} type + * @returns {boolean} + * @private + */ + +function eehaslisteners (emitter, type) { + var count = typeof emitter.listenerCount !== 'function' + ? emitter.listeners(type).length + : emitter.listenerCount(type) + + return count > 0 +} + +/** * Determine if namespace is ignored. */ function isignored (namespace) { - /* istanbul ignore next: tested in a child processs */ if (process.noDeprecation) { // --no-deprecation support return true @@ -150,7 +167,6 @@ function isignored (namespace) { */ function istraced (namespace) { - /* istanbul ignore next: tested in a child processs */ if (process.traceDeprecation) { // --trace-deprecation support return true @@ -167,7 +183,7 @@ function istraced (namespace) { */ function log (message, site) { - var haslisteners = eventListenerCount(process, 'deprecation') !== 0 + var haslisteners = eehaslisteners(process, 'deprecation') // abort early if no destination if (!haslisteners && this._ignored) { @@ -310,7 +326,7 @@ function formatPlain (msg, caller, stack) { // add stack trace if (this._traced) { for (var i = 0; i < stack.length; i++) { - formatted += '\n at ' + callSiteToString(stack[i]) + formatted += '\n at ' + stack[i].toString() } return formatted @@ -335,7 +351,7 @@ function formatColor (msg, caller, stack) { // add stack trace if (this._traced) { for (var i = 0; i < stack.length; i++) { - formatted += '\n \x1b[36mat ' + callSiteToString(stack[i]) + '\x1b[39m' // cyan + formatted += '\n \x1b[36mat ' + stack[i].toString() + '\x1b[39m' // cyan } return formatted @@ -400,18 +416,18 @@ function wrapfunction (fn, message) { } var args = createArgumentsString(fn.length) - var deprecate = this // eslint-disable-line no-unused-vars var stack = getStack() var site = callSiteLocation(stack[1]) site.name = fn.name - // eslint-disable-next-line no-eval - var deprecatedfn = eval('(function (' + args + ') {\n' + + // eslint-disable-next-line no-new-func + var deprecatedfn = new Function('fn', 'log', 'deprecate', 'message', 'site', '"use strict"\n' + + 'return function (' + args + ') {' + 'log.call(deprecate, message, site)\n' + 'return fn.apply(this, arguments)\n' + - '})') + '}')(fn, log, this, message, site) return deprecatedfn } diff --git a/deps/npm/node_modules/depd/lib/compat/callsite-tostring.js b/deps/npm/node_modules/depd/lib/compat/callsite-tostring.js deleted file mode 100644 index 73186dc644..0000000000 --- a/deps/npm/node_modules/depd/lib/compat/callsite-tostring.js +++ /dev/null @@ -1,103 +0,0 @@ -/*! - * depd - * Copyright(c) 2014 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - */ - -module.exports = callSiteToString - -/** - * Format a CallSite file location to a string. - */ - -function callSiteFileLocation (callSite) { - var fileName - var fileLocation = '' - - if (callSite.isNative()) { - fileLocation = 'native' - } else if (callSite.isEval()) { - fileName = callSite.getScriptNameOrSourceURL() - if (!fileName) { - fileLocation = callSite.getEvalOrigin() - } - } else { - fileName = callSite.getFileName() - } - - if (fileName) { - fileLocation += fileName - - var lineNumber = callSite.getLineNumber() - if (lineNumber != null) { - fileLocation += ':' + lineNumber - - var columnNumber = callSite.getColumnNumber() - if (columnNumber) { - fileLocation += ':' + columnNumber - } - } - } - - return fileLocation || 'unknown source' -} - -/** - * Format a CallSite to a string. - */ - -function callSiteToString (callSite) { - var addSuffix = true - var fileLocation = callSiteFileLocation(callSite) - var functionName = callSite.getFunctionName() - var isConstructor = callSite.isConstructor() - var isMethodCall = !(callSite.isToplevel() || isConstructor) - var line = '' - - if (isMethodCall) { - var methodName = callSite.getMethodName() - var typeName = getConstructorName(callSite) - - if (functionName) { - if (typeName && functionName.indexOf(typeName) !== 0) { - line += typeName + '.' - } - - line += functionName - - if (methodName && functionName.lastIndexOf('.' + methodName) !== functionName.length - methodName.length - 1) { - line += ' [as ' + methodName + ']' - } - } else { - line += typeName + '.' + (methodName || '<anonymous>') - } - } else if (isConstructor) { - line += 'new ' + (functionName || '<anonymous>') - } else if (functionName) { - line += functionName - } else { - addSuffix = false - line += fileLocation - } - - if (addSuffix) { - line += ' (' + fileLocation + ')' - } - - return line -} - -/** - * Get constructor name of reviver. - */ - -function getConstructorName (obj) { - var receiver = obj.receiver - return (receiver.constructor && receiver.constructor.name) || null -} diff --git a/deps/npm/node_modules/depd/lib/compat/event-listener-count.js b/deps/npm/node_modules/depd/lib/compat/event-listener-count.js deleted file mode 100644 index 3a8925d136..0000000000 --- a/deps/npm/node_modules/depd/lib/compat/event-listener-count.js +++ /dev/null @@ -1,22 +0,0 @@ -/*! - * depd - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - * @public - */ - -module.exports = eventListenerCount - -/** - * Get the count of listeners on an event emitter of a specific type. - */ - -function eventListenerCount (emitter, type) { - return emitter.listeners(type).length -} diff --git a/deps/npm/node_modules/depd/lib/compat/index.js b/deps/npm/node_modules/depd/lib/compat/index.js deleted file mode 100644 index 955b3336b2..0000000000 --- a/deps/npm/node_modules/depd/lib/compat/index.js +++ /dev/null @@ -1,79 +0,0 @@ -/*! - * depd - * Copyright(c) 2014-2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module dependencies. - * @private - */ - -var EventEmitter = require('events').EventEmitter - -/** - * Module exports. - * @public - */ - -lazyProperty(module.exports, 'callSiteToString', function callSiteToString () { - var limit = Error.stackTraceLimit - var obj = {} - var prep = Error.prepareStackTrace - - function prepareObjectStackTrace (obj, stack) { - return stack - } - - Error.prepareStackTrace = prepareObjectStackTrace - Error.stackTraceLimit = 2 - - // capture the stack - Error.captureStackTrace(obj) - - // slice the stack - var stack = obj.stack.slice() - - Error.prepareStackTrace = prep - Error.stackTraceLimit = limit - - return stack[0].toString ? toString : require('./callsite-tostring') -}) - -lazyProperty(module.exports, 'eventListenerCount', function eventListenerCount () { - return EventEmitter.listenerCount || require('./event-listener-count') -}) - -/** - * Define a lazy property. - */ - -function lazyProperty (obj, prop, getter) { - function get () { - var val = getter() - - Object.defineProperty(obj, prop, { - configurable: true, - enumerable: true, - value: val - }) - - return val - } - - Object.defineProperty(obj, prop, { - configurable: true, - enumerable: true, - get: get - }) -} - -/** - * Call toString() on the obj - */ - -function toString (obj) { - return obj.toString() -} diff --git a/deps/npm/node_modules/depd/package.json b/deps/npm/node_modules/depd/package.json index 5e3c863216..3857e19918 100644 --- a/deps/npm/node_modules/depd/package.json +++ b/deps/npm/node_modules/depd/package.json @@ -1,7 +1,7 @@ { "name": "depd", "description": "Deprecate all the things", - "version": "1.1.2", + "version": "2.0.0", "author": "Douglas Christopher Wilson <doug@somethingdoug.com>", "license": "MIT", "keywords": [ @@ -13,13 +13,17 @@ "devDependencies": { "benchmark": "2.1.4", "beautify-benchmark": "0.2.4", - "eslint": "3.19.0", - "eslint-config-standard": "7.1.0", + "eslint": "5.7.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.14.0", "eslint-plugin-markdown": "1.0.0-beta.7", - "eslint-plugin-promise": "3.6.0", - "eslint-plugin-standard": "3.0.1", + "eslint-plugin-node": "7.0.1", + "eslint-plugin-promise": "4.0.1", + "eslint-plugin-standard": "4.0.0", "istanbul": "0.4.5", - "mocha": "~1.21.5" + "mocha": "5.2.0", + "safe-buffer": "5.1.2", + "uid-safe": "2.1.5" }, "files": [ "lib/", @@ -29,13 +33,13 @@ "Readme.md" ], "engines": { - "node": ">= 0.6" + "node": ">= 0.8" }, "scripts": { "bench": "node benchmark/index.js", "lint": "eslint --plugin markdown --ext js,md .", "test": "mocha --reporter spec --bail test/", - "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --no-exit test/", - "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/" + "test-ci": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter spec test/ && istanbul report lcovonly text-summary", + "test-cov": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter dot test/ && istanbul report lcov text-summary" } } diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index ae4cb8b21e..30f83a49e5 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -17,7 +17,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "nock": "^13.3.0", "tap": "^16.3.4" }, @@ -41,7 +41,7 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index cdd01c25c3..32f845e483 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "5.0.11", + "version": "5.0.13", "description": "The registry diff", "repository": { "type": "git", @@ -42,13 +42,13 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.3", + "@npmcli/arborist": "^6.2.5", "@npmcli/disparity-colors": "^3.0.0", - "@npmcli/installed-package-contents": "^2.0.0", + "@npmcli/installed-package-contents": "^2.0.2", "binary-extensions": "^2.2.0", "diff": "^5.1.0", "minimatch": "^6.1.6", @@ -58,7 +58,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index f13b16ed17..5e1d5643cd 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "5.0.11", + "version": "5.0.13", "files": [ "bin/", "lib/" @@ -52,7 +52,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "bin-links": "^4.0.1", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", @@ -60,7 +60,7 @@ "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.3", + "@npmcli/arborist": "^6.2.5", "@npmcli/run-script": "^6.0.0", "chalk": "^4.1.0", "ci-info": "^3.7.1", @@ -75,7 +75,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 36ec66de75..dbb9d1f903 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "4.0.11", + "version": "4.0.13", "main": "lib/index.js", "files": [ "bin/", @@ -41,18 +41,18 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.3" + "@npmcli/arborist": "^6.2.5" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 493b64359c..cd3fb93a34 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -35,7 +35,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "nock": "^13.3.0", "tap": "^16.3.4" }, @@ -44,7 +44,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 97d957492e..28ed6b7a3c 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -28,7 +28,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "minipass": "^4.0.2", "nock": "^13.3.0", "tap": "^16.3.4" @@ -49,7 +49,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index ef256ad38d..23cd712a7a 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "5.0.11", + "version": "5.0.13", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -23,7 +23,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "nock": "^13.3.0", "spawk": "^1.7.1", "tap": "^16.3.4" @@ -36,7 +36,7 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^6.2.3", + "@npmcli/arborist": "^6.2.5", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", "pacote": "^15.0.8" @@ -46,7 +46,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmpublish/lib/provenance.js b/deps/npm/node_modules/libnpmpublish/lib/provenance.js index d11d210478..1eb870da5f 100644 --- a/deps/npm/node_modules/libnpmpublish/lib/provenance.js +++ b/deps/npm/node_modules/libnpmpublish/lib/provenance.js @@ -4,39 +4,40 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json' const INTOTO_STATEMENT_TYPE = 'https://in-toto.io/Statement/v0.1' const SLSA_PREDICATE_TYPE = 'https://slsa.dev/provenance/v0.2' -const BUILDER_ID_PREFIX = 'https://github.com/npm/cli' +const BUILDER_ID = 'https://github.com/actions/runner' const BUILD_TYPE_PREFIX = 'https://github.com/npm/cli/gha' -const BUILD_TYPE_VERSION = 'v1' +const BUILD_TYPE_VERSION = 'v2' const generateProvenance = async (subject, opts) => { const { env } = process + /* istanbul ignore next - not covering missing env var case */ + const [workflowPath] = (env.GITHUB_WORKFLOW_REF || '') + .replace(env.GITHUB_REPOSITORY + '/', '') + .split('@') const payload = { _type: INTOTO_STATEMENT_TYPE, subject, predicateType: SLSA_PREDICATE_TYPE, predicate: { - buildType: `${BUILD_TYPE_PREFIX}@${BUILD_TYPE_VERSION}`, - builder: { id: `${BUILDER_ID_PREFIX}@${opts.npmVersion}` }, + buildType: `${BUILD_TYPE_PREFIX}/${BUILD_TYPE_VERSION}`, + builder: { id: BUILDER_ID }, invocation: { configSource: { uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, digest: { sha1: env.GITHUB_SHA, }, - entryPoint: env.GITHUB_WORKFLOW_REF, + entryPoint: workflowPath, }, parameters: {}, environment: { - GITHUB_ACTOR_ID: env.GITHUB_ACTOR_ID, GITHUB_EVENT_NAME: env.GITHUB_EVENT_NAME, GITHUB_REF: env.GITHUB_REF, - GITHUB_REF_TYPE: env.GITHUB_REF_TYPE, GITHUB_REPOSITORY: env.GITHUB_REPOSITORY, GITHUB_REPOSITORY_ID: env.GITHUB_REPOSITORY_ID, GITHUB_REPOSITORY_OWNER_ID: env.GITHUB_REPOSITORY_OWNER_ID, GITHUB_RUN_ATTEMPT: env.GITHUB_RUN_ATTEMPT, GITHUB_RUN_ID: env.GITHUB_RUN_ID, - GITHUB_RUN_NUMBER: env.GITHUB_RUN_NUMBER, GITHUB_SHA: env.GITHUB_SHA, GITHUB_WORKFLOW_REF: env.GITHUB_WORKFLOW_REF, GITHUB_WORKFLOW_SHA: env.GITHUB_WORKFLOW_SHA, @@ -53,7 +54,7 @@ const generateProvenance = async (subject, opts) => { }, materials: [ { - uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}`, + uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, digest: { sha1: env.GITHUB_SHA, }, diff --git a/deps/npm/node_modules/libnpmpublish/lib/publish.js b/deps/npm/node_modules/libnpmpublish/lib/publish.js index 353688a10e..25dedb2363 100644 --- a/deps/npm/node_modules/libnpmpublish/lib/publish.js +++ b/deps/npm/node_modules/libnpmpublish/lib/publish.js @@ -1,6 +1,7 @@ const { fixer } = require('normalize-package-data') const npmFetch = require('npm-registry-fetch') const npa = require('npm-package-arg') +const log = require('proc-log') const semver = require('semver') const { URL } = require('url') const ssri = require('ssri') @@ -8,6 +9,8 @@ const ciInfo = require('ci-info') const { generateProvenance } = require('./provenance') +const TLOG_BASE_URL = 'https://rekor.sigstore.dev/api/v1/log/entries' + const publish = async (manifest, tarballData, opts) => { if (manifest.private) { throw Object.assign( @@ -141,15 +144,23 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { digest: { sha512: integrity.sha512[0].hexDigest() }, } - // Ensure that we're running in GHA and an OIDC token is available, - // currently the only supported build environment - if (ciInfo.name !== 'GitHub Actions' || !process.env.ACTIONS_ID_TOKEN_REQUEST_URL) { + // Ensure that we're running in GHA, currently the only supported build environment + if (ciInfo.name !== 'GitHub Actions') { throw Object.assign( new Error('Automatic provenance generation not supported outside of GitHub Actions'), { code: 'EUSAGE' } ) } + // Ensure that the GHA OIDC token is available + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'), + { code: 'EUSAGE' } + ) + } + const visibility = await npmFetch.json(`${registry}/-/package/${spec.escapedName}/visibility`, opts) if (!visibility.public && opts.provenance === true && opts.access !== 'public') { @@ -161,6 +172,16 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { } const provenanceBundle = await generateProvenance([subject], opts) + /* eslint-disable-next-line max-len */ + log.notice('publish', 'Signed provenance statement with source and build information from GitHub Actions') + + const tlogEntry = provenanceBundle?.verificationMaterial?.tlogEntries[0] + /* istanbul ignore else */ + if (tlogEntry) { + const logUrl = `${TLOG_BASE_URL}?logIndex=${tlogEntry.logIndex}` + log.notice('publish', `Provenance statement published to transparency log: ${logUrl}`) + } + const serializedBundle = JSON.stringify(provenanceBundle) root._attachments[provenanceBundleName] = { content_type: provenanceBundle.mediaType, diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index 1b6a53eae6..7c04d2adb9 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "7.1.0", + "version": "7.1.2", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -25,7 +25,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/mock-registry": "^1.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "lodash.clonedeep": "^4.5.0", "nock": "^13.3.0", "tap": "^16.3.4" @@ -42,6 +42,7 @@ "normalize-package-data": "^5.0.0", "npm-package-arg": "^10.1.0", "npm-registry-fetch": "^14.0.3", + "proc-log": "^3.0.0", "semver": "^7.3.7", "sigstore": "^1.0.0", "ssri": "^10.0.1" @@ -51,7 +52,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 51e1d0adf9..8ccd77541e 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -26,7 +26,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "nock": "^13.3.0", "tap": "^16.3.4" }, @@ -45,7 +45,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 4d98dc9dc5..333647b127 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -16,7 +16,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "nock": "^13.3.0", "tap": "^16.3.4" }, @@ -39,7 +39,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" }, "tap": { diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index 8fce14cebf..2e80f8c3c1 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -32,7 +32,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "require-inject": "^1.4.4", "tap": "^16.3.4" }, @@ -48,7 +48,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "../../scripts/template-oss/index.js" } } diff --git a/deps/npm/node_modules/lru-cache/index.d.ts b/deps/npm/node_modules/lru-cache/index.d.ts index e8af7c4de8..b58395e04a 100644 --- a/deps/npm/node_modules/lru-cache/index.d.ts +++ b/deps/npm/node_modules/lru-cache/index.d.ts @@ -112,7 +112,7 @@ declare class LRUCache<K, V> implements Iterable<[K, V]> { * `cache.set(key, undefined)`. Use {@link has} to determine whether a key is * present in the cache at all. */ - public get(key: K, options?: LRUCache.GetOptions): V | undefined + public get(key: K, options?: LRUCache.GetOptions<V>): V | undefined /** * Like {@link get} but doesn't update recency or delete stale items. @@ -129,7 +129,7 @@ declare class LRUCache<K, V> implements Iterable<[K, V]> { * Will not update item age unless {@link updateAgeOnHas} is set in the * options or constructor. */ - public has(key: K, options?: LRUCache.HasOptions): boolean + public has(key: K, options?: LRUCache.HasOptions<V>): boolean /** * Deletes a key out of the cache. @@ -158,7 +158,7 @@ declare class LRUCache<K, V> implements Iterable<[K, V]> { key: K, cache: this ) => boolean | undefined | void, - options?: LRUCache.GetOptions + options?: LRUCache.GetOptions<V> ): V | undefined /** @@ -271,6 +271,20 @@ declare class LRUCache<K, V> implements Iterable<[K, V]> { public prune(): boolean /** + * Make an asynchronous cached fetch using the {@link fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link allowStaleOnFetchAbort}, {@link signal}, and + * {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch() + * call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + * * since: 7.6.0 */ public fetch( @@ -521,6 +535,8 @@ declare namespace LRUCache { * Set to true to suppress the deletion of stale data when a * {@link fetchMethod} throws an error or returns a rejected promise * + * This may be overridden in the {@link fetchMethod}. + * * @default false * @since 7.10.0 */ @@ -533,12 +549,60 @@ declare namespace LRUCache { * ONLY be returned in the case that the fetch fails, not any other * times. * + * This may be overridden in the {@link fetchMethod}. + * * @default false * @since 7.16.0 */ allowStaleOnFetchRejection?: boolean /** + * + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead take + * the full time to await. + * + * When used with {@link allowStaleOnFetchAbort}, aborted {@link fetch} + * calls will resolve immediately to their stale cached value or + * `undefined`, and will continue to process and eventually update the + * cache when they resolve, as long as the resulting value is not + * `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still emitted + * on the `AbortSignal` object_, so may result in invalid results when + * passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + ignoreFetchAbort?: boolean + + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link ignoreFetchAbort} is also set, the underlying + * {@link fetchMethod} will still be considered canceled, and its return + * value will be ignored and not cached. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + allowStaleOnFetchAbort?: boolean + + /** * Set to any value in the constructor or {@link fetch} options to * pass arbitrary data to the {@link fetchMethod} in the {@link context} * options field. @@ -585,24 +649,27 @@ declare namespace LRUCache { start?: LRUMilliseconds noDisposeOnSet?: boolean noUpdateTTL?: boolean + status?: Status<V> } /** * options which override the options set in the LRUCAche constructor * when calling {@link has}. */ - interface HasOptions { + interface HasOptions<V> { updateAgeOnHas?: boolean + status: Status<V> } /** * options which override the options set in the LRUCache constructor * when calling {@link get}. */ - interface GetOptions { + interface GetOptions<V> { allowStale?: boolean updateAgeOnGet?: boolean noDeleteOnStaleGet?: boolean + status?: Status<V> } /** @@ -618,8 +685,8 @@ declare namespace LRUCache { * * May be mutated by the {@link fetchMethod} to affect the behavior of the * resulting {@link set} operation on resolution, or in the case of - * {@link noDeleteOnFetchRejection} and {@link allowStaleOnFetchRejection}, - * the handling of failure. + * {@link noDeleteOnFetchRejection}, {@link ignoreFetchAbort}, and + * {@link allowStaleOnFetchRejection}, the handling of failure. */ interface FetcherFetchOptions<K, V> { allowStale?: boolean @@ -632,6 +699,139 @@ declare namespace LRUCache { noUpdateTTL?: boolean noDeleteOnFetchRejection?: boolean allowStaleOnFetchRejection?: boolean + ignoreFetchAbort?: boolean + allowStaleOnFetchAbort?: boolean + status?: Status<V> + } + + /** + * Status object that may be passed to {@link fetch}, {@link get}, + * {@link set}, and {@link has}. + */ + interface Status<V> { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: LRUMilliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: LRUMilliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: LRUMilliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: LRUMilliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + size?: LRUSize + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to fetchMethod + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link fetchMethod}, or the reason for an AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link get} operation. + * + * - fetching: The item is currently being fetched. If a previous value is + * present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true } /** @@ -645,6 +845,8 @@ declare namespace LRUCache { interface FetchOptions<K, V> extends FetcherFetchOptions<K, V> { forceRefresh?: boolean fetchContext?: any + signal?: AbortSignal + status?: Status<V> } interface FetcherOptions<K, V> { diff --git a/deps/npm/node_modules/lru-cache/index.js b/deps/npm/node_modules/lru-cache/index.js index f4be3476d4..48e99fe5e5 100644 --- a/deps/npm/node_modules/lru-cache/index.js +++ b/deps/npm/node_modules/lru-cache/index.js @@ -18,7 +18,8 @@ const AC = hasAbortController this.signal = new AS() } abort(reason = new Error('This operation was aborted')) { - this.signal.reason = reason + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true this.signal.dispatchEvent({ type: 'abort', target: this.signal, @@ -168,6 +169,8 @@ class LRUCache { noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, } = options // deprecated options, don't trigger a warning for getting them if @@ -238,6 +241,8 @@ class LRUCache { this.noUpdateTTL = !!noUpdateTTL this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort // NB: maxEntrySize is set to maxSize if it's set if (this.maxEntrySize !== 0) { @@ -331,6 +336,15 @@ class LRUCache { this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 } + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + // debounce calls to perf.now() to 1s so we're not hitting // that costly call repeatedly. let cachedNow = 0 @@ -372,6 +386,7 @@ class LRUCache { } } updateItemAge(_index) {} + statusTTL(_status, _index) {} setItemTTL(_index, _ttl, _start) {} isStale(_index) { return false @@ -411,7 +426,7 @@ class LRUCache { } return size } - this.addItemSize = (index, size) => { + this.addItemSize = (index, size, status) => { this.sizes[index] = size if (this.maxSize) { const maxSize = this.maxSize - this.sizes[index] @@ -420,6 +435,10 @@ class LRUCache { } } this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } } } removeItemSize(_index) {} @@ -469,19 +488,30 @@ class LRUCache { } isValidIndex(index) { - return this.keyMap.get(this.keyList[index]) === index + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) } *entries() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } } *rentries() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } @@ -489,14 +519,20 @@ class LRUCache { *keys() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } } *rkeys() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } @@ -504,14 +540,20 @@ class LRUCache { *values() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } } *rvalues() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } @@ -521,9 +563,14 @@ class LRUCache { return this.entries() } - find(fn, getOptions = {}) { + find(fn, getOptions) { for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { return this.get(this.keyList[i], getOptions) } } @@ -531,13 +578,23 @@ class LRUCache { forEach(fn, thisp = this) { for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } rforEach(fn, thisp = this) { for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } @@ -608,12 +665,17 @@ class LRUCache { size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + status, } = {} ) { size = this.requireSize(k, v, size, sizeCalculation) // if the item doesn't fit, don't do anything // NB: maxEntrySize set to maxSize by default if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } // have to delete, in case a background fetch is there already. // in non-async cases, this is a no-op this.delete(k) @@ -630,7 +692,10 @@ class LRUCache { this.prev[index] = this.tail this.tail = index this.size++ - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } noUpdateTTL = false } else { // update @@ -649,7 +714,17 @@ class LRUCache { } this.removeItemSize(index) this.valList[index] = v - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' } } if (ttl !== 0 && this.ttl === 0 && !this.ttls) { @@ -658,6 +733,7 @@ class LRUCache { if (!noUpdateTTL) { this.setItemTTL(index, ttl, start) } + this.statusTTL(status, index) if (this.disposeAfter) { while (this.disposed.length) { this.disposeAfter(...this.disposed.shift()) @@ -713,15 +789,22 @@ class LRUCache { return head } - has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { const index = this.keyMap.get(k) if (index !== undefined) { if (!this.isStale(index)) { if (updateAgeOnHas) { this.updateItemAge(index) } + if (status) status.has = 'hit' + this.statusTTL(status, index) return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) } + } else if (status) { + status.has = 'miss' } return false } @@ -742,51 +825,109 @@ class LRUCache { return v } const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } const fetchOpts = { signal: ac.signal, options, context, } - const cb = v => { - if (!ac.signal.aborted) { - this.set(k, v, fetchOpts.options) - return v - } else { - return eb(ac.signal.reason) + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } } + return v } const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection if (this.valList[index] === p) { // if we allow stale on fetch rejections, then we need to ensure that // the stale value is not removed from the cache when the fetch fails. - const noDelete = - options.noDeleteOnFetchRejection || - options.allowStaleOnFetchRejection const del = !noDelete || p.__staleWhileFetching === undefined if (del) { this.delete(k) - } else { + } else if (!allowStaleAborted) { // still replace the *promise* with the stale value, // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. this.valList[index] = p.__staleWhileFetching } } - if (options.allowStaleOnFetchRejection) { + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } return p.__staleWhileFetching } else if (p.__returned === p) { throw er } } const pcall = (res, rej) => { - ac.signal.addEventListener('abort', () => res()) - this.fetchMethod(k, v, fetchOpts).then(res, rej) + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) } + if (options.status) options.status.fetchDispatched = true const p = new Promise(pcall).then(cb, eb) p.__abortController = ac p.__staleWhileFetching = v p.__returned = null if (index === undefined) { - this.set(k, p, fetchOpts.options) + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) index = this.keyMap.get(k) } else { this.valList[index] = p @@ -825,16 +966,21 @@ class LRUCache { // fetch exclusive options noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, fetchContext = this.fetchContext, forceRefresh = false, + status, signal, } = {} ) { if (!this.fetchMethod) { + if (status) status.fetch = 'get' return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet, + status, }) } @@ -849,38 +995,53 @@ class LRUCache { noUpdateTTL, noDeleteOnFetchRejection, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, signal, } let index = this.keyMap.get(k) if (index === undefined) { + if (status) status.fetch = 'miss' const p = this.backgroundFetch(k, index, options, fetchContext) return (p.__returned = p) } else { // in cache, maybe already fetching const v = this.valList[index] if (this.isBackgroundFetch(v)) { - return allowStale && v.__staleWhileFetching !== undefined - ? v.__staleWhileFetching - : (v.__returned = v) + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) } // if we force a refresh, that means do NOT serve the cached value, // unless we are already in the process of refreshing the cache. - if (!forceRefresh && !this.isStale(index)) { + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' this.moveToTail(index) if (updateAgeOnGet) { this.updateItemAge(index) } + this.statusTTL(status, index) return v } // ok, it is stale or a forced refresh, and not already fetching. // refresh the cache. const p = this.backgroundFetch(k, index, options, fetchContext) - return allowStale && p.__staleWhileFetching !== undefined - ? p.__staleWhileFetching - : (p.__returned = p) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) } } @@ -890,28 +1051,39 @@ class LRUCache { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, } = {} ) { const index = this.keyMap.get(k) if (index !== undefined) { const value = this.valList[index] const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) if (this.isStale(index)) { + if (status) status.get = 'stale' // delete only if not an in-flight background fetch if (!fetching) { if (!noDeleteOnStaleGet) { this.delete(k) } + if (status) status.returnedStale = allowStale return allowStale ? value : undefined } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } return allowStale ? value.__staleWhileFetching : undefined } } else { + if (status) status.get = 'hit' // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching, - // so we just return undefined + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. if (fetching) { - return undefined + return value.__staleWhileFetching } this.moveToTail(index) if (updateAgeOnGet) { @@ -919,6 +1091,8 @@ class LRUCache { } return value } + } else if (status) { + status.get = 'miss' } } diff --git a/deps/npm/node_modules/lru-cache/index.mjs b/deps/npm/node_modules/lru-cache/index.mjs index e69e77fbb3..4a0b4813ec 100644 --- a/deps/npm/node_modules/lru-cache/index.mjs +++ b/deps/npm/node_modules/lru-cache/index.mjs @@ -18,7 +18,8 @@ const AC = hasAbortController this.signal = new AS() } abort(reason = new Error('This operation was aborted')) { - this.signal.reason = reason + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true this.signal.dispatchEvent({ type: 'abort', target: this.signal, @@ -168,6 +169,8 @@ class LRUCache { noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, } = options // deprecated options, don't trigger a warning for getting them if @@ -238,6 +241,8 @@ class LRUCache { this.noUpdateTTL = !!noUpdateTTL this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort // NB: maxEntrySize is set to maxSize if it's set if (this.maxEntrySize !== 0) { @@ -331,6 +336,15 @@ class LRUCache { this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 } + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + // debounce calls to perf.now() to 1s so we're not hitting // that costly call repeatedly. let cachedNow = 0 @@ -372,6 +386,7 @@ class LRUCache { } } updateItemAge(_index) {} + statusTTL(_status, _index) {} setItemTTL(_index, _ttl, _start) {} isStale(_index) { return false @@ -411,7 +426,7 @@ class LRUCache { } return size } - this.addItemSize = (index, size) => { + this.addItemSize = (index, size, status) => { this.sizes[index] = size if (this.maxSize) { const maxSize = this.maxSize - this.sizes[index] @@ -420,6 +435,10 @@ class LRUCache { } } this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } } } removeItemSize(_index) {} @@ -469,19 +488,30 @@ class LRUCache { } isValidIndex(index) { - return this.keyMap.get(this.keyList[index]) === index + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) } *entries() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } } *rentries() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield [this.keyList[i], this.valList[i]] } } @@ -489,14 +519,20 @@ class LRUCache { *keys() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } } *rkeys() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.keyList[i] } } @@ -504,14 +540,20 @@ class LRUCache { *values() { for (const i of this.indexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } } *rvalues() { for (const i of this.rindexes()) { - if (!this.isBackgroundFetch(this.valList[i])) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { yield this.valList[i] } } @@ -521,9 +563,14 @@ class LRUCache { return this.entries() } - find(fn, getOptions = {}) { + find(fn, getOptions) { for (const i of this.indexes()) { - if (fn(this.valList[i], this.keyList[i], this)) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { return this.get(this.keyList[i], getOptions) } } @@ -531,13 +578,23 @@ class LRUCache { forEach(fn, thisp = this) { for (const i of this.indexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } rforEach(fn, thisp = this) { for (const i of this.rindexes()) { - fn.call(thisp, this.valList[i], this.keyList[i], this) + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) } } @@ -608,12 +665,17 @@ class LRUCache { size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + status, } = {} ) { size = this.requireSize(k, v, size, sizeCalculation) // if the item doesn't fit, don't do anything // NB: maxEntrySize set to maxSize by default if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } // have to delete, in case a background fetch is there already. // in non-async cases, this is a no-op this.delete(k) @@ -630,7 +692,10 @@ class LRUCache { this.prev[index] = this.tail this.tail = index this.size++ - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } noUpdateTTL = false } else { // update @@ -649,7 +714,17 @@ class LRUCache { } this.removeItemSize(index) this.valList[index] = v - this.addItemSize(index, size) + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' } } if (ttl !== 0 && this.ttl === 0 && !this.ttls) { @@ -658,6 +733,7 @@ class LRUCache { if (!noUpdateTTL) { this.setItemTTL(index, ttl, start) } + this.statusTTL(status, index) if (this.disposeAfter) { while (this.disposed.length) { this.disposeAfter(...this.disposed.shift()) @@ -713,15 +789,22 @@ class LRUCache { return head } - has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { const index = this.keyMap.get(k) if (index !== undefined) { if (!this.isStale(index)) { if (updateAgeOnHas) { this.updateItemAge(index) } + if (status) status.has = 'hit' + this.statusTTL(status, index) return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) } + } else if (status) { + status.has = 'miss' } return false } @@ -742,51 +825,109 @@ class LRUCache { return v } const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } const fetchOpts = { signal: ac.signal, options, context, } - const cb = v => { - if (!ac.signal.aborted) { - this.set(k, v, fetchOpts.options) - return v - } else { - return eb(ac.signal.reason) + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } } + return v } const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection if (this.valList[index] === p) { // if we allow stale on fetch rejections, then we need to ensure that // the stale value is not removed from the cache when the fetch fails. - const noDelete = - options.noDeleteOnFetchRejection || - options.allowStaleOnFetchRejection const del = !noDelete || p.__staleWhileFetching === undefined if (del) { this.delete(k) - } else { + } else if (!allowStaleAborted) { // still replace the *promise* with the stale value, // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. this.valList[index] = p.__staleWhileFetching } } - if (options.allowStaleOnFetchRejection) { + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } return p.__staleWhileFetching } else if (p.__returned === p) { throw er } } const pcall = (res, rej) => { - ac.signal.addEventListener('abort', () => res()) - this.fetchMethod(k, v, fetchOpts).then(res, rej) + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) } + if (options.status) options.status.fetchDispatched = true const p = new Promise(pcall).then(cb, eb) p.__abortController = ac p.__staleWhileFetching = v p.__returned = null if (index === undefined) { - this.set(k, p, fetchOpts.options) + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) index = this.keyMap.get(k) } else { this.valList[index] = p @@ -825,16 +966,21 @@ class LRUCache { // fetch exclusive options noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, fetchContext = this.fetchContext, forceRefresh = false, + status, signal, } = {} ) { if (!this.fetchMethod) { + if (status) status.fetch = 'get' return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet, + status, }) } @@ -849,38 +995,53 @@ class LRUCache { noUpdateTTL, noDeleteOnFetchRejection, allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, signal, } let index = this.keyMap.get(k) if (index === undefined) { + if (status) status.fetch = 'miss' const p = this.backgroundFetch(k, index, options, fetchContext) return (p.__returned = p) } else { // in cache, maybe already fetching const v = this.valList[index] if (this.isBackgroundFetch(v)) { - return allowStale && v.__staleWhileFetching !== undefined - ? v.__staleWhileFetching - : (v.__returned = v) + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) } // if we force a refresh, that means do NOT serve the cached value, // unless we are already in the process of refreshing the cache. - if (!forceRefresh && !this.isStale(index)) { + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' this.moveToTail(index) if (updateAgeOnGet) { this.updateItemAge(index) } + this.statusTTL(status, index) return v } // ok, it is stale or a forced refresh, and not already fetching. // refresh the cache. const p = this.backgroundFetch(k, index, options, fetchContext) - return allowStale && p.__staleWhileFetching !== undefined - ? p.__staleWhileFetching - : (p.__returned = p) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) } } @@ -890,28 +1051,39 @@ class LRUCache { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, } = {} ) { const index = this.keyMap.get(k) if (index !== undefined) { const value = this.valList[index] const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) if (this.isStale(index)) { + if (status) status.get = 'stale' // delete only if not an in-flight background fetch if (!fetching) { if (!noDeleteOnStaleGet) { this.delete(k) } + if (status) status.returnedStale = allowStale return allowStale ? value : undefined } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } return allowStale ? value.__staleWhileFetching : undefined } } else { + if (status) status.get = 'hit' // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching, - // so we just return undefined + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. if (fetching) { - return undefined + return value.__staleWhileFetching } this.moveToTail(index) if (updateAgeOnGet) { @@ -919,6 +1091,8 @@ class LRUCache { } return value } + } else if (status) { + status.get = 'miss' } } diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json index fb90c93901..9684991727 100644 --- a/deps/npm/node_modules/lru-cache/package.json +++ b/deps/npm/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.16.2", + "version": "7.18.3", "author": "Isaac Z. Schlueter <i@izs.me>", "keywords": [ "mru", @@ -13,7 +13,7 @@ "build": "npm run prepare", "pretest": "npm run prepare", "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.mjs", + "prepare": "node ./scripts/transpile-to-esm.js", "size": "size-limit", "test": "tap", "snap": "tap", diff --git a/deps/npm/node_modules/minipass/index.d.ts b/deps/npm/node_modules/minipass/index.d.ts index 93a06eb357..539fbca570 100644 --- a/deps/npm/node_modules/minipass/index.d.ts +++ b/deps/npm/node_modules/minipass/index.d.ts @@ -26,21 +26,23 @@ declare namespace Minipass { type BufferOrString = Buffer | string - interface StringOptions { + interface SharedOptions { + async?: boolean + signal?: AbortSignal + } + + interface StringOptions extends SharedOptions { encoding: BufferEncoding objectMode?: boolean - async?: boolean } - interface BufferOptions { + interface BufferOptions extends SharedOptions { encoding?: null | 'buffer' objectMode?: boolean - async?: boolean } - interface ObjectModeOptions { + interface ObjectModeOptions extends SharedOptions { objectMode: true - async?: boolean } interface PipeOptions { @@ -70,6 +72,7 @@ declare class Minipass< readonly flowing: boolean readonly writable: boolean readonly readable: boolean + readonly aborted: boolean readonly paused: boolean readonly emittedEnd: boolean readonly destroyed: boolean diff --git a/deps/npm/node_modules/minipass/index.js b/deps/npm/node_modules/minipass/index.js index 5d45de8d39..97b23068c9 100644 --- a/deps/npm/node_modules/minipass/index.js +++ b/deps/npm/node_modules/minipass/index.js @@ -8,7 +8,8 @@ const proc = } const EE = require('events') const Stream = require('stream') -const SD = require('string_decoder').StringDecoder +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder const EOF = Symbol('EOF') const MAYBE_EMIT_END = Symbol('maybeEmitEnd') @@ -38,6 +39,9 @@ const EMITDATA = Symbol('emitData') const EMITEND = Symbol('emitEnd') const EMITEND2 = Symbol('emitEnd2') const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') const defer = fn => Promise.resolve().then(fn) @@ -93,7 +97,7 @@ class PipeProxyErrors extends Pipe { } } -module.exports = class Minipass extends Stream { +class Minipass extends Stream { constructor(options) { super() this[FLOWING] = false @@ -122,6 +126,14 @@ module.exports = class Minipass extends Stream { if (options && options.debugExposePipes === true) { Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } } get bufferLength() { @@ -168,7 +180,20 @@ module.exports = class Minipass extends Stream { this[ASYNC] = this[ASYNC] || !!a } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + write(chunk, encoding, cb) { + if (this[ABORTED]) return false if (this[EOF]) throw new Error('write after end') if (this[DESTROYED]) { @@ -342,21 +367,20 @@ module.exports = class Minipass extends Stream { } [BUFFERSHIFT]() { - if (this[BUFFER].length) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - } + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length return this[BUFFER].shift() } [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') } [FLUSHCHUNK](chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false + this.emit('data', chunk) + return this.flowing } pipe(dest, opts) { @@ -437,7 +461,7 @@ module.exports = class Minipass extends Stream { if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) return else if (ev === 'data') { - return !data + return !this[OBJECTMODE] && !data ? false : this[ASYNC] ? defer(() => this[EMITDATA](data)) @@ -454,7 +478,10 @@ module.exports = class Minipass extends Stream { } else if (ev === 'error') { this[EMITTED_ERROR] = data super.emit(ERROR, data) - const ret = super.emit('error', data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false this[MAYBE_EMIT_END]() return ret } else if (ev === 'resume') { @@ -659,8 +686,12 @@ module.exports = class Minipass extends Stream { (s instanceof Minipass || s instanceof Stream || (s instanceof EE && - (typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function')))) // writable + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) ) } } + +module.exports = Minipass diff --git a/deps/npm/node_modules/minipass/index.mjs b/deps/npm/node_modules/minipass/index.mjs new file mode 100644 index 0000000000..a669403664 --- /dev/null +++ b/deps/npm/node_modules/minipass/index.mjs @@ -0,0 +1,697 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +export default Minipass diff --git a/deps/npm/node_modules/minipass/package.json b/deps/npm/node_modules/minipass/package.json index 43051ad5b6..4a6246df54 100644 --- a/deps/npm/node_modules/minipass/package.json +++ b/deps/npm/node_modules/minipass/package.json @@ -1,12 +1,27 @@ { "name": "minipass", - "version": "4.0.3", + "version": "4.2.4", "description": "minimal implementation of a PassThrough stream", - "main": "index.js", - "types": "index.d.ts", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, "devDependencies": { "@types/node": "^17.0.41", "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", "prettier": "^2.6.2", "tap": "^16.2.0", "through2": "^2.0.3", @@ -15,6 +30,10 @@ "typescript": "^4.7.3" }, "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", "test": "tap", "preversion": "npm test", "postversion": "npm publish", @@ -34,7 +53,8 @@ "license": "ISC", "files": [ "index.d.ts", - "index.js" + "index.js", + "index.mjs" ], "tap": { "check-coverage": true diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js index 6752519225..c51c67d414 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_duplex.js @@ -18,66 +18,54 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. + 'use strict'; -/*<replacement>*/ +/*<replacement>*/ var objectKeys = Object.keys || function (obj) { var keys = []; - - for (var key in obj) { - keys.push(key); - } - + for (var key in obj) keys.push(key); return keys; }; /*</replacement>*/ - module.exports = Duplex; - -var Readable = require('./_stream_readable'); - -var Writable = require('./_stream_writable'); - +const Readable = require('./_stream_readable'); +const Writable = require('./_stream_writable'); require('inherits')(Duplex, Readable); - { // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - + const keys = objectKeys(Writable.prototype); for (var v = 0; v < keys.length; v++) { - var method = keys[v]; + const method = keys[v]; if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } } - function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); this.allowHalfOpen = true; - if (options) { if (options.readable === false) this.readable = false; if (options.writable === false) this.writable = false; - if (options.allowHalfOpen === false) { this.allowHalfOpen = false; this.once('end', onend); } } } - Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.highWaterMark; } }); @@ -95,44 +83,43 @@ Object.defineProperty(Duplex.prototype, 'writableLength', { // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.length; } -}); // the no-half-open enforcer +}); +// the no-half-open enforcer function onend() { // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. + if (this._writableState.ended) return; + // no more data can be written. + // But allow more writes to happen in this tick. process.nextTick(onEndNT, this); } - function onEndNT(self) { self.end(); } - Object.defineProperty(Duplex.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._readableState === undefined || this._writableState === undefined) { return false; } - return this._readableState.destroyed && this._writableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (this._readableState === undefined || this._writableState === undefined) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._readableState.destroyed = value; this._writableState.destroyed = value; } diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js index 32e7414c5a..38a1eaac80 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_passthrough.js @@ -18,22 +18,20 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. + 'use strict'; module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - +const Transform = require('./_stream_transform'); require('inherits')(PassThrough, Transform); - function PassThrough(options) { if (!(this instanceof PassThrough)) return new PassThrough(options); Transform.call(this, options); } - PassThrough.prototype._transform = function (chunk, encoding, cb) { cb(null, chunk); };
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js index 192d451488..8e3af6c9ed 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_readable.js @@ -18,49 +18,40 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + 'use strict'; module.exports = Readable; -/*<replacement>*/ +/*<replacement>*/ var Duplex; /*</replacement>*/ Readable.ReadableState = ReadableState; -/*<replacement>*/ - -var EE = require('events').EventEmitter; +/*<replacement>*/ +const EE = require('events').EventEmitter; var EElistenerCount = function EElistenerCount(emitter, type) { return emitter.listeners(type).length; }; /*</replacement>*/ /*<replacement>*/ - - var Stream = require('./internal/streams/stream'); /*</replacement>*/ - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - +const Buffer = require('buffer').Buffer; +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } - function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } -/*<replacement>*/ - - -var debugUtil = require('util'); - -var debug; +/*<replacement>*/ +const debugUtil = require('util'); +let debug; if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); } else { @@ -68,60 +59,57 @@ if (debugUtil && debugUtil.debuglog) { } /*</replacement>*/ - -var BufferList = require('./internal/streams/buffer_list'); - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. - - -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; - +const BufferList = require('./internal/streams/buffer_list'); +const destroyImpl = require('./internal/streams/destroy'); +const _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +let StringDecoder; +let createReadableStreamAsyncIterator; +let from; require('inherits')(Readable, Stream); - -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - +const errorOrDestroy = destroyImpl.errorOrDestroy; +const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } - function ReadableState(options, stream, isDuplex) { Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share + options = options || {}; + + // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift() - this.buffer = new BufferList(); this.length = 0; this.pipes = null; @@ -129,141 +117,136 @@ function ReadableState(options, stream, isDuplex) { this.flowing = null; this.ended = false; this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. + this.sync = true; - this.sync = true; // whenever we return null, then we set a flag to say + // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. - this.needReadable = false; this.emittedReadable = false; this.readableListening = false; this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; - this.autoDestroy = !!options.autoDestroy; // has it been destroyed + // has it been destroyed + this.destroyed = false; - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + // if true, a maybeReadMore has been scheduled this.readingMore = false; this.decoder = null; this.encoding = null; - if (options.encoding) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } - function Readable(options) { Duplex = Duplex || require('./_stream_duplex'); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 + if (!(this instanceof Readable)) return new Readable(options); - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + const isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + // legacy this.readable = true; - if (options) { if (typeof options.read === 'function') this._read = options.read; if (typeof options.destroy === 'function') this._destroy = options.destroy; } - Stream.call(this); } - Object.defineProperty(Readable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._readableState === undefined) { return false; } - return this._readableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._readableState) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._readableState.destroyed = value; } }); Readable.prototype.destroy = destroyImpl.destroy; Readable.prototype._undestroy = destroyImpl.undestroy; - Readable.prototype._destroy = function (err, cb) { cb(err); -}; // Manually shove something into the read() buffer. +}; + +// Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. - - Readable.prototype.push = function (chunk, encoding) { var state = this._readableState; var skipChunkCheck; - if (!state.objectMode) { if (typeof chunk === 'string') { encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { chunk = Buffer.from(chunk, encoding); encoding = ''; } - skipChunkCheck = true; } } else { skipChunkCheck = true; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() - +}; +// Unshift should *always* be something directly out of read() Readable.prototype.unshift = function (chunk) { return readableAddChunk(this, chunk, null, true, false); }; - function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { debug('readableAddChunk', chunk); var state = stream._readableState; - if (chunk === null) { state.reading = false; onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { errorOrDestroy(stream, er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } - if (addToFront) { if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); } else if (state.ended) { @@ -272,7 +255,6 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { return false; } else { state.reading = false; - if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); @@ -284,14 +266,13 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { state.reading = false; maybeReadMore(stream, state); } - } // We can push more data if we are below the highWaterMark. + } + + // We can push more data if we are below the highWaterMark. // Also, if we have no data yet, we can stand some more bytes. // This is to work around cases where hwm=0, such as the repl. - - return !state.ended && (state.length < state.highWaterMark || state.length === 0); } - function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { state.awaitDrain = 0; @@ -302,50 +283,42 @@ function addChunk(stream, state, chunk, addToFront) { if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); if (state.needReadable) emitReadable(stream); } - maybeReadMore(stream, state); } - function chunkInvalid(state, chunk) { var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); } - return er; } - Readable.prototype.isPaused = function () { return this._readableState.flowing === false; -}; // backwards compatibility. - +}; +// backwards compatibility. Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 - - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: - - var p = this._readableState.buffer.head; - var content = ''; - + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + let p = this._readableState.buffer.head; + let content = ''; while (p !== null) { content += decoder.write(p.data); p = p.next; } - this._readableState.buffer.clear(); - if (content !== '') this._readableState.buffer.push(content); this._readableState.length = content.length; return this; -}; // Don't raise the hwm > 1GB - - -var MAX_HWM = 0x40000000; +}; +// Don't raise the hwm > 1GB +const MAX_HWM = 0x40000000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. @@ -361,55 +334,54 @@ function computeNewHighWaterMark(n) { n |= n >>> 16; n++; } - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. - +} +// This function is designed to be inlinable, so please take care when making +// changes to the function body. function howMuchToRead(n, state) { if (n <= 0 || state.length === 0 && state.ended) return 0; if (state.objectMode) return 1; - if (n !== n) { // Only flow one buffer at a time if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. - - + } + // If we're asking for more than the current hwm, then raise the hwm. if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough - + if (n <= state.length) return n; + // Don't have enough if (!state.ended) { state.needReadable = true; return 0; } - return state.length; -} // you can override either this method, or the async _read(n) below. - +} +// you can override either this method, or the async _read(n) below. Readable.prototype.read = function (n) { debug('read', n); n = parseInt(n, 10); var state = this._readableState; var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; } + n = howMuchToRead(n, state); - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - + // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; - } // All the actual chunk generation logic needs to be + } + + // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change @@ -430,40 +402,37 @@ Readable.prototype.read = function (n) { // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - + // if we need a readable event, then we need to do some reading. var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + debug('need readable', doRead); + // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - + } + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } else if (doRead) { debug('do read'); state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. - - if (state.length === 0) state.needReadable = true; // call internal read method - + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method this._read(state.highWaterMark); - - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); } - var ret; if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { state.needReadable = state.length <= state.highWaterMark; n = 0; @@ -471,34 +440,28 @@ Readable.prototype.read = function (n) { state.length -= n; state.awaitDrain = 0; } - if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + if (!state.ended) state.needReadable = true; + // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended) endReadable(this); } - if (ret !== null) this.emit('data', ret); return ret; }; - function onEofChunk(stream, state) { debug('onEofChunk'); if (state.ended) return; - if (state.decoder) { var chunk = state.decoder.end(); - if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } - state.ended = true; - if (state.sync) { // if we are sync, wait until next tick to emit the data. // Otherwise we risk emitting data in the flow() @@ -507,61 +470,56 @@ function onEofChunk(stream, state) { } else { // emit 'readable' now to make sure it gets picked up. state.needReadable = false; - if (!state.emittedReadable) { state.emittedReadable = true; emitReadable_(stream); } } -} // Don't emit readable right away in sync mode, because this can trigger +} + +// Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. - - function emitReadable(stream) { var state = stream._readableState; debug('emitReadable', state.needReadable, state.emittedReadable); state.needReadable = false; - if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; process.nextTick(emitReadable_, stream); } } - function emitReadable_(stream) { var state = stream._readableState; debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { stream.emit('readable'); state.emittedReadable = false; - } // The stream needs another readable event if + } + + // The stream needs another readable event if // 1. It is not flowing, as the flow mechanism will take // care of it. // 2. It is not ended. // 3. It is below the highWaterMark, so we can schedule // another readable later. - - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; flow(stream); -} // at this point, the user has presumably seen the 'readable' event, +} + +// at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. - - function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; process.nextTick(maybeReadMore_, stream, state); } } - function maybeReadMore_(stream, state) { // Attempt to read more data if we should. // @@ -587,52 +545,45 @@ function maybeReadMore_(stream, state) { // read()s. The execution ends in this method again after the _read() ends // up calling push() with more data. while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; + const len = state.length; debug('maybeReadMore read 0'); stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. + if (len === state.length) + // didn't get any data, stop spinning. break; } - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. +} + +// abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. - - Readable.prototype._read = function (n) { errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); }; - Readable.prototype.pipe = function (dest, pipeOpts) { var src = this; var state = this._readableState; - switch (state.pipesCount) { case 0: state.pipes = dest; break; - case 1: state.pipes = [state.pipes, dest]; break; - default: state.pipes.push(dest); break; } - state.pipesCount += 1; debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; var endFn = doEnd ? onend : unpipe; if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { debug('onunpipe'); - if (readable === src) { if (unpipeInfo && unpipeInfo.hasUnpiped === false) { unpipeInfo.hasUnpiped = true; @@ -640,23 +591,21 @@ Readable.prototype.pipe = function (dest, pipeOpts) { } } } - function onend() { debug('onend'); dest.end(); - } // when the dest drains, it reduces the awaitDrain counter + } + + // when the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. - - var ondrain = pipeOnDrain(src); dest.on('drain', ondrain); var cleanedUp = false; - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken - + debug('cleanup'); + // cleanup event handlers once the pipe is broken dest.removeListener('close', onclose); dest.removeListener('finish', onfinish); dest.removeListener('drain', ondrain); @@ -665,22 +614,20 @@ Readable.prototype.pipe = function (dest, pipeOpts) { src.removeListener('end', onend); src.removeListener('end', unpipe); src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this + cleanedUp = true; + + // if the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); } - src.on('data', ondata); - function ondata(chunk) { debug('ondata'); var ret = dest.write(chunk); debug('dest.write', ret); - if (ret === false) { // If the user unpiped during `dest.write()`, it is possible // to get stuck in a permanently paused state if that write @@ -690,87 +637,84 @@ Readable.prototype.pipe = function (dest, pipeOpts) { debug('false write response, pause', state.awaitDrain); state.awaitDrain++; } - src.pause(); } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - + } + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. function onerror(er) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - + } - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + // Both close and finish should trigger unpipe, but only once. function onclose() { dest.removeListener('finish', onfinish); unpipe(); } - dest.once('close', onclose); - function onfinish() { debug('onfinish'); dest.removeListener('close', onclose); unpipe(); } - dest.once('finish', onfinish); - function unpipe() { debug('unpipe'); src.unpipe(dest); - } // tell the dest that it's being piped to - + } - dest.emit('pipe', src); // start the flow if it hasn't been started already. + // tell the dest that it's being piped to + dest.emit('pipe', src); + // start the flow if it hasn't been started already. if (!state.flowing) { debug('pipe resume'); src.resume(); } - return dest; }; - function pipeOnDrain(src) { return function pipeOnDrainFunctionResult() { var state = src._readableState; debug('pipeOnDrain', state.awaitDrain); if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { state.flowing = true; flow(src); } }; } - Readable.prototype.unpipe = function (dest) { var state = this._readableState; var unpipeInfo = { hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. + }; - if (state.pipesCount === 0) return this; // just one destination. most common case. + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + // just one destination. most common case. if (state.pipesCount === 1) { // passed in one, but it's not the right one. if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. + if (!dest) dest = state.pipes; + // got a match. state.pipes = null; state.pipesCount = 0; state.flowing = false; if (dest) dest.emit('unpipe', this, unpipeInfo); return this; - } // slow case. multiple pipe destinations. + } + // slow case. multiple pipe destinations. if (!dest) { // remove all. @@ -779,17 +723,13 @@ Readable.prototype.unpipe = function (dest) { state.pipes = null; state.pipesCount = 0; state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } - + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); return this; - } // try to find the right one. - + } + // try to find the right one. var index = indexOf(state.pipes, dest); if (index === -1) return this; state.pipes.splice(index, 1); @@ -797,19 +737,19 @@ Readable.prototype.unpipe = function (dest) { if (state.pipesCount === 1) state.pipes = state.pipes[0]; dest.emit('unpipe', this, unpipeInfo); return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something - +}; +// set up data events if they are asked for +// Ensure readable listeners eventually get something Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; if (ev === 'data') { // update readableListening so that resume() may be a no-op // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + state.readableListening = this.listenerCount('readable') > 0; + // Try start flowing on next tick if stream isn't explicitly paused if (state.flowing !== false) this.resume(); } else if (ev === 'readable') { if (!state.endEmitted && !state.readableListening) { @@ -817,7 +757,6 @@ Readable.prototype.on = function (ev, fn) { state.flowing = false; state.emittedReadable = false; debug('on readable', state.length, state.reading); - if (state.length) { emitReadable(this); } else if (!state.reading) { @@ -825,15 +764,11 @@ Readable.prototype.on = function (ev, fn) { } } } - return res; }; - Readable.prototype.addListener = Readable.prototype.on; - Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - + const res = Stream.prototype.removeListener.call(this, ev, fn); if (ev === 'readable') { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen @@ -843,13 +778,10 @@ Readable.prototype.removeListener = function (ev, fn) { // effect. process.nextTick(updateReadableListening, this); } - return res; }; - Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - + const res = Stream.prototype.removeAllListeners.apply(this, arguments); if (ev === 'readable' || ev === undefined) { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen @@ -859,121 +791,102 @@ Readable.prototype.removeAllListeners = function (ev) { // effect. process.nextTick(updateReadableListening, this); } - return res; }; - function updateReadableListening(self) { - var state = self._readableState; + const state = self._readableState; state.readableListening = self.listenerCount('readable') > 0; - if (state.resumeScheduled && !state.paused) { // flowing needs to be set to true now, otherwise // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume + state.flowing = true; + + // crude way to check if we should resume } else if (self.listenerCount('data') > 0) { self.resume(); } } - function nReadingNextTick(self) { debug('readable nexttick read 0'); self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. - +} +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. Readable.prototype.resume = function () { var state = this._readableState; - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening + debug('resume'); + // we flow only if there is no one listening // for readable, but we still have to call // resume() - state.flowing = !state.readableListening; resume(this, state); } - state.paused = false; return this; }; - function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; process.nextTick(resume_, stream, state); } } - function resume_(stream, state) { debug('resume', state.reading); - if (!state.reading) { stream.read(0); } - state.resumeScheduled = false; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) stream.read(0); } - Readable.prototype.pause = function () { debug('call pause flowing=%j', this._readableState.flowing); - if (this._readableState.flowing !== false) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); } - this._readableState.paused = true; return this; }; - function flow(stream) { - var state = stream._readableState; + const state = stream._readableState; debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. +// wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. - - Readable.prototype.wrap = function (stream) { - var _this = this; - var state = this._readableState; var paused = false; - stream.on('end', function () { + stream.on('end', () => { debug('wrapped end'); - if (state.decoder && !state.ended) { var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); + if (chunk && chunk.length) this.push(chunk); } - - _this.push(null); + this.push(null); }); - stream.on('data', function (chunk) { + stream.on('data', chunk => { debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + if (state.decoder) chunk = state.decoder.write(chunk); + // don't skip over falsy values in objectMode if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - + var ret = this.push(chunk); if (!ret) { paused = true; stream.pause(); } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. + }); + // proxy all the other methods. + // important when wrapping filters and duplexes. for (var i in stream) { if (this[i] === undefined && typeof stream[i] === 'function') { this[i] = function methodWrap(method) { @@ -982,37 +895,32 @@ Readable.prototype.wrap = function (stream) { }; }(i); } - } // proxy certain important events. - + } + // proxy certain important events. for (var n = 0; n < kProxyEvents.length; n++) { stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. - + } - this._read = function (n) { + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = n => { debug('wrapped _read', n); - if (paused) { paused = false; stream.resume(); } }; - return this; }; - if (typeof Symbol === 'function') { Readable.prototype[Symbol.asyncIterator] = function () { if (createReadableStreamAsyncIterator === undefined) { createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); } - return createReadableStreamAsyncIterator(this); }; } - Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -1044,22 +952,24 @@ Object.defineProperty(Readable.prototype, 'readableFlowing', { this._readableState.flowing = state; } } -}); // exposed for testing purposes only. +}); +// exposed for testing purposes only. Readable._fromList = fromList; Object.defineProperty(Readable.prototype, 'readableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._readableState.length; } -}); // Pluck off n bytes from an array of buffers. +}); + +// Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. // This function is designed to be inlinable, so please take care when making // changes to the function body. - function fromList(n, state) { // nothing buffered if (state.length === 0) return null; @@ -1074,51 +984,43 @@ function fromList(n, state) { } return ret; } - function endReadable(stream) { var state = stream._readableState; debug('endReadable', state.endEmitted); - if (!state.endEmitted) { state.ended = true; process.nextTick(endReadableNT, state, stream); } } - function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + debug('endReadableNT', state.endEmitted, state.length); + // Check that we didn't get one last unshift. if (!state.endEmitted && state.length === 0) { state.endEmitted = true; stream.readable = false; stream.emit('end'); - if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - + const wState = stream._writableState; if (!wState || wState.autoDestroy && wState.finished) { stream.destroy(); } } } } - if (typeof Symbol === 'function') { Readable.from = function (iterable, opts) { if (from === undefined) { from = require('./internal/streams/from'); } - return from(Readable, iterable, opts); }; } - function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; } - return -1; }
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js index 41a738c4e9..a2fcca2193 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_transform.js @@ -18,6 +18,7 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where @@ -59,42 +60,36 @@ // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. + 'use strict'; module.exports = Transform; - -var _require$codes = require('../errors').codes, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; - -var Duplex = require('./_stream_duplex'); - +const _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +const Duplex = require('./_stream_duplex'); require('inherits')(Transform, Duplex); - function afterTransform(er, data) { var ts = this._transformState; ts.transforming = false; var cb = ts.writecb; - if (cb === null) { return this.emit('error', new ERR_MULTIPLE_CALLBACK()); } - ts.writechunk = null; ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` + if (data != null) + // single equals check for both `null` and `undefined` this.push(data); cb(er); var rs = this._readableState; rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { this._read(rs.highWaterMark); } } - function Transform(options) { if (!(this instanceof Transform)) return new Transform(options); Duplex.call(this, options); @@ -105,39 +100,38 @@ function Transform(options) { writecb: null, writechunk: null, writeencoding: null - }; // start out asking for a readable event once data is transformed. + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // we have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. - this._readableState.sync = false; - if (options) { if (typeof options.transform === 'function') this._transform = options.transform; if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. - + } + // When the writable side finishes, then flush out anything remaining. this.on('prefinish', prefinish); } - function prefinish() { - var _this = this; - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); + this._flush((er, data) => { + done(this, er, data); }); } else { done(this, null, null); } } - Transform.prototype.push = function (chunk, encoding) { this._transformState.needTransform = false; return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! +}; + +// This is the part where you do stuff! // override this function in implementation classes. // 'chunk' is an input chunk. // @@ -147,33 +141,27 @@ Transform.prototype.push = function (chunk, encoding) { // Call `cb(err)` when you are done with this chunk. If you pass // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. - - Transform.prototype._transform = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); }; - Transform.prototype._write = function (chunk, encoding, cb) { var ts = this._transformState; ts.writecb = cb; ts.writechunk = chunk; ts.writeencoding = encoding; - if (!ts.transforming) { var rs = this._readableState; if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } -}; // Doesn't matter what the args are here. +}; + +// Doesn't matter what the args are here. // _transform does all the work. // That we got here means that the readable side wants more data. - - Transform.prototype._read = function (n) { var ts = this._transformState; - if (ts.writechunk !== null && !ts.transforming) { ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); } else { // mark that we need a transform, so that any data that comes in @@ -181,20 +169,20 @@ Transform.prototype._read = function (n) { ts.needTransform = true; } }; - Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { + Duplex.prototype._destroy.call(this, err, err2 => { cb(err2); }); }; - function done(stream, er, data) { if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases // if there's nothing in the write buffer, then that means // that nothing more will ever be provided - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); return stream.push(null); diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js index a2634d7c24..feece02279 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/_stream_writable.js @@ -18,185 +18,188 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. + // A bit simpler than readable streams. // Implement an async ._write(chunk, encoding, cb), and it'll handle all // the drain event emission and buffering. + 'use strict'; module.exports = Writable; -/* <replacement> */ +/* <replacement> */ function WriteReq(chunk, encoding, cb) { this.chunk = chunk; this.encoding = encoding; this.callback = cb; this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream - +} +// It seems a linked list but it is not +// there will be only 2 of these for each stream function CorkedRequest(state) { - var _this = this; - this.next = null; this.entry = null; - - this.finish = function () { - onCorkedFinish(_this, state); + this.finish = () => { + onCorkedFinish(this, state); }; } /* </replacement> */ /*<replacement>*/ - - var Duplex; /*</replacement>*/ Writable.WritableState = WritableState; -/*<replacement>*/ -var internalUtil = { +/*<replacement>*/ +const internalUtil = { deprecate: require('util-deprecate') }; /*</replacement>*/ /*<replacement>*/ - var Stream = require('./internal/streams/stream'); /*</replacement>*/ - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - +const Buffer = require('buffer').Buffer; +const OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } - function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; - -var errorOrDestroy = destroyImpl.errorOrDestroy; - +const destroyImpl = require('./internal/streams/destroy'); +const _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +const _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +const errorOrDestroy = destroyImpl.errorOrDestroy; require('inherits')(Writable, Stream); - function nop() {} - function WritableState(options, stream, isDuplex) { Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share + options = options || {}; + + // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream, // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // object stream flag to indicate whether or not this stream // contains buffers or objects. - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + // if _final has been called + this.finalCalled = false; - this.finalCalled = false; // drain event flag. + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - this.needDrain = false; // at the start of calling end() + // has it been destroyed + this.destroyed = false; - this.ending = false; // when end() has been called, and returned - - this.ended = false; // when 'finish' is emitted - - this.finished = false; // has it been destroyed - - this.destroyed = false; // should we decode strings into buffers before passing to _write? + // should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. + this.length = 0; - this.length = 0; // a flag to see when we're in the middle of a write. + // a flag to see when we're in the middle of a write. + this.writing = false; - this.writing = false; // when true all writes will be buffered until .uncork() call + // when true all writes will be buffered until .uncork() call + this.corked = 0; - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. + this.sync = true; - this.sync = true; // a flag to know if we're processing previously buffered items, which + // a flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. + this.bufferProcessing = false; - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - + // the callback that's passed to _write(chunk,cb) this.onwrite = function (er) { onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) - + }; - this.writecb = null; // the amount that is being written when _write is called. + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + // the amount that is being written when _write is called. this.writelen = 0; this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams + this.prefinished = false; - this.prefinished = false; // True if the error was already emitted and should not be thrown again + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; - this.autoDestroy = !!options.autoDestroy; // count buffered requests + // count buffered requests + this.bufferedRequestCount = 0; - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // allocate the first CorkedRequest, there is always // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); } - WritableState.prototype.getBuffer = function getBuffer() { var current = this.bufferedRequest; var out = []; - while (current) { out.push(current); current = current.next; } - return out; }; - (function () { try { Object.defineProperty(WritableState.prototype, 'buffer', { @@ -205,12 +208,11 @@ WritableState.prototype.getBuffer = function getBuffer() { }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') }); } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. - +})(); +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. var realHasInstance; - if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { realHasInstance = Function.prototype[Symbol.hasInstance]; Object.defineProperty(Writable, Symbol.hasInstance, { @@ -225,81 +227,73 @@ if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.protot return object instanceof this; }; } - function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. // `realHasInstance` is necessary because using plain `instanceof` // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the // Node.js LazyTransform implementation, which has a non-trivial getter for // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside // the WritableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; + const isDuplex = this instanceof Duplex; if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. + this._writableState = new WritableState(options, this, isDuplex); + // legacy. this.writable = true; - if (options) { if (typeof options.write === 'function') this._write = options.write; if (typeof options.writev === 'function') this._writev = options.writev; if (typeof options.destroy === 'function') this._destroy = options.destroy; if (typeof options.final === 'function') this._final = options.final; } - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. - +} +// Otherwise people can pipe Writable streams, which is just wrong. Writable.prototype.pipe = function () { errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; - function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb errorOrDestroy(stream, er); process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular +} + +// Checks that a user-supplied chunk is valid, especially for the particular // mode the stream is in. Currently this means that `null` is never accepted // and undefined/non-string values are only allowed in object mode. - - function validChunk(stream, state, chunk, cb) { var er; - if (chunk === null) { er = new ERR_STREAM_NULL_VALUES(); } else if (typeof chunk !== 'string' && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); } - if (er) { errorOrDestroy(stream, er); process.nextTick(cb, er); return false; } - return true; } - Writable.prototype.write = function (chunk, encoding, cb) { var state = this._writableState; var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { chunk = _uint8ArrayToBuffer(chunk); } - if (typeof encoding === 'function') { cb = encoding; encoding = null; } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; if (typeof cb !== 'function') cb = nop; if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { @@ -308,20 +302,16 @@ Writable.prototype.write = function (chunk, encoding, cb) { } return ret; }; - Writable.prototype.cork = function () { this._writableState.corked++; }; - Writable.prototype.uncork = function () { var state = this._writableState; - if (state.corked) { state.corked--; if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); } }; - Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { // node::ParseEncoding() requires lower case. if (typeof encoding === 'string') encoding = encoding.toLowerCase(); @@ -329,7 +319,6 @@ Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { this._writableState.defaultEncoding = encoding; return this; }; - Object.defineProperty(Writable.prototype, 'writableBuffer', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -339,15 +328,12 @@ Object.defineProperty(Writable.prototype, 'writableBuffer', { return this._writableState && this._writableState.getBuffer(); } }); - function decodeChunk(state, chunk, encoding) { if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { chunk = Buffer.from(chunk, encoding); } - return chunk; } - Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in @@ -356,51 +342,45 @@ Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { get: function get() { return this._writableState.highWaterMark; } -}); // if we're already writing something, then just put this +}); + +// if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. - function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { if (!isBuf) { var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { isBuf = true; encoding = 'buffer'; chunk = newChunk; } } - var len = state.objectMode ? 1 : chunk.length; state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. if (!ret) state.needDrain = true; - if (state.writing || state.corked) { var last = state.lastBufferedRequest; state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, + chunk, + encoding, + isBuf, callback: cb, next: null }; - if (last) { last.next = state.lastBufferedRequest; } else { state.bufferedRequest = state.lastBufferedRequest; } - state.bufferedRequestCount += 1; } else { doWrite(stream, state, false, len, chunk, encoding, cb); } - return ret; } - function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; @@ -409,16 +389,14 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) { if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); state.sync = false; } - function onwriteError(stream, state, sync, er, cb) { --state.pendingcb; - if (sync) { // defer the callback if we are being called synchronously // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen + process.nextTick(cb, er); + // this can emit finish, and it will always happen // after error - process.nextTick(finishMaybe, stream, state); stream._writableState.errorEmitted = true; errorOrDestroy(stream, er); @@ -427,20 +405,18 @@ function onwriteError(stream, state, sync, er, cb) { // it is async cb(er); stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must + errorOrDestroy(stream, er); + // this can emit finish, but finish must // always follow error - finishMaybe(stream, state); } } - function onwriteStateUpdate(state) { state.writing = false; state.writecb = null; state.length -= state.writelen; state.writelen = 0; } - function onwrite(stream, er) { var state = stream._writableState; var sync = state.sync; @@ -450,11 +426,9 @@ function onwrite(stream, er) { if (er) onwriteError(stream, state, sync, er, cb);else { // Check if we're actually ready to finish, but don't emit yet var finished = needFinish(state) || stream.destroyed; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { clearBuffer(stream, state); } - if (sync) { process.nextTick(afterWrite, stream, state, finished, cb); } else { @@ -462,29 +436,27 @@ function onwrite(stream, er) { } } } - function afterWrite(stream, state, finished, cb) { if (!finished) onwriteDrain(stream, state); state.pendingcb--; cb(); finishMaybe(stream, state); -} // Must force callback to be called on nextTick, so that we don't +} + +// Must force callback to be called on nextTick, so that we don't // emit 'drain' before the write() consumer gets the 'false' return // value, and has a chance to attach a 'drain' listener. - - function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; stream.emit('drain'); } -} // if there's something in the buffer waiting, then process it - +} +// if there's something in the buffer waiting, then process it function clearBuffer(stream, state) { state.bufferProcessing = true; var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { // Fast case, write everything using _writev() var l = state.bufferedRequestCount; @@ -493,28 +465,25 @@ function clearBuffer(stream, state) { holder.entry = entry; var count = 0; var allBuffers = true; - while (entry) { buffer[count] = entry; if (!entry.isBuf) allBuffers = false; entry = entry.next; count += 1; } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite state.pendingcb++; state.lastBufferedRequest = null; - if (holder.next) { state.corkedRequestsFree = holder.next; holder.next = null; } else { state.corkedRequestsFree = new CorkedRequest(state); } - state.bufferedRequestCount = 0; } else { // Slow case, write chunks one-by-one @@ -525,32 +494,26 @@ function clearBuffer(stream, state) { var len = state.objectMode ? 1 : chunk.length; doWrite(stream, state, false, len, chunk, encoding, cb); entry = entry.next; - state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then // it means that we need to wait until it does. // also, that means that the chunk and cb are currently // being processed, so move the buffer counter past them. - if (state.writing) { break; } } - if (entry === null) state.lastBufferedRequest = null; } - state.bufferedRequest = entry; state.bufferProcessing = false; } - Writable.prototype._write = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); }; - Writable.prototype._writev = null; - Writable.prototype.end = function (chunk, encoding, cb) { var state = this._writableState; - if (typeof chunk === 'function') { cb = chunk; chunk = null; @@ -559,47 +522,41 @@ Writable.prototype.end = function (chunk, encoding, cb) { cb = encoding; encoding = null; } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - + // .end() fully uncorks if (state.corked) { state.corked = 1; this.uncork(); - } // ignore unnecessary end() calls. - + } + // ignore unnecessary end() calls. if (!state.ending) endWritable(this, state, cb); return this; }; - Object.defineProperty(Writable.prototype, 'writableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { return this._writableState.length; } }); - function needFinish(state) { return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } - function callFinal(stream, state) { - stream._final(function (err) { + stream._final(err => { state.pendingcb--; - if (err) { errorOrDestroy(stream, err); } - state.prefinished = true; stream.emit('prefinish'); finishMaybe(stream, state); }); } - function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { if (typeof stream._final === 'function' && !state.destroyed) { @@ -612,86 +569,72 @@ function prefinish(stream, state) { } } } - function finishMaybe(stream, state) { var need = needFinish(state); - if (need) { prefinish(stream, state); - if (state.pendingcb === 0) { state.finished = true; stream.emit('finish'); - if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - + const rState = stream._readableState; if (!rState || rState.autoDestroy && rState.endEmitted) { stream.destroy(); } } } } - return need; } - function endWritable(stream, state, cb) { state.ending = true; finishMaybe(stream, state); - if (cb) { if (state.finished) process.nextTick(cb);else stream.once('finish', cb); } - state.ended = true; stream.writable = false; } - function onCorkedFinish(corkReq, state, err) { var entry = corkReq.entry; corkReq.entry = null; - while (entry) { var cb = entry.callback; state.pendingcb--; cb(err); entry = entry.next; - } // reuse the free corkReq. - + } + // reuse the free corkReq. state.corkedRequestsFree.next = corkReq; } - Object.defineProperty(Writable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function get() { + get() { if (this._writableState === undefined) { return false; } - return this._writableState.destroyed; }, - set: function set(value) { + set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._writableState) { return; - } // backward compatibility, the user is explicitly - // managing destroyed - + } + // backward compatibility, the user is explicitly + // managing destroyed this._writableState.destroyed = value; } }); Writable.prototype.destroy = destroyImpl.destroy; Writable.prototype._undestroy = destroyImpl.undestroy; - Writable.prototype._destroy = function (err, cb) { cb(err); };
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js index 9fb615a2f3..bcae6108c0 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -1,34 +1,26 @@ 'use strict'; -var _Object$setPrototypeO; - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var finished = require('./end-of-stream'); - -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); - +const finished = require('./end-of-stream'); +const kLastResolve = Symbol('lastResolve'); +const kLastReject = Symbol('lastReject'); +const kError = Symbol('error'); +const kEnded = Symbol('ended'); +const kLastPromise = Symbol('lastPromise'); +const kHandlePromise = Symbol('handlePromise'); +const kStream = Symbol('stream'); function createIterResult(value, done) { return { - value: value, - done: done + value, + done }; } - function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - + const resolve = iter[kLastResolve]; if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null + const data = iter[kStream].read(); + // we defer if data is null // we can be expecting either 'end' or // 'error' - if (data !== null) { iter[kLastPromise] = null; iter[kLastResolve] = null; @@ -37,171 +29,157 @@ function readAndResolve(iter) { } } } - function onReadable(iter) { // we wait for the next tick, because it might // emit an error with process.nextTick process.nextTick(readAndResolve, iter); } - function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { + return (resolve, reject) => { + lastPromise.then(() => { if (iter[kEnded]) { resolve(createIterResult(undefined, true)); return; } - iter[kHandlePromise](resolve, reject); }, reject); }; } - -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { +const AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ get stream() { return this[kStream]; }, - - next: function next() { - var _this = this; - + next() { // if we have detected an error in the meanwhile // reject straight away - var error = this[kError]; - + const error = this[kError]; if (error !== null) { return Promise.reject(error); } - if (this[kEnded]) { return Promise.resolve(createIterResult(undefined, true)); } - if (this[kStream].destroyed) { // We need to defer via nextTick because if .destroy(err) is // called, the error will be emitted via nextTick, and // we cannot guarantee that there is no error lingering around // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); + return new Promise((resolve, reject) => { + process.nextTick(() => { + if (this[kError]) { + reject(this[kError]); } else { resolve(createIterResult(undefined, true)); } }); }); - } // if we have multiple next() calls + } + + // if we have multiple next() calls // we will wait for the previous Promise to finish // this logic is optimized to support for await loops, // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - + const lastPromise = this[kLastPromise]; + let promise; if (lastPromise) { promise = new Promise(wrapForNext(lastPromise, this)); } else { // fast path needed to support multiple this.push() // without triggering the next() queue - var data = this[kStream].read(); - + const data = this[kStream].read(); if (data !== null) { return Promise.resolve(createIterResult(data, false)); } - promise = new Promise(this[kHandlePromise]); } - this[kLastPromise] = promise; return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - - resolve(createIterResult(undefined, true)); + }, + [Symbol.asyncIterator]() { + return this; + }, + return() { + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise((resolve, reject) => { + this[kStream].destroy(null, err => { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); - -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } + } +}, AsyncIteratorPrototype); +const createReadableStreamAsyncIterator = stream => { + const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { + [kStream]: { + value: stream, + writable: true + }, + [kLastResolve]: { + value: null, + writable: true + }, + [kLastReject]: { + value: null, + writable: true }, - writable: true - }), _Object$create)); + [kError]: { + value: null, + writable: true + }, + [kEnded]: { + value: stream._readableState.endEmitted, + writable: true + }, + // the function passed to new Promise + // is cached so we avoid allocating a new + // closure at every run + [kHandlePromise]: { + value: (resolve, reject) => { + const data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + } + }); iterator[kLastPromise] = null; - finished(stream, function (err) { + finished(stream, err => { if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + const reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise // returned by next() and store the error - if (reject !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; reject(err); } - iterator[kError] = err; return; } - - var resolve = iterator[kLastResolve]; - + const resolve = iterator[kLastResolve]; if (resolve !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; resolve(createIterResult(undefined, true)); } - iterator[kEnded] = true; }); stream.on('readable', onReadable.bind(null, iterator)); return iterator; }; - module.exports = createReadableStreamAsyncIterator;
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js index cdea425f19..352ac3438e 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -1,210 +1,155 @@ 'use strict'; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var _require = require('buffer'), - Buffer = _require.Buffer; - -var _require2 = require('util'), - inspect = _require2.inspect; - -var custom = inspect && inspect.custom || 'inspect'; - +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const _require = require('buffer'), + Buffer = _require.Buffer; +const _require2 = require('util'), + inspect = _require2.inspect; +const custom = inspect && inspect.custom || 'inspect'; function copyBuffer(src, target, offset) { Buffer.prototype.copy.call(src, target, offset); } - -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); - +module.exports = class BufferList { + constructor() { this.head = null; this.tail = null; this.length = 0; } - - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } - - return ret; + push(v) { + const entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) return; + const ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + concat(n) { + if (this.length === 0) return Buffer.alloc(0); + const ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. - - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } + return ret; + } - return ret; + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. - - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } + return ret; + } + first() { + return this.head.data; + } - break; + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + const str = p.data; + const nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); } - - ++c; + break; } + ++c; + } + this.length -= c; + return ret; + } - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. - - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - - break; + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + const buf = p.data; + const nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); } - - ++c; + break; } - - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. - - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); + ++c; } - }]); + this.length -= c; + return ret; + } - return BufferList; -}();
\ No newline at end of file + // Make sure the linked list only shows the minimal necessary information. + [custom](_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } +};
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js index 3268a16f3b..7e8275567d 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -1,11 +1,9 @@ -'use strict'; // undocumented cb() API, needed for core, not for public API +'use strict'; +// undocumented cb() API, needed for core, not for public API function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - + const readableDestroyed = this._readableState && this._readableState.destroyed; + const writableDestroyed = this._writableState && this._writableState.destroyed; if (readableDestroyed || writableDestroyed) { if (cb) { cb(err); @@ -17,53 +15,48 @@ function destroy(err, cb) { process.nextTick(emitErrorNT, this, err); } } - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks + } + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks if (this._readableState) { this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well - + } + // if this is a duplex stream mark the writable part as destroyed as well if (this._writableState) { this._writableState.destroyed = true; } - - this._destroy(err || null, function (err) { + this._destroy(err || null, err => { if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); + if (!this._writableState) { + process.nextTick(emitErrorAndCloseNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, this, err); } else { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); } } else if (cb) { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); cb(err); } else { - process.nextTick(emitCloseNT, _this); + process.nextTick(emitCloseNT, this); } }); - return this; } - function emitErrorAndCloseNT(self, err) { emitErrorNT(self, err); emitCloseNT(self); } - function emitCloseNT(self) { if (self._writableState && !self._writableState.emitClose) return; if (self._readableState && !self._readableState.emitClose) return; self.emit('close'); } - function undestroy() { if (this._readableState) { this._readableState.destroyed = false; @@ -71,7 +64,6 @@ function undestroy() { this._readableState.ended = false; this._readableState.endEmitted = false; } - if (this._writableState) { this._writableState.destroyed = false; this._writableState.ended = false; @@ -82,24 +74,22 @@ function undestroy() { this._writableState.errorEmitted = false; } } - function emitErrorNT(self, err) { self.emit('error', err); } - function errorOrDestroy(stream, err) { // We have tests that rely on errors being emitted // in the same tick, so changing this is semver major. // For now when you opt-in to autoDestroy we allow // the error to be emitted nextTick. In a future // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; + + const rState = stream._readableState; + const wState = stream._writableState; if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); } - module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy + destroy, + undestroy, + errorOrDestroy };
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js index 831f286d98..b6d101691f 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -1,78 +1,62 @@ // Ported from https://github.com/mafintosh/end-of-stream with // permission from the author, Mathias Buus (@mafintosh). -'use strict'; -var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +'use strict'; +const ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; function once(callback) { - var called = false; + let called = false; return function () { if (called) return; called = true; - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } - callback.apply(this, args); }; } - function noop() {} - function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } - function eos(stream, opts, callback) { if (typeof opts === 'function') return eos(stream, null, opts); if (!opts) opts = {}; callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - - var onlegacyfinish = function onlegacyfinish() { + let readable = opts.readable || opts.readable !== false && stream.readable; + let writable = opts.writable || opts.writable !== false && stream.writable; + const onlegacyfinish = () => { if (!stream.writable) onfinish(); }; - var writableEnded = stream._writableState && stream._writableState.finished; - - var onfinish = function onfinish() { + const onfinish = () => { writable = false; writableEnded = true; if (!readable) callback.call(stream); }; - var readableEnded = stream._readableState && stream._readableState.endEmitted; - - var onend = function onend() { + const onend = () => { readable = false; readableEnded = true; if (!writable) callback.call(stream); }; - - var onerror = function onerror(err) { + const onerror = err => { callback.call(stream, err); }; - - var onclose = function onclose() { - var err; - + const onclose = () => { + let err; if (readable && !readableEnded) { if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } - if (writable && !writableEnded) { if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } }; - - var onrequest = function onrequest() { + const onrequest = () => { stream.req.on('finish', onfinish); }; - if (isRequest(stream)) { stream.on('complete', onfinish); stream.on('abort', onclose); @@ -82,7 +66,6 @@ function eos(stream, opts, callback) { stream.on('end', onlegacyfinish); stream.on('close', onlegacyfinish); } - stream.on('end', onend); stream.on('finish', onfinish); if (opts.error !== false) stream.on('error', onerror); @@ -100,5 +83,4 @@ function eos(stream, opts, callback) { stream.removeListener('close', onclose); }; } - module.exports = eos;
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js index 6c41284416..4ca2cd1996 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/from.js @@ -1,52 +1,42 @@ 'use strict'; function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; - +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +const ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; function from(Readable, iterable, opts) { - var iterator; - + let iterator; if (iterable && typeof iterable.next === 'function') { iterator = iterable; } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - - var readable = new Readable(_objectSpread({ + const readable = new Readable(_objectSpread({ objectMode: true - }, opts)); // Reading boolean to protect against _read + }, opts)); + // Reading boolean to protect against _read // being called before last iteration completion. - - var reading = false; - + let reading = false; readable._read = function () { if (!reading) { reading = true; next(); } }; - function next() { return _next2.apply(this, arguments); } - function _next2() { _next2 = _asyncToGenerator(function* () { try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; - + const _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; if (done) { readable.push(null); - } else if (readable.push((yield value))) { + } else if (readable.push(yield value)) { next(); } else { reading = false; @@ -57,8 +47,6 @@ function from(Readable, iterable, opts) { }); return _next2.apply(this, arguments); } - return readable; } - module.exports = from;
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js index 6589909889..272546db82 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -1,88 +1,78 @@ // Ported from https://github.com/mafintosh/pump with // permission from the author, Mathias Buus (@mafintosh). -'use strict'; -var eos; +'use strict'; +let eos; function once(callback) { - var called = false; + let called = false; return function () { if (called) return; called = true; - callback.apply(void 0, arguments); + callback(...arguments); }; } - -var _require$codes = require('../../../errors').codes, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; - +const _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; function noop(err) { // Rethrow the error if it exists to avoid swallowing it if (err) throw err; } - function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } - function destroyer(stream, reading, writing, callback) { callback = once(callback); - var closed = false; - stream.on('close', function () { + let closed = false; + stream.on('close', () => { closed = true; }); if (eos === undefined) eos = require('./end-of-stream'); eos(stream, { readable: reading, writable: writing - }, function (err) { + }, err => { if (err) return callback(err); closed = true; callback(); }); - var destroyed = false; - return function (err) { + let destroyed = false; + return err => { if (closed) return; if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want + destroyed = true; + // request.destroy just do .end - .abort is what we want if (isRequest(stream)) return stream.abort(); if (typeof stream.destroy === 'function') return stream.destroy(); callback(err || new ERR_STREAM_DESTROYED('pipe')); }; } - function call(fn) { fn(); } - function pipe(from, to) { return from.pipe(to); } - function popCallback(streams) { if (!streams.length) return noop; if (typeof streams[streams.length - 1] !== 'function') return noop; return streams.pop(); } - function pipeline() { for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { streams[_key] = arguments[_key]; } - - var callback = popCallback(streams); + const callback = popCallback(streams); if (Array.isArray(streams[0])) streams = streams[0]; - if (streams.length < 2) { throw new ERR_MISSING_ARGS('streams'); } - - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; + let error; + const destroys = streams.map(function (stream, i) { + const reading = i < streams.length - 1; + const writing = i > 0; return destroyer(stream, reading, writing, function (err) { if (!error) error = err; if (err) destroys.forEach(call); @@ -93,5 +83,4 @@ function pipeline() { }); return streams.reduce(pipe); } - module.exports = pipeline;
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js index 19887eb8a9..8a994b4422 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/lib/internal/streams/state.js @@ -1,27 +1,22 @@ 'use strict'; -var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; - +const ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; function highWaterMarkFrom(options, isDuplex, duplexKey) { return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; } - function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); if (hwm != null) { if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; + const name = isDuplex ? duplexKey : 'highWaterMark'; throw new ERR_INVALID_OPT_VALUE(name, hwm); } - return Math.floor(hwm); - } // Default value - + } + // Default value return state.objectMode ? 16 : 16 * 1024; } - module.exports = { - getHighWaterMark: getHighWaterMark + getHighWaterMark };
\ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/package.json b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/package.json index 0b0c4bd207..4cd81b628d 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/readable-stream/package.json +++ b/deps/npm/node_modules/node-gyp/node_modules/readable-stream/package.json @@ -1,6 +1,6 @@ { "name": "readable-stream", - "version": "3.6.0", + "version": "3.6.1", "description": "Streams3, a user-land copy of the stream library from Node.js", "main": "readable.js", "engines": { diff --git a/deps/npm/node_modules/sigstore/LICENSE b/deps/npm/node_modules/sigstore/LICENSE index d645695673..e9e7c1679a 100644 --- a/deps/npm/node_modules/sigstore/LICENSE +++ b/deps/npm/node_modules/sigstore/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2023 The Sigstore Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/deps/npm/node_modules/sigstore/README.md b/deps/npm/node_modules/sigstore/README.md index 0a8b690423..5ade0dc5ab 100644 --- a/deps/npm/node_modules/sigstore/README.md +++ b/deps/npm/node_modules/sigstore/README.md @@ -1,4 +1,4 @@ -# sigstore-js +# sigstore-js · [![npm version](https://img.shields.io/npm/v/sigstore.svg?style=flat)](https://www.npmjs.com/package/sigstore) [![CI Status](https://github.com/sigstore/sigstore-js/workflows/CI/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [![Smoke Test Status](https://github.com/sigstore/sigstore-js/workflows/smoke-test/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml) A JavaScript library for generating and verifying Sigstore signatures. One of the intended uses is to sign and verify npm packages but it can be used to sign @@ -101,29 +101,16 @@ sigstore <command> <artifact> ## Development -### Updating protobufs +### Changesets +If you are contributing a user-facing or noteworthy change that should be added to the changelog, you should include a changeset with your PR by running the following command: -[Docker](https://docs.docker.com/engine/install/) is required to generate protobufs for the `.sigstore` bundle format. - -Install Docker on MacOS using [Homebrew](https://brew.sh/): - -``` -brew install --cask docker && open -a Docker +```console +npx changeset add ``` -View [Docker install instructions](https://docs.docker.com/engine/install/) for other platforms. - -#### Updating Sigstore Protobufs - -Update the Git `REF` in `hack/generate-bundle-types` from the [sigstore/protobuf-specs][5] repository. +Follow the prompts to specify whether the change is a major, minor or patch change. This will create a file in the `.changesets` directory of the repo. This change should be committed and included with your PR. -Generate TypeScript protobufs (should update files in scr/types/sigstore/\_\_generated\_\_): - -``` -npm run codegen:bundle -``` - -#### Updating Rekor Types +### Updating Rekor Types Update the git `REF` in `hack/generate-rekor-types` from the [sigstore/rekor][1] repository. @@ -135,16 +122,10 @@ npm run codegen:rekor ### Release Steps -1. Update the version inside `package.json` and run `npm i` to update `package-lock.json`. -2. PR the changes above and merge to the "main" branch when approved. -3. Use either the "[Create a new release](https://github.com/sigstore/sigstore-js/releases/new)" link or the `gh release create` CLI command to start a new release. -4. Tag the release with a label matching the version in the `package.json` (with a `v` prefix). -5. Add a title matching the tag. -6. Add release notes. -7. Mark as pre-release as appropriate. -8. Publish release. +Whenever a new changeset is merged to the "main" branch, the `release` workflow will open a PR (or append to the existing PR if one is already open) with the all of the pending changesets. + +Publishing a release simply requires that you approve/merge this PR. This will trigger the publishing of the package to the npm registry and the creation of the GitHub release. -After publishing the release, a new npm package will be built and published automatically to the npm registry. ## Licensing `sigstore-js` is licensed under the Apache 2.0 License. diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts b/deps/npm/node_modules/sigstore/dist/sigstore.d.ts index bb3034383e..22c5d2a45f 100644 --- a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts +++ b/deps/npm/node_modules/sigstore/dist/sigstore.d.ts @@ -8,6 +8,10 @@ export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev"; interface TLogOptions { rekorURL?: string; } +interface TUFOptions { + tufMirrorURL?: string; + tufRootPath?: string; +} export type SignOptions = { fulcioURL?: string; identityToken?: string; @@ -23,7 +27,7 @@ export type VerifyOptions = { certificateIdentityURI?: string; certificateOIDs?: Record<string, string>; keySelector?: KeySelector; -} & TLogOptions; +} & TLogOptions & TUFOptions; type Bundle = sigstore.SerializedBundle; export declare function sign(payload: Buffer, options?: SignOptions): Promise<Bundle>; export declare function attest(payload: Buffer, payloadType: string, options?: SignOptions): Promise<Bundle>; diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.js b/deps/npm/node_modules/sigstore/dist/sigstore.js index ef8fb2058a..34b269aadd 100644 --- a/deps/npm/node_modules/sigstore/dist/sigstore.js +++ b/deps/npm/node_modules/sigstore/dist/sigstore.js @@ -42,15 +42,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const fs_1 = __importDefault(require("fs")); -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); const ca_1 = require("./ca"); const identity_1 = __importDefault(require("./identity")); const sign_1 = require("./sign"); const tlog_1 = require("./tlog"); const tuf = __importStar(require("./tuf")); const sigstore = __importStar(require("./types/sigstore")); +const util_1 = require("./util"); const verify_1 = require("./verify"); exports.utils = __importStar(require("./sigstore-utils")); exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; @@ -65,6 +63,7 @@ function createTLogClient(options) { rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL, }); } +const tufCacheDir = util_1.appdata.appDataPath('sigstore-js'); async function sign(payload, options = {}) { const ca = createCAClient(options); const tlog = createTLogClient(options); @@ -92,8 +91,10 @@ async function attest(payload, payloadType, options = {}) { } exports.attest = attest; async function verify(bundle, payload, options = {}) { - const cacheDir = defaultCacheDir(); - const trustedRoot = await tuf.getTrustedRoot(cacheDir); + const trustedRoot = await tuf.getTrustedRoot(tufCacheDir, { + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + }); const verifier = new verify_1.Verifier(trustedRoot, options.keySelector); const deserializedBundle = sigstore.bundleFromJSON(bundle); const opts = collectArtifactVerificationOptions(options); @@ -119,16 +120,6 @@ function configureIdentityProviders(options) { } return idps; } -function defaultCacheDir() { - let cacheRootDir = os_1.default.homedir(); - try { - fs_1.default.accessSync(os_1.default.homedir(), fs_1.default.constants.W_OK | fs_1.default.constants.R_OK); - } - catch (e) { - cacheRootDir = os_1.default.tmpdir(); - } - return path_1.default.join(cacheRootDir, '.sigstore', 'js-root'); -} // Assembles the AtifactVerificationOptions from the supplied VerifyOptions. function collectArtifactVerificationOptions(options) { // The trusted signers are only used if the options contain a certificate diff --git a/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js b/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js index 5383a37009..61923a61cd 100644 --- a/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js +++ b/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js @@ -1,5 +1,5 @@ "use strict"; -/* tslint:disable */ +/* eslint-disable */ /** * This file was automatically generated by json-schema-to-typescript. * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, diff --git a/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js b/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js index 5383a37009..61923a61cd 100644 --- a/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js +++ b/deps/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js @@ -1,5 +1,5 @@ "use strict"; -/* tslint:disable */ +/* eslint-disable */ /** * This file was automatically generated by json-schema-to-typescript. * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, diff --git a/deps/npm/node_modules/sigstore/dist/tuf/index.d.ts b/deps/npm/node_modules/sigstore/dist/tuf/index.d.ts index 349ff08b3b..455fc6af06 100644 --- a/deps/npm/node_modules/sigstore/dist/tuf/index.d.ts +++ b/deps/npm/node_modules/sigstore/dist/tuf/index.d.ts @@ -1,2 +1,6 @@ import * as sigstore from '../types/sigstore'; -export declare function getTrustedRoot(cacheDir: string): Promise<sigstore.TrustedRoot>; +export interface TUFOptions { + mirrorURL?: string; + rootPath?: string; +} +export declare function getTrustedRoot(cachePath: string, options?: TUFOptions): Promise<sigstore.TrustedRoot>; diff --git a/deps/npm/node_modules/sigstore/dist/tuf/index.js b/deps/npm/node_modules/sigstore/dist/tuf/index.js index 1aea238ef3..824bce9105 100644 --- a/deps/npm/node_modules/sigstore/dist/tuf/index.js +++ b/deps/npm/node_modules/sigstore/dist/tuf/index.js @@ -1,4 +1,27 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; @@ -22,55 +45,62 @@ limitations under the License. const fs_1 = __importDefault(require("fs")); const path_1 = __importDefault(require("path")); const tuf_js_1 = require("tuf-js"); -const trustroot_1 = require("./trustroot"); -async function getTrustedRoot(cacheDir) { - initTufCache(cacheDir); - const repoMap = initRepoMap(cacheDir); - const repoClients = Object.entries(repoMap.repositories).map(([name, urls]) => initClient(name, urls[0], cacheDir)); - // TODO: Add support for multiple repositories. For now, we just use the first - // one (the production Sigstore TUF repository). - const fetcher = new trustroot_1.TrustedRootFetcher(repoClients[0]); - return fetcher.getTrustedRoot(); +const sigstore = __importStar(require("../types/sigstore")); +const target_1 = require("./target"); +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +const DEFAULT_MIRROR_URL = 'https://sigstore-tuf-root.storage.googleapis.com'; +const DEFAULT_TUF_ROOT_PATH = '../../store/public-good-instance-root.json'; +async function getTrustedRoot(cachePath, options = {}) { + const tufRootPath = options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH); + const mirrorURL = options.mirrorURL || DEFAULT_MIRROR_URL; + initTufCache(cachePath, tufRootPath); + const remote = initRemoteConfig(cachePath, mirrorURL); + const repoClient = initClient(cachePath, remote); + const trustedRoot = await (0, target_1.getTarget)(repoClient, TRUSTED_ROOT_TARGET); + return sigstore.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); } exports.getTrustedRoot = getTrustedRoot; -// Initializes the root TUF cache directory -function initTufCache(cacheDir) { - if (!fs_1.default.existsSync(cacheDir)) { - fs_1.default.mkdirSync(cacheDir, { recursive: true }); +// Initializes the TUF cache directory structure including the initial +// root.json file. If the cache directory does not exist, it will be +// created. If the targets directory does not exist, it will be created. +// If the root.json file does not exist, it will be copied from the +// rootPath argument. +function initTufCache(cachePath, tufRootPath) { + const targetsPath = path_1.default.join(cachePath, 'targets'); + const cachedRootPath = path_1.default.join(cachePath, 'root.json'); + if (!fs_1.default.existsSync(cachePath)) { + fs_1.default.mkdirSync(cachePath, { recursive: true }); } -} -// Initializes the repo map (copying it to the cache root dir) and returns the -// content of the repository map. -function initRepoMap(rootDir) { - const mapDest = path_1.default.join(rootDir, 'map.json'); - if (!fs_1.default.existsSync(mapDest)) { - const mapSrc = require.resolve('../../store/map.json'); - fs_1.default.copyFileSync(mapSrc, mapDest); + if (!fs_1.default.existsSync(targetsPath)) { + fs_1.default.mkdirSync(targetsPath); } - const buf = fs_1.default.readFileSync(mapDest); - return JSON.parse(buf.toString('utf-8')); + if (!fs_1.default.existsSync(cachedRootPath)) { + fs_1.default.copyFileSync(tufRootPath, cachedRootPath); + } + return cachePath; } -function initClient(name, url, rootDir) { - const repoCachePath = path_1.default.join(rootDir, name); - const targetCachePath = path_1.default.join(repoCachePath, 'targets'); - const tufRootDest = path_1.default.join(repoCachePath, 'root.json'); - // Only copy the TUF trusted root if it doesn't already exist. It's possible - // that the cached root has already been updated, so we don't want to roll it - // back. - if (!fs_1.default.existsSync(tufRootDest)) { - const tufRootSrc = require.resolve(`../../store/${name}-root.json`); - fs_1.default.mkdirSync(repoCachePath); - fs_1.default.copyFileSync(tufRootSrc, tufRootDest); +// Initializes the remote.json file, which contains the URL of the TUF +// repository. If the file does not exist, it will be created. If the file +// exists, it will be parsed and returned. +function initRemoteConfig(rootDir, mirrorURL) { + let remoteConfig; + const remoteConfigPath = path_1.default.join(rootDir, 'remote.json'); + if (fs_1.default.existsSync(remoteConfigPath)) { + const data = fs_1.default.readFileSync(remoteConfigPath, 'utf-8'); + remoteConfig = JSON.parse(data); } - if (!fs_1.default.existsSync(targetCachePath)) { - fs_1.default.mkdirSync(targetCachePath); + if (!remoteConfig) { + remoteConfig = { mirror: mirrorURL }; + fs_1.default.writeFileSync(remoteConfigPath, JSON.stringify(remoteConfig)); } - // TODO: Is there some better way to derive the base URL for the targets? - // Hard-coding for now based on current Sigstore TUF repo layout. + return remoteConfig; +} +function initClient(cachePath, remote) { + const baseURL = remote.mirror; return new tuf_js_1.Updater({ - metadataBaseUrl: url, - targetBaseUrl: `${url}/targets`, - metadataDir: repoCachePath, - targetDir: targetCachePath, + metadataBaseUrl: baseURL, + targetBaseUrl: `${baseURL}/targets`, + metadataDir: cachePath, + targetDir: path_1.default.join(cachePath, 'targets'), }); } diff --git a/deps/npm/node_modules/sigstore/dist/tuf/target.d.ts b/deps/npm/node_modules/sigstore/dist/tuf/target.d.ts new file mode 100644 index 0000000000..aed81654f3 --- /dev/null +++ b/deps/npm/node_modules/sigstore/dist/tuf/target.d.ts @@ -0,0 +1,2 @@ +import { Updater } from 'tuf-js'; +export declare function getTarget(tuf: Updater, targetPath: string): Promise<string>; diff --git a/deps/npm/node_modules/sigstore/dist/tuf/target.js b/deps/npm/node_modules/sigstore/dist/tuf/target.js new file mode 100644 index 0000000000..ac708cdbcf --- /dev/null +++ b/deps/npm/node_modules/sigstore/dist/tuf/target.js @@ -0,0 +1,60 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getTarget = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const error_1 = require("../error"); +// Returns the local path to the specified target. If the target is not yet +// cached locally, the provided TUF Updater will be used to download and +// cache the target. +async function getTarget(tuf, targetPath) { + const path = await getTargetPath(tuf, targetPath); + try { + return fs_1.default.readFileSync(path, 'utf-8'); + } + catch (err) { + throw new error_1.InternalError(`error reading trusted root: ${err}`); + } +} +exports.getTarget = getTarget; +async function getTargetPath(tuf, target) { + let targetInfo; + try { + targetInfo = await tuf.refresh().then(() => tuf.getTargetInfo(target)); + } + catch (err) { + throw new error_1.InternalError(`error refreshing TUF metadata: ${err}`); + } + if (!targetInfo) { + throw new error_1.InternalError(`target ${target} not found`); + } + let path = await tuf.findCachedTarget(targetInfo); + // An empty path here means the target has not been cached locally, or is + // out of date. In either case, we need to download it. + if (!path) { + try { + path = await tuf.downloadTarget(targetInfo); + } + catch (err) { + throw new error_1.InternalError(`error downloading target: ${err}`); + } + } + return path; +} diff --git a/deps/npm/node_modules/sigstore/dist/tuf/trustroot.d.ts b/deps/npm/node_modules/sigstore/dist/tuf/trustroot.d.ts deleted file mode 100644 index 615fffae62..0000000000 --- a/deps/npm/node_modules/sigstore/dist/tuf/trustroot.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Updater } from 'tuf-js'; -import * as sigstore from '../types/sigstore'; -export declare class TrustedRootFetcher { - private tuf; - constructor(tuf: Updater); - getTrustedRoot(): Promise<sigstore.TrustedRoot>; - private allTargets; - private getTLogKeys; - private getCAKeys; - private readTargetBytes; -} diff --git a/deps/npm/node_modules/sigstore/dist/tuf/trustroot.js b/deps/npm/node_modules/sigstore/dist/tuf/trustroot.js deleted file mode 100644 index dcf491cdae..0000000000 --- a/deps/npm/node_modules/sigstore/dist/tuf/trustroot.js +++ /dev/null @@ -1,163 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TrustedRootFetcher = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -const fs_1 = __importDefault(require("fs")); -const error_1 = require("../error"); -const sigstore = __importStar(require("../types/sigstore")); -const util_1 = require("../util"); -const TRUSTED_ROOT_MEDIA_TYPE = 'application/vnd.dev.sigstore.trustedroot+json;version=0.1'; -// Type guard for SigstoreTargetMetadata -function isTargetMetadata(m) { - return (m !== undefined && - m !== null && - typeof m === 'object' && - 'status' in m && - 'usage' in m && - 'uri' in m); -} -class TrustedRootFetcher { - constructor(tuf) { - this.tuf = tuf; - } - // Assembles a TrustedRoot from the targets in the TUF repo - async getTrustedRoot() { - // Get all available targets - const targets = await this.allTargets(); - const cas = await this.getCAKeys(targets, 'Fulcio'); - const ctlogs = await this.getTLogKeys(targets, 'CTFE'); - const tlogs = await this.getTLogKeys(targets, 'Rekor'); - return { - mediaType: TRUSTED_ROOT_MEDIA_TYPE, - certificateAuthorities: cas, - ctlogs: ctlogs, - tlogs: tlogs, - timestampAuthorities: [], - }; - } - // Retrieves the list of TUF targets. - // NOTE: This is a HACK to get around the fact that the TUF library doesn't - // expose the list of targets. This is a temporary solution until TUF comes up - // with a story for target discovery. - // https://docs.google.com/document/d/1rWHAM2qCUtnjWD4lOrGWE2EIDLoA7eSy4-jB66Wgh0o - async allTargets() { - try { - await this.tuf.refresh(); - } - catch (e) { - throw new error_1.InternalError('error refreshing trust metadata'); - } - return Object.values( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - this.tuf.trustedSet.targets?.signed.targets || {}); - } - // Filters the supplied list of targets to those with the specified usage - // and returns a new TransparencyLogInstance for each with the associated - // public key populated. - async getTLogKeys(targets, usage) { - const filteredTargets = filterByUsage(targets, usage); - return Promise.all(filteredTargets.map(async (target) => { - const keyBytes = await this.readTargetBytes(target); - const uri = isTargetMetadata(target.custom.sigstore) - ? target.custom.sigstore.uri - : ''; - // The log ID is not present in the Sigstore target metadata, but - // can be derived by hashing the contents of the public key. - return { - baseUrl: uri, - hashAlgorithm: sigstore.HashAlgorithm.SHA2_256, - logId: { keyId: util_1.crypto.hash(keyBytes) }, - publicKey: { - keyDetails: sigstore.PublicKeyDetails.PKIX_ECDSA_P256_SHA_256, - rawBytes: keyBytes, - }, - }; - })); - } - // Filters the supplied list of targets to those with the specified usage - // and returns a new CertificateAuthority populated with all of the associated - // certificates. - // NOTE: The Sigstore target metadata does NOT provide any mechanism to link - // related certificates (e.g. a root and intermediate). As a result, we - // assume that all certificates located here are part of the same chain. - // This works out OK since our certificate chain verification code tries all - // possible permutations of the certificates until it finds one that results - // in a valid, trusted chain. - async getCAKeys(targets, usage) { - const filteredTargets = filterByUsage(targets, usage); - const certs = await Promise.all(filteredTargets.map(async (target) => await this.readTargetBytes(target))); - return [ - { - uri: '', - subject: undefined, - validFor: { start: new Date(0) }, - certChain: { - certificates: certs.map((cert) => ({ rawBytes: cert })), - }, - }, - ]; - } - // Reads the contents of the specified target file as a DER-encoded buffer. - async readTargetBytes(target) { - try { - let path = await this.tuf.findCachedTarget(target); - // An empty path here means the target has not been cached locally, or is - // out of date. In either case, we need to download it. - if (!path) { - path = await this.tuf.downloadTarget(target); - } - const file = fs_1.default.readFileSync(path); - return util_1.pem.toDER(file.toString('utf-8')); - } - catch (err) { - throw new error_1.InternalError(`error reading key/certificate for ${target.path}`); - } - } -} -exports.TrustedRootFetcher = TrustedRootFetcher; -function filterByUsage(targets, usage) { - return targets.filter((target) => { - const meta = target.custom.sigstore; - return isTargetMetadata(meta) && meta.usage === usage; - }); -} diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts index 26dd2150d5..70b2896fbd 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts +++ b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts @@ -1,21 +1,13 @@ /// <reference types="node" /> +import { ArtifactVerificationOptions, Bundle, Envelope, TransparencyLogEntry, VerificationMaterial } from '@sigstore/protobuf-specs'; import { Entry } from '../../tlog'; import { x509Certificate } from '../../x509/cert'; import { SignatureMaterial } from '../signature'; import { WithRequired } from '../utility'; import { ValidBundle } from './validate'; -import { Envelope } from './__generated__/envelope'; -import { Bundle, VerificationMaterial } from './__generated__/sigstore_bundle'; -import { TransparencyLogEntry } from './__generated__/sigstore_rekor'; -import { ArtifactVerificationOptions } from './__generated__/sigstore_verification'; +export * from '@sigstore/protobuf-specs'; export * from './serialized'; export * from './validate'; -export * from './__generated__/envelope'; -export * from './__generated__/sigstore_bundle'; -export * from './__generated__/sigstore_common'; -export { TransparencyLogEntry } from './__generated__/sigstore_rekor'; -export * from './__generated__/sigstore_trustroot'; -export * from './__generated__/sigstore_verification'; export declare const bundleToJSON: (message: Bundle) => unknown; export declare const bundleFromJSON: (obj: any) => ValidBundle; export declare const envelopeToJSON: (message: Envelope) => unknown; diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js index df07d6dc9f..55df7e744d 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js +++ b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js @@ -14,32 +14,39 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.signingCertificate = exports.bundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.isBundleWithVerificationMaterial = exports.envelopeFromJSON = exports.envelopeToJSON = exports.bundleFromJSON = exports.bundleToJSON = exports.TransparencyLogEntry = void 0; +exports.signingCertificate = exports.bundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.isBundleWithVerificationMaterial = exports.envelopeFromJSON = exports.envelopeToJSON = exports.bundleFromJSON = exports.bundleToJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); const util_1 = require("../../util"); const cert_1 = require("../../x509/cert"); const validate_1 = require("./validate"); -const envelope_1 = require("./__generated__/envelope"); -const sigstore_bundle_1 = require("./__generated__/sigstore_bundle"); -const sigstore_common_1 = require("./__generated__/sigstore_common"); +__exportStar(require("@sigstore/protobuf-specs"), exports); __exportStar(require("./serialized"), exports); __exportStar(require("./validate"), exports); -__exportStar(require("./__generated__/envelope"), exports); -__exportStar(require("./__generated__/sigstore_bundle"), exports); -__exportStar(require("./__generated__/sigstore_common"), exports); -var sigstore_rekor_1 = require("./__generated__/sigstore_rekor"); -Object.defineProperty(exports, "TransparencyLogEntry", { enumerable: true, get: function () { return sigstore_rekor_1.TransparencyLogEntry; } }); -__exportStar(require("./__generated__/sigstore_trustroot"), exports); -__exportStar(require("./__generated__/sigstore_verification"), exports); -exports.bundleToJSON = sigstore_bundle_1.Bundle.toJSON; +exports.bundleToJSON = protobuf_specs_1.Bundle.toJSON; // eslint-disable-next-line @typescript-eslint/no-explicit-any const bundleFromJSON = (obj) => { - const bundle = sigstore_bundle_1.Bundle.fromJSON(obj); + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); (0, validate_1.assertValidBundle)(bundle); return bundle; }; exports.bundleFromJSON = bundleFromJSON; -exports.envelopeToJSON = envelope_1.Envelope.toJSON; -exports.envelopeFromJSON = envelope_1.Envelope.fromJSON; +exports.envelopeToJSON = protobuf_specs_1.Envelope.toJSON; +exports.envelopeFromJSON = protobuf_specs_1.Envelope.fromJSON; const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; // Type guard for narrowing a Bundle to a BundleWithVerificationMaterial function isBundleWithVerificationMaterial(bundle) { @@ -80,7 +87,7 @@ exports.bundle = { $case: 'messageSignature', messageSignature: { messageDigest: { - algorithm: sigstore_common_1.HashAlgorithm.SHA2_256, + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, digest: digest, }, signature: signature.signature, diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts index fd0a354282..7d8316fd2e 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts +++ b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts @@ -1,6 +1,5 @@ +import { Bundle, MessageSignature, VerificationMaterial } from '@sigstore/protobuf-specs'; import { WithRequired } from '../utility'; -import { Bundle, VerificationMaterial } from './__generated__/sigstore_bundle'; -import { MessageSignature } from './__generated__/sigstore_common'; export type ValidBundle = Bundle & { verificationMaterial: VerificationMaterial & { content: NonNullable<VerificationMaterial['content']>; diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js index a19d8ad3ec..efd873ab65 100644 --- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js +++ b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js @@ -1,21 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.assertValidBundle = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ const error_1 = require("../../error"); // Performs basic validation of a Sigstore bundle to ensure that all required // fields are populated. This is not a complete validation of the bundle, but diff --git a/deps/npm/node_modules/sigstore/dist/util/appdata.d.ts b/deps/npm/node_modules/sigstore/dist/util/appdata.d.ts new file mode 100644 index 0000000000..dcdaeef418 --- /dev/null +++ b/deps/npm/node_modules/sigstore/dist/util/appdata.d.ts @@ -0,0 +1 @@ +export declare function appDataPath(name: string): string; diff --git a/deps/npm/node_modules/sigstore/dist/util/appdata.js b/deps/npm/node_modules/sigstore/dist/util/appdata.js new file mode 100644 index 0000000000..d0c7f6f079 --- /dev/null +++ b/deps/npm/node_modules/sigstore/dist/util/appdata.js @@ -0,0 +1,26 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appDataPath = void 0; +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +function appDataPath(name) { + const homedir = os_1.default.homedir(); + switch (process.platform) { + case 'darwin': { + const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); + return path_1.default.join(appSupport, name); + } + case 'win32': { + const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); + return path_1.default.join(localAppData, name, 'Data'); + } + default: { + const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); + return path_1.default.join(localData, name); + } + } +} +exports.appDataPath = appDataPath; diff --git a/deps/npm/node_modules/sigstore/dist/util/index.d.ts b/deps/npm/node_modules/sigstore/dist/util/index.d.ts index 786a19630c..02e4ddc69b 100644 --- a/deps/npm/node_modules/sigstore/dist/util/index.d.ts +++ b/deps/npm/node_modules/sigstore/dist/util/index.d.ts @@ -1,3 +1,4 @@ +export * as appdata from './appdata'; export * as crypto from './crypto'; export * as dsse from './dsse'; export * as encoding from './encoding'; diff --git a/deps/npm/node_modules/sigstore/dist/util/index.js b/deps/npm/node_modules/sigstore/dist/util/index.js index 2c02116cbf..74ef9c0b11 100644 --- a/deps/npm/node_modules/sigstore/dist/util/index.js +++ b/deps/npm/node_modules/sigstore/dist/util/index.js @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.appdata = void 0; /* Copyright 2022 The Sigstore Authors. @@ -39,6 +39,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ +exports.appdata = __importStar(require("./appdata")); exports.crypto = __importStar(require("./crypto")); exports.dsse = __importStar(require("./dsse")); exports.encoding = __importStar(require("./encoding")); diff --git a/deps/npm/node_modules/sigstore/dist/verify.js b/deps/npm/node_modules/sigstore/dist/verify.js index 1bcef03b5f..9d21b553ac 100644 --- a/deps/npm/node_modules/sigstore/dist/verify.js +++ b/deps/npm/node_modules/sigstore/dist/verify.js @@ -37,7 +37,7 @@ class Verifier { // Verifies the bundle signature, the bundle's certificate chain (if present) // and the bundle's transparency log entries. verify(bundle, options, data) { - this.verifyArtifactSignature(bundle, options, data); + this.verifyArtifactSignature(bundle, data); if (sigstore.isBundleWithCertificateChain(bundle)) { this.verifySigningCertificate(bundle, options); } @@ -45,8 +45,8 @@ class Verifier { } // Performs bundle signature verification. Determines the type of the bundle // content and delegates to the appropriate signature verification function. - verifyArtifactSignature(bundle, options, data) { - const publicKey = this.getPublicKey(bundle, options); + verifyArtifactSignature(bundle, data) { + const publicKey = this.getPublicKey(bundle); switch (bundle.content?.$case) { case 'messageSignature': if (!data) { @@ -79,7 +79,7 @@ class Verifier { // Returns the public key which will be used to verify the bundle signature. // The public key is selected based on the verification material in the bundle // and the options provided. - getPublicKey(bundle, options) { + getPublicKey(bundle) { // Select the key which will be used to verify the signature switch (bundle.verificationMaterial?.content?.$case) { // If the bundle contains a certificate chain, the public key is the @@ -89,7 +89,7 @@ class Verifier { // If the bundle contains a public key hint, the public key is selected // from the list of trusted keys in the options case 'publicKey': - return getPublicKeyFromHint(bundle.verificationMaterial.content.publicKey, options, this.keySelector); + return getPublicKeyFromHint(bundle.verificationMaterial.content.publicKey, this.keySelector); } } } @@ -101,7 +101,7 @@ function getPublicKeyFromCertificateChain(certificateChain) { } // Retrieves the public key through the key selector callback, passing the // public key hint from the bundle -function getPublicKeyFromHint(publicKeyID, options, keySelector) { +function getPublicKeyFromHint(publicKeyID, keySelector) { const key = keySelector(publicKeyID.hint); if (!key) { throw new error_1.VerificationError('no public key found for signature verification'); diff --git a/deps/npm/node_modules/sigstore/package.json b/deps/npm/node_modules/sigstore/package.json index 1a5960822e..b0e856df9a 100644 --- a/deps/npm/node_modules/sigstore/package.json +++ b/deps/npm/node_modules/sigstore/package.json @@ -1,6 +1,6 @@ { "name": "sigstore", - "version": "1.0.0", + "version": "1.1.1", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -12,7 +12,7 @@ "lint": "eslint --fix --ext .ts src/**", "lint:check": "eslint --max-warnings 0 --ext .ts src/**", "format": "prettier --write \"src/**/*\"", - "codegen:sigstore": "./hack/generate-sigstore-types", + "release": "npm run build && changeset publish", "codegen:rekor": "./hack/generate-rekor-types" }, "bin": { @@ -22,6 +22,9 @@ "type": "git", "url": "git+https://github.com/sigstore/sigstore-js.git" }, + "publishConfig": { + "provenance": true + }, "files": [ "dist", "store" @@ -33,6 +36,7 @@ }, "homepage": "https://github.com/sigstore/sigstore-js#readme", "devDependencies": { + "@changesets/cli": "^2.26.0", "@tsconfig/node14": "^1.0.3", "@types/jest": "^29.4.0", "@types/make-fetch-happen": "^10.0.0", @@ -43,13 +47,14 @@ "eslint-config-prettier": "^8.5.0", "eslint-plugin-prettier": "^4.0.0", "jest": "^29.4.1", - "json-schema-to-typescript": "^11.0.2", + "json-schema-to-typescript": "^12.0.0", "nock": "^13.2.4", "prettier": "^2.6.2", "ts-jest": "^29.0.5", "typescript": "^4.7.2" }, "dependencies": { + "@sigstore/protobuf-specs": "^0.1.0", "make-fetch-happen": "^11.0.1", "tuf-js": "^1.0.0" }, diff --git a/deps/npm/node_modules/sigstore/store/map.json b/deps/npm/node_modules/sigstore/store/map.json deleted file mode 100644 index 620bf0bedb..0000000000 --- a/deps/npm/node_modules/sigstore/store/map.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "repositories": { - "public-good-instance": [ - "https://sigstore-tuf-root.storage.googleapis.com" - ] - }, - "mapping": [ - { - "paths": [ - "*" - ], - "repositories": [ - "public-good-instance" - ], - "terminating": true, - "threshold": 1 - } - ] -} diff --git a/deps/npm/node_modules/spdx-correct/index.js b/deps/npm/node_modules/spdx-correct/index.js index c51a79f5d1..4d9037e0cc 100644 --- a/deps/npm/node_modules/spdx-correct/index.js +++ b/deps/npm/node_modules/spdx-correct/index.js @@ -25,6 +25,18 @@ function valid (string) { } } +// Sorting function that orders the given array of transpositions such +// that a transposition with the longer pattern comes before a transposition +// with a shorter pattern. This is to prevent e.g. the transposition +// ["General Public License", "GPL"] from matching to "Lesser General Public License" +// before a longer and more accurate transposition ["Lesser General Public License", "LGPL"] +// has a chance to be recognized. +function sortTranspositions(a, b) { + var length = b[0].length - a[0].length + if (length !== 0) return length + return a[0].toUpperCase().localeCompare(b[0].toUpperCase()) +} + // Common transpositions of license identifier acronyms var transpositions = [ ['APGL', 'AGPL'], @@ -41,8 +53,17 @@ var transpositions = [ ['GUN', 'GPL'], ['+', ''], ['GNU GPL', 'GPL'], + ['GNU LGPL', 'LGPL'], ['GNU/GPL', 'GPL'], ['GNU GLP', 'GPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['GNU Lesser General Public License', 'LGPL'], + ['GNU LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['GNU Lesser General Public License', 'LGPL-2.1'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL'], + ['Lesser General Public License', 'LGPL'], + ['LESSER GENERAL PUBLIC LICENSE', 'LGPL-2.1'], + ['Lesser General Public License', 'LGPL-2.1'], ['GNU General Public License', 'GPL'], ['Gnu public license', 'GPL'], ['GNU Public License', 'GPL'], @@ -51,8 +72,9 @@ var transpositions = [ ['Mozilla Public License', 'MPL'], ['Universal Permissive License', 'UPL'], ['WTH', 'WTF'], + ['WTFGPL', 'WTFPL'], ['-License', ''] -] +].sort(sortTranspositions) var TRANSPOSED = 0 var CORRECT = 1 @@ -254,7 +276,7 @@ var lastResorts = [ ['MPL', 'MPL-2.0'], ['X11', 'X11'], ['ZLIB', 'Zlib'] -].concat(licensesWithOneVersion) +].concat(licensesWithOneVersion).sort(sortTranspositions) var SUBSTRING = 0 var IDENTIFIER = 1 diff --git a/deps/npm/node_modules/spdx-correct/package.json b/deps/npm/node_modules/spdx-correct/package.json index 35c68bdaa6..d77615638b 100644 --- a/deps/npm/node_modules/spdx-correct/package.json +++ b/deps/npm/node_modules/spdx-correct/package.json @@ -1,24 +1,17 @@ { "name": "spdx-correct", "description": "correct invalid SPDX expressions", - "version": "3.1.1", - "author": "Kyle E. Mitchell <kyle@kemitchell.com> (https://kemitchell.com)", - "contributors": [ - "Kyle E. Mitchell <kyle@kemitchell.com> (https://kemitchell.com)", - "Christian Zommerfelds <aero_super@yahoo.com>", - "Tal Einat <taleinat@gmail.com>", - "Dan Butvinik <butvinik@outlook.com>" - ], + "version": "3.2.0", "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" }, "devDependencies": { - "defence-cli": "^2.0.1", + "defence-cli": "^3.0.1", "replace-require-self": "^1.0.0", - "standard": "^11.0.0", - "standard-markdown": "^4.0.2", - "tape": "^4.9.0" + "standard": "^14.3.4", + "standard-markdown": "^6.0.0", + "tape": "^5.0.1" }, "files": [ "index.js" diff --git a/deps/npm/node_modules/tuf-js/LICENSE b/deps/npm/node_modules/tuf-js/LICENSE index f28ab0914a..420700f5d3 100644 --- a/deps/npm/node_modules/tuf-js/LICENSE +++ b/deps/npm/node_modules/tuf-js/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright GitHub +Copyright (c) 2022 GitHub and the TUF Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/deps/npm/node_modules/tuf-js/dist/utils/config.d.ts b/deps/npm/node_modules/tuf-js/dist/config.d.ts index 2a906c7c28..2a906c7c28 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/config.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/config.d.ts diff --git a/deps/npm/node_modules/tuf-js/dist/utils/config.js b/deps/npm/node_modules/tuf-js/dist/config.js index c2d970e256..c2d970e256 100644 --- a/deps/npm/node_modules/tuf-js/dist/utils/config.js +++ b/deps/npm/node_modules/tuf-js/dist/config.js diff --git a/deps/npm/node_modules/tuf-js/dist/error.d.ts b/deps/npm/node_modules/tuf-js/dist/error.d.ts index 130e49ab9d..3a42f0441b 100644 --- a/deps/npm/node_modules/tuf-js/dist/error.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/error.d.ts @@ -6,20 +6,12 @@ export declare class PersistError extends Error { } export declare class RepositoryError extends Error { } -export declare class UnsignedMetadataError extends RepositoryError { -} export declare class BadVersionError extends RepositoryError { } export declare class EqualVersionError extends BadVersionError { } export declare class ExpiredMetadataError extends RepositoryError { } -export declare class LengthOrHashMismatchError extends RepositoryError { -} -export declare class CryptoError extends Error { -} -export declare class UnsupportedAlgorithmError extends CryptoError { -} export declare class DownloadError extends Error { } export declare class DownloadLengthMismatchError extends DownloadError { diff --git a/deps/npm/node_modules/tuf-js/dist/error.js b/deps/npm/node_modules/tuf-js/dist/error.js index ce7ca5ea06..f4b10fa202 100644 --- a/deps/npm/node_modules/tuf-js/dist/error.js +++ b/deps/npm/node_modules/tuf-js/dist/error.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.UnsignedMetadataError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; +exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0; // An error about insufficient values class ValueError extends Error { } @@ -17,10 +17,6 @@ exports.PersistError = PersistError; class RepositoryError extends Error { } exports.RepositoryError = RepositoryError; -// An error about metadata object with insufficient threshold of signatures. -class UnsignedMetadataError extends RepositoryError { -} -exports.UnsignedMetadataError = UnsignedMetadataError; // An error for metadata that contains an invalid version number. class BadVersionError extends RepositoryError { } @@ -33,16 +29,6 @@ exports.EqualVersionError = EqualVersionError; class ExpiredMetadataError extends RepositoryError { } exports.ExpiredMetadataError = ExpiredMetadataError; -// An error while checking the length and hash values of an object. -class LengthOrHashMismatchError extends RepositoryError { -} -exports.LengthOrHashMismatchError = LengthOrHashMismatchError; -class CryptoError extends Error { -} -exports.CryptoError = CryptoError; -class UnsupportedAlgorithmError extends CryptoError { -} -exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; //----- Download Errors ------------------------------------------------------- // An error occurred while attempting to download a file. class DownloadError extends Error { diff --git a/deps/npm/node_modules/tuf-js/dist/fetcher.d.ts b/deps/npm/node_modules/tuf-js/dist/fetcher.d.ts index 2b52cbef52..126e9eb11a 100644 --- a/deps/npm/node_modules/tuf-js/dist/fetcher.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/fetcher.d.ts @@ -1,7 +1,11 @@ /// <reference types="node" /> /// <reference types="node" /> type DownloadFileHandler<T> = (file: string) => Promise<T>; -export declare abstract class BaseFetcher { +export interface Fetcher { + downloadFile<T>(url: string, maxLength: number, handler: DownloadFileHandler<T>): Promise<T>; + downloadBytes(url: string, maxLength: number): Promise<Buffer>; +} +export declare abstract class BaseFetcher implements Fetcher { abstract fetch(url: string): Promise<NodeJS.ReadableStream>; downloadFile<T>(url: string, maxLength: number, handler: DownloadFileHandler<T>): Promise<T>; downloadBytes(url: string, maxLength: number): Promise<Buffer>; @@ -10,7 +14,7 @@ interface FetcherOptions { timeout?: number; retries?: number; } -export declare class Fetcher extends BaseFetcher { +export declare class DefaultFetcher extends BaseFetcher { private timeout?; private retries?; constructor(options?: FetcherOptions); diff --git a/deps/npm/node_modules/tuf-js/dist/fetcher.js b/deps/npm/node_modules/tuf-js/dist/fetcher.js index cb42ab22a1..7a7405ac53 100644 --- a/deps/npm/node_modules/tuf-js/dist/fetcher.js +++ b/deps/npm/node_modules/tuf-js/dist/fetcher.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Fetcher = exports.BaseFetcher = void 0; +exports.DefaultFetcher = exports.BaseFetcher = void 0; const fs_1 = __importDefault(require("fs")); const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); const util_1 = __importDefault(require("util")); @@ -51,7 +51,7 @@ class BaseFetcher { } } exports.BaseFetcher = BaseFetcher; -class Fetcher extends BaseFetcher { +class DefaultFetcher extends BaseFetcher { constructor(options = {}) { super(); this.timeout = options.timeout; @@ -68,7 +68,7 @@ class Fetcher extends BaseFetcher { return response.body; } } -exports.Fetcher = Fetcher; +exports.DefaultFetcher = DefaultFetcher; const writeBufferToStream = async (stream, buffer) => { return new Promise((resolve, reject) => { stream.write(buffer, (err) => { diff --git a/deps/npm/node_modules/tuf-js/dist/index.d.ts b/deps/npm/node_modules/tuf-js/dist/index.d.ts index bfe3adcac2..b4d1bc2b9c 100644 --- a/deps/npm/node_modules/tuf-js/dist/index.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/index.d.ts @@ -1,3 +1,3 @@ -export { BaseFetcher } from './fetcher'; -export { TargetFile } from './models/file'; +export { TargetFile } from '@tufjs/models'; +export { BaseFetcher, Fetcher } from './fetcher'; export { Updater } from './updater'; diff --git a/deps/npm/node_modules/tuf-js/dist/index.js b/deps/npm/node_modules/tuf-js/dist/index.js index 6245d1724a..5a83b91f35 100644 --- a/deps/npm/node_modules/tuf-js/dist/index.js +++ b/deps/npm/node_modules/tuf-js/dist/index.js @@ -1,9 +1,9 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.Updater = exports.TargetFile = exports.BaseFetcher = void 0; +exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0; +var models_1 = require("@tufjs/models"); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } }); var fetcher_1 = require("./fetcher"); Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } }); -var file_1 = require("./models/file"); -Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); var updater_1 = require("./updater"); Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } }); diff --git a/deps/npm/node_modules/tuf-js/dist/models/index.d.ts b/deps/npm/node_modules/tuf-js/dist/models/index.d.ts deleted file mode 100644 index 58d7791592..0000000000 --- a/deps/npm/node_modules/tuf-js/dist/models/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { Metadata } from './metadata'; -export { Root } from './root'; -export { Snapshot } from './snapshot'; -export { Targets } from './targets'; -export { Timestamp } from './timestamp'; diff --git a/deps/npm/node_modules/tuf-js/dist/models/index.js b/deps/npm/node_modules/tuf-js/dist/models/index.js deleted file mode 100644 index aa3d828cf9..0000000000 --- a/deps/npm/node_modules/tuf-js/dist/models/index.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = exports.Targets = exports.Snapshot = exports.Root = exports.Metadata = void 0; -var metadata_1 = require("./metadata"); -Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); -var root_1 = require("./root"); -Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); -var snapshot_1 = require("./snapshot"); -Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); -var targets_1 = require("./targets"); -Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); -var timestamp_1 = require("./timestamp"); -Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/deps/npm/node_modules/tuf-js/dist/store.d.ts b/deps/npm/node_modules/tuf-js/dist/store.d.ts index a6e20ae559..aed13b300d 100644 --- a/deps/npm/node_modules/tuf-js/dist/store.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/store.d.ts @@ -1,5 +1,5 @@ /// <reference types="node" /> -import { Metadata, Root, Snapshot, Targets, Timestamp } from './models'; +import { Metadata, Root, Snapshot, Targets, Timestamp } from '@tufjs/models'; export declare class TrustedMetadataStore { private trustedSet; private referenceTime; diff --git a/deps/npm/node_modules/tuf-js/dist/store.js b/deps/npm/node_modules/tuf-js/dist/store.js index 351a196173..8567336108 100644 --- a/deps/npm/node_modules/tuf-js/dist/store.js +++ b/deps/npm/node_modules/tuf-js/dist/store.js @@ -1,9 +1,8 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.TrustedMetadataStore = void 0; +const models_1 = require("@tufjs/models"); const error_1 = require("./error"); -const models_1 = require("./models"); -const types_1 = require("./utils/types"); class TrustedMetadataStore { constructor(rootData) { this.trustedSet = {}; @@ -32,18 +31,18 @@ class TrustedMetadataStore { } updateRoot(bytesBuffer) { const data = JSON.parse(bytesBuffer.toString('utf8')); - const newRoot = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data); - if (newRoot.signed.type != types_1.MetadataKind.Root) { + const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (newRoot.signed.type != models_1.MetadataKind.Root) { throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`); } // Client workflow 5.4: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Root, newRoot); + this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot); // Client workflow 5.5: check for rollback attack if (newRoot.signed.version != this.root.signed.version + 1) { throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`); } // Check that new root is signed by self - newRoot.verifyDelegate(types_1.MetadataKind.Root, newRoot); + newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot); // Client workflow 5.7: set new root as trusted root this.trustedSet.root = newRoot; return newRoot; @@ -56,12 +55,12 @@ class TrustedMetadataStore { throw new error_1.ExpiredMetadataError('Final root.json is expired'); } const data = JSON.parse(bytesBuffer.toString('utf8')); - const newTimestamp = models_1.Metadata.fromJSON(types_1.MetadataKind.Timestamp, data); - if (newTimestamp.signed.type != types_1.MetadataKind.Timestamp) { + const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data); + if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) { throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`); } // Client workflow 5.4.2: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Timestamp, newTimestamp); + this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp); if (this.timestamp) { // Prevent rolling back timestamp version // Client workflow 5.4.3.1: check for rollback attack @@ -104,12 +103,12 @@ class TrustedMetadataStore { snapshotMeta.verify(bytesBuffer); } const data = JSON.parse(bytesBuffer.toString('utf8')); - const newSnapshot = models_1.Metadata.fromJSON(types_1.MetadataKind.Snapshot, data); - if (newSnapshot.signed.type != types_1.MetadataKind.Snapshot) { + const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data); + if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) { throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`); } // Client workflow 5.5.3: check for arbitrary software attack - this.root.verifyDelegate(types_1.MetadataKind.Snapshot, newSnapshot); + this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot); // version check against meta version (5.5.4) is deferred to allow old // snapshot to be used in rollback protection // Client workflow 5.5.5: check for rollback attack @@ -149,8 +148,8 @@ class TrustedMetadataStore { // Client workflow 5.6.2: check against snapshot role's targets hash meta.verify(bytesBuffer); const data = JSON.parse(bytesBuffer.toString('utf8')); - const newDelegate = models_1.Metadata.fromJSON(types_1.MetadataKind.Targets, data); - if (newDelegate.signed.type != types_1.MetadataKind.Targets) { + const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data); + if (newDelegate.signed.type != models_1.MetadataKind.Targets) { throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`); } // Client workflow 5.6.3: check for arbitrary software attack @@ -170,11 +169,11 @@ class TrustedMetadataStore { // Note that an expired initial root is still considered valid. loadTrustedRoot(bytesBuffer) { const data = JSON.parse(bytesBuffer.toString('utf8')); - const root = models_1.Metadata.fromJSON(types_1.MetadataKind.Root, data); - if (root.signed.type != types_1.MetadataKind.Root) { + const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data); + if (root.signed.type != models_1.MetadataKind.Root) { throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`); } - root.verifyDelegate(types_1.MetadataKind.Root, root); + root.verifyDelegate(models_1.MetadataKind.Root, root); this.trustedSet['root'] = root; } checkFinalTimestamp() { diff --git a/deps/npm/node_modules/tuf-js/dist/updater.d.ts b/deps/npm/node_modules/tuf-js/dist/updater.d.ts index e49dca22a4..9da17d7471 100644 --- a/deps/npm/node_modules/tuf-js/dist/updater.d.ts +++ b/deps/npm/node_modules/tuf-js/dist/updater.d.ts @@ -1,12 +1,12 @@ -import { BaseFetcher } from './fetcher'; -import { TargetFile } from './models/file'; -import { Config } from './utils/config'; +import { TargetFile } from '@tufjs/models'; +import { Config } from './config'; +import { Fetcher } from './fetcher'; export interface UpdaterOptions { metadataDir: string; metadataBaseUrl: string; targetDir?: string; targetBaseUrl?: string; - fetcher?: BaseFetcher; + fetcher?: Fetcher; config?: Partial<Config>; } export declare class Updater { diff --git a/deps/npm/node_modules/tuf-js/dist/updater.js b/deps/npm/node_modules/tuf-js/dist/updater.js index 9f33c667ce..7f8b6bedee 100644 --- a/deps/npm/node_modules/tuf-js/dist/updater.js +++ b/deps/npm/node_modules/tuf-js/dist/updater.js @@ -24,13 +24,13 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Updater = void 0; +const models_1 = require("@tufjs/models"); const fs = __importStar(require("fs")); const path = __importStar(require("path")); +const config_1 = require("./config"); const error_1 = require("./error"); const fetcher_1 = require("./fetcher"); const store_1 = require("./store"); -const config_1 = require("./utils/config"); -const types_1 = require("./utils/types"); class Updater { constructor(options) { const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options; @@ -38,12 +38,12 @@ class Updater { this.metadataBaseUrl = metadataBaseUrl; this.targetDir = targetDir; this.targetBaseUrl = targetBaseUrl; - const data = this.loadLocalMetadata(types_1.MetadataKind.Root); + const data = this.loadLocalMetadata(models_1.MetadataKind.Root); this.trustedSet = new store_1.TrustedMetadataStore(data); this.config = { ...config_1.defaultConfig, ...config }; this.fetcher = fetcher || - new fetcher_1.Fetcher({ + new fetcher_1.DefaultFetcher({ timeout: this.config.fetchTimeout, retries: this.config.fetchRetries, }); @@ -52,7 +52,7 @@ class Updater { await this.loadRoot(); await this.loadTimestamp(); await this.loadSnapshot(); - await this.loadTargets(types_1.MetadataKind.Targets, types_1.MetadataKind.Root); + await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root); } // Returns the TargetFile instance with information for the given target path. // @@ -123,7 +123,7 @@ class Updater { // Client workflow 5.3.4 - 5.4.7 this.trustedSet.updateRoot(bytesData); // Client workflow 5.3.8: persist root metadata file - this.persistMetadata(types_1.MetadataKind.Root, bytesData); + this.persistMetadata(models_1.MetadataKind.Root, bytesData); } catch (error) { break; @@ -135,7 +135,7 @@ class Updater { async loadTimestamp() { // Load local and remote timestamp metadata try { - const data = this.loadLocalMetadata(types_1.MetadataKind.Timestamp); + const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp); this.trustedSet.updateTimestamp(data); } catch (error) { @@ -159,14 +159,14 @@ class Updater { throw error; } // Client workflow 5.4.5: persist timestamp metadata - this.persistMetadata(types_1.MetadataKind.Timestamp, bytesData); + this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData); } // Load local and remote snapshot metadata. // Client workflow 5.5: update snapshot role async loadSnapshot() { //Load local (and if needed remote) snapshot metadata try { - const data = this.loadLocalMetadata(types_1.MetadataKind.Snapshot); + const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot); this.trustedSet.updateSnapshot(data, true); } catch (error) { @@ -185,7 +185,7 @@ class Updater { // Client workflow 5.5.2 - 5.5.6 this.trustedSet.updateSnapshot(bytesData); // Client workflow 5.5.7: persist snapshot metadata file - this.persistMetadata(types_1.MetadataKind.Snapshot, bytesData); + this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData); } catch (error) { throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`); @@ -236,8 +236,8 @@ class Updater { // is needed to load and verify the delegated targets metadata. const delegationsToVisit = [ { - roleName: types_1.MetadataKind.Targets, - parentRoleName: types_1.MetadataKind.Root, + roleName: models_1.MetadataKind.Targets, + parentRoleName: models_1.MetadataKind.Root, }, ]; const visitedRoleNames = new Set(); diff --git a/deps/npm/node_modules/tuf-js/dist/utils/index.d.ts b/deps/npm/node_modules/tuf-js/dist/utils/index.d.ts deleted file mode 100644 index e2232bc5cc..0000000000 --- a/deps/npm/node_modules/tuf-js/dist/utils/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * as config from './config'; -export * as guard from './guard'; -export * as json from './json'; -export * as signer from './signer'; -export * as types from './types'; diff --git a/deps/npm/node_modules/tuf-js/dist/utils/types.js b/deps/npm/node_modules/tuf-js/dist/utils/types.js deleted file mode 100644 index 469f580743..0000000000 --- a/deps/npm/node_modules/tuf-js/dist/utils/types.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MetadataKind = void 0; -var MetadataKind; -(function (MetadataKind) { - MetadataKind["Root"] = "root"; - MetadataKind["Timestamp"] = "timestamp"; - MetadataKind["Snapshot"] = "snapshot"; - MetadataKind["Targets"] = "targets"; -})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {})); diff --git a/deps/npm/node_modules/tuf-js/package.json b/deps/npm/node_modules/tuf-js/package.json index 758e71223e..29436c760f 100644 --- a/deps/npm/node_modules/tuf-js/package.json +++ b/deps/npm/node_modules/tuf-js/package.json @@ -1,56 +1,42 @@ { "name": "tuf-js", - "version": "1.0.0", + "version": "1.1.1", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { - "build": "tsc", - "test": "jest", - "test:watch": "jest --watch", - "test:ci": "jest --maxWorkers=2 --coverage", - "lint": "eslint --fix --ext .ts src/**", - "lint:check": "eslint --max-warnings 0 --ext .ts src/**", - "format": "prettier --write \"src/**/*\"" + "build": "tsc --build", + "clean": "rm -rf dist", + "test": "jest" }, "repository": { "type": "git", - "url": "git+https://github.com/github/tuf-js.git" + "url": "git+https://github.com/theupdateframework/tuf-js.git" }, "files": [ "dist" ], "keywords": [ - "tuf" + "tuf", + "security", + "update" ], "author": "bdehamer@github.com", "license": "MIT", "bugs": { - "url": "https://github.com/github/tuf-js/issues" + "url": "https://github.com/theupdateframework/tuf-js/issues" }, - "homepage": "https://github.com/github/tuf-js#readme", + "homepage": "https://github.com/theupdateframework/tuf-js/packages/client#readme", "devDependencies": { - "@tsconfig/node14": "^1.0.3", - "@types/jest": "^28.1.8", - "@types/lodash.isequal": "^4.5.6", + "@tufjs/repo-mock": "1.0.0", "@types/make-fetch-happen": "^10.0.1", - "@types/minimatch": "^5.1.2", - "@types/node": "^18.11.10", - "@typescript-eslint/eslint-plugin": "^5.45.0", - "@typescript-eslint/parser": "^5.45.0", - "eslint": "^8.28.0", - "eslint-config-prettier": "^8.5.0", - "eslint-plugin-prettier": "^4.2.1", - "http-server": "^14.1.1", - "jest": "^28.1.3", + "@types/node": "^18.14.5", "nock": "^13.2.9", - "prettier": "^2.8.0", - "ts-jest": "^28.0.8", - "typescript": "^4.9.3" + "typescript": "^4.9.5" }, "dependencies": { "make-fetch-happen": "^11.0.1", - "minimatch": "^6.1.0" + "@tufjs/models": "1.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/deps/npm/package.json b/deps/npm/package.json index 851d7de8b8..bfd702e4e7 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "9.5.1", + "version": "9.6.2", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -53,8 +53,8 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^6.2.3", - "@npmcli/config": "^6.1.3", + "@npmcli/arborist": "^6.2.5", + "@npmcli/config": "^6.1.4", "@npmcli/map-workspaces": "^3.0.2", "@npmcli/package-json": "^3.0.0", "@npmcli/run-script": "^6.0.0", @@ -76,19 +76,19 @@ "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.2", - "libnpmdiff": "^5.0.11", - "libnpmexec": "^5.0.11", - "libnpmfund": "^4.0.11", + "libnpmdiff": "^5.0.13", + "libnpmexec": "^5.0.13", + "libnpmfund": "^4.0.13", "libnpmhook": "^9.0.3", "libnpmorg": "^5.0.3", - "libnpmpack": "^5.0.11", - "libnpmpublish": "^7.1.0", + "libnpmpack": "^5.0.13", + "libnpmpublish": "^7.1.2", "libnpmsearch": "^6.0.2", "libnpmteam": "^5.0.3", "libnpmversion": "^4.0.2", "make-fetch-happen": "^11.0.3", "minimatch": "^6.2.0", - "minipass": "^4.0.3", + "minipass": "^4.2.4", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^9.3.1", @@ -194,7 +194,7 @@ "@npmcli/git": "^4.0.1", "@npmcli/mock-registry": "^1.0.0", "@npmcli/promise-spawn": "^6.0.2", - "@npmcli/template-oss": "4.11.4", + "@npmcli/template-oss": "4.12.0", "licensee": "^10.0.0", "nock": "^13.3.0", "npm-packlist": "^7.0.4", @@ -248,7 +248,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.4", + "version": "4.12.0", "content": "./scripts/template-oss/root.js" }, "license": "Artistic-2.0", diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs index dfd287eaa6..270d9b631e 100644 --- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs @@ -384,7 +384,7 @@ Object { "c": "config", "cit": "install-ci-test", "clean-install": "ci", - "clean-install-test": "cit", + "clean-install-test": "install-ci-test", "create": "init", "ddp": "dedupe", "dist-tags": "dist-tag", @@ -421,7 +421,7 @@ Object { "s": "search", "se": "search", "show": "view", - "sit": "cit", + "sit": "install-ci-test", "t": "test", "tst": "test", "udpate": "update", @@ -3239,14 +3239,14 @@ Options: [-w|--workspace <workspace-name> [-w|--workspace <workspace-name> ...]] [-ws|--workspaces] [--include-workspace-root] [--install-links] -alias: cit +aliases: cit, clean-install-test, sit Run "npm help install-ci-test" for more info \`\`\`bash npm install-ci-test -alias: cit +aliases: cit, clean-install-test, sit \`\`\` #### \`save\` diff --git a/deps/npm/test/lib/commands/access.js b/deps/npm/test/lib/commands/access.js index b0057545ba..d1839aaaef 100644 --- a/deps/npm/test/lib/commands/access.js +++ b/deps/npm/test/lib/commands/access.js @@ -30,6 +30,7 @@ t.test('completion', async t => { ]) testComp(['npm', 'access', 'grant'], ['read-only', 'read-write']) testComp(['npm', 'access', 'revoke'], []) + testComp(['npm', 'access', 'grant', ''], []) await t.rejects( access.completion({ conf: { argv: { remain: ['npm', 'access', 'foobar'] } } }), @@ -70,10 +71,16 @@ t.test('grant', t => { }) t.test('read-only', async t => { - const { npm } = await loadMockNpm(t) + const authToken = 'abcd1234' + const { npm } = await loadMockNpm(t, { + config: { + '//registry.npmjs.org/:_authToken': authToken, + }, + }) const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry'), + authorization: authToken, }) const permissions = 'read-only' registry.setPermissions({ spec: '@npmcli/test-package', team: '@npm:test-team', permissions }) @@ -84,10 +91,16 @@ t.test('grant', t => { t.test('revoke', t => { t.test('success', async t => { - const { npm } = await loadMockNpm(t) + const authToken = 'abcd1234' + const { npm } = await loadMockNpm(t, { + config: { + '//registry.npmjs.org/:_authToken': authToken, + }, + }) const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry'), + authorization: authToken, }) registry.removePermissions({ spec: '@npmcli/test-package', team: '@npm:test-team' }) await npm.exec('access', ['revoke', '@npm:test-team', '@npmcli/test-package']) diff --git a/deps/npm/test/lib/commands/run-script.js b/deps/npm/test/lib/commands/run-script.js index a265db3cc0..6e2bf22add 100644 --- a/deps/npm/test/lib/commands/run-script.js +++ b/deps/npm/test/lib/commands/run-script.js @@ -34,12 +34,12 @@ const mockRs = async (t, { windows = false, runScript, ...opts } = {}) => { } t.test('completion', async t => { - const completion = async (t, remain, pkg) => { + const completion = async (t, remain, pkg, isFish = false) => { const { npm } = await mockRs(t, pkg ? { prefixDir: { 'package.json': JSON.stringify(pkg) } } : {} ) const cmd = await npm.cmd('run-script') - return cmd.completion({ conf: { argv: { remain } } }) + return cmd.completion({ conf: { argv: { remain } }, isFish }) } t.test('already have a script name', async t => { @@ -60,6 +60,13 @@ t.test('completion', async t => { }) t.strictSame(res, ['hello', 'world']) }) + + t.test('fish shell', async t => { + const res = await completion(t, ['npm', 'run'], { + scripts: { hello: 'echo hello', world: 'echo world' }, + }, true) + t.strictSame(res, ['hello\techo hello', 'world\techo world']) + }) }) t.test('fail if no package.json', async t => { diff --git a/deps/npm/test/lib/utils/audit-error.js b/deps/npm/test/lib/utils/audit-error.js index 46a9dbc38c..1cb29a0857 100644 --- a/deps/npm/test/lib/utils/audit-error.js +++ b/deps/npm/test/lib/utils/audit-error.js @@ -87,7 +87,7 @@ t.test('error, audit command, json', async t => { message: 'message', body: { response: 'body' }, method: 'POST', - uri: 'https://example.com/not/a/registry', + uri: 'https://username:password@example.com/not/a/registry', headers: { head: ['ers'], }, @@ -101,7 +101,7 @@ t.test('error, audit command, json', async t => { '{\n' + ' "message": "message",\n' + ' "method": "POST",\n' + - ' "uri": "https://example.com/not/a/registry",\n' + + ' "uri": "https://username:***@example.com/not/a/registry",\n' + ' "headers": {\n' + ' "head": [\n' + ' "ers"\n' + |