summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/@npmcli
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/@npmcli')
-rw-r--r--deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js121
-rw-r--r--deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js30
-rw-r--r--deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js47
-rw-r--r--deps/npm/node_modules/@npmcli/arborist/lib/node.js37
-rw-r--r--deps/npm/node_modules/@npmcli/arborist/package.json4
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js15
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/lib/run-script.js3
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/package.json2
8 files changed, 186 insertions, 73 deletions
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
index 54a6ff3375..579d5740da 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
@@ -7,6 +7,10 @@ const semver = require('semver')
const promiseCallLimit = require('promise-call-limit')
const getPeerSet = require('../peer-set.js')
const realpath = require('../../lib/realpath.js')
+const walkUpPath = require('walk-up-path')
+const { dirname, resolve } = require('path')
+const { promisify } = require('util')
+const readdir = promisify(require('readdir-scoped-modules'))
const debug = require('../debug.js')
const fromPath = require('../from-path.js')
@@ -182,8 +186,10 @@ module.exports = cls => class IdealTreeBuilder extends cls {
process.emit('time', 'idealTree')
- if (!options.add && !options.rm && this[_global])
- return Promise.reject(new Error('global requires an add or rm option'))
+ if (!options.add && !options.rm && !options.update && this[_global]) {
+ const er = new Error('global requires add, rm, or update option')
+ return Promise.reject(er)
+ }
// first get the virtual tree, if possible. If there's a lockfile, then
// that defines the ideal tree, unless the root package.json is not
@@ -305,7 +311,6 @@ module.exports = cls => class IdealTreeBuilder extends cls {
// cases we don't use a lockfile anyway.
// Load on a new Arborist object, so the Nodes aren't the same,
// or else it'll get super confusing when we change them!
- // Only have to mapWorkspaces if we didn't get it from actual or virtual
.then(async root => {
if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk)
await new this.constructor(this.options).loadActual({ root })
@@ -322,10 +327,10 @@ module.exports = cls => class IdealTreeBuilder extends cls {
}
[_globalRootNode] () {
- const root = this[_rootNodeFromPackage]({})
+ const root = this[_rootNodeFromPackage]({ dependencies: {} })
// this is a gross kludge to handle the fact that we don't save
// metadata on the root node in global installs, because the "root"
- // node is something like /usr/local/lib/node_modules.
+ // node is something like /usr/local/lib.
const meta = new Shrinkwrap({ path: this.path })
meta.reset()
root.meta = meta
@@ -353,9 +358,19 @@ module.exports = cls => class IdealTreeBuilder extends cls {
// If we have a list of package names to update, and we know it's
// going to update them wherever they are, add any paths into those
// named nodes to the buildIdealTree queue.
- if (this[_updateNames].length)
+ if (!this[_global] && this[_updateNames].length)
this[_queueNamedUpdates]()
+ // global updates only update the globalTop nodes, but we need to know
+ // that they're there, and not reinstall the world unnecessarily.
+ if (this[_global] && (this[_updateAll] || this[_updateNames].length)) {
+ const nm = resolve(this.path, 'node_modules')
+ for (const name of await readdir(nm)) {
+ if (this[_updateAll] || this[_updateNames].includes(name))
+ this.idealTree.package.dependencies[name] = '*'
+ }
+ }
+
if (this.auditReport && this.auditReport.size > 0)
this[_queueVulnDependents](options)
@@ -563,7 +578,8 @@ module.exports = cls => class IdealTreeBuilder extends cls {
const { meta, inventory } = this.idealTree
const ancient = meta.ancientLockfile
const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)
- if (inventory.size === 0 || !(ancient || old && this[_complete]))
+
+ if (inventory.size === 0 || !ancient && !(old && this[_complete]))
return
// if the lockfile is from node v5 or earlier, then we'll have to reload
@@ -604,6 +620,10 @@ This is a one-time fix-up, please be patient...
})
}
await promiseCallLimit(queue)
+ // yes, yes, this isn't the "original" version, but now that it's been
+ // upgraded, we need to make sure we don't do the work to upgrade it
+ // again, since it's now as new as can be.
+ meta.originalLockfileVersion = 2
this.finishTracker('idealTree:inflate')
process.emit('timeEnd', 'idealTree:inflate')
}
@@ -790,6 +810,11 @@ This is a one-time fix-up, please be patient...
}
await Promise.all(promises)
+ for (const { to } of node.edgesOut.values()) {
+ if (to && to.isLink)
+ this[_linkNodes].add(to)
+ }
+
return this[_buildDepStep]()
}
@@ -1065,14 +1090,22 @@ This is a one-time fix-up, please be patient...
let target
let canPlace = null
+ let isSource = false
+ const source = this[_peerSetSource].get(dep)
for (let check = start; check; check = check.resolveParent) {
+ // we always give the FIRST place we possibly *can* put this a little
+ // extra prioritization with peer dep overrides and deduping
+ if (check === source)
+ isSource = true
+
// if the current location has a peerDep on it, then we can't place here
// this is pretty rare to hit, since we always prefer deduping peers.
const checkEdge = check.edgesOut.get(edge.name)
if (!check.isTop && checkEdge && checkEdge.peer)
continue
- const cp = this[_canPlaceDep](dep, check, edge, peerEntryEdge, peerPath)
+ const cp = this[_canPlaceDep](dep, check, edge, peerEntryEdge, peerPath, isSource)
+ isSource = false
// anything other than a conflict is fine to proceed with
if (cp !== CONFLICT) {
@@ -1144,7 +1177,7 @@ This is a one-time fix-up, please be patient...
const oldDeps = []
for (const [name, edge] of oldChild.edgesOut.entries()) {
if (!newDep.edgesOut.has(name) && edge.to)
- oldDeps.push(edge.to)
+ oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to))
}
newDep.replace(oldChild)
this[_pruneForReplacement](newDep, oldDeps)
@@ -1245,14 +1278,17 @@ This is a one-time fix-up, please be patient...
// deps that the new node doesn't depend on but the old one did.
const invalidDeps = new Set([...node.edgesOut.values()]
.filter(e => e.to && !e.valid).map(e => e.to))
- for (const dep of oldDeps)
- invalidDeps.add(dep)
+ for (const dep of oldDeps) {
+ const set = gatherDepSet([dep], e => e.to !== dep && e.valid)
+ for (const dep of set)
+ invalidDeps.add(dep)
+ }
// ignore dependency edges from the node being replaced, but
// otherwise filter the set down to just the set with no
// dependencies from outside the set, except the node in question.
const deps = gatherDepSet(invalidDeps, edge =>
- edge.from !== node && edge.to !== node)
+ edge.from !== node && edge.to !== node && edge.valid)
// now just delete whatever's left, because it's junk
for (const dep of deps)
@@ -1279,7 +1315,7 @@ This is a one-time fix-up, please be patient...
// checking, because either we're leaving it alone, or it won't work anyway.
// When we check peers, we pass along the peerEntryEdge to track the
// original edge that caused us to load the family of peer dependencies.
- [_canPlaceDep] (dep, target, edge, peerEntryEdge = null, peerPath = []) {
+ [_canPlaceDep] (dep, target, edge, peerEntryEdge = null, peerPath = [], isSource = false) {
/* istanbul ignore next */
debug(() => {
if (!dep)
@@ -1287,8 +1323,16 @@ This is a one-time fix-up, please be patient...
})
const entryEdge = peerEntryEdge || edge
const source = this[_peerSetSource].get(dep)
- const isSource = target === source
- const { isRoot, isWorkspace } = source || {}
+ isSource = isSource || target === source
+ // if we're overriding the source, then we care if the *target* is
+ // ours, even if it wasn't actually the original source, since we
+ // are depending on something that has a dep that can't go in its own
+ // folder. for example, a -> b, b -> PEER(a). Even though a is the
+ // source, b has to be installed up a level, and if the root package
+ // depends on a, and it has a conflict, it's our problem. So, the root
+ // (or whatever is bringing in a) becomes the "effective source" for
+ // the purposes of this calculation.
+ const { isRoot, isWorkspace } = isSource ? target : source || {}
const isMine = isRoot || isWorkspace
// Useful testing thingie right here.
@@ -1313,7 +1357,7 @@ This is a one-time fix-up, please be patient...
const { version: newVer } = dep
const tryReplace = curVer && newVer && semver.gte(newVer, curVer)
if (tryReplace && dep.canReplace(current)) {
- const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath)
+ const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource)
/* istanbul ignore else - It's extremely rare that a replaceable
* node would be a conflict, if the current one wasn't a conflict,
* but it is theoretically possible if peer deps are pinned. In
@@ -1333,7 +1377,7 @@ This is a one-time fix-up, please be patient...
// a bit harder to be singletons.
const preferDedupe = this[_preferDedupe] || edge.peer
if (preferDedupe && !tryReplace && dep.canReplace(current)) {
- const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath)
+ const res = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource)
/* istanbul ignore else - It's extremely rare that a replaceable
* node would be a conflict, if the current one wasn't a conflict,
* but it is theoretically possible if peer deps are pinned. In
@@ -1401,7 +1445,7 @@ This is a one-time fix-up, please be patient...
}
}
if (canReplace) {
- const ret = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath)
+ const ret = this[_canPlacePeers](dep, target, edge, REPLACE, peerEntryEdge, peerPath, isSource)
/* istanbul ignore else - extremely rare that the peer set would
* conflict if we can replace the node in question, but theoretically
* possible, if peer deps are pinned aggressively. */
@@ -1462,14 +1506,14 @@ This is a one-time fix-up, please be patient...
}
// no objections! ok to place here
- return this[_canPlacePeers](dep, target, edge, OK, peerEntryEdge, peerPath)
+ return this[_canPlacePeers](dep, target, edge, OK, peerEntryEdge, peerPath, isSource)
}
// make sure the family of peer deps can live here alongside it.
// this doesn't guarantee that THIS solution will be the one we take,
// but it does establish that SOME solution exists at this level in
// the tree.
- [_canPlacePeers] (dep, target, edge, ret, peerEntryEdge, peerPath) {
+ [_canPlacePeers] (dep, target, edge, ret, peerEntryEdge, peerPath, isSource) {
// do not go in cycles when we're resolving a peer group
if (!dep.parent || peerEntryEdge && peerPath.includes(dep))
return ret
@@ -1481,7 +1525,7 @@ This is a one-time fix-up, please be patient...
if (!peerEdge.peer || !peerEdge.to)
continue
const peer = peerEdge.to
- const canPlacePeer = this[_canPlaceDep](peer, target, peerEdge, entryEdge, peerPath)
+ const canPlacePeer = this[_canPlaceDep](peer, target, peerEdge, entryEdge, peerPath, isSource)
if (canPlacePeer !== CONFLICT)
continue
@@ -1514,34 +1558,35 @@ This is a one-time fix-up, please be patient...
const external = /^\.\.(\/|$)/.test(loc)
- if (external && !this[_follow]) {
- // outside the root, somebody else's problem, ignore it
- continue
- }
-
if (!link.target.parent && !link.target.fsParent) {
- // the fsParent MUST be some node in the tree, possibly the root.
- // find it by walking up. Note that this is where its deps may
- // end up being installed, if possible.
- const parts = loc.split('/')
- for (let p = parts.length - 1; p > -1; p--) {
- const path = parts.slice(0, p).join('/')
- if (!path && external)
- break
+ // the fsParent likely some node in the tree, possibly the root,
+ // unless it is external. find it by walking up. Note that this
+ // is where its deps may end up being installed, if possible.
+ for (const p of walkUpPath(dirname(realpath))) {
+ const path = relpath(this.path, p)
const node = !path ? this.idealTree
: this.idealTree.inventory.get(path)
if (node) {
link.target.fsParent = node
this.addTracker('idealTree', link.target.name, link.target.location)
this[_depsQueue].push(link.target)
- p = -1
+ break
}
}
}
- // didn't find a parent for it, but we're filling in external
- // link targets, so go ahead and process it.
- if (this[_follow] && !link.target.parent && !link.target.fsParent) {
+ // outside the root, somebody else's problem, ignore it
+ if (external && !this[_follow])
+ continue
+
+ // didn't find a parent for it or it has not been seen yet
+ // so go ahead and process it.
+ const unseenLink = (link.target.parent || link.target.fsParent)
+ && !this[_depsSeen].has(link.target)
+ if (this[_follow]
+ && !link.target.parent
+ && !link.target.fsParent
+ || unseenLink) {
this.addTracker('idealTree', link.target.name, link.target.location)
this[_depsQueue].push(link.target)
}
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
index 14c9609275..e335bdadd4 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
@@ -185,12 +185,6 @@ module.exports = cls => class VirtualLoader extends cls {
// links is the set of metadata, and nodes is the map of non-Link nodes
// Set the targets to nodes in the set, if we have them (we might not)
async [resolveLinks] (links, nodes) {
- // now we've loaded the root, and all real nodes
- // link up the links
- const {meta} = this.virtualTree
- const {loadedFromDisk, originalLockfileVersion} = meta
- const oldLockfile = loadedFromDisk && !(originalLockfileVersion >= 2)
-
for (const [location, meta] of links.entries()) {
const targetPath = resolve(this.path, meta.resolved)
const targetLoc = relpath(this.path, targetPath)
@@ -198,27 +192,31 @@ module.exports = cls => class VirtualLoader extends cls {
const link = this[loadLink](location, targetLoc, target, meta)
nodes.set(location, link)
nodes.set(targetLoc, link.target)
- // legacy shrinkwraps do not store all the info we need for the target.
- // if we're loading from disk, and have a link in place, we need to
- // look in that actual folder (or at least try to) in order to get
- // the dependencies of the link target and load it properly.
- if (oldLockfile) {
- const pj = link.realpath + '/package.json'
- const pkg = await rpj(pj).catch(() => null)
- if (pkg)
- link.target.package = pkg
- }
+
+ // we always need to read the package.json for link targets
+ // because they can be changed by the local user
+ const pj = link.realpath + '/package.json'
+ const pkg = await rpj(pj).catch(() => null)
+ if (pkg)
+ link.target.package = pkg
}
}
[assignParentage] (nodes) {
for (const [location, node] of nodes) {
+ // Skip assignment of parentage for the root package
+ if (!location)
+ continue
const { path, name } = node
for (const p of walkUp(dirname(path))) {
const ploc = relpath(this.path, p)
const parent = nodes.get(ploc)
if (!parent)
continue
+ // Safety check: avoid self-assigning nodes as their own parents
+ /* istanbul ignore if - should be obviated by parentage skip check */
+ if (parent === node)
+ continue
const locTest = `${ploc}/node_modules/${name}`.replace(/^\//, '')
const ptype = location === locTest
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
index 57ecf071fb..92943554b4 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -10,10 +10,10 @@ const {dirname, resolve, relative} = require('path')
const {depth: dfwalk} = require('treeverse')
const fs = require('fs')
const {promisify} = require('util')
-const rename = promisify(fs.rename)
const symlink = promisify(fs.symlink)
const writeFile = promisify(fs.writeFile)
const mkdirp = require('mkdirp-infer-owner')
+const moveFile = require('@npmcli/move-file')
const rimraf = promisify(require('rimraf'))
const packageContents = require('@npmcli/installed-package-contents')
@@ -60,6 +60,7 @@ const _rollbackRetireShallowNodes = Symbol.for('rollbackRetireShallowNodes')
const _rollbackCreateSparseTree = Symbol.for('rollbackCreateSparseTree')
const _rollbackMoveBackRetiredUnchanged = Symbol.for('rollbackMoveBackRetiredUnchanged')
const _saveIdealTree = Symbol.for('saveIdealTree')
+const _saveLockFile = Symbol('saveLockFile')
const _copyIdealToActual = Symbol('copyIdealToActual')
const _addOmitsToTrashList = Symbol('addOmitsToTrashList')
const _packageLockOnly = Symbol('packageLockOnly')
@@ -172,7 +173,7 @@ module.exports = cls => class Reifier extends cls {
ignoreMissing: true,
global: true,
filter: (node, kid) => !node.isRoot ? true
- : this[_explicitRequests].has(kid),
+ : (node.edgesOut.has(kid) || this[_explicitRequests].has(kid)),
} : { ignoreMissing: true }
if (!this[_global]) {
@@ -182,7 +183,9 @@ module.exports = cls => class Reifier extends cls {
// the global install space tends to have a lot of stuff in it. don't
// load all of it, just what we care about. we won't be saving a
- // hidden lockfile in there anyway.
+ // hidden lockfile in there anyway. Note that we have to load ideal
+ // BEFORE loading actual, so that the actualOpt can use the
+ // explicitRequests which is set during buildIdealTree
return this.buildIdealTree(bitOpt)
.then(() => this.loadActual(actualOpt))
.then(() => process.emit('timeEnd', 'reify:loadTrees'))
@@ -251,7 +254,7 @@ module.exports = cls => class Reifier extends cls {
}
[_renamePath] (from, to, didMkdirp = false) {
- return rename(from, to)
+ return moveFile(from, to)
.catch(er => {
// Occasionally an expected bin file might not exist in the package,
// or a shim/symlink might have been moved aside. If we've already
@@ -261,7 +264,7 @@ module.exports = cls => class Reifier extends cls {
return didMkdirp ? null : mkdirp(dirname(to)).then(() =>
this[_renamePath](from, to, true))
} else if (er.code === 'EEXIST')
- return rimraf(to).then(() => rename(from, to))
+ return rimraf(to).then(() => moveFile(from, to))
else
throw er
})
@@ -427,7 +430,7 @@ module.exports = cls => class Reifier extends cls {
const dir = dirname(node.path)
const target = node.realpath
const rel = relative(dir, target)
- return symlink(rel, node.path, 'dir')
+ return symlink(rel, node.path, 'junction')
}
[_warnDeprecated] (node) {
@@ -436,23 +439,29 @@ module.exports = cls => class Reifier extends cls {
this.log.warn('deprecated', `${_id}: ${deprecated}`)
}
- [_loadAncientPackageDetails] (node) {
+ async [_loadAncientPackageDetails] (node, forceReload = false) {
// If we're loading from a v1 lockfile, load details from the package.json
// that weren't recorded in the old format.
const {meta} = this.idealTree
const ancient = meta.ancientLockfile
const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)
+
// already replaced with the manifest if it's truly ancient
- if (old && !ancient) {
+ if (node.path && (forceReload || (old && !ancient))) {
// XXX should have a shared location where package.json is read,
// so we don't ever read the same pj more than necessary.
- return rpj(node.path + '/package.json').then(pkg => {
+ let pkg
+ try {
+ pkg = await rpj(node.path + '/package.json')
+ } catch (err) {}
+
+ if (pkg) {
node.package.bin = pkg.bin
node.package.os = pkg.os
node.package.cpu = pkg.cpu
node.package.engines = pkg.engines
meta.add(node)
- })
+ }
}
}
@@ -839,12 +848,28 @@ module.exports = cls => class Reifier extends cls {
format: (this[_formatPackageLock] && format) ? format
: this[_formatPackageLock],
}
+
return Promise.all([
- this[_usePackageLock] && this.idealTree.meta.save(saveOpt),
+ this[_saveLockFile](saveOpt),
writeFile(pj, json),
]).then(() => process.emit('timeEnd', 'reify:save'))
}
+ async [_saveLockFile] (saveOpt) {
+ if (!this[_usePackageLock])
+ return
+
+ const { meta } = this.idealTree
+
+ // might have to update metadata for bins and stuff that gets lost
+ if (meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)) {
+ for (const node of this.idealTree.inventory.values())
+ await this[_loadAncientPackageDetails](node, true)
+ }
+
+ return meta.save(saveOpt)
+ }
+
[_copyIdealToActual] () {
// save the ideal's meta as a hidden lockfile after we actualize it
this.idealTree.meta.filename =
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js
index a783ce9c97..e4ba3ac42b 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js
@@ -35,6 +35,7 @@ const {normalize} = require('read-package-json-fast')
const {getPaths: getBinPaths} = require('bin-links')
const npa = require('npm-package-arg')
const debug = require('./debug.js')
+const gatherDepSet = require('./gather-dep-set.js')
const {resolve, relative, dirname, basename} = require('path')
const _package = Symbol('_package')
@@ -566,13 +567,32 @@ class Node {
}
debug(() => {
- if (!this.fsParent && this.realpath.indexOf(fsParent.realpath) !== 0)
- throw new Error('attempting to set fsParent improperly')
+ if (fsParent === this)
+ throw new Error('setting node to its own fsParent')
+
+ if (fsParent.realpath === this.realpath)
+ throw new Error('setting fsParent to same path')
+
+ // the initial set MUST be an actual walk-up from the realpath
+ // subsequent sets will re-root on the new fsParent's path.
+ if (!this[_fsParent] && this.realpath.indexOf(fsParent.realpath) !== 0) {
+ throw Object.assign(new Error('setting fsParent improperly'), {
+ path: this.path,
+ realpath: this.realpath,
+ fsParent: {
+ path: fsParent.path,
+ realpath: fsParent.realpath,
+ },
+ })
+ }
if (fsParent.isLink)
- throw new Error('attempting to set fsParent to link node')
+ throw new Error('setting fsParent to link node')
})
+ if (this === fsParent || fsParent.realpath === this.realpath)
+ return
+
// prune off the original location, so we don't leave edges lying around
if (current)
this.fsParent = null
@@ -621,8 +641,14 @@ class Node {
if (node.name !== this.name)
return false
+ // gather up all the deps of this node and that are only depended
+ // upon by deps of this node. those ones don't count, since
+ // they'll be replaced if this node is replaced anyway.
+ const depSet = gatherDepSet([this], e => e.to !== this && e.valid)
+
for (const edge of this.edgesIn) {
- if (!edge.satisfiedBy(node))
+ // only care about edges that don't originate from this node
+ if (!depSet.has(edge.from) && !edge.satisfiedBy(node))
return false
}
@@ -731,6 +757,9 @@ class Node {
set parent (parent) {
const oldParent = this[_parent]
+ if (this === parent)
+ return
+
// link nodes can't contain children directly.
// children go under the link target.
if (parent) {
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index fe72f409c4..6dca9abe50 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -1,11 +1,12 @@
{
"name": "@npmcli/arborist",
- "version": "1.0.8",
+ "version": "1.0.11",
"description": "Manage node_modules trees",
"dependencies": {
"@npmcli/installed-package-contents": "^1.0.5",
"@npmcli/map-workspaces": "^1.0.1",
"@npmcli/metavuln-calculator": "^1.0.0",
+ "@npmcli/move-file": "^1.0.1",
"@npmcli/name-from-folder": "^1.0.1",
"@npmcli/node-gyp": "^1.0.0",
"@npmcli/run-script": "^1.7.2",
@@ -68,6 +69,7 @@
"node-arg": [
"--unhandled-rejections=strict"
],
+ "after": "test/fixtures/cleanup.js",
"coverage-map": "map.js",
"esm": false,
"timeout": "120"
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
index 5980376f55..47f386304e 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -20,6 +20,9 @@ const runScriptPkg = async options => {
stdioString = false,
// note: only used when stdio:inherit
banner = true,
+ // how long to wait for a process.kill signal
+ // only exposed here so that we can make the test go a bit faster.
+ signalTimeout = 500,
} = options
const {scripts = {}, gypfile} = pkg
@@ -68,7 +71,17 @@ const runScriptPkg = async options => {
if (p.stdin)
p.stdin.end()
- return p
+ return p.catch(er => {
+ const { signal } = er
+ if (stdio === 'inherit' && signal) {
+ process.kill(process.pid, signal)
+ // just in case we don't die, reject after 500ms
+ // this also keeps the node process open long enough to actually
+ // get the signal, rather than terminating gracefully.
+ return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout))
+ } else
+ throw er
+ })
}
module.exports = runScriptPkg
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/run-script.js b/deps/npm/node_modules/@npmcli/run-script/lib/run-script.js
index 3be39b0ba8..af33d2113f 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/run-script.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/run-script.js
@@ -1,6 +1,7 @@
const rpj = require('read-package-json-fast')
const runScriptPkg = require('./run-script-pkg.js')
const validateOptions = require('./validate-options.js')
+const isServerPackage = require('./is-server-package.js')
const runScript = options => {
validateOptions(options)
@@ -9,4 +10,4 @@ const runScript = options => {
: rpj(path + '/package.json').then(pkg => runScriptPkg({...options, pkg}))
}
-module.exports = runScript
+module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json
index 5ec3bb5b66..c8a052f036 100644
--- a/deps/npm/node_modules/@npmcli/run-script/package.json
+++ b/deps/npm/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/run-script",
- "version": "1.7.4",
+ "version": "1.7.5",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
"license": "ISC",