summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/cacache
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/cacache')
-rw-r--r--deps/npm/node_modules/cacache/CHANGELOG.md356
-rw-r--r--deps/npm/node_modules/cacache/README.md51
-rw-r--r--deps/npm/node_modules/cacache/lib/content/write.js6
-rw-r--r--deps/npm/node_modules/cacache/lib/entry-index.js12
-rw-r--r--deps/npm/node_modules/cacache/lib/util/fix-owner.js106
-rw-r--r--deps/npm/node_modules/cacache/lib/util/tmp.js11
-rw-r--r--deps/npm/node_modules/cacache/lib/verify.js16
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/LICENSE21
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/README.md373
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/changelog.md67
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/common.js240
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/glob.js790
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/package.json76
-rw-r--r--deps/npm/node_modules/cacache/node_modules/glob/sync.js486
-rw-r--r--deps/npm/node_modules/cacache/node_modules/lru-cache/LICENSE15
-rw-r--r--deps/npm/node_modules/cacache/node_modules/lru-cache/README.md166
-rw-r--r--deps/npm/node_modules/cacache/node_modules/lru-cache/index.js334
-rw-r--r--deps/npm/node_modules/cacache/node_modules/lru-cache/package.json67
-rw-r--r--deps/npm/node_modules/cacache/node_modules/yallist/LICENSE15
-rw-r--r--deps/npm/node_modules/cacache/node_modules/yallist/README.md204
-rw-r--r--deps/npm/node_modules/cacache/node_modules/yallist/iterator.js8
-rw-r--r--deps/npm/node_modules/cacache/node_modules/yallist/package.json62
-rw-r--r--deps/npm/node_modules/cacache/node_modules/yallist/yallist.js376
-rw-r--r--deps/npm/node_modules/cacache/package.json38
-rw-r--r--deps/npm/node_modules/cacache/put.js2
25 files changed, 347 insertions, 3551 deletions
diff --git a/deps/npm/node_modules/cacache/CHANGELOG.md b/deps/npm/node_modules/cacache/CHANGELOG.md
index e140576c30..de84bdbf13 100644
--- a/deps/npm/node_modules/cacache/CHANGELOG.md
+++ b/deps/npm/node_modules/cacache/CHANGELOG.md
@@ -2,108 +2,168 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-### [11.3.3](https://github.com/zkat/cacache/compare/v11.3.2...v11.3.3) (2019-06-17)
+### [12.0.2](https://github.com/npm/cacache/compare/v12.0.1...v12.0.2) (2019-07-19)
+
+
+
+### [12.0.1](https://github.com/npm/cacache/compare/v12.0.0...v12.0.1) (2019-07-19)
+
+* **deps** Abstracted out `lib/util/infer-owner.js` to
+ [@npmcli/infer-owner](https://www.npmjs.com/package/@npmcli/infer-owner)
+ so that it could be more easily used in other parts of the npm CLI.
+
+
+## [12.0.0](https://github.com/npm/cacache/compare/v11.3.3...v12.0.0) (2019-07-15)
+
+
+### Features
+
+* infer uid/gid instead of accepting as options ([ac84d14](https://github.com/npm/cacache/commit/ac84d14))
+* **i18n:** add another error message ([676cb32](https://github.com/npm/cacache/commit/676cb32))
+
+
+### BREAKING CHANGES
+
+* the uid gid options are no longer respected or
+necessary. As of this change, cacache will always match the cache
+contents to the ownership of the cache directory (or its parent
+directory), regardless of what the caller passes in.
+
+Reasoning:
+
+The number one reason to use a uid or gid option was to keep root-owned
+files from causing problems in the cache. In npm's case, this meant
+that CLI's ./lib/command.js had to work out the appropriate uid and gid,
+then pass it to the libnpmcommand module, which had to in turn pass the
+uid and gid to npm-registry-fetch, which then passed it to
+make-fetch-happen, which passed it to cacache. (For package fetching,
+pacote would be in that mix as well.)
+
+Added to that, `cacache.rm()` will actually _write_ a file into the
+cache index, but has no way to accept an option so that its call to
+entry-index.js will write the index with the appropriate uid/gid.
+Little ownership bugs were all over the place, and tricky to trace
+through. (Why should make-fetch-happen even care about accepting or
+passing uids and gids? It's an http library.)
+
+This change allows us to keep the cache from having mixed ownership in
+any situation.
+
+Of course, this _does_ mean that if you have a root-owned but
+user-writable folder (for example, `/tmp`), then the cache will try to
+chown everything to root.
+
+The solution is for the user to create a folder, make it user-owned, and
+use that, rather than relying on cacache to create the root cache folder.
+
+If we decide to restore the uid/gid opts, and use ownership inferrence
+only when uid/gid are unset, then take care to also make rm take an
+option object, and pass it through to entry-index.js.
+
+
+
+### [11.3.3](https://github.com/npm/cacache/compare/v11.3.2...v11.3.3) (2019-06-17)
### Bug Fixes
-* **audit:** npm audit fix ([200a6d5](https://github.com/zkat/cacache/commit/200a6d5))
-* **config:** Add ssri config 'error' option ([#146](https://github.com/zkat/cacache/issues/146)) ([47de8f5](https://github.com/zkat/cacache/commit/47de8f5))
-* **deps:** npm audit fix ([481a7dc](https://github.com/zkat/cacache/commit/481a7dc))
-* **standard:** standard --fix ([7799149](https://github.com/zkat/cacache/commit/7799149))
-* **write:** avoid another cb never called situation ([5156561](https://github.com/zkat/cacache/commit/5156561))
+* **audit:** npm audit fix ([200a6d5](https://github.com/npm/cacache/commit/200a6d5))
+* **config:** Add ssri config 'error' option ([#146](https://github.com/npm/cacache/issues/146)) ([47de8f5](https://github.com/npm/cacache/commit/47de8f5))
+* **deps:** npm audit fix ([481a7dc](https://github.com/npm/cacache/commit/481a7dc))
+* **standard:** standard --fix ([7799149](https://github.com/npm/cacache/commit/7799149))
+* **write:** avoid another cb never called situation ([5156561](https://github.com/npm/cacache/commit/5156561))
<a name="11.3.2"></a>
-## [11.3.2](https://github.com/zkat/cacache/compare/v11.3.1...v11.3.2) (2018-12-21)
+## [11.3.2](https://github.com/npm/cacache/compare/v11.3.1...v11.3.2) (2018-12-21)
### Bug Fixes
-* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/zkat/cacache/commit/b10bcd0))
+* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/npm/cacache/commit/b10bcd0))
<a name="11.3.1"></a>
-## [11.3.1](https://github.com/zkat/cacache/compare/v11.3.0...v11.3.1) (2018-11-05)
+## [11.3.1](https://github.com/npm/cacache/compare/v11.3.0...v11.3.1) (2018-11-05)
### Bug Fixes
-* **get:** export hasContent.sync properly ([d76c920](https://github.com/zkat/cacache/commit/d76c920))
+* **get:** export hasContent.sync properly ([d76c920](https://github.com/npm/cacache/commit/d76c920))
<a name="11.3.0"></a>
-# [11.3.0](https://github.com/zkat/cacache/compare/v11.2.0...v11.3.0) (2018-11-05)
+# [11.3.0](https://github.com/npm/cacache/compare/v11.2.0...v11.3.0) (2018-11-05)
### Features
-* **get:** add sync API for reading ([db1e094](https://github.com/zkat/cacache/commit/db1e094))
+* **get:** add sync API for reading ([db1e094](https://github.com/npm/cacache/commit/db1e094))
<a name="11.2.0"></a>
-# [11.2.0](https://github.com/zkat/cacache/compare/v11.1.0...v11.2.0) (2018-08-08)
+# [11.2.0](https://github.com/npm/cacache/compare/v11.1.0...v11.2.0) (2018-08-08)
### Features
-* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/zkat/cacache/commit/fe638b6))
+* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/npm/cacache/commit/fe638b6))
<a name="11.1.0"></a>
-# [11.1.0](https://github.com/zkat/cacache/compare/v11.0.3...v11.1.0) (2018-08-01)
+# [11.1.0](https://github.com/npm/cacache/compare/v11.0.3...v11.1.0) (2018-08-01)
### Features
-* **read:** add sync support for low-level content read ([b43af83](https://github.com/zkat/cacache/commit/b43af83))
+* **read:** add sync support for low-level content read ([b43af83](https://github.com/npm/cacache/commit/b43af83))
<a name="11.0.3"></a>
-## [11.0.3](https://github.com/zkat/cacache/compare/v11.0.2...v11.0.3) (2018-08-01)
+## [11.0.3](https://github.com/npm/cacache/compare/v11.0.2...v11.0.3) (2018-08-01)
### Bug Fixes
-* **config:** add ssri config options ([#136](https://github.com/zkat/cacache/issues/136)) ([10d5d9a](https://github.com/zkat/cacache/commit/10d5d9a))
-* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/zkat/cacache/commit/c5ac10e))
-* **test:** oops when removing safe-buffer ([1950490](https://github.com/zkat/cacache/commit/1950490))
+* **config:** add ssri config options ([#136](https://github.com/npm/cacache/issues/136)) ([10d5d9a](https://github.com/npm/cacache/commit/10d5d9a))
+* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/npm/cacache/commit/c5ac10e))
+* **test:** oops when removing safe-buffer ([1950490](https://github.com/npm/cacache/commit/1950490))
<a name="11.0.2"></a>
-## [11.0.2](https://github.com/zkat/cacache/compare/v11.0.1...v11.0.2) (2018-05-07)
+## [11.0.2](https://github.com/npm/cacache/compare/v11.0.1...v11.0.2) (2018-05-07)
### Bug Fixes
-* **verify:** size param no longer lost in a verify ([#131](https://github.com/zkat/cacache/issues/131)) ([c614a19](https://github.com/zkat/cacache/commit/c614a19)), closes [#130](https://github.com/zkat/cacache/issues/130)
+* **verify:** size param no longer lost in a verify ([#131](https://github.com/npm/cacache/issues/131)) ([c614a19](https://github.com/npm/cacache/commit/c614a19)), closes [#130](https://github.com/npm/cacache/issues/130)
<a name="11.0.1"></a>
-## [11.0.1](https://github.com/zkat/cacache/compare/v11.0.0...v11.0.1) (2018-04-10)
+## [11.0.1](https://github.com/npm/cacache/compare/v11.0.0...v11.0.1) (2018-04-10)
<a name="11.0.0"></a>
-# [11.0.0](https://github.com/zkat/cacache/compare/v10.0.4...v11.0.0) (2018-04-09)
+# [11.0.0](https://github.com/npm/cacache/compare/v10.0.4...v11.0.0) (2018-04-09)
### Features
-* **opts:** use figgy-pudding for opts ([#128](https://github.com/zkat/cacache/issues/128)) ([33d4eed](https://github.com/zkat/cacache/commit/33d4eed))
+* **opts:** use figgy-pudding for opts ([#128](https://github.com/npm/cacache/issues/128)) ([33d4eed](https://github.com/npm/cacache/commit/33d4eed))
### meta
-* drop support for node@4 ([529f347](https://github.com/zkat/cacache/commit/529f347))
+* drop support for node@4 ([529f347](https://github.com/npm/cacache/commit/529f347))
### BREAKING CHANGES
@@ -113,52 +173,52 @@ All notable changes to this project will be documented in this file. See [standa
<a name="10.0.4"></a>
-## [10.0.4](https://github.com/zkat/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
+## [10.0.4](https://github.com/npm/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
<a name="10.0.3"></a>
-## [10.0.3](https://github.com/zkat/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
+## [10.0.3](https://github.com/npm/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
### Bug Fixes
-* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/zkat/cacache/commit/fa918f5))
+* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/npm/cacache/commit/fa918f5))
<a name="10.0.2"></a>
-## [10.0.2](https://github.com/zkat/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
+## [10.0.2](https://github.com/npm/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
### Bug Fixes
-* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/zkat/cacache/commit/347dc36))
+* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/npm/cacache/commit/347dc36))
<a name="10.0.1"></a>
-## [10.0.1](https://github.com/zkat/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
+## [10.0.1](https://github.com/npm/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
### Bug Fixes
-* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/zkat/cacache/commit/073fbe1))
+* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/npm/cacache/commit/073fbe1))
<a name="10.0.0"></a>
-# [10.0.0](https://github.com/zkat/cacache/compare/v9.3.0...v10.0.0) (2017-10-23)
+# [10.0.0](https://github.com/npm/cacache/compare/v9.3.0...v10.0.0) (2017-10-23)
### Features
-* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/zkat/cacache/commit/fdbb4e5))
+* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/npm/cacache/commit/fdbb4e5))
### Performance Improvements
-* more copyFile benchmarks ([63787bb](https://github.com/zkat/cacache/commit/63787bb))
+* more copyFile benchmarks ([63787bb](https://github.com/npm/cacache/commit/63787bb))
### BREAKING CHANGES
@@ -168,132 +228,132 @@ All notable changes to this project will be documented in this file. See [standa
<a name="9.3.0"></a>
-# [9.3.0](https://github.com/zkat/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
+# [9.3.0](https://github.com/npm/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
### Features
-* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/zkat/cacache/commit/067b5f6))
+* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/npm/cacache/commit/067b5f6))
<a name="9.2.9"></a>
-## [9.2.9](https://github.com/zkat/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
+## [9.2.9](https://github.com/npm/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
<a name="9.2.8"></a>
-## [9.2.8](https://github.com/zkat/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
+## [9.2.8](https://github.com/npm/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
### Bug Fixes
-* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/zkat/cacache/commit/c3232ea))
+* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/npm/cacache/commit/c3232ea))
<a name="9.2.7"></a>
-## [9.2.7](https://github.com/zkat/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
+## [9.2.7](https://github.com/npm/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
### Bug Fixes
-* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/zkat/cacache/commit/4131196))
+* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/npm/cacache/commit/4131196))
<a name="9.2.6"></a>
-## [9.2.6](https://github.com/zkat/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
+## [9.2.6](https://github.com/npm/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
### Bug Fixes
-* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/zkat/cacache/commit/5209ffe))
+* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/npm/cacache/commit/5209ffe))
<a name="9.2.5"></a>
-## [9.2.5](https://github.com/zkat/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
+## [9.2.5](https://github.com/npm/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
### Bug Fixes
-* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/zkat/cacache/commit/84e1d7e))
+* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/npm/cacache/commit/84e1d7e))
<a name="9.2.4"></a>
-## [9.2.4](https://github.com/zkat/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
+## [9.2.4](https://github.com/npm/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
### Bug Fixes
-* **deps:** bumping deps ([bbccb12](https://github.com/zkat/cacache/commit/bbccb12))
+* **deps:** bumping deps ([bbccb12](https://github.com/npm/cacache/commit/bbccb12))
<a name="9.2.3"></a>
-## [9.2.3](https://github.com/zkat/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
+## [9.2.3](https://github.com/npm/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
### Bug Fixes
-* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/zkat/cacache/commit/ac90bc0))
+* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/npm/cacache/commit/ac90bc0))
<a name="9.2.2"></a>
-## [9.2.2](https://github.com/zkat/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
+## [9.2.2](https://github.com/npm/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
### Bug Fixes
-* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/zkat/cacache/commit/519b4ee))
+* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/npm/cacache/commit/519b4ee))
<a name="9.2.1"></a>
-## [9.2.1](https://github.com/zkat/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
+## [9.2.1](https://github.com/npm/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
### Bug Fixes
-* **docs:** fixing translation messup ([bb9e4f9](https://github.com/zkat/cacache/commit/bb9e4f9))
+* **docs:** fixing translation messup ([bb9e4f9](https://github.com/npm/cacache/commit/bb9e4f9))
<a name="9.2.0"></a>
-# [9.2.0](https://github.com/zkat/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
+# [9.2.0](https://github.com/npm/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
### Features
-* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/zkat/cacache/commit/531f9a4))
+* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/npm/cacache/commit/531f9a4))
<a name="9.1.0"></a>
-# [9.1.0](https://github.com/zkat/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
+# [9.1.0](https://github.com/npm/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
### Features
-* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/zkat/cacache/commit/323b90c))
+* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/npm/cacache/commit/323b90c))
<a name="9.0.0"></a>
-# [9.0.0](https://github.com/zkat/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
+# [9.0.0](https://github.com/npm/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
### Bug Fixes
-* **memoization:** actually use the LRU ([0e55dc9](https://github.com/zkat/cacache/commit/0e55dc9))
+* **memoization:** actually use the LRU ([0e55dc9](https://github.com/npm/cacache/commit/0e55dc9))
### Features
-* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/zkat/cacache/commit/e5614c7))
+* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/npm/cacache/commit/e5614c7))
### BREAKING CHANGES
@@ -303,12 +363,12 @@ All notable changes to this project will be documented in this file. See [standa
<a name="8.0.0"></a>
-# [8.0.0](https://github.com/zkat/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
+# [8.0.0](https://github.com/npm/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
### Features
-* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/zkat/cacache/commit/bad6c49)), closes [#87](https://github.com/zkat/cacache/issues/87)
+* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/npm/cacache/commit/bad6c49)), closes [#87](https://github.com/npm/cacache/issues/87)
### BREAKING CHANGES
@@ -318,78 +378,78 @@ All notable changes to this project will be documented in this file. See [standa
<a name="7.1.0"></a>
-# [7.1.0](https://github.com/zkat/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
+# [7.1.0](https://github.com/npm/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
### Features
-* **size:** handle content size info (#49) ([91230af](https://github.com/zkat/cacache/commit/91230af))
+* **size:** handle content size info (#49) ([91230af](https://github.com/npm/cacache/commit/91230af))
<a name="7.0.5"></a>
-## [7.0.5](https://github.com/zkat/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
+## [7.0.5](https://github.com/npm/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
### Bug Fixes
-* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/zkat/cacache/commit/6d13e8e))
-* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/zkat/cacache/commit/3624fc5))
+* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/npm/cacache/commit/6d13e8e))
+* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/npm/cacache/commit/3624fc5))
<a name="7.0.4"></a>
-## [7.0.4](https://github.com/zkat/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
+## [7.0.4](https://github.com/npm/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
### Bug Fixes
-* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/zkat/cacache/commit/d49bbcd))
+* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/npm/cacache/commit/d49bbcd))
<a name="7.0.3"></a>
-## [7.0.3](https://github.com/zkat/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
+## [7.0.3](https://github.com/npm/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
### Bug Fixes
-* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/zkat/cacache/commit/9d4f0a5))
+* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/npm/cacache/commit/9d4f0a5))
<a name="7.0.2"></a>
-## [7.0.2](https://github.com/zkat/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
+## [7.0.2](https://github.com/npm/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
### Bug Fixes
-* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/zkat/cacache/commit/8dc2e62))
+* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/npm/cacache/commit/8dc2e62))
<a name="7.0.1"></a>
-## [7.0.1](https://github.com/zkat/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
+## [7.0.1](https://github.com/npm/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
### Bug Fixes
-* **docs:** fix header name conflict in readme ([afcd456](https://github.com/zkat/cacache/commit/afcd456))
+* **docs:** fix header name conflict in readme ([afcd456](https://github.com/npm/cacache/commit/afcd456))
<a name="7.0.0"></a>
-# [7.0.0](https://github.com/zkat/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
+# [7.0.0](https://github.com/npm/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
### Bug Fixes
-* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/zkat/cacache/commit/d2e9b6a))
+* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/npm/cacache/commit/d2e9b6a))
### Features
-* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/zkat/cacache/commit/b1e731f))
+* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/npm/cacache/commit/b1e731f))
### BREAKING CHANGES
@@ -425,150 +485,150 @@ All notable changes to this project will be documented in this file. See [standa
<a name="6.3.0"></a>
-# [6.3.0](https://github.com/zkat/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
+# [6.3.0](https://github.com/npm/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
### Bug Fixes
-* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/zkat/cacache/commit/4670e9b))
-* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/zkat/cacache/commit/b9d2fa2))
-* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/zkat/cacache/commit/d8ac5aa))
+* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/npm/cacache/commit/4670e9b))
+* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/npm/cacache/commit/b9d2fa2))
+* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/npm/cacache/commit/d8ac5aa))
### Features
-* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/zkat/cacache/commit/dc6482d))
+* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/npm/cacache/commit/dc6482d))
<a name="6.2.0"></a>
-# [6.2.0](https://github.com/zkat/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
+# [6.2.0](https://github.com/npm/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
### Bug Fixes
-* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/zkat/cacache/commit/f8e0f25))
-* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/zkat/cacache/commit/6818521))
+* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/npm/cacache/commit/f8e0f25))
+* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/npm/cacache/commit/6818521))
### Features
-* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/zkat/cacache/commit/c42da71))
+* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/npm/cacache/commit/c42da71))
<a name="6.1.2"></a>
-## [6.1.2](https://github.com/zkat/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
+## [6.1.2](https://github.com/npm/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
### Bug Fixes
-* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/zkat/cacache/commit/d6eb2f0))
+* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/npm/cacache/commit/d6eb2f0))
<a name="6.1.1"></a>
-## [6.1.1](https://github.com/zkat/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
+## [6.1.1](https://github.com/npm/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
### Bug Fixes
-* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/zkat/cacache/commit/0b7faf6))
-* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/zkat/cacache/commit/0640bc4))
+* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/npm/cacache/commit/0b7faf6))
+* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/npm/cacache/commit/0640bc4))
<a name="6.1.0"></a>
-# [6.1.0](https://github.com/zkat/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
+# [6.1.0](https://github.com/npm/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
### Bug Fixes
-* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/zkat/cacache/commit/ef4f70a))
-* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/zkat/cacache/commit/6ab8132))
+* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/npm/cacache/commit/ef4f70a))
+* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/npm/cacache/commit/6ab8132))
### Features
-* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/zkat/cacache/commit/d5d25ba)), closes [#66](https://github.com/zkat/cacache/issues/66)
-* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/zkat/cacache/commit/45ad77a))
+* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/npm/cacache/commit/d5d25ba)), closes [#66](https://github.com/npm/cacache/issues/66)
+* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/npm/cacache/commit/45ad77a))
<a name="6.0.2"></a>
-## [6.0.2](https://github.com/zkat/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
+## [6.0.2](https://github.com/npm/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
### Bug Fixes
-* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/zkat/cacache/commit/c3644e5))
+* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/npm/cacache/commit/c3644e5))
<a name="6.0.1"></a>
-## [6.0.1](https://github.com/zkat/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
+## [6.0.1](https://github.com/npm/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
### Bug Fixes
-* **docs:** Missed spots in README ([8ffb7fa](https://github.com/zkat/cacache/commit/8ffb7fa))
+* **docs:** Missed spots in README ([8ffb7fa](https://github.com/npm/cacache/commit/8ffb7fa))
<a name="6.0.0"></a>
-# [6.0.0](https://github.com/zkat/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
+# [6.0.0](https://github.com/npm/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
### Bug Fixes
-* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/zkat/cacache/commit/2f72d0a))
-* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/zkat/cacache/commit/fa8f3c3))
-* **deps:** removed `format-number[@2](https://github.com/2).0.2` ([1187791](https://github.com/zkat/cacache/commit/1187791))
-* **deps:** removed inflight[@1](https://github.com/1).0.6 ([0d1819c](https://github.com/zkat/cacache/commit/0d1819c))
-* **deps:** rimraf[@2](https://github.com/2).6.1 ([9efab6b](https://github.com/zkat/cacache/commit/9efab6b))
-* **deps:** standard[@9](https://github.com/9).0.0 ([4202cba](https://github.com/zkat/cacache/commit/4202cba))
-* **deps:** tap[@10](https://github.com/10).3.0 ([aa03088](https://github.com/zkat/cacache/commit/aa03088))
-* **deps:** weallcontribute[@1](https://github.com/1).0.8 ([ad4f4dc](https://github.com/zkat/cacache/commit/ad4f4dc))
-* **docs:** add security note to hashKey ([03f81ba](https://github.com/zkat/cacache/commit/03f81ba))
-* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/zkat/cacache/commit/ea00ba6))
-* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/zkat/cacache/commit/45997d8))
-* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/zkat/cacache/commit/fb8cb4d))
-* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/zkat/cacache/commit/bbc5fca))
-* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/zkat/cacache/commit/b1d3888))
-* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/zkat/cacache/commit/d26cdf9))
-* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/zkat/cacache/commit/79a8891))
-* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/zkat/cacache/commit/65f6632))
-* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/zkat/cacache/commit/daf9e08))
-* **put-stream:** use new promise API for moves ([1d36013](https://github.com/zkat/cacache/commit/1d36013))
-* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/zkat/cacache/commit/c60a2fa))
-* **verify:** tiny typo fix ([db22d05](https://github.com/zkat/cacache/commit/db22d05))
+* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/npm/cacache/commit/2f72d0a))
+* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/npm/cacache/commit/fa8f3c3))
+* **deps:** removed `format-number@2.0.2` ([1187791](https://github.com/npm/cacache/commit/1187791))
+* **deps:** removed inflight@1.0.6 ([0d1819c](https://github.com/npm/cacache/commit/0d1819c))
+* **deps:** rimraf@2.6.1 ([9efab6b](https://github.com/npm/cacache/commit/9efab6b))
+* **deps:** standard@9.0.0 ([4202cba](https://github.com/npm/cacache/commit/4202cba))
+* **deps:** tap@10.3.0 ([aa03088](https://github.com/npm/cacache/commit/aa03088))
+* **deps:** weallcontribute@1.0.8 ([ad4f4dc](https://github.com/npm/cacache/commit/ad4f4dc))
+* **docs:** add security note to hashKey ([03f81ba](https://github.com/npm/cacache/commit/03f81ba))
+* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/npm/cacache/commit/ea00ba6))
+* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/npm/cacache/commit/45997d8))
+* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/npm/cacache/commit/fb8cb4d))
+* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/npm/cacache/commit/bbc5fca))
+* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/npm/cacache/commit/b1d3888))
+* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/npm/cacache/commit/d26cdf9))
+* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/npm/cacache/commit/79a8891))
+* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/npm/cacache/commit/65f6632))
+* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/npm/cacache/commit/daf9e08))
+* **put-stream:** use new promise API for moves ([1d36013](https://github.com/npm/cacache/commit/1d36013))
+* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/npm/cacache/commit/c60a2fa))
+* **verify:** tiny typo fix ([db22d05](https://github.com/npm/cacache/commit/db22d05))
### Features
-* **api:** converted external api ([7bf032f](https://github.com/zkat/cacache/commit/7bf032f))
-* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/zkat/cacache/commit/8d2c5b6))
-* **cache:** add versioning to content and index ([31bc549](https://github.com/zkat/cacache/commit/31bc549))
-* **content:** collate content files into subdirs ([c094d9f](https://github.com/zkat/cacache/commit/c094d9f))
-* **deps:** [@npmcorp](https://github.com/npmcorp)/move[@1](https://github.com/1).0.0 ([bdd00bf](https://github.com/zkat/cacache/commit/bdd00bf))
-* **deps:** bluebird[@3](https://github.com/3).4.7 ([3a17aff](https://github.com/zkat/cacache/commit/3a17aff))
-* **deps:** promise-inflight[@1](https://github.com/1).0.1 ([a004fe6](https://github.com/zkat/cacache/commit/a004fe6))
-* **get:** added memoization support for get ([c77d794](https://github.com/zkat/cacache/commit/c77d794))
-* **get:** export hasContent ([2956ec3](https://github.com/zkat/cacache/commit/2956ec3))
-* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/zkat/cacache/commit/b639746))
-* **index:** collate index files into subdirs ([e8402a5](https://github.com/zkat/cacache/commit/e8402a5))
-* **index:** promisify entry index ([cda3335](https://github.com/zkat/cacache/commit/cda3335))
-* **memo:** added memoization lib ([da07b92](https://github.com/zkat/cacache/commit/da07b92))
-* **memo:** export memoization api ([954b1b3](https://github.com/zkat/cacache/commit/954b1b3))
-* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/zkat/cacache/commit/5cf4616))
-* **perf:** bulk content write api ([51b536e](https://github.com/zkat/cacache/commit/51b536e))
-* **put:** added memoization support to put ([b613a70](https://github.com/zkat/cacache/commit/b613a70))
-* **read:** switched to promises ([a869362](https://github.com/zkat/cacache/commit/a869362))
-* **rm:** added memoization support to rm ([4205cf0](https://github.com/zkat/cacache/commit/4205cf0))
-* **rm:** switched to promises ([a000d24](https://github.com/zkat/cacache/commit/a000d24))
-* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/zkat/cacache/commit/9517cd7))
-* **util:** use promises for api ([ae204bb](https://github.com/zkat/cacache/commit/ae204bb))
-* **verify:** converted to Promises ([f0b3974](https://github.com/zkat/cacache/commit/f0b3974))
+* **api:** converted external api ([7bf032f](https://github.com/npm/cacache/commit/7bf032f))
+* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/npm/cacache/commit/8d2c5b6))
+* **cache:** add versioning to content and index ([31bc549](https://github.com/npm/cacache/commit/31bc549))
+* **content:** collate content files into subdirs ([c094d9f](https://github.com/npm/cacache/commit/c094d9f))
+* **deps:** `@npmcorp/move@1.0.0` ([bdd00bf](https://github.com/npm/cacache/commit/bdd00bf))
+* **deps:** `bluebird@3.4.7` ([3a17aff](https://github.com/npm/cacache/commit/3a17aff))
+* **deps:** `promise-inflight@1.0.1` ([a004fe6](https://github.com/npm/cacache/commit/a004fe6))
+* **get:** added memoization support for get ([c77d794](https://github.com/npm/cacache/commit/c77d794))
+* **get:** export hasContent ([2956ec3](https://github.com/npm/cacache/commit/2956ec3))
+* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/npm/cacache/commit/b639746))
+* **index:** collate index files into subdirs ([e8402a5](https://github.com/npm/cacache/commit/e8402a5))
+* **index:** promisify entry index ([cda3335](https://github.com/npm/cacache/commit/cda3335))
+* **memo:** added memoization lib ([da07b92](https://github.com/npm/cacache/commit/da07b92))
+* **memo:** export memoization api ([954b1b3](https://github.com/npm/cacache/commit/954b1b3))
+* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/npm/cacache/commit/5cf4616))
+* **perf:** bulk content write api ([51b536e](https://github.com/npm/cacache/commit/51b536e))
+* **put:** added memoization support to put ([b613a70](https://github.com/npm/cacache/commit/b613a70))
+* **read:** switched to promises ([a869362](https://github.com/npm/cacache/commit/a869362))
+* **rm:** added memoization support to rm ([4205cf0](https://github.com/npm/cacache/commit/4205cf0))
+* **rm:** switched to promises ([a000d24](https://github.com/npm/cacache/commit/a000d24))
+* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/npm/cacache/commit/9517cd7))
+* **util:** use promises for api ([ae204bb](https://github.com/npm/cacache/commit/ae204bb))
+* **verify:** converted to Promises ([f0b3974](https://github.com/npm/cacache/commit/f0b3974))
### BREAKING CHANGES
diff --git a/deps/npm/node_modules/cacache/README.md b/deps/npm/node_modules/cacache/README.md
index 971b7b0b74..7f8ec5eec0 100644
--- a/deps/npm/node_modules/cacache/README.md
+++ b/deps/npm/node_modules/cacache/README.md
@@ -1,12 +1,16 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
+# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest)
-[`cacache`](https://github.com/zkat/cacache) is a Node.js library for managing
+[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
local key and content address caches. It's really fast, really good at
concurrency, and it will never give you corrupted data, even if cache files
get corrupted or manipulated.
-It was originally written to be used as [npm](https://npm.im)'s local cache, but
-can just as easily be used on its own.
+On systems that support user and group settings on files, cacache will
+match the `uid` and `gid` values to the folder where the cache lives, even
+when running as `root`.
+
+It was written to be used as [npm](https://npm.im)'s local cache, but can
+just as easily be used on its own.
_Translations: [español](README.es.md)_
@@ -414,13 +418,6 @@ may also use any anagram of `'modnar'` to use this feature.
Currently only supports one algorithm at a time (i.e., an array length of
exactly `1`). Has no effect if `opts.integrity` is present.
-##### `opts.uid`/`opts.gid`
-
-If provided, cacache will do its best to make sure any new files added to the
-cache use this particular `uid`/`gid` combination. This can be used,
-for example, to drop permissions when someone uses `sudo`, but cacache makes
-no assumptions about your needs here.
-
##### `opts.memoize`
Default: null
@@ -498,10 +495,11 @@ Completely resets the in-memory entry cache.
Returns a unique temporary directory inside the cache's `tmp` dir. This
directory will use the same safe user assignment that all the other stuff use.
-Once the directory is made, it's the user's responsibility that all files within
-are made according to the same `opts.gid`/`opts.uid` settings that would be
-passed in. If not, you can ask cacache to do it for you by calling
-[`tmp.fix()`](#tmp-fix), which will fix all tmp directory permissions.
+Once the directory is made, it's the user's responsibility that all files
+within are given the appropriate `gid`/`uid` ownership settings to match
+the rest of the cache. If not, you can ask cacache to do it for you by
+calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
+permissions.
If you want automatic cleanup of this directory, use
[`tmp.withTmp()`](#with-tpm)
@@ -514,6 +512,27 @@ cacache.tmp.mkdir(cache).then(dir => {
})
```
+#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
+
+Sets the `uid` and `gid` properties on all files and folders within the tmp
+folder to match the rest of the cache.
+
+Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
+[`tmp.withTmp`](#with-tmp).
+
+##### Example
+
+```javascript
+cacache.tmp.mkdir(cache).then(dir => {
+ writeFile(path.join(dir, 'file'), someData).then(() => {
+ // make sure we didn't just put a root-owned file in the cache
+ cacache.tmp.fix().then(() => {
+ // all uids and gids match now
+ })
+ })
+})
+```
+
#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
@@ -591,8 +610,6 @@ of entries removed, etc.
##### Options
-* `opts.uid` - uid to assign to cache and its contents
-* `opts.gid` - gid to assign to cache and its contents
* `opts.filter` - receives a formatted entry. Return false to remove it.
Note: might be called more than once on the same entry.
diff --git a/deps/npm/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/cacache/lib/content/write.js
index 150371cb85..4d96a3cffe 100644
--- a/deps/npm/node_modules/cacache/lib/content/write.js
+++ b/deps/npm/node_modules/cacache/lib/content/write.js
@@ -121,7 +121,7 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
function makeTmp (cache, opts) {
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
return fixOwner.mkdirfix(
- path.dirname(tmpTarget), opts.uid, opts.gid
+ cache, path.dirname(tmpTarget)
).then(() => ({
target: tmpTarget,
moved: false
@@ -134,14 +134,14 @@ function moveToDestination (tmp, cache, sri, opts, errCheck) {
const destDir = path.dirname(destination)
return fixOwner.mkdirfix(
- destDir, opts.uid, opts.gid
+ cache, destDir
).then(() => {
errCheck && errCheck()
return moveFile(tmp.target, destination)
}).then(() => {
errCheck && errCheck()
tmp.moved = true
- return fixOwner.chownr(destination, opts.uid, opts.gid)
+ return fixOwner.chownr(cache, destination)
})
}
diff --git a/deps/npm/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/cacache/lib/entry-index.js
index d2549e7136..dee1824b1d 100644
--- a/deps/npm/node_modules/cacache/lib/entry-index.js
+++ b/deps/npm/node_modules/cacache/lib/entry-index.js
@@ -32,9 +32,7 @@ module.exports.NotFoundError = class NotFoundError extends Error {
const IndexOpts = figgyPudding({
metadata: {},
- size: {},
- uid: {},
- gid: {}
+ size: {}
})
module.exports.insert = insert
@@ -49,7 +47,7 @@ function insert (cache, key, integrity, opts) {
metadata: opts.metadata
}
return fixOwner.mkdirfix(
- path.dirname(bucket), opts.uid, opts.gid
+ cache, path.dirname(bucket)
).then(() => {
const stringified = JSON.stringify(entry)
// NOTE - Cleverness ahoy!
@@ -63,7 +61,7 @@ function insert (cache, key, integrity, opts) {
bucket, `\n${hashEntry(stringified)}\t${stringified}`
)
}).then(
- () => fixOwner.chownr(bucket, opts.uid, opts.gid)
+ () => fixOwner.chownr(cache, bucket)
).catch({ code: 'ENOENT' }, () => {
// There's a class of race conditions that happen when things get deleted
// during fixOwner, or between the two mkdirfix/chownr calls.
@@ -86,13 +84,13 @@ function insertSync (cache, key, integrity, opts) {
size: opts.size,
metadata: opts.metadata
}
- fixOwner.mkdirfix.sync(path.dirname(bucket), opts.uid, opts.gid)
+ fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
const stringified = JSON.stringify(entry)
fs.appendFileSync(
bucket, `\n${hashEntry(stringified)}\t${stringified}`
)
try {
- fixOwner.chownr.sync(bucket, opts.uid, opts.gid)
+ fixOwner.chownr.sync(cache, bucket)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
diff --git a/deps/npm/node_modules/cacache/lib/util/fix-owner.js b/deps/npm/node_modules/cacache/lib/util/fix-owner.js
index 563724ca6e..c91c709d95 100644
--- a/deps/npm/node_modules/cacache/lib/util/fix-owner.js
+++ b/deps/npm/node_modules/cacache/lib/util/fix-owner.js
@@ -5,83 +5,115 @@ const BB = require('bluebird')
const chownr = BB.promisify(require('chownr'))
const mkdirp = BB.promisify(require('mkdirp'))
const inflight = require('promise-inflight')
+const inferOwner = require('infer-owner')
+
+// Memoize getuid()/getgid() calls.
+// patch process.setuid/setgid to invalidate cached value on change
+const self = { uid: null, gid: null }
+const getSelf = () => {
+ if (typeof self.uid !== 'number') {
+ self.uid = process.getuid()
+ const setuid = process.setuid
+ process.setuid = (uid) => {
+ self.uid = null
+ process.setuid = setuid
+ return process.setuid(uid)
+ }
+ }
+ if (typeof self.gid !== 'number') {
+ self.gid = process.getgid()
+ const setgid = process.setgid
+ process.setgid = (gid) => {
+ self.gid = null
+ process.setgid = setgid
+ return process.setgid(gid)
+ }
+ }
+}
module.exports.chownr = fixOwner
-function fixOwner (filepath, uid, gid) {
+function fixOwner (cache, filepath) {
if (!process.getuid) {
// This platform doesn't need ownership fixing
return BB.resolve()
}
- if (typeof uid !== 'number' && typeof gid !== 'number') {
- // There's no permissions override. Nothing to do here.
- return BB.resolve()
- }
- if ((typeof uid === 'number' && process.getuid() === uid) &&
- (typeof gid === 'number' && process.getgid() === gid)) {
+ return BB.resolve(inferOwner(cache)).then(owner => {
+ const { uid, gid } = owner
+ getSelf()
+
// No need to override if it's already what we used.
- return BB.resolve()
- }
- return inflight(
- 'fixOwner: fixing ownership on ' + filepath,
- () => chownr(
- filepath,
- typeof uid === 'number' ? uid : process.getuid(),
- typeof gid === 'number' ? gid : process.getgid()
- ).catch({ code: 'ENOENT' }, () => null)
- )
+ if (self.uid === uid && self.gid === gid) {
+ return
+ }
+
+ return inflight(
+ 'fixOwner: fixing ownership on ' + filepath,
+ () => chownr(
+ filepath,
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
+ ).catch({ code: 'ENOENT' }, () => null)
+ )
+ })
}
module.exports.chownr.sync = fixOwnerSync
-function fixOwnerSync (filepath, uid, gid) {
+function fixOwnerSync (cache, filepath) {
if (!process.getuid) {
// This platform doesn't need ownership fixing
return
}
- if (typeof uid !== 'number' && typeof gid !== 'number') {
- // There's no permissions override. Nothing to do here.
- return
- }
- if ((typeof uid === 'number' && process.getuid() === uid) &&
- (typeof gid === 'number' && process.getgid() === gid)) {
+ const { uid, gid } = inferOwner.sync(cache)
+ getSelf()
+ if (self.uid === uid && self.gid === gid) {
// No need to override if it's already what we used.
return
}
try {
chownr.sync(
filepath,
- typeof uid === 'number' ? uid : process.getuid(),
- typeof gid === 'number' ? gid : process.getgid()
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
)
} catch (err) {
+ // only catch ENOENT, any other error is a problem.
if (err.code === 'ENOENT') {
return null
}
+ throw err
}
}
module.exports.mkdirfix = mkdirfix
-function mkdirfix (p, uid, gid, cb) {
- return mkdirp(p).then(made => {
- if (made) {
- return fixOwner(made, uid, gid).then(() => made)
- }
- }).catch({ code: 'EEXIST' }, () => {
- // There's a race in mkdirp!
- return fixOwner(p, uid, gid).then(() => null)
+function mkdirfix (cache, p, cb) {
+ // we have to infer the owner _before_ making the directory, even though
+ // we aren't going to use the results, since the cache itself might not
+ // exist yet. If we mkdirp it, then our current uid/gid will be assumed
+ // to be correct if it creates the cache folder in the process.
+ return BB.resolve(inferOwner(cache)).then(() => {
+ return mkdirp(p).then(made => {
+ if (made) {
+ return fixOwner(cache, made).then(() => made)
+ }
+ }).catch({ code: 'EEXIST' }, () => {
+ // There's a race in mkdirp!
+ return fixOwner(cache, p).then(() => null)
+ })
})
}
module.exports.mkdirfix.sync = mkdirfixSync
-function mkdirfixSync (p, uid, gid) {
+function mkdirfixSync (cache, p) {
try {
+ inferOwner.sync(cache)
const made = mkdirp.sync(p)
if (made) {
- fixOwnerSync(made, uid, gid)
+ fixOwnerSync(cache, made)
return made
}
} catch (err) {
if (err.code === 'EEXIST') {
- fixOwnerSync(p, uid, gid)
+ fixOwnerSync(cache, p)
return null
} else {
throw err
diff --git a/deps/npm/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/cacache/lib/util/tmp.js
index 65fc4b297e..78494b8eae 100644
--- a/deps/npm/node_modules/cacache/lib/util/tmp.js
+++ b/deps/npm/node_modules/cacache/lib/util/tmp.js
@@ -9,16 +9,14 @@ const rimraf = BB.promisify(require('rimraf'))
const uniqueFilename = require('unique-filename')
const TmpOpts = figgyPudding({
- tmpPrefix: {},
- uid: {},
- gid: {}
+ tmpPrefix: {}
})
module.exports.mkdir = mktmpdir
function mktmpdir (cache, opts) {
opts = TmpOpts(opts)
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid).then(() => {
+ return fixOwner.mkdirfix(cache, tmpTarget).then(() => {
return tmpTarget
})
}
@@ -34,7 +32,6 @@ function withTmp (cache, opts, cb) {
}
module.exports.fix = fixtmpdir
-function fixtmpdir (cache, opts) {
- opts = TmpOpts(opts)
- return fixOwner(path.join(cache, 'tmp'), opts.uid, opts.gid)
+function fixtmpdir (cache) {
+ return fixOwner(cache, path.join(cache, 'tmp'))
}
diff --git a/deps/npm/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/cacache/lib/verify.js
index 8eaab0b7da..617d38db12 100644
--- a/deps/npm/node_modules/cacache/lib/verify.js
+++ b/deps/npm/node_modules/cacache/lib/verify.js
@@ -22,9 +22,7 @@ const VerifyOpts = figgyPudding({
filter: {},
log: {
default: { silly () {} }
- },
- uid: {},
- gid: {}
+ }
})
module.exports = verify
@@ -67,9 +65,9 @@ function markEndTime (cache, opts) {
function fixPerms (cache, opts) {
opts.log.silly('verify', 'fixing cache permissions')
- return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => {
+ return fixOwner.mkdirfix(cache, cache).then(() => {
// TODO - fix file permissions too
- return fixOwner.chownr(cache, opts.uid, opts.gid)
+ return fixOwner.chownr(cache, cache)
}).then(() => null)
}
@@ -195,8 +193,6 @@ function rebuildBucket (cache, bucket, stats, opts) {
const content = contentPath(cache, entry.integrity)
return fs.statAsync(content).then(() => {
return index.insert(cache, entry.key, entry.integrity, {
- uid: opts.uid,
- gid: opts.gid,
metadata: entry.metadata,
size: entry.size
}).then(() => { stats.totalEntries++ })
@@ -216,7 +212,11 @@ function cleanTmp (cache, opts) {
function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
opts.log.silly('verify', 'writing verifile to ' + verifile)
- return fs.writeFileAsync(verifile, '' + (+(new Date())))
+ try {
+ return fs.writeFileAsync(verifile, '' + (+(new Date())))
+ } finally {
+ fixOwner.chownr.sync(cache, verifile)
+ }
}
module.exports.lastRun = lastRun
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/LICENSE b/deps/npm/node_modules/cacache/node_modules/glob/LICENSE
deleted file mode 100644
index 42ca266df1..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-## Glob Logo
-
-Glob's logo created by Tanya Brassie <http://tanyabrassie.com/>, licensed
-under a Creative Commons Attribution-ShareAlike 4.0 International License
-https://creativecommons.org/licenses/by-sa/4.0/
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/README.md b/deps/npm/node_modules/cacache/node_modules/glob/README.md
deleted file mode 100644
index e71b967ea2..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/README.md
+++ /dev/null
@@ -1,373 +0,0 @@
-# Glob
-
-Match files using the patterns the shell uses, like stars and stuff.
-
-[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master)
-
-This is a glob implementation in JavaScript. It uses the `minimatch`
-library to do its matching.
-
-![](logo/glob.png)
-
-## Usage
-
-Install with npm
-
-```
-npm i glob
-```
-
-```javascript
-var glob = require("glob")
-
-// options is optional
-glob("**/*.js", options, function (er, files) {
- // files is an array of filenames.
- // If the `nonull` option is set, and nothing
- // was found, then files is ["**/*.js"]
- // er is an error object or null.
-})
-```
-
-## Glob Primer
-
-"Globs" are the patterns you type when you do stuff like `ls *.js` on
-the command line, or put `build/*` in a `.gitignore` file.
-
-Before parsing the path part patterns, braced sections are expanded
-into a set. Braced sections start with `{` and end with `}`, with any
-number of comma-delimited sections within. Braced sections may contain
-slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
-
-The following characters have special magic meaning when used in a
-path portion:
-
-* `*` Matches 0 or more characters in a single path portion
-* `?` Matches 1 character
-* `[...]` Matches a range of characters, similar to a RegExp range.
- If the first character of the range is `!` or `^` then it matches
- any character not in the range.
-* `!(pattern|pattern|pattern)` Matches anything that does not match
- any of the patterns provided.
-* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
- patterns provided.
-* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
- patterns provided.
-* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
-* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
- provided
-* `**` If a "globstar" is alone in a path portion, then it matches
- zero or more directories and subdirectories searching for matches.
- It does not crawl symlinked directories.
-
-### Dots
-
-If a file or directory path portion has a `.` as the first character,
-then it will not match any glob pattern unless that pattern's
-corresponding path part also has a `.` as its first character.
-
-For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
-However the pattern `a/*/c` would not, because `*` does not start with
-a dot character.
-
-You can make glob treat dots as normal characters by setting
-`dot:true` in the options.
-
-### Basename Matching
-
-If you set `matchBase:true` in the options, and the pattern has no
-slashes in it, then it will seek for any file anywhere in the tree
-with a matching basename. For example, `*.js` would match
-`test/simple/basic.js`.
-
-### Empty Sets
-
-If no matching files are found, then an empty array is returned. This
-differs from the shell, where the pattern itself is returned. For
-example:
-
- $ echo a*s*d*f
- a*s*d*f
-
-To get the bash-style behavior, set the `nonull:true` in the options.
-
-### See Also:
-
-* `man sh`
-* `man bash` (Search for "Pattern Matching")
-* `man 3 fnmatch`
-* `man 5 gitignore`
-* [minimatch documentation](https://github.com/isaacs/minimatch)
-
-## glob.hasMagic(pattern, [options])
-
-Returns `true` if there are any special characters in the pattern, and
-`false` otherwise.
-
-Note that the options affect the results. If `noext:true` is set in
-the options object, then `+(a|b)` will not be considered a magic
-pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
-then that is considered magical, unless `nobrace:true` is set in the
-options.
-
-## glob(pattern, [options], cb)
-
-* `pattern` `{String}` Pattern to be matched
-* `options` `{Object}`
-* `cb` `{Function}`
- * `err` `{Error | null}`
- * `matches` `{Array<String>}` filenames found matching the pattern
-
-Perform an asynchronous glob search.
-
-## glob.sync(pattern, [options])
-
-* `pattern` `{String}` Pattern to be matched
-* `options` `{Object}`
-* return: `{Array<String>}` filenames found matching the pattern
-
-Perform a synchronous glob search.
-
-## Class: glob.Glob
-
-Create a Glob object by instantiating the `glob.Glob` class.
-
-```javascript
-var Glob = require("glob").Glob
-var mg = new Glob(pattern, options, cb)
-```
-
-It's an EventEmitter, and starts walking the filesystem to find matches
-immediately.
-
-### new glob.Glob(pattern, [options], [cb])
-
-* `pattern` `{String}` pattern to search for
-* `options` `{Object}`
-* `cb` `{Function}` Called when an error occurs, or matches are found
- * `err` `{Error | null}`
- * `matches` `{Array<String>}` filenames found matching the pattern
-
-Note that if the `sync` flag is set in the options, then matches will
-be immediately available on the `g.found` member.
-
-### Properties
-
-* `minimatch` The minimatch object that the glob uses.
-* `options` The options object passed in.
-* `aborted` Boolean which is set to true when calling `abort()`. There
- is no way at this time to continue a glob search after aborting, but
- you can re-use the statCache to avoid having to duplicate syscalls.
-* `cache` Convenience object. Each field has the following possible
- values:
- * `false` - Path does not exist
- * `true` - Path exists
- * `'FILE'` - Path exists, and is not a directory
- * `'DIR'` - Path exists, and is a directory
- * `[file, entries, ...]` - Path exists, is a directory, and the
- array value is the results of `fs.readdir`
-* `statCache` Cache of `fs.stat` results, to prevent statting the same
- path multiple times.
-* `symlinks` A record of which paths are symbolic links, which is
- relevant in resolving `**` patterns.
-* `realpathCache` An optional object which is passed to `fs.realpath`
- to minimize unnecessary syscalls. It is stored on the instantiated
- Glob object, and may be re-used.
-
-### Events
-
-* `end` When the matching is finished, this is emitted with all the
- matches found. If the `nonull` option is set, and no match was found,
- then the `matches` list contains the original pattern. The matches
- are sorted, unless the `nosort` flag is set.
-* `match` Every time a match is found, this is emitted with the specific
- thing that matched. It is not deduplicated or resolved to a realpath.
-* `error` Emitted when an unexpected error is encountered, or whenever
- any fs error occurs if `options.strict` is set.
-* `abort` When `abort()` is called, this event is raised.
-
-### Methods
-
-* `pause` Temporarily stop the search
-* `resume` Resume the search
-* `abort` Stop the search forever
-
-### Options
-
-All the options that can be passed to Minimatch can also be passed to
-Glob to change pattern matching behavior. Also, some have been added,
-or have glob-specific ramifications.
-
-All options are false by default, unless otherwise noted.
-
-All options are added to the Glob object, as well.
-
-If you are running many `glob` operations, you can pass a Glob object
-as the `options` argument to a subsequent operation to shortcut some
-`stat` and `readdir` calls. At the very least, you may pass in shared
-`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
-parallel glob operations will be sped up by sharing information about
-the filesystem.
-
-* `cwd` The current working directory in which to search. Defaults
- to `process.cwd()`.
-* `root` The place where patterns starting with `/` will be mounted
- onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
- systems, and `C:\` or some such on Windows.)
-* `dot` Include `.dot` files in normal matches and `globstar` matches.
- Note that an explicit dot in a portion of the pattern will always
- match dot files.
-* `nomount` By default, a pattern starting with a forward-slash will be
- "mounted" onto the root setting, so that a valid filesystem path is
- returned. Set this flag to disable that behavior.
-* `mark` Add a `/` character to directory matches. Note that this
- requires additional stat calls.
-* `nosort` Don't sort the results.
-* `stat` Set to true to stat *all* results. This reduces performance
- somewhat, and is completely unnecessary, unless `readdir` is presumed
- to be an untrustworthy indicator of file existence.
-* `silent` When an unusual error is encountered when attempting to
- read a directory, a warning will be printed to stderr. Set the
- `silent` option to true to suppress these warnings.
-* `strict` When an unusual error is encountered when attempting to
- read a directory, the process will just continue on in search of
- other matches. Set the `strict` option to raise an error in these
- cases.
-* `cache` See `cache` property above. Pass in a previously generated
- cache object to save some fs calls.
-* `statCache` A cache of results of filesystem information, to prevent
- unnecessary stat calls. While it should not normally be necessary
- to set this, you may pass the statCache from one glob() call to the
- options object of another, if you know that the filesystem will not
- change between calls. (See "Race Conditions" below.)
-* `symlinks` A cache of known symbolic links. You may pass in a
- previously generated `symlinks` object to save `lstat` calls when
- resolving `**` matches.
-* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
-* `nounique` In some cases, brace-expanded patterns can result in the
- same file showing up multiple times in the result set. By default,
- this implementation prevents duplicates in the result set. Set this
- flag to disable that behavior.
-* `nonull` Set to never return an empty set, instead returning a set
- containing the pattern itself. This is the default in glob(3).
-* `debug` Set to enable debug logging in minimatch and glob.
-* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
-* `noglobstar` Do not match `**` against multiple filenames. (Ie,
- treat it as a normal `*` instead.)
-* `noext` Do not match `+(a|b)` "extglob" patterns.
-* `nocase` Perform a case-insensitive match. Note: on
- case-insensitive filesystems, non-magic patterns will match by
- default, since `stat` and `readdir` will not raise errors.
-* `matchBase` Perform a basename-only match if the pattern does not
- contain any slash characters. That is, `*.js` would be treated as
- equivalent to `**/*.js`, matching all js files in all directories.
-* `nodir` Do not match directories, only files. (Note: to match
- *only* directories, simply put a `/` at the end of the pattern.)
-* `ignore` Add a pattern or an array of glob patterns to exclude matches.
- Note: `ignore` patterns are *always* in `dot:true` mode, regardless
- of any other settings.
-* `follow` Follow symlinked directories when expanding `**` patterns.
- Note that this can result in a lot of duplicate references in the
- presence of cyclic links.
-* `realpath` Set to true to call `fs.realpath` on all of the results.
- In the case of a symlink that cannot be resolved, the full absolute
- path to the matched entry is returned (though it will usually be a
- broken symlink)
-* `absolute` Set to true to always receive absolute paths for matched
- files. Unlike `realpath`, this also affects the values returned in
- the `match` event.
-
-## Comparisons to other fnmatch/glob implementations
-
-While strict compliance with the existing standards is a worthwhile
-goal, some discrepancies exist between node-glob and other
-implementations, and are intentional.
-
-The double-star character `**` is supported by default, unless the
-`noglobstar` flag is set. This is supported in the manner of bsdglob
-and bash 4.3, where `**` only has special significance if it is the only
-thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
-`a/**b` will not.
-
-Note that symlinked directories are not crawled as part of a `**`,
-though their contents may match against subsequent portions of the
-pattern. This prevents infinite loops and duplicates and the like.
-
-If an escaped pattern has no matches, and the `nonull` flag is set,
-then glob returns the pattern as-provided, rather than
-interpreting the character escapes. For example,
-`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
-`"*a?"`. This is akin to setting the `nullglob` option in bash, except
-that it does not resolve escaped pattern characters.
-
-If brace expansion is not disabled, then it is performed before any
-other interpretation of the glob pattern. Thus, a pattern like
-`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
-**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
-checked for validity. Since those two are valid, matching proceeds.
-
-### Comments and Negation
-
-Previously, this module let you mark a pattern as a "comment" if it
-started with a `#` character, or a "negated" pattern if it started
-with a `!` character.
-
-These options were deprecated in version 5, and removed in version 6.
-
-To specify things that should not match, use the `ignore` option.
-
-## Windows
-
-**Please only use forward-slashes in glob expressions.**
-
-Though windows uses either `/` or `\` as its path separator, only `/`
-characters are used by this glob implementation. You must use
-forward-slashes **only** in glob expressions. Back-slashes will always
-be interpreted as escape characters, not path separators.
-
-Results from absolute patterns such as `/foo/*` are mounted onto the
-root setting using `path.join`. On windows, this will by default result
-in `/foo/*` matching `C:\foo\bar.txt`.
-
-## Race Conditions
-
-Glob searching, by its very nature, is susceptible to race conditions,
-since it relies on directory walking and such.
-
-As a result, it is possible that a file that exists when glob looks for
-it may have been deleted or modified by the time it returns the result.
-
-As part of its internal implementation, this program caches all stat
-and readdir calls that it makes, in order to cut down on system
-overhead. However, this also makes it even more susceptible to races,
-especially if the cache or statCache objects are reused between glob
-calls.
-
-Users are thus advised not to use a glob result as a guarantee of
-filesystem state in the face of rapid changes. For the vast majority
-of operations, this is never a problem.
-
-## Glob Logo
-Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo).
-
-The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/).
-
-## Contributing
-
-Any change to behavior (including bugfixes) must come with a test.
-
-Patches that fail tests or reduce performance will be rejected.
-
-```
-# to run tests
-npm test
-
-# to re-generate test fixtures
-npm run test-regen
-
-# to benchmark against bash/zsh
-npm run bench
-
-# to profile javascript
-npm run prof
-```
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/changelog.md b/deps/npm/node_modules/cacache/node_modules/glob/changelog.md
deleted file mode 100644
index 41636771e3..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/changelog.md
+++ /dev/null
@@ -1,67 +0,0 @@
-## 7.0
-
-- Raise error if `options.cwd` is specified, and not a directory
-
-## 6.0
-
-- Remove comment and negation pattern support
-- Ignore patterns are always in `dot:true` mode
-
-## 5.0
-
-- Deprecate comment and negation patterns
-- Fix regression in `mark` and `nodir` options from making all cache
- keys absolute path.
-- Abort if `fs.readdir` returns an error that's unexpected
-- Don't emit `match` events for ignored items
-- Treat ENOTSUP like ENOTDIR in readdir
-
-## 4.5
-
-- Add `options.follow` to always follow directory symlinks in globstar
-- Add `options.realpath` to call `fs.realpath` on all results
-- Always cache based on absolute path
-
-## 4.4
-
-- Add `options.ignore`
-- Fix handling of broken symlinks
-
-## 4.3
-
-- Bump minimatch to 2.x
-- Pass all tests on Windows
-
-## 4.2
-
-- Add `glob.hasMagic` function
-- Add `options.nodir` flag
-
-## 4.1
-
-- Refactor sync and async implementations for performance
-- Throw if callback provided to sync glob function
-- Treat symbolic links in globstar results the same as Bash 4.3
-
-## 4.0
-
-- Use `^` for dependency versions (bumped major because this breaks
- older npm versions)
-- Ensure callbacks are only ever called once
-- switch to ISC license
-
-## 3.x
-
-- Rewrite in JavaScript
-- Add support for setting root, cwd, and windows support
-- Cache many fs calls
-- Add globstar support
-- emit match events
-
-## 2.x
-
-- Use `glob.h` and `fnmatch.h` from NetBSD
-
-## 1.x
-
-- `glob.h` static binding.
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/common.js b/deps/npm/node_modules/cacache/node_modules/glob/common.js
deleted file mode 100644
index 66651bb3aa..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/common.js
+++ /dev/null
@@ -1,240 +0,0 @@
-exports.alphasort = alphasort
-exports.alphasorti = alphasorti
-exports.setopts = setopts
-exports.ownProp = ownProp
-exports.makeAbs = makeAbs
-exports.finish = finish
-exports.mark = mark
-exports.isIgnored = isIgnored
-exports.childrenIgnored = childrenIgnored
-
-function ownProp (obj, field) {
- return Object.prototype.hasOwnProperty.call(obj, field)
-}
-
-var path = require("path")
-var minimatch = require("minimatch")
-var isAbsolute = require("path-is-absolute")
-var Minimatch = minimatch.Minimatch
-
-function alphasorti (a, b) {
- return a.toLowerCase().localeCompare(b.toLowerCase())
-}
-
-function alphasort (a, b) {
- return a.localeCompare(b)
-}
-
-function setupIgnores (self, options) {
- self.ignore = options.ignore || []
-
- if (!Array.isArray(self.ignore))
- self.ignore = [self.ignore]
-
- if (self.ignore.length) {
- self.ignore = self.ignore.map(ignoreMap)
- }
-}
-
-// ignore patterns are always in dot:true mode.
-function ignoreMap (pattern) {
- var gmatcher = null
- if (pattern.slice(-3) === '/**') {
- var gpattern = pattern.replace(/(\/\*\*)+$/, '')
- gmatcher = new Minimatch(gpattern, { dot: true })
- }
-
- return {
- matcher: new Minimatch(pattern, { dot: true }),
- gmatcher: gmatcher
- }
-}
-
-function setopts (self, pattern, options) {
- if (!options)
- options = {}
-
- // base-matching: just use globstar for that.
- if (options.matchBase && -1 === pattern.indexOf("/")) {
- if (options.noglobstar) {
- throw new Error("base matching requires globstar")
- }
- pattern = "**/" + pattern
- }
-
- self.silent = !!options.silent
- self.pattern = pattern
- self.strict = options.strict !== false
- self.realpath = !!options.realpath
- self.realpathCache = options.realpathCache || Object.create(null)
- self.follow = !!options.follow
- self.dot = !!options.dot
- self.mark = !!options.mark
- self.nodir = !!options.nodir
- if (self.nodir)
- self.mark = true
- self.sync = !!options.sync
- self.nounique = !!options.nounique
- self.nonull = !!options.nonull
- self.nosort = !!options.nosort
- self.nocase = !!options.nocase
- self.stat = !!options.stat
- self.noprocess = !!options.noprocess
- self.absolute = !!options.absolute
-
- self.maxLength = options.maxLength || Infinity
- self.cache = options.cache || Object.create(null)
- self.statCache = options.statCache || Object.create(null)
- self.symlinks = options.symlinks || Object.create(null)
-
- setupIgnores(self, options)
-
- self.changedCwd = false
- var cwd = process.cwd()
- if (!ownProp(options, "cwd"))
- self.cwd = cwd
- else {
- self.cwd = path.resolve(options.cwd)
- self.changedCwd = self.cwd !== cwd
- }
-
- self.root = options.root || path.resolve(self.cwd, "/")
- self.root = path.resolve(self.root)
- if (process.platform === "win32")
- self.root = self.root.replace(/\\/g, "/")
-
- // TODO: is an absolute `cwd` supposed to be resolved against `root`?
- // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
- self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
- if (process.platform === "win32")
- self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
- self.nomount = !!options.nomount
-
- // disable comments and negation in Minimatch.
- // Note that they are not supported in Glob itself anyway.
- options.nonegate = true
- options.nocomment = true
-
- self.minimatch = new Minimatch(pattern, options)
- self.options = self.minimatch.options
-}
-
-function finish (self) {
- var nou = self.nounique
- var all = nou ? [] : Object.create(null)
-
- for (var i = 0, l = self.matches.length; i < l; i ++) {
- var matches = self.matches[i]
- if (!matches || Object.keys(matches).length === 0) {
- if (self.nonull) {
- // do like the shell, and spit out the literal glob
- var literal = self.minimatch.globSet[i]
- if (nou)
- all.push(literal)
- else
- all[literal] = true
- }
- } else {
- // had matches
- var m = Object.keys(matches)
- if (nou)
- all.push.apply(all, m)
- else
- m.forEach(function (m) {
- all[m] = true
- })
- }
- }
-
- if (!nou)
- all = Object.keys(all)
-
- if (!self.nosort)
- all = all.sort(self.nocase ? alphasorti : alphasort)
-
- // at *some* point we statted all of these
- if (self.mark) {
- for (var i = 0; i < all.length; i++) {
- all[i] = self._mark(all[i])
- }
- if (self.nodir) {
- all = all.filter(function (e) {
- var notDir = !(/\/$/.test(e))
- var c = self.cache[e] || self.cache[makeAbs(self, e)]
- if (notDir && c)
- notDir = c !== 'DIR' && !Array.isArray(c)
- return notDir
- })
- }
- }
-
- if (self.ignore.length)
- all = all.filter(function(m) {
- return !isIgnored(self, m)
- })
-
- self.found = all
-}
-
-function mark (self, p) {
- var abs = makeAbs(self, p)
- var c = self.cache[abs]
- var m = p
- if (c) {
- var isDir = c === 'DIR' || Array.isArray(c)
- var slash = p.slice(-1) === '/'
-
- if (isDir && !slash)
- m += '/'
- else if (!isDir && slash)
- m = m.slice(0, -1)
-
- if (m !== p) {
- var mabs = makeAbs(self, m)
- self.statCache[mabs] = self.statCache[abs]
- self.cache[mabs] = self.cache[abs]
- }
- }
-
- return m
-}
-
-// lotta situps...
-function makeAbs (self, f) {
- var abs = f
- if (f.charAt(0) === '/') {
- abs = path.join(self.root, f)
- } else if (isAbsolute(f) || f === '') {
- abs = f
- } else if (self.changedCwd) {
- abs = path.resolve(self.cwd, f)
- } else {
- abs = path.resolve(f)
- }
-
- if (process.platform === 'win32')
- abs = abs.replace(/\\/g, '/')
-
- return abs
-}
-
-
-// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
-// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
-function isIgnored (self, path) {
- if (!self.ignore.length)
- return false
-
- return self.ignore.some(function(item) {
- return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
- })
-}
-
-function childrenIgnored (self, path) {
- if (!self.ignore.length)
- return false
-
- return self.ignore.some(function(item) {
- return !!(item.gmatcher && item.gmatcher.match(path))
- })
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/glob.js b/deps/npm/node_modules/cacache/node_modules/glob/glob.js
deleted file mode 100644
index 58dec0f6c2..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/glob.js
+++ /dev/null
@@ -1,790 +0,0 @@
-// Approach:
-//
-// 1. Get the minimatch set
-// 2. For each pattern in the set, PROCESS(pattern, false)
-// 3. Store matches per-set, then uniq them
-//
-// PROCESS(pattern, inGlobStar)
-// Get the first [n] items from pattern that are all strings
-// Join these together. This is PREFIX.
-// If there is no more remaining, then stat(PREFIX) and
-// add to matches if it succeeds. END.
-//
-// If inGlobStar and PREFIX is symlink and points to dir
-// set ENTRIES = []
-// else readdir(PREFIX) as ENTRIES
-// If fail, END
-//
-// with ENTRIES
-// If pattern[n] is GLOBSTAR
-// // handle the case where the globstar match is empty
-// // by pruning it out, and testing the resulting pattern
-// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
-// // handle other cases.
-// for ENTRY in ENTRIES (not dotfiles)
-// // attach globstar + tail onto the entry
-// // Mark that this entry is a globstar match
-// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
-//
-// else // not globstar
-// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
-// Test ENTRY against pattern[n]
-// If fails, continue
-// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
-//
-// Caveat:
-// Cache all stats and readdirs results to minimize syscall. Since all
-// we ever care about is existence and directory-ness, we can just keep
-// `true` for files, and [children,...] for directories, or `false` for
-// things that don't exist.
-
-module.exports = glob
-
-var fs = require('fs')
-var rp = require('fs.realpath')
-var minimatch = require('minimatch')
-var Minimatch = minimatch.Minimatch
-var inherits = require('inherits')
-var EE = require('events').EventEmitter
-var path = require('path')
-var assert = require('assert')
-var isAbsolute = require('path-is-absolute')
-var globSync = require('./sync.js')
-var common = require('./common.js')
-var alphasort = common.alphasort
-var alphasorti = common.alphasorti
-var setopts = common.setopts
-var ownProp = common.ownProp
-var inflight = require('inflight')
-var util = require('util')
-var childrenIgnored = common.childrenIgnored
-var isIgnored = common.isIgnored
-
-var once = require('once')
-
-function glob (pattern, options, cb) {
- if (typeof options === 'function') cb = options, options = {}
- if (!options) options = {}
-
- if (options.sync) {
- if (cb)
- throw new TypeError('callback provided to sync glob')
- return globSync(pattern, options)
- }
-
- return new Glob(pattern, options, cb)
-}
-
-glob.sync = globSync
-var GlobSync = glob.GlobSync = globSync.GlobSync
-
-// old api surface
-glob.glob = glob
-
-function extend (origin, add) {
- if (add === null || typeof add !== 'object') {
- return origin
- }
-
- var keys = Object.keys(add)
- var i = keys.length
- while (i--) {
- origin[keys[i]] = add[keys[i]]
- }
- return origin
-}
-
-glob.hasMagic = function (pattern, options_) {
- var options = extend({}, options_)
- options.noprocess = true
-
- var g = new Glob(pattern, options)
- var set = g.minimatch.set
-
- if (!pattern)
- return false
-
- if (set.length > 1)
- return true
-
- for (var j = 0; j < set[0].length; j++) {
- if (typeof set[0][j] !== 'string')
- return true
- }
-
- return false
-}
-
-glob.Glob = Glob
-inherits(Glob, EE)
-function Glob (pattern, options, cb) {
- if (typeof options === 'function') {
- cb = options
- options = null
- }
-
- if (options && options.sync) {
- if (cb)
- throw new TypeError('callback provided to sync glob')
- return new GlobSync(pattern, options)
- }
-
- if (!(this instanceof Glob))
- return new Glob(pattern, options, cb)
-
- setopts(this, pattern, options)
- this._didRealPath = false
-
- // process each pattern in the minimatch set
- var n = this.minimatch.set.length
-
- // The matches are stored as {<filename>: true,...} so that
- // duplicates are automagically pruned.
- // Later, we do an Object.keys() on these.
- // Keep them as a list so we can fill in when nonull is set.
- this.matches = new Array(n)
-
- if (typeof cb === 'function') {
- cb = once(cb)
- this.on('error', cb)
- this.on('end', function (matches) {
- cb(null, matches)
- })
- }
-
- var self = this
- this._processing = 0
-
- this._emitQueue = []
- this._processQueue = []
- this.paused = false
-
- if (this.noprocess)
- return this
-
- if (n === 0)
- return done()
-
- var sync = true
- for (var i = 0; i < n; i ++) {
- this._process(this.minimatch.set[i], i, false, done)
- }
- sync = false
-
- function done () {
- --self._processing
- if (self._processing <= 0) {
- if (sync) {
- process.nextTick(function () {
- self._finish()
- })
- } else {
- self._finish()
- }
- }
- }
-}
-
-Glob.prototype._finish = function () {
- assert(this instanceof Glob)
- if (this.aborted)
- return
-
- if (this.realpath && !this._didRealpath)
- return this._realpath()
-
- common.finish(this)
- this.emit('end', this.found)
-}
-
-Glob.prototype._realpath = function () {
- if (this._didRealpath)
- return
-
- this._didRealpath = true
-
- var n = this.matches.length
- if (n === 0)
- return this._finish()
-
- var self = this
- for (var i = 0; i < this.matches.length; i++)
- this._realpathSet(i, next)
-
- function next () {
- if (--n === 0)
- self._finish()
- }
-}
-
-Glob.prototype._realpathSet = function (index, cb) {
- var matchset = this.matches[index]
- if (!matchset)
- return cb()
-
- var found = Object.keys(matchset)
- var self = this
- var n = found.length
-
- if (n === 0)
- return cb()
-
- var set = this.matches[index] = Object.create(null)
- found.forEach(function (p, i) {
- // If there's a problem with the stat, then it means that
- // one or more of the links in the realpath couldn't be
- // resolved. just return the abs value in that case.
- p = self._makeAbs(p)
- rp.realpath(p, self.realpathCache, function (er, real) {
- if (!er)
- set[real] = true
- else if (er.syscall === 'stat')
- set[p] = true
- else
- self.emit('error', er) // srsly wtf right here
-
- if (--n === 0) {
- self.matches[index] = set
- cb()
- }
- })
- })
-}
-
-Glob.prototype._mark = function (p) {
- return common.mark(this, p)
-}
-
-Glob.prototype._makeAbs = function (f) {
- return common.makeAbs(this, f)
-}
-
-Glob.prototype.abort = function () {
- this.aborted = true
- this.emit('abort')
-}
-
-Glob.prototype.pause = function () {
- if (!this.paused) {
- this.paused = true
- this.emit('pause')
- }
-}
-
-Glob.prototype.resume = function () {
- if (this.paused) {
- this.emit('resume')
- this.paused = false
- if (this._emitQueue.length) {
- var eq = this._emitQueue.slice(0)
- this._emitQueue.length = 0
- for (var i = 0; i < eq.length; i ++) {
- var e = eq[i]
- this._emitMatch(e[0], e[1])
- }
- }
- if (this._processQueue.length) {
- var pq = this._processQueue.slice(0)
- this._processQueue.length = 0
- for (var i = 0; i < pq.length; i ++) {
- var p = pq[i]
- this._processing--
- this._process(p[0], p[1], p[2], p[3])
- }
- }
- }
-}
-
-Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
- assert(this instanceof Glob)
- assert(typeof cb === 'function')
-
- if (this.aborted)
- return
-
- this._processing++
- if (this.paused) {
- this._processQueue.push([pattern, index, inGlobStar, cb])
- return
- }
-
- //console.error('PROCESS %d', this._processing, pattern)
-
- // Get the first [n] parts of pattern that are all strings.
- var n = 0
- while (typeof pattern[n] === 'string') {
- n ++
- }
- // now n is the index of the first one that is *not* a string.
-
- // see if there's anything else
- var prefix
- switch (n) {
- // if not, then this is rather simple
- case pattern.length:
- this._processSimple(pattern.join('/'), index, cb)
- return
-
- case 0:
- // pattern *starts* with some non-trivial item.
- // going to readdir(cwd), but not include the prefix in matches.
- prefix = null
- break
-
- default:
- // pattern has some string bits in the front.
- // whatever it starts with, whether that's 'absolute' like /foo/bar,
- // or 'relative' like '../baz'
- prefix = pattern.slice(0, n).join('/')
- break
- }
-
- var remain = pattern.slice(n)
-
- // get the list of entries.
- var read
- if (prefix === null)
- read = '.'
- else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
- if (!prefix || !isAbsolute(prefix))
- prefix = '/' + prefix
- read = prefix
- } else
- read = prefix
-
- var abs = this._makeAbs(read)
-
- //if ignored, skip _processing
- if (childrenIgnored(this, read))
- return cb()
-
- var isGlobStar = remain[0] === minimatch.GLOBSTAR
- if (isGlobStar)
- this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
- else
- this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
-}
-
-Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
- var self = this
- this._readdir(abs, inGlobStar, function (er, entries) {
- return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
- })
-}
-
-Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
-
- // if the abs isn't a dir, then nothing can match!
- if (!entries)
- return cb()
-
- // It will only match dot entries if it starts with a dot, or if
- // dot is set. Stuff like @(.foo|.bar) isn't allowed.
- var pn = remain[0]
- var negate = !!this.minimatch.negate
- var rawGlob = pn._glob
- var dotOk = this.dot || rawGlob.charAt(0) === '.'
-
- var matchedEntries = []
- for (var i = 0; i < entries.length; i++) {
- var e = entries[i]
- if (e.charAt(0) !== '.' || dotOk) {
- var m
- if (negate && !prefix) {
- m = !e.match(pn)
- } else {
- m = e.match(pn)
- }
- if (m)
- matchedEntries.push(e)
- }
- }
-
- //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
-
- var len = matchedEntries.length
- // If there are no matched entries, then nothing matches.
- if (len === 0)
- return cb()
-
- // if this is the last remaining pattern bit, then no need for
- // an additional stat *unless* the user has specified mark or
- // stat explicitly. We know they exist, since readdir returned
- // them.
-
- if (remain.length === 1 && !this.mark && !this.stat) {
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- if (prefix) {
- if (prefix !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
-
- if (e.charAt(0) === '/' && !this.nomount) {
- e = path.join(this.root, e)
- }
- this._emitMatch(index, e)
- }
- // This was the last one, and no stats were needed
- return cb()
- }
-
- // now test all matched entries as stand-ins for that part
- // of the pattern.
- remain.shift()
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- var newPattern
- if (prefix) {
- if (prefix !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
- this._process([e].concat(remain), index, inGlobStar, cb)
- }
- cb()
-}
-
-Glob.prototype._emitMatch = function (index, e) {
- if (this.aborted)
- return
-
- if (isIgnored(this, e))
- return
-
- if (this.paused) {
- this._emitQueue.push([index, e])
- return
- }
-
- var abs = isAbsolute(e) ? e : this._makeAbs(e)
-
- if (this.mark)
- e = this._mark(e)
-
- if (this.absolute)
- e = abs
-
- if (this.matches[index][e])
- return
-
- if (this.nodir) {
- var c = this.cache[abs]
- if (c === 'DIR' || Array.isArray(c))
- return
- }
-
- this.matches[index][e] = true
-
- var st = this.statCache[abs]
- if (st)
- this.emit('stat', e, st)
-
- this.emit('match', e)
-}
-
-Glob.prototype._readdirInGlobStar = function (abs, cb) {
- if (this.aborted)
- return
-
- // follow all symlinked directories forever
- // just proceed as if this is a non-globstar situation
- if (this.follow)
- return this._readdir(abs, false, cb)
-
- var lstatkey = 'lstat\0' + abs
- var self = this
- var lstatcb = inflight(lstatkey, lstatcb_)
-
- if (lstatcb)
- fs.lstat(abs, lstatcb)
-
- function lstatcb_ (er, lstat) {
- if (er && er.code === 'ENOENT')
- return cb()
-
- var isSym = lstat && lstat.isSymbolicLink()
- self.symlinks[abs] = isSym
-
- // If it's not a symlink or a dir, then it's definitely a regular file.
- // don't bother doing a readdir in that case.
- if (!isSym && lstat && !lstat.isDirectory()) {
- self.cache[abs] = 'FILE'
- cb()
- } else
- self._readdir(abs, false, cb)
- }
-}
-
-Glob.prototype._readdir = function (abs, inGlobStar, cb) {
- if (this.aborted)
- return
-
- cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
- if (!cb)
- return
-
- //console.error('RD %j %j', +inGlobStar, abs)
- if (inGlobStar && !ownProp(this.symlinks, abs))
- return this._readdirInGlobStar(abs, cb)
-
- if (ownProp(this.cache, abs)) {
- var c = this.cache[abs]
- if (!c || c === 'FILE')
- return cb()
-
- if (Array.isArray(c))
- return cb(null, c)
- }
-
- var self = this
- fs.readdir(abs, readdirCb(this, abs, cb))
-}
-
-function readdirCb (self, abs, cb) {
- return function (er, entries) {
- if (er)
- self._readdirError(abs, er, cb)
- else
- self._readdirEntries(abs, entries, cb)
- }
-}
-
-Glob.prototype._readdirEntries = function (abs, entries, cb) {
- if (this.aborted)
- return
-
- // if we haven't asked to stat everything, then just
- // assume that everything in there exists, so we can avoid
- // having to stat it a second time.
- if (!this.mark && !this.stat) {
- for (var i = 0; i < entries.length; i ++) {
- var e = entries[i]
- if (abs === '/')
- e = abs + e
- else
- e = abs + '/' + e
- this.cache[e] = true
- }
- }
-
- this.cache[abs] = entries
- return cb(null, entries)
-}
-
-Glob.prototype._readdirError = function (f, er, cb) {
- if (this.aborted)
- return
-
- // handle errors, and cache the information
- switch (er.code) {
- case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
- case 'ENOTDIR': // totally normal. means it *does* exist.
- var abs = this._makeAbs(f)
- this.cache[abs] = 'FILE'
- if (abs === this.cwdAbs) {
- var error = new Error(er.code + ' invalid cwd ' + this.cwd)
- error.path = this.cwd
- error.code = er.code
- this.emit('error', error)
- this.abort()
- }
- break
-
- case 'ENOENT': // not terribly unusual
- case 'ELOOP':
- case 'ENAMETOOLONG':
- case 'UNKNOWN':
- this.cache[this._makeAbs(f)] = false
- break
-
- default: // some unusual error. Treat as failure.
- this.cache[this._makeAbs(f)] = false
- if (this.strict) {
- this.emit('error', er)
- // If the error is handled, then we abort
- // if not, we threw out of here
- this.abort()
- }
- if (!this.silent)
- console.error('glob error', er)
- break
- }
-
- return cb()
-}
-
-Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
- var self = this
- this._readdir(abs, inGlobStar, function (er, entries) {
- self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
- })
-}
-
-
-Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
- //console.error('pgs2', prefix, remain[0], entries)
-
- // no entries means not a dir, so it can never have matches
- // foo.txt/** doesn't match foo.txt
- if (!entries)
- return cb()
-
- // test without the globstar, and with every child both below
- // and replacing the globstar.
- var remainWithoutGlobStar = remain.slice(1)
- var gspref = prefix ? [ prefix ] : []
- var noGlobStar = gspref.concat(remainWithoutGlobStar)
-
- // the noGlobStar pattern exits the inGlobStar state
- this._process(noGlobStar, index, false, cb)
-
- var isSym = this.symlinks[abs]
- var len = entries.length
-
- // If it's a symlink, and we're in a globstar, then stop
- if (isSym && inGlobStar)
- return cb()
-
- for (var i = 0; i < len; i++) {
- var e = entries[i]
- if (e.charAt(0) === '.' && !this.dot)
- continue
-
- // these two cases enter the inGlobStar state
- var instead = gspref.concat(entries[i], remainWithoutGlobStar)
- this._process(instead, index, true, cb)
-
- var below = gspref.concat(entries[i], remain)
- this._process(below, index, true, cb)
- }
-
- cb()
-}
-
-Glob.prototype._processSimple = function (prefix, index, cb) {
- // XXX review this. Shouldn't it be doing the mounting etc
- // before doing stat? kinda weird?
- var self = this
- this._stat(prefix, function (er, exists) {
- self._processSimple2(prefix, index, er, exists, cb)
- })
-}
-Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
-
- //console.error('ps2', prefix, exists)
-
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- // If it doesn't exist, then just mark the lack of results
- if (!exists)
- return cb()
-
- if (prefix && isAbsolute(prefix) && !this.nomount) {
- var trail = /[\/\\]$/.test(prefix)
- if (prefix.charAt(0) === '/') {
- prefix = path.join(this.root, prefix)
- } else {
- prefix = path.resolve(this.root, prefix)
- if (trail)
- prefix += '/'
- }
- }
-
- if (process.platform === 'win32')
- prefix = prefix.replace(/\\/g, '/')
-
- // Mark this as a match
- this._emitMatch(index, prefix)
- cb()
-}
-
-// Returns either 'DIR', 'FILE', or false
-Glob.prototype._stat = function (f, cb) {
- var abs = this._makeAbs(f)
- var needDir = f.slice(-1) === '/'
-
- if (f.length > this.maxLength)
- return cb()
-
- if (!this.stat && ownProp(this.cache, abs)) {
- var c = this.cache[abs]
-
- if (Array.isArray(c))
- c = 'DIR'
-
- // It exists, but maybe not how we need it
- if (!needDir || c === 'DIR')
- return cb(null, c)
-
- if (needDir && c === 'FILE')
- return cb()
-
- // otherwise we have to stat, because maybe c=true
- // if we know it exists, but not what it is.
- }
-
- var exists
- var stat = this.statCache[abs]
- if (stat !== undefined) {
- if (stat === false)
- return cb(null, stat)
- else {
- var type = stat.isDirectory() ? 'DIR' : 'FILE'
- if (needDir && type === 'FILE')
- return cb()
- else
- return cb(null, type, stat)
- }
- }
-
- var self = this
- var statcb = inflight('stat\0' + abs, lstatcb_)
- if (statcb)
- fs.lstat(abs, statcb)
-
- function lstatcb_ (er, lstat) {
- if (lstat && lstat.isSymbolicLink()) {
- // If it's a symlink, then treat it as the target, unless
- // the target does not exist, then treat it as a file.
- return fs.stat(abs, function (er, stat) {
- if (er)
- self._stat2(f, abs, null, lstat, cb)
- else
- self._stat2(f, abs, er, stat, cb)
- })
- } else {
- self._stat2(f, abs, er, lstat, cb)
- }
- }
-}
-
-Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
- if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
- this.statCache[abs] = false
- return cb()
- }
-
- var needDir = f.slice(-1) === '/'
- this.statCache[abs] = stat
-
- if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
- return cb(null, false, stat)
-
- var c = true
- if (stat)
- c = stat.isDirectory() ? 'DIR' : 'FILE'
- this.cache[abs] = this.cache[abs] || c
-
- if (needDir && c === 'FILE')
- return cb()
-
- return cb(null, c, stat)
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/package.json b/deps/npm/node_modules/cacache/node_modules/glob/package.json
deleted file mode 100644
index eaead84e56..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/package.json
+++ /dev/null
@@ -1,76 +0,0 @@
-{
- "_from": "glob@^7.1.4",
- "_id": "glob@7.1.4",
- "_inBundle": false,
- "_integrity": "sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==",
- "_location": "/cacache/glob",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "glob@^7.1.4",
- "name": "glob",
- "escapedName": "glob",
- "rawSpec": "^7.1.4",
- "saveSpec": null,
- "fetchSpec": "^7.1.4"
- },
- "_requiredBy": [
- "/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.4.tgz",
- "_shasum": "aa608a2f6c577ad357e1ae5a5c26d9a8d1969255",
- "_spec": "glob@^7.1.4",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/node-glob/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^3.0.4",
- "once": "^1.3.0",
- "path-is-absolute": "^1.0.0"
- },
- "deprecated": false,
- "description": "a little globber",
- "devDependencies": {
- "mkdirp": "0",
- "rimraf": "^2.2.8",
- "tap": "^12.0.1",
- "tick": "0.0.6"
- },
- "engines": {
- "node": "*"
- },
- "files": [
- "glob.js",
- "sync.js",
- "common.js"
- ],
- "homepage": "https://github.com/isaacs/node-glob#readme",
- "license": "ISC",
- "main": "glob.js",
- "name": "glob",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-glob.git"
- },
- "scripts": {
- "bench": "bash benchmark.sh",
- "benchclean": "node benchclean.js",
- "prepublish": "npm run benchclean",
- "prof": "bash prof.sh && cat profile.txt",
- "profclean": "rm -f v8.log profile.txt",
- "test": "tap test/*.js --cov",
- "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
- },
- "version": "7.1.4"
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/glob/sync.js b/deps/npm/node_modules/cacache/node_modules/glob/sync.js
deleted file mode 100644
index c952134baa..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/glob/sync.js
+++ /dev/null
@@ -1,486 +0,0 @@
-module.exports = globSync
-globSync.GlobSync = GlobSync
-
-var fs = require('fs')
-var rp = require('fs.realpath')
-var minimatch = require('minimatch')
-var Minimatch = minimatch.Minimatch
-var Glob = require('./glob.js').Glob
-var util = require('util')
-var path = require('path')
-var assert = require('assert')
-var isAbsolute = require('path-is-absolute')
-var common = require('./common.js')
-var alphasort = common.alphasort
-var alphasorti = common.alphasorti
-var setopts = common.setopts
-var ownProp = common.ownProp
-var childrenIgnored = common.childrenIgnored
-var isIgnored = common.isIgnored
-
-function globSync (pattern, options) {
- if (typeof options === 'function' || arguments.length === 3)
- throw new TypeError('callback provided to sync glob\n'+
- 'See: https://github.com/isaacs/node-glob/issues/167')
-
- return new GlobSync(pattern, options).found
-}
-
-function GlobSync (pattern, options) {
- if (!pattern)
- throw new Error('must provide pattern')
-
- if (typeof options === 'function' || arguments.length === 3)
- throw new TypeError('callback provided to sync glob\n'+
- 'See: https://github.com/isaacs/node-glob/issues/167')
-
- if (!(this instanceof GlobSync))
- return new GlobSync(pattern, options)
-
- setopts(this, pattern, options)
-
- if (this.noprocess)
- return this
-
- var n = this.minimatch.set.length
- this.matches = new Array(n)
- for (var i = 0; i < n; i ++) {
- this._process(this.minimatch.set[i], i, false)
- }
- this._finish()
-}
-
-GlobSync.prototype._finish = function () {
- assert(this instanceof GlobSync)
- if (this.realpath) {
- var self = this
- this.matches.forEach(function (matchset, index) {
- var set = self.matches[index] = Object.create(null)
- for (var p in matchset) {
- try {
- p = self._makeAbs(p)
- var real = rp.realpathSync(p, self.realpathCache)
- set[real] = true
- } catch (er) {
- if (er.syscall === 'stat')
- set[self._makeAbs(p)] = true
- else
- throw er
- }
- }
- })
- }
- common.finish(this)
-}
-
-
-GlobSync.prototype._process = function (pattern, index, inGlobStar) {
- assert(this instanceof GlobSync)
-
- // Get the first [n] parts of pattern that are all strings.
- var n = 0
- while (typeof pattern[n] === 'string') {
- n ++
- }
- // now n is the index of the first one that is *not* a string.
-
- // See if there's anything else
- var prefix
- switch (n) {
- // if not, then this is rather simple
- case pattern.length:
- this._processSimple(pattern.join('/'), index)
- return
-
- case 0:
- // pattern *starts* with some non-trivial item.
- // going to readdir(cwd), but not include the prefix in matches.
- prefix = null
- break
-
- default:
- // pattern has some string bits in the front.
- // whatever it starts with, whether that's 'absolute' like /foo/bar,
- // or 'relative' like '../baz'
- prefix = pattern.slice(0, n).join('/')
- break
- }
-
- var remain = pattern.slice(n)
-
- // get the list of entries.
- var read
- if (prefix === null)
- read = '.'
- else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
- if (!prefix || !isAbsolute(prefix))
- prefix = '/' + prefix
- read = prefix
- } else
- read = prefix
-
- var abs = this._makeAbs(read)
-
- //if ignored, skip processing
- if (childrenIgnored(this, read))
- return
-
- var isGlobStar = remain[0] === minimatch.GLOBSTAR
- if (isGlobStar)
- this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
- else
- this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
-}
-
-
-GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
- var entries = this._readdir(abs, inGlobStar)
-
- // if the abs isn't a dir, then nothing can match!
- if (!entries)
- return
-
- // It will only match dot entries if it starts with a dot, or if
- // dot is set. Stuff like @(.foo|.bar) isn't allowed.
- var pn = remain[0]
- var negate = !!this.minimatch.negate
- var rawGlob = pn._glob
- var dotOk = this.dot || rawGlob.charAt(0) === '.'
-
- var matchedEntries = []
- for (var i = 0; i < entries.length; i++) {
- var e = entries[i]
- if (e.charAt(0) !== '.' || dotOk) {
- var m
- if (negate && !prefix) {
- m = !e.match(pn)
- } else {
- m = e.match(pn)
- }
- if (m)
- matchedEntries.push(e)
- }
- }
-
- var len = matchedEntries.length
- // If there are no matched entries, then nothing matches.
- if (len === 0)
- return
-
- // if this is the last remaining pattern bit, then no need for
- // an additional stat *unless* the user has specified mark or
- // stat explicitly. We know they exist, since readdir returned
- // them.
-
- if (remain.length === 1 && !this.mark && !this.stat) {
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- if (prefix) {
- if (prefix.slice(-1) !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
-
- if (e.charAt(0) === '/' && !this.nomount) {
- e = path.join(this.root, e)
- }
- this._emitMatch(index, e)
- }
- // This was the last one, and no stats were needed
- return
- }
-
- // now test all matched entries as stand-ins for that part
- // of the pattern.
- remain.shift()
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- var newPattern
- if (prefix)
- newPattern = [prefix, e]
- else
- newPattern = [e]
- this._process(newPattern.concat(remain), index, inGlobStar)
- }
-}
-
-
-GlobSync.prototype._emitMatch = function (index, e) {
- if (isIgnored(this, e))
- return
-
- var abs = this._makeAbs(e)
-
- if (this.mark)
- e = this._mark(e)
-
- if (this.absolute) {
- e = abs
- }
-
- if (this.matches[index][e])
- return
-
- if (this.nodir) {
- var c = this.cache[abs]
- if (c === 'DIR' || Array.isArray(c))
- return
- }
-
- this.matches[index][e] = true
-
- if (this.stat)
- this._stat(e)
-}
-
-
-GlobSync.prototype._readdirInGlobStar = function (abs) {
- // follow all symlinked directories forever
- // just proceed as if this is a non-globstar situation
- if (this.follow)
- return this._readdir(abs, false)
-
- var entries
- var lstat
- var stat
- try {
- lstat = fs.lstatSync(abs)
- } catch (er) {
- if (er.code === 'ENOENT') {
- // lstat failed, doesn't exist
- return null
- }
- }
-
- var isSym = lstat && lstat.isSymbolicLink()
- this.symlinks[abs] = isSym
-
- // If it's not a symlink or a dir, then it's definitely a regular file.
- // don't bother doing a readdir in that case.
- if (!isSym && lstat && !lstat.isDirectory())
- this.cache[abs] = 'FILE'
- else
- entries = this._readdir(abs, false)
-
- return entries
-}
-
-GlobSync.prototype._readdir = function (abs, inGlobStar) {
- var entries
-
- if (inGlobStar && !ownProp(this.symlinks, abs))
- return this._readdirInGlobStar(abs)
-
- if (ownProp(this.cache, abs)) {
- var c = this.cache[abs]
- if (!c || c === 'FILE')
- return null
-
- if (Array.isArray(c))
- return c
- }
-
- try {
- return this._readdirEntries(abs, fs.readdirSync(abs))
- } catch (er) {
- this._readdirError(abs, er)
- return null
- }
-}
-
-GlobSync.prototype._readdirEntries = function (abs, entries) {
- // if we haven't asked to stat everything, then just
- // assume that everything in there exists, so we can avoid
- // having to stat it a second time.
- if (!this.mark && !this.stat) {
- for (var i = 0; i < entries.length; i ++) {
- var e = entries[i]
- if (abs === '/')
- e = abs + e
- else
- e = abs + '/' + e
- this.cache[e] = true
- }
- }
-
- this.cache[abs] = entries
-
- // mark and cache dir-ness
- return entries
-}
-
-GlobSync.prototype._readdirError = function (f, er) {
- // handle errors, and cache the information
- switch (er.code) {
- case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
- case 'ENOTDIR': // totally normal. means it *does* exist.
- var abs = this._makeAbs(f)
- this.cache[abs] = 'FILE'
- if (abs === this.cwdAbs) {
- var error = new Error(er.code + ' invalid cwd ' + this.cwd)
- error.path = this.cwd
- error.code = er.code
- throw error
- }
- break
-
- case 'ENOENT': // not terribly unusual
- case 'ELOOP':
- case 'ENAMETOOLONG':
- case 'UNKNOWN':
- this.cache[this._makeAbs(f)] = false
- break
-
- default: // some unusual error. Treat as failure.
- this.cache[this._makeAbs(f)] = false
- if (this.strict)
- throw er
- if (!this.silent)
- console.error('glob error', er)
- break
- }
-}
-
-GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
-
- var entries = this._readdir(abs, inGlobStar)
-
- // no entries means not a dir, so it can never have matches
- // foo.txt/** doesn't match foo.txt
- if (!entries)
- return
-
- // test without the globstar, and with every child both below
- // and replacing the globstar.
- var remainWithoutGlobStar = remain.slice(1)
- var gspref = prefix ? [ prefix ] : []
- var noGlobStar = gspref.concat(remainWithoutGlobStar)
-
- // the noGlobStar pattern exits the inGlobStar state
- this._process(noGlobStar, index, false)
-
- var len = entries.length
- var isSym = this.symlinks[abs]
-
- // If it's a symlink, and we're in a globstar, then stop
- if (isSym && inGlobStar)
- return
-
- for (var i = 0; i < len; i++) {
- var e = entries[i]
- if (e.charAt(0) === '.' && !this.dot)
- continue
-
- // these two cases enter the inGlobStar state
- var instead = gspref.concat(entries[i], remainWithoutGlobStar)
- this._process(instead, index, true)
-
- var below = gspref.concat(entries[i], remain)
- this._process(below, index, true)
- }
-}
-
-GlobSync.prototype._processSimple = function (prefix, index) {
- // XXX review this. Shouldn't it be doing the mounting etc
- // before doing stat? kinda weird?
- var exists = this._stat(prefix)
-
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- // If it doesn't exist, then just mark the lack of results
- if (!exists)
- return
-
- if (prefix && isAbsolute(prefix) && !this.nomount) {
- var trail = /[\/\\]$/.test(prefix)
- if (prefix.charAt(0) === '/') {
- prefix = path.join(this.root, prefix)
- } else {
- prefix = path.resolve(this.root, prefix)
- if (trail)
- prefix += '/'
- }
- }
-
- if (process.platform === 'win32')
- prefix = prefix.replace(/\\/g, '/')
-
- // Mark this as a match
- this._emitMatch(index, prefix)
-}
-
-// Returns either 'DIR', 'FILE', or false
-GlobSync.prototype._stat = function (f) {
- var abs = this._makeAbs(f)
- var needDir = f.slice(-1) === '/'
-
- if (f.length > this.maxLength)
- return false
-
- if (!this.stat && ownProp(this.cache, abs)) {
- var c = this.cache[abs]
-
- if (Array.isArray(c))
- c = 'DIR'
-
- // It exists, but maybe not how we need it
- if (!needDir || c === 'DIR')
- return c
-
- if (needDir && c === 'FILE')
- return false
-
- // otherwise we have to stat, because maybe c=true
- // if we know it exists, but not what it is.
- }
-
- var exists
- var stat = this.statCache[abs]
- if (!stat) {
- var lstat
- try {
- lstat = fs.lstatSync(abs)
- } catch (er) {
- if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
- this.statCache[abs] = false
- return false
- }
- }
-
- if (lstat && lstat.isSymbolicLink()) {
- try {
- stat = fs.statSync(abs)
- } catch (er) {
- stat = lstat
- }
- } else {
- stat = lstat
- }
- }
-
- this.statCache[abs] = stat
-
- var c = true
- if (stat)
- c = stat.isDirectory() ? 'DIR' : 'FILE'
-
- this.cache[abs] = this.cache[abs] || c
-
- if (needDir && c === 'FILE')
- return false
-
- return c
-}
-
-GlobSync.prototype._mark = function (p) {
- return common.mark(this, p)
-}
-
-GlobSync.prototype._makeAbs = function (f) {
- return common.makeAbs(this, f)
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/cacache/node_modules/lru-cache/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/cacache/node_modules/lru-cache/README.md b/deps/npm/node_modules/cacache/node_modules/lru-cache/README.md
deleted file mode 100644
index 435dfebb7e..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/lru-cache/README.md
+++ /dev/null
@@ -1,166 +0,0 @@
-# lru cache
-
-A cache object that deletes the least-recently-used items.
-
-[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache)
-
-## Installation:
-
-```javascript
-npm install lru-cache --save
-```
-
-## Usage:
-
-```javascript
-var LRU = require("lru-cache")
- , options = { max: 500
- , length: function (n, key) { return n * 2 + key.length }
- , dispose: function (key, n) { n.close() }
- , maxAge: 1000 * 60 * 60 }
- , cache = new LRU(options)
- , otherCache = new LRU(50) // sets just the max size
-
-cache.set("key", "value")
-cache.get("key") // "value"
-
-// non-string keys ARE fully supported
-// but note that it must be THE SAME object, not
-// just a JSON-equivalent object.
-var someObject = { a: 1 }
-cache.set(someObject, 'a value')
-// Object keys are not toString()-ed
-cache.set('[object Object]', 'a different value')
-assert.equal(cache.get(someObject), 'a value')
-// A similar object with same keys/values won't work,
-// because it's a different object identity
-assert.equal(cache.get({ a: 1 }), undefined)
-
-cache.reset() // empty the cache
-```
-
-If you put more stuff in it, then items will fall out.
-
-If you try to put an oversized thing in it, then it'll fall out right
-away.
-
-## Options
-
-* `max` The maximum size of the cache, checked by applying the length
- function to all values in the cache. Not setting this is kind of
- silly, since that's the whole purpose of this lib, but it defaults
- to `Infinity`. Setting it to a non-number or negative number will
- throw a `TypeError`. Setting it to 0 makes it be `Infinity`.
-* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
- as they age, but if you try to get an item that is too old, it'll
- drop it and return undefined instead of giving it to you.
- Setting this to a negative value will make everything seem old!
- Setting it to a non-number will throw a `TypeError`.
-* `length` Function that is used to calculate the length of stored
- items. If you're storing strings or buffers, then you probably want
- to do something like `function(n, key){return n.length}`. The default is
- `function(){return 1}`, which is fine if you want to store `max`
- like-sized things. The item is passed as the first argument, and
- the key is passed as the second argumnet.
-* `dispose` Function that is called on items when they are dropped
- from the cache. This can be handy if you want to close file
- descriptors or do other cleanup tasks when items are no longer
- accessible. Called with `key, value`. It's called *before*
- actually removing the item from the internal cache, so if you want
- to immediately put it back in, you'll have to do that in a
- `nextTick` or `setTimeout` callback or it won't do anything.
-* `stale` By default, if you set a `maxAge`, it'll only actually pull
- stale items out of the cache when you `get(key)`. (That is, it's
- not pre-emptively doing a `setTimeout` or anything.) If you set
- `stale:true`, it'll return the stale value before deleting it. If
- you don't set this, then it'll return `undefined` when you try to
- get a stale entry, as if it had already been deleted.
-* `noDisposeOnSet` By default, if you set a `dispose()` method, then
- it'll be called whenever a `set()` operation overwrites an existing
- key. If you set this option, `dispose()` will only be called when a
- key falls out of the cache, not when it is overwritten.
-* `updateAgeOnGet` When using time-expiring entries with `maxAge`,
- setting this to `true` will make each item's effective time update
- to the current time whenever it is retrieved from cache, causing it
- to not expire. (It can still fall out of cache based on recency of
- use, of course.)
-
-## API
-
-* `set(key, value, maxAge)`
-* `get(key) => value`
-
- Both of these will update the "recently used"-ness of the key.
- They do what you think. `maxAge` is optional and overrides the
- cache `maxAge` option if provided.
-
- If the key is not found, `get()` will return `undefined`.
-
- The key and val can be any value.
-
-* `peek(key)`
-
- Returns the key value (or `undefined` if not found) without
- updating the "recently used"-ness of the key.
-
- (If you find yourself using this a lot, you *might* be using the
- wrong sort of data structure, but there are some use cases where
- it's handy.)
-
-* `del(key)`
-
- Deletes a key out of the cache.
-
-* `reset()`
-
- Clear the cache entirely, throwing away all values.
-
-* `has(key)`
-
- Check if a key is in the cache, without updating the recent-ness
- or deleting it for being stale.
-
-* `forEach(function(value,key,cache), [thisp])`
-
- Just like `Array.prototype.forEach`. Iterates over all the keys
- in the cache, in order of recent-ness. (Ie, more recently used
- items are iterated over first.)
-
-* `rforEach(function(value,key,cache), [thisp])`
-
- The same as `cache.forEach(...)` but items are iterated over in
- reverse order. (ie, less recently used items are iterated over
- first.)
-
-* `keys()`
-
- Return an array of the keys in the cache.
-
-* `values()`
-
- Return an array of the values in the cache.
-
-* `length`
-
- Return total length of objects in cache taking into account
- `length` options function.
-
-* `itemCount`
-
- Return total quantity of objects currently in cache. Note, that
- `stale` (see options) items are returned as part of this item
- count.
-
-* `dump()`
-
- Return an array of the cache entries ready for serialization and usage
- with 'destinationCache.load(arr)`.
-
-* `load(cacheEntriesArray)`
-
- Loads another cache entries array, obtained with `sourceCache.dump()`,
- into the cache. The destination cache is reset before loading new entries
-
-* `prune()`
-
- Manually iterates over the entire cache proactively pruning old entries
diff --git a/deps/npm/node_modules/cacache/node_modules/lru-cache/index.js b/deps/npm/node_modules/cacache/node_modules/lru-cache/index.js
deleted file mode 100644
index 573b6b85b9..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,334 +0,0 @@
-'use strict'
-
-// A linked list to keep track of recently-used-ness
-const Yallist = require('yallist')
-
-const MAX = Symbol('max')
-const LENGTH = Symbol('length')
-const LENGTH_CALCULATOR = Symbol('lengthCalculator')
-const ALLOW_STALE = Symbol('allowStale')
-const MAX_AGE = Symbol('maxAge')
-const DISPOSE = Symbol('dispose')
-const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
-const LRU_LIST = Symbol('lruList')
-const CACHE = Symbol('cache')
-const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
-
-const naiveLength = () => 1
-
-// lruList is a yallist where the head is the youngest
-// item, and the tail is the oldest. the list contains the Hit
-// objects as the entries.
-// Each Hit object has a reference to its Yallist.Node. This
-// never changes.
-//
-// cache is a Map (or PseudoMap) that matches the keys to
-// the Yallist.Node object.
-class LRUCache {
- constructor (options) {
- if (typeof options === 'number')
- options = { max: options }
-
- if (!options)
- options = {}
-
- if (options.max && (typeof options.max !== 'number' || options.max < 0))
- throw new TypeError('max must be a non-negative number')
- // Kind of weird to have a default max of Infinity, but oh well.
- const max = this[MAX] = options.max || Infinity
-
- const lc = options.length || naiveLength
- this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
- this[ALLOW_STALE] = options.stale || false
- if (options.maxAge && typeof options.maxAge !== 'number')
- throw new TypeError('maxAge must be a number')
- this[MAX_AGE] = options.maxAge || 0
- this[DISPOSE] = options.dispose
- this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
- this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
- this.reset()
- }
-
- // resize the cache when the max changes.
- set max (mL) {
- if (typeof mL !== 'number' || mL < 0)
- throw new TypeError('max must be a non-negative number')
-
- this[MAX] = mL || Infinity
- trim(this)
- }
- get max () {
- return this[MAX]
- }
-
- set allowStale (allowStale) {
- this[ALLOW_STALE] = !!allowStale
- }
- get allowStale () {
- return this[ALLOW_STALE]
- }
-
- set maxAge (mA) {
- if (typeof mA !== 'number')
- throw new TypeError('maxAge must be a non-negative number')
-
- this[MAX_AGE] = mA
- trim(this)
- }
- get maxAge () {
- return this[MAX_AGE]
- }
-
- // resize the cache when the lengthCalculator changes.
- set lengthCalculator (lC) {
- if (typeof lC !== 'function')
- lC = naiveLength
-
- if (lC !== this[LENGTH_CALCULATOR]) {
- this[LENGTH_CALCULATOR] = lC
- this[LENGTH] = 0
- this[LRU_LIST].forEach(hit => {
- hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
- this[LENGTH] += hit.length
- })
- }
- trim(this)
- }
- get lengthCalculator () { return this[LENGTH_CALCULATOR] }
-
- get length () { return this[LENGTH] }
- get itemCount () { return this[LRU_LIST].length }
-
- rforEach (fn, thisp) {
- thisp = thisp || this
- for (let walker = this[LRU_LIST].tail; walker !== null;) {
- const prev = walker.prev
- forEachStep(this, fn, walker, thisp)
- walker = prev
- }
- }
-
- forEach (fn, thisp) {
- thisp = thisp || this
- for (let walker = this[LRU_LIST].head; walker !== null;) {
- const next = walker.next
- forEachStep(this, fn, walker, thisp)
- walker = next
- }
- }
-
- keys () {
- return this[LRU_LIST].toArray().map(k => k.key)
- }
-
- values () {
- return this[LRU_LIST].toArray().map(k => k.value)
- }
-
- reset () {
- if (this[DISPOSE] &&
- this[LRU_LIST] &&
- this[LRU_LIST].length) {
- this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
- }
-
- this[CACHE] = new Map() // hash of items by key
- this[LRU_LIST] = new Yallist() // list of items in order of use recency
- this[LENGTH] = 0 // length of items in the list
- }
-
- dump () {
- return this[LRU_LIST].map(hit =>
- isStale(this, hit) ? false : {
- k: hit.key,
- v: hit.value,
- e: hit.now + (hit.maxAge || 0)
- }).toArray().filter(h => h)
- }
-
- dumpLru () {
- return this[LRU_LIST]
- }
-
- set (key, value, maxAge) {
- maxAge = maxAge || this[MAX_AGE]
-
- if (maxAge && typeof maxAge !== 'number')
- throw new TypeError('maxAge must be a number')
-
- const now = maxAge ? Date.now() : 0
- const len = this[LENGTH_CALCULATOR](value, key)
-
- if (this[CACHE].has(key)) {
- if (len > this[MAX]) {
- del(this, this[CACHE].get(key))
- return false
- }
-
- const node = this[CACHE].get(key)
- const item = node.value
-
- // dispose of the old one before overwriting
- // split out into 2 ifs for better coverage tracking
- if (this[DISPOSE]) {
- if (!this[NO_DISPOSE_ON_SET])
- this[DISPOSE](key, item.value)
- }
-
- item.now = now
- item.maxAge = maxAge
- item.value = value
- this[LENGTH] += len - item.length
- item.length = len
- this.get(key)
- trim(this)
- return true
- }
-
- const hit = new Entry(key, value, len, now, maxAge)
-
- // oversized objects fall out of cache automatically.
- if (hit.length > this[MAX]) {
- if (this[DISPOSE])
- this[DISPOSE](key, value)
-
- return false
- }
-
- this[LENGTH] += hit.length
- this[LRU_LIST].unshift(hit)
- this[CACHE].set(key, this[LRU_LIST].head)
- trim(this)
- return true
- }
-
- has (key) {
- if (!this[CACHE].has(key)) return false
- const hit = this[CACHE].get(key).value
- return !isStale(this, hit)
- }
-
- get (key) {
- return get(this, key, true)
- }
-
- peek (key) {
- return get(this, key, false)
- }
-
- pop () {
- const node = this[LRU_LIST].tail
- if (!node)
- return null
-
- del(this, node)
- return node.value
- }
-
- del (key) {
- del(this, this[CACHE].get(key))
- }
-
- load (arr) {
- // reset the cache
- this.reset()
-
- const now = Date.now()
- // A previous serialized cache has the most recent items first
- for (let l = arr.length - 1; l >= 0; l--) {
- const hit = arr[l]
- const expiresAt = hit.e || 0
- if (expiresAt === 0)
- // the item was created without expiration in a non aged cache
- this.set(hit.k, hit.v)
- else {
- const maxAge = expiresAt - now
- // dont add already expired items
- if (maxAge > 0) {
- this.set(hit.k, hit.v, maxAge)
- }
- }
- }
- }
-
- prune () {
- this[CACHE].forEach((value, key) => get(this, key, false))
- }
-}
-
-const get = (self, key, doUse) => {
- const node = self[CACHE].get(key)
- if (node) {
- const hit = node.value
- if (isStale(self, hit)) {
- del(self, node)
- if (!self[ALLOW_STALE])
- return undefined
- } else {
- if (doUse) {
- if (self[UPDATE_AGE_ON_GET])
- node.value.now = Date.now()
- self[LRU_LIST].unshiftNode(node)
- }
- }
- return hit.value
- }
-}
-
-const isStale = (self, hit) => {
- if (!hit || (!hit.maxAge && !self[MAX_AGE]))
- return false
-
- const diff = Date.now() - hit.now
- return hit.maxAge ? diff > hit.maxAge
- : self[MAX_AGE] && (diff > self[MAX_AGE])
-}
-
-const trim = self => {
- if (self[LENGTH] > self[MAX]) {
- for (let walker = self[LRU_LIST].tail;
- self[LENGTH] > self[MAX] && walker !== null;) {
- // We know that we're about to delete this one, and also
- // what the next least recently used key will be, so just
- // go ahead and set it now.
- const prev = walker.prev
- del(self, walker)
- walker = prev
- }
- }
-}
-
-const del = (self, node) => {
- if (node) {
- const hit = node.value
- if (self[DISPOSE])
- self[DISPOSE](hit.key, hit.value)
-
- self[LENGTH] -= hit.length
- self[CACHE].delete(hit.key)
- self[LRU_LIST].removeNode(node)
- }
-}
-
-class Entry {
- constructor (key, value, length, now, maxAge) {
- this.key = key
- this.value = value
- this.length = length
- this.now = now
- this.maxAge = maxAge || 0
- }
-}
-
-const forEachStep = (self, fn, node, thisp) => {
- let hit = node.value
- if (isStale(self, hit)) {
- del(self, node)
- if (!self[ALLOW_STALE])
- hit = undefined
- }
- if (hit)
- fn.call(thisp, hit.value, hit.key, self)
-}
-
-module.exports = LRUCache
diff --git a/deps/npm/node_modules/cacache/node_modules/lru-cache/package.json b/deps/npm/node_modules/cacache/node_modules/lru-cache/package.json
deleted file mode 100644
index 1d41a07afb..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,67 +0,0 @@
-{
- "_from": "lru-cache@^5.1.1",
- "_id": "lru-cache@5.1.1",
- "_inBundle": false,
- "_integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
- "_location": "/cacache/lru-cache",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "lru-cache@^5.1.1",
- "name": "lru-cache",
- "escapedName": "lru-cache",
- "rawSpec": "^5.1.1",
- "saveSpec": null,
- "fetchSpec": "^5.1.1"
- },
- "_requiredBy": [
- "/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
- "_shasum": "1da27e6710271947695daf6848e847f01d84b920",
- "_spec": "lru-cache@^5.1.1",
- "_where": "/Users/aeschright/code/cli/node_modules/cacache",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me"
- },
- "bugs": {
- "url": "https://github.com/isaacs/node-lru-cache/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "yallist": "^3.0.2"
- },
- "deprecated": false,
- "description": "A cache object that deletes the least-recently-used items.",
- "devDependencies": {
- "benchmark": "^2.1.4",
- "tap": "^12.1.0"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/isaacs/node-lru-cache#readme",
- "keywords": [
- "mru",
- "lru",
- "cache"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "lru-cache",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-lru-cache.git"
- },
- "scripts": {
- "coveragerport": "tap --coverage-report=html",
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "snap": "TAP_SNAPSHOT=1 tap test/*.js -J",
- "test": "tap test/*.js --100 -J"
- },
- "version": "5.1.1"
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE b/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/README.md b/deps/npm/node_modules/cacache/node_modules/yallist/README.md
deleted file mode 100644
index f586101869..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/yallist/README.md
+++ /dev/null
@@ -1,204 +0,0 @@
-# yallist
-
-Yet Another Linked List
-
-There are many doubly-linked list implementations like it, but this
-one is mine.
-
-For when an array would be too big, and a Map can't be iterated in
-reverse order.
-
-
-[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
-
-## basic usage
-
-```javascript
-var yallist = require('yallist')
-var myList = yallist.create([1, 2, 3])
-myList.push('foo')
-myList.unshift('bar')
-// of course pop() and shift() are there, too
-console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
-myList.forEach(function (k) {
- // walk the list head to tail
-})
-myList.forEachReverse(function (k, index, list) {
- // walk the list tail to head
-})
-var myDoubledList = myList.map(function (k) {
- return k + k
-})
-// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
-// mapReverse is also a thing
-var myDoubledListReverse = myList.mapReverse(function (k) {
- return k + k
-}) // ['foofoo', 6, 4, 2, 'barbar']
-
-var reduced = myList.reduce(function (set, entry) {
- set += entry
- return set
-}, 'start')
-console.log(reduced) // 'startfoo123bar'
-```
-
-## api
-
-The whole API is considered "public".
-
-Functions with the same name as an Array method work more or less the
-same way.
-
-There's reverse versions of most things because that's the point.
-
-### Yallist
-
-Default export, the class that holds and manages a list.
-
-Call it with either a forEach-able (like an array) or a set of
-arguments, to initialize the list.
-
-The Array-ish methods all act like you'd expect. No magic length,
-though, so if you change that it won't automatically prune or add
-empty spots.
-
-### Yallist.create(..)
-
-Alias for Yallist function. Some people like factories.
-
-#### yallist.head
-
-The first node in the list
-
-#### yallist.tail
-
-The last node in the list
-
-#### yallist.length
-
-The number of nodes in the list. (Change this at your peril. It is
-not magic like Array length.)
-
-#### yallist.toArray()
-
-Convert the list to an array.
-
-#### yallist.forEach(fn, [thisp])
-
-Call a function on each item in the list.
-
-#### yallist.forEachReverse(fn, [thisp])
-
-Call a function on each item in the list, in reverse order.
-
-#### yallist.get(n)
-
-Get the data at position `n` in the list. If you use this a lot,
-probably better off just using an Array.
-
-#### yallist.getReverse(n)
-
-Get the data at position `n`, counting from the tail.
-
-#### yallist.map(fn, thisp)
-
-Create a new Yallist with the result of calling the function on each
-item.
-
-#### yallist.mapReverse(fn, thisp)
-
-Same as `map`, but in reverse.
-
-#### yallist.pop()
-
-Get the data from the list tail, and remove the tail from the list.
-
-#### yallist.push(item, ...)
-
-Insert one or more items to the tail of the list.
-
-#### yallist.reduce(fn, initialValue)
-
-Like Array.reduce.
-
-#### yallist.reduceReverse
-
-Like Array.reduce, but in reverse.
-
-#### yallist.reverse
-
-Reverse the list in place.
-
-#### yallist.shift()
-
-Get the data from the list head, and remove the head from the list.
-
-#### yallist.slice([from], [to])
-
-Just like Array.slice, but returns a new Yallist.
-
-#### yallist.sliceReverse([from], [to])
-
-Just like yallist.slice, but the result is returned in reverse.
-
-#### yallist.toArray()
-
-Create an array representation of the list.
-
-#### yallist.toArrayReverse()
-
-Create a reversed array representation of the list.
-
-#### yallist.unshift(item, ...)
-
-Insert one or more items to the head of the list.
-
-#### yallist.unshiftNode(node)
-
-Move a Node object to the front of the list. (That is, pull it out of
-wherever it lives, and make it the new head.)
-
-If the node belongs to a different list, then that list will remove it
-first.
-
-#### yallist.pushNode(node)
-
-Move a Node object to the end of the list. (That is, pull it out of
-wherever it lives, and make it the new tail.)
-
-If the node belongs to a list already, then that list will remove it
-first.
-
-#### yallist.removeNode(node)
-
-Remove a node from the list, preserving referential integrity of head
-and tail and other nodes.
-
-Will throw an error if you try to have a list remove a node that
-doesn't belong to it.
-
-### Yallist.Node
-
-The class that holds the data and is actually the list.
-
-Call with `var n = new Node(value, previousNode, nextNode)`
-
-Note that if you do direct operations on Nodes themselves, it's very
-easy to get into weird states where the list is broken. Be careful :)
-
-#### node.next
-
-The next node in the list.
-
-#### node.prev
-
-The previous node in the list.
-
-#### node.value
-
-The data the node contains.
-
-#### node.list
-
-The list to which this node belongs. (Null if it does not belong to
-any list.)
diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/iterator.js b/deps/npm/node_modules/cacache/node_modules/yallist/iterator.js
deleted file mode 100644
index d41c97a19f..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/yallist/iterator.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-module.exports = function (Yallist) {
- Yallist.prototype[Symbol.iterator] = function* () {
- for (let walker = this.head; walker; walker = walker.next) {
- yield walker.value
- }
- }
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/package.json
deleted file mode 100644
index 91ea442aa8..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/yallist/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "yallist@^3.0.2",
- "_id": "yallist@3.0.3",
- "_inBundle": false,
- "_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==",
- "_location": "/cacache/yallist",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "yallist@^3.0.2",
- "name": "yallist",
- "escapedName": "yallist",
- "rawSpec": "^3.0.2",
- "saveSpec": null,
- "fetchSpec": "^3.0.2"
- },
- "_requiredBy": [
- "/cacache/lru-cache"
- ],
- "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz",
- "_shasum": "b4b049e314be545e3ce802236d6cd22cd91c3de9",
- "_spec": "yallist@^3.0.2",
- "_where": "/Users/aeschright/code/cli/node_modules/cacache/node_modules/lru-cache",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/yallist/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Yet Another Linked List",
- "devDependencies": {
- "tap": "^12.1.0"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "yallist.js",
- "iterator.js"
- ],
- "homepage": "https://github.com/isaacs/yallist#readme",
- "license": "ISC",
- "main": "yallist.js",
- "name": "yallist",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/yallist.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --100"
- },
- "version": "3.0.3"
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/yallist.js b/deps/npm/node_modules/cacache/node_modules/yallist/yallist.js
deleted file mode 100644
index b0ab36cf31..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/yallist/yallist.js
+++ /dev/null
@@ -1,376 +0,0 @@
-'use strict'
-module.exports = Yallist
-
-Yallist.Node = Node
-Yallist.create = Yallist
-
-function Yallist (list) {
- var self = this
- if (!(self instanceof Yallist)) {
- self = new Yallist()
- }
-
- self.tail = null
- self.head = null
- self.length = 0
-
- if (list && typeof list.forEach === 'function') {
- list.forEach(function (item) {
- self.push(item)
- })
- } else if (arguments.length > 0) {
- for (var i = 0, l = arguments.length; i < l; i++) {
- self.push(arguments[i])
- }
- }
-
- return self
-}
-
-Yallist.prototype.removeNode = function (node) {
- if (node.list !== this) {
- throw new Error('removing node which does not belong to this list')
- }
-
- var next = node.next
- var prev = node.prev
-
- if (next) {
- next.prev = prev
- }
-
- if (prev) {
- prev.next = next
- }
-
- if (node === this.head) {
- this.head = next
- }
- if (node === this.tail) {
- this.tail = prev
- }
-
- node.list.length--
- node.next = null
- node.prev = null
- node.list = null
-}
-
-Yallist.prototype.unshiftNode = function (node) {
- if (node === this.head) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var head = this.head
- node.list = this
- node.next = head
- if (head) {
- head.prev = node
- }
-
- this.head = node
- if (!this.tail) {
- this.tail = node
- }
- this.length++
-}
-
-Yallist.prototype.pushNode = function (node) {
- if (node === this.tail) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var tail = this.tail
- node.list = this
- node.prev = tail
- if (tail) {
- tail.next = node
- }
-
- this.tail = node
- if (!this.head) {
- this.head = node
- }
- this.length++
-}
-
-Yallist.prototype.push = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- push(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.unshift = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- unshift(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.pop = function () {
- if (!this.tail) {
- return undefined
- }
-
- var res = this.tail.value
- this.tail = this.tail.prev
- if (this.tail) {
- this.tail.next = null
- } else {
- this.head = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.shift = function () {
- if (!this.head) {
- return undefined
- }
-
- var res = this.head.value
- this.head = this.head.next
- if (this.head) {
- this.head.prev = null
- } else {
- this.tail = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.forEach = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.head, i = 0; walker !== null; i++) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.next
- }
-}
-
-Yallist.prototype.forEachReverse = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.prev
- }
-}
-
-Yallist.prototype.get = function (n) {
- for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.next
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.getReverse = function (n) {
- for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.prev
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.map = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.head; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.next
- }
- return res
-}
-
-Yallist.prototype.mapReverse = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.tail; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.prev
- }
- return res
-}
-
-Yallist.prototype.reduce = function (fn, initial) {
- var acc
- var walker = this.head
- if (arguments.length > 1) {
- acc = initial
- } else if (this.head) {
- walker = this.head.next
- acc = this.head.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = 0; walker !== null; i++) {
- acc = fn(acc, walker.value, i)
- walker = walker.next
- }
-
- return acc
-}
-
-Yallist.prototype.reduceReverse = function (fn, initial) {
- var acc
- var walker = this.tail
- if (arguments.length > 1) {
- acc = initial
- } else if (this.tail) {
- walker = this.tail.prev
- acc = this.tail.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = this.length - 1; walker !== null; i--) {
- acc = fn(acc, walker.value, i)
- walker = walker.prev
- }
-
- return acc
-}
-
-Yallist.prototype.toArray = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.head; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.next
- }
- return arr
-}
-
-Yallist.prototype.toArrayReverse = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.tail; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.prev
- }
- return arr
-}
-
-Yallist.prototype.slice = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
- walker = walker.next
- }
- for (; walker !== null && i < to; i++, walker = walker.next) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.sliceReverse = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
- walker = walker.prev
- }
- for (; walker !== null && i > from; i--, walker = walker.prev) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.reverse = function () {
- var head = this.head
- var tail = this.tail
- for (var walker = head; walker !== null; walker = walker.prev) {
- var p = walker.prev
- walker.prev = walker.next
- walker.next = p
- }
- this.head = tail
- this.tail = head
- return this
-}
-
-function push (self, item) {
- self.tail = new Node(item, self.tail, null, self)
- if (!self.head) {
- self.head = self.tail
- }
- self.length++
-}
-
-function unshift (self, item) {
- self.head = new Node(item, null, self.head, self)
- if (!self.tail) {
- self.tail = self.head
- }
- self.length++
-}
-
-function Node (value, prev, next, list) {
- if (!(this instanceof Node)) {
- return new Node(value, prev, next, list)
- }
-
- this.list = list
- this.value = value
-
- if (prev) {
- prev.next = this
- this.prev = prev
- } else {
- this.prev = null
- }
-
- if (next) {
- next.prev = this
- this.next = next
- } else {
- this.next = null
- }
-}
-
-try {
- // add if support for Symbol.iterator is present
- require('./iterator.js')(Yallist)
-} catch (er) {}
diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json
index c5cce2b106..1df72c2d7d 100644
--- a/deps/npm/node_modules/cacache/package.json
+++ b/deps/npm/node_modules/cacache/package.json
@@ -1,26 +1,19 @@
{
- "_from": "cacache@^11.3.2",
- "_id": "cacache@11.3.3",
+ "_from": "cacache@12.0.2",
+ "_id": "cacache@12.0.2",
"_inBundle": false,
- "_integrity": "sha512-p8WcneCytvzPxhDvYp31PD039vi77I12W+/KfR9S8AZbaiARFBCpsPJS+9uhWfeBfeAtW7o/4vt3MUqLkbY6nA==",
+ "_integrity": "sha512-ifKgxH2CKhJEg6tNdAwziu6Q33EvuG26tYcda6PT3WKisZcYDXsnEdnRv67Po3yCzFfaSoMjGZzJyD2c3DT1dg==",
"_location": "/cacache",
- "_phantomChildren": {
- "fs.realpath": "1.0.0",
- "inflight": "1.0.6",
- "inherits": "2.0.3",
- "minimatch": "3.0.4",
- "once": "1.4.0",
- "path-is-absolute": "1.0.1"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "version",
"registry": true,
- "raw": "cacache@^11.3.2",
+ "raw": "cacache@12.0.2",
"name": "cacache",
"escapedName": "cacache",
- "rawSpec": "^11.3.2",
+ "rawSpec": "12.0.2",
"saveSpec": null,
- "fetchSpec": "^11.3.2"
+ "fetchSpec": "12.0.2"
},
"_requiredBy": [
"#USER",
@@ -28,16 +21,16 @@
"/make-fetch-happen",
"/pacote"
],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-11.3.3.tgz",
- "_shasum": "8bd29df8c6a718a6ebd2d010da4d7972ae3bbadc",
- "_spec": "cacache@^11.3.2",
+ "_resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.2.tgz",
+ "_shasum": "8db03205e36089a3df6954c66ce92541441ac46c",
+ "_spec": "cacache@12.0.2",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
},
"bugs": {
- "url": "https://github.com/zkat/cacache/issues"
+ "url": "https://github.com/npm/cacache/issues"
},
"bundleDependencies": false,
"cache-version": {
@@ -68,6 +61,7 @@
"figgy-pudding": "^3.5.1",
"glob": "^7.1.4",
"graceful-fs": "^4.1.15",
+ "infer-owner": "^1.0.3",
"lru-cache": "^5.1.1",
"mississippi": "^3.0.0",
"mkdirp": "^0.5.1",
@@ -97,7 +91,7 @@
"lib",
"locales"
],
- "homepage": "https://github.com/zkat/cacache#readme",
+ "homepage": "https://github.com/npm/cacache#readme",
"keywords": [
"cache",
"caching",
@@ -118,7 +112,7 @@
"name": "cacache",
"repository": {
"type": "git",
- "url": "git+https://github.com/zkat/cacache.git"
+ "url": "git+https://github.com/npm/cacache.git"
},
"scripts": {
"benchmarks": "node test/benchmarks",
@@ -131,5 +125,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "11.3.3"
+ "version": "12.0.2"
}
diff --git a/deps/npm/node_modules/cacache/put.js b/deps/npm/node_modules/cacache/put.js
index cb4057fd5f..a400639303 100644
--- a/deps/npm/node_modules/cacache/put.js
+++ b/deps/npm/node_modules/cacache/put.js
@@ -16,8 +16,6 @@ const PutOpts = figgyPudding({
pickAlgorithm: {},
size: {},
tmpPrefix: {},
- uid: {},
- gid: {},
single: {},
sep: {},
error: {},