summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/cacache
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/cacache')
-rw-r--r--deps/npm/node_modules/cacache/CHANGELOG.md60
-rw-r--r--deps/npm/node_modules/cacache/README.md4
-rw-r--r--deps/npm/node_modules/cacache/get.js36
-rw-r--r--deps/npm/node_modules/cacache/lib/content/read.js43
-rw-r--r--deps/npm/node_modules/cacache/lib/content/write.js4
-rw-r--r--deps/npm/node_modules/cacache/lib/entry-index.js27
-rw-r--r--deps/npm/node_modules/cacache/lib/util/tmp.js12
-rw-r--r--deps/npm/node_modules/cacache/lib/verify.js40
-rw-r--r--deps/npm/node_modules/cacache/node_modules/ssri/CHANGELOG.md190
-rw-r--r--deps/npm/node_modules/cacache/node_modules/ssri/LICENSE.md16
-rw-r--r--deps/npm/node_modules/cacache/node_modules/ssri/README.md462
-rw-r--r--deps/npm/node_modules/cacache/node_modules/ssri/index.js334
-rw-r--r--deps/npm/node_modules/cacache/node_modules/ssri/package.json89
-rw-r--r--deps/npm/node_modules/cacache/node_modules/y18n/LICENSE13
-rw-r--r--deps/npm/node_modules/cacache/node_modules/y18n/README.md91
-rw-r--r--deps/npm/node_modules/cacache/node_modules/y18n/index.js172
-rw-r--r--deps/npm/node_modules/cacache/node_modules/y18n/package.json65
-rw-r--r--deps/npm/node_modules/cacache/package.json68
-rw-r--r--deps/npm/node_modules/cacache/put.js28
19 files changed, 226 insertions, 1528 deletions
diff --git a/deps/npm/node_modules/cacache/CHANGELOG.md b/deps/npm/node_modules/cacache/CHANGELOG.md
index 0903d1d578..f04bdea0c4 100644
--- a/deps/npm/node_modules/cacache/CHANGELOG.md
+++ b/deps/npm/node_modules/cacache/CHANGELOG.md
@@ -2,6 +2,66 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+<a name="11.0.2"></a>
+## [11.0.2](https://github.com/zkat/cacache/compare/v11.0.1...v11.0.2) (2018-05-07)
+
+
+### Bug Fixes
+
+* **verify:** size param no longer lost in a verify ([#131](https://github.com/zkat/cacache/issues/131)) ([c614a19](https://github.com/zkat/cacache/commit/c614a19)), closes [#130](https://github.com/zkat/cacache/issues/130)
+
+
+
+<a name="11.0.1"></a>
+## [11.0.1](https://github.com/zkat/cacache/compare/v11.0.0...v11.0.1) (2018-04-10)
+
+
+
+<a name="11.0.0"></a>
+# [11.0.0](https://github.com/zkat/cacache/compare/v10.0.4...v11.0.0) (2018-04-09)
+
+
+### Features
+
+* **opts:** use figgy-pudding for opts ([#128](https://github.com/zkat/cacache/issues/128)) ([33d4eed](https://github.com/zkat/cacache/commit/33d4eed))
+
+
+### meta
+
+* drop support for node@4 ([529f347](https://github.com/zkat/cacache/commit/529f347))
+
+
+### BREAKING CHANGES
+
+* node@4 is no longer supported
+
+
+
+<a name="10.0.4"></a>
+## [10.0.4](https://github.com/zkat/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
+
+
+
+<a name="10.0.3"></a>
+## [10.0.3](https://github.com/zkat/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
+
+
+### Bug Fixes
+
+* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/zkat/cacache/commit/fa918f5))
+
+
+
+<a name="10.0.2"></a>
+## [10.0.2](https://github.com/zkat/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
+
+
+### Bug Fixes
+
+* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/zkat/cacache/commit/347dc36))
+
+
+
<a name="10.0.1"></a>
## [10.0.1](https://github.com/zkat/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
diff --git a/deps/npm/node_modules/cacache/README.md b/deps/npm/node_modules/cacache/README.md
index ea69b8f540..4b284588a6 100644
--- a/deps/npm/node_modules/cacache/README.md
+++ b/deps/npm/node_modules/cacache/README.md
@@ -6,7 +6,7 @@ concurrency, and it will never give you corrupted data, even if cache files
get corrupted or manipulated.
It was originally written to be used as [npm](https://npm.im)'s local cache, but
-can just as easily be used on its own
+can just as easily be used on its own.
_Translations: [español](README.es.md)_
@@ -117,7 +117,7 @@ translations. To use the English API as documented in this README, use
`require('cacache')`, but may change in the future.
cacache also supports other languages! You can find the list of currently
-supported ones my looking in `./locales` in the source directory. You can use
+supported ones by looking in `./locales` in the source directory. You can use
the API in that language with `require('cacache/<lang>')`.
Want to add support for a new language? Please go ahead! You should be able to
diff --git a/deps/npm/node_modules/cacache/get.js b/deps/npm/node_modules/cacache/get.js
index 2bb3afa528..7bafe128e4 100644
--- a/deps/npm/node_modules/cacache/get.js
+++ b/deps/npm/node_modules/cacache/get.js
@@ -2,6 +2,7 @@
const BB = require('bluebird')
+const figgyPudding = require('figgy-pudding')
const fs = require('fs')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
@@ -10,6 +11,12 @@ const pipeline = require('mississippi').pipeline
const read = require('./lib/content/read')
const through = require('mississippi').through
+const GetOpts = figgyPudding({
+ integrity: {},
+ memoize: {},
+ size: {}
+})
+
module.exports = function get (cache, key, opts) {
return getData(false, cache, key, opts)
}
@@ -17,11 +24,11 @@ module.exports.byDigest = function getByDigest (cache, digest, opts) {
return getData(true, cache, digest, opts)
}
function getData (byDigest, cache, key, opts) {
- opts = opts || {}
+ opts = GetOpts(opts)
const memoized = (
byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
+ ? memo.get.byDigest(cache, key, opts)
+ : memo.get(cache, key, opts)
)
if (memoized && opts.memoize !== false) {
return BB.resolve(byDigest ? memoized : {
@@ -58,7 +65,7 @@ function getData (byDigest, cache, key, opts) {
module.exports.stream = getStream
function getStream (cache, key, opts) {
- opts = opts || {}
+ opts = GetOpts(opts)
let stream = through()
const memoized = memo.get(cache, key, opts)
if (memoized && opts.memoize !== false) {
@@ -91,7 +98,6 @@ function getStream (cache, key, opts) {
} else {
memoStream = through()
}
- opts.size = opts.size == null ? entry.size : opts.size
stream.emit('metadata', entry.metadata)
stream.emit('integrity', entry.integrity)
stream.emit('size', entry.size)
@@ -101,7 +107,9 @@ function getStream (cache, key, opts) {
ev === 'size' && cb(entry.size)
})
pipe(
- read.readStream(cache, entry.integrity, opts),
+ read.readStream(cache, entry.integrity, opts.concat({
+ size: opts.size == null ? entry.size : opts.size
+ })),
memoStream,
stream
)
@@ -111,7 +119,7 @@ function getStream (cache, key, opts) {
module.exports.stream.byDigest = getStreamDigest
function getStreamDigest (cache, integrity, opts) {
- opts = opts || {}
+ opts = GetOpts(opts)
const memoized = memo.get.byDigest(cache, integrity, opts)
if (memoized && opts.memoize !== false) {
const stream = through()
@@ -143,7 +151,7 @@ function getStreamDigest (cache, integrity, opts) {
module.exports.info = info
function info (cache, key, opts) {
- opts = opts || {}
+ opts = GetOpts(opts)
const memoized = memo.get(cache, key, opts)
if (memoized && opts.memoize !== false) {
return BB.resolve(memoized.entry)
@@ -161,7 +169,7 @@ module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
return copy(true, cache, digest, dest, opts)
}
function copy (byDigest, cache, key, dest, opts) {
- opts = opts || {}
+ opts = GetOpts(opts)
if (read.copy) {
return (
byDigest ? BB.resolve(null) : index.find(cache, key, opts)
@@ -180,11 +188,11 @@ function copy (byDigest, cache, key, dest, opts) {
} else {
return getData(byDigest, cache, key, opts).then(res => {
return fs.writeFileAsync(dest, byDigest ? res : res.data)
- .then(() => byDigest ? key : {
- metadata: res.metadata,
- size: res.size,
- integrity: res.integrity
- })
+ .then(() => byDigest ? key : {
+ metadata: res.metadata,
+ size: res.size,
+ integrity: res.integrity
+ })
})
}
}
diff --git a/deps/npm/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/cacache/lib/content/read.js
index b09ad5cb40..5c1a6f2f29 100644
--- a/deps/npm/node_modules/cacache/lib/content/read.js
+++ b/deps/npm/node_modules/cacache/lib/content/read.js
@@ -3,6 +3,7 @@
const BB = require('bluebird')
const contentPath = require('./path')
+const figgyPudding = require('figgy-pudding')
const fs = require('graceful-fs')
const PassThrough = require('stream').PassThrough
const pipe = BB.promisify(require('mississippi').pipe)
@@ -11,9 +12,13 @@ const Y = require('../util/y.js')
BB.promisifyAll(fs)
+const ReadOpts = figgyPudding({
+ size: {}
+})
+
module.exports = read
function read (cache, integrity, opts) {
- opts = opts || {}
+ opts = ReadOpts(opts)
return pickContentSri(cache, integrity).then(content => {
const sri = content.sri
const cpath = contentPath(cache, sri)
@@ -32,7 +37,7 @@ function read (cache, integrity, opts) {
module.exports.stream = readStream
module.exports.readStream = readStream
function readStream (cache, integrity, opts) {
- opts = opts || {}
+ opts = ReadOpts(opts)
const stream = new PassThrough()
pickContentSri(
cache, integrity
@@ -56,7 +61,7 @@ if (fs.copyFile) {
module.exports.copy = copy
}
function copy (cache, integrity, dest, opts) {
- opts = opts || {}
+ opts = ReadOpts(opts)
return pickContentSri(cache, integrity).then(content => {
const sri = content.sri
const cpath = contentPath(cache, sri)
@@ -68,17 +73,17 @@ module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
if (!integrity) { return BB.resolve(false) }
return pickContentSri(cache, integrity)
- .catch({code: 'ENOENT'}, () => false)
- .catch({code: 'EPERM'}, err => {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }).then(content => {
- if (!content.sri) return false
- return ({ sri: content.sri, size: content.stat.size })
- })
+ .catch({code: 'ENOENT'}, () => false)
+ .catch({code: 'EPERM'}, err => {
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
+ }
+ }).then(content => {
+ if (!content.sri) return false
+ return ({ sri: content.sri, size: content.stat.size })
+ })
}
module.exports._pickContentSri = pickContentSri
@@ -95,6 +100,16 @@ function pickContentSri (cache, integrity) {
return BB.any(sri[sri.pickAlgorithm()].map(meta => {
return pickContentSri(cache, meta)
}))
+ .catch(err => {
+ if ([].some.call(err, e => e.code === 'ENOENT')) {
+ throw Object.assign(
+ new Error('No matching content found for ' + sri.toString()),
+ {code: 'ENOENT'}
+ )
+ } else {
+ throw err[0]
+ }
+ })
}
}
diff --git a/deps/npm/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/cacache/lib/content/write.js
index a79ae92902..c71363413c 100644
--- a/deps/npm/node_modules/cacache/lib/content/write.js
+++ b/deps/npm/node_modules/cacache/lib/content/write.js
@@ -28,7 +28,9 @@ function write (cache, data, opts) {
if (typeof opts.size === 'number' && data.length !== opts.size) {
return BB.reject(sizeError(opts.size, data.length))
}
- const sri = ssri.fromData(data, opts)
+ const sri = ssri.fromData(data, {
+ algorithms: opts.algorithms
+ })
if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
return BB.reject(checksumError(opts.integrity, sri))
}
diff --git a/deps/npm/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/cacache/lib/entry-index.js
index face0fe79c..43fa7b95b1 100644
--- a/deps/npm/node_modules/cacache/lib/entry-index.js
+++ b/deps/npm/node_modules/cacache/lib/entry-index.js
@@ -4,6 +4,7 @@ const BB = require('bluebird')
const contentPath = require('./content/path')
const crypto = require('crypto')
+const figgyPudding = require('figgy-pudding')
const fixOwner = require('./util/fix-owner')
const fs = require('graceful-fs')
const hashToSegments = require('./util/hash-to-segments')
@@ -29,9 +30,16 @@ module.exports.NotFoundError = class NotFoundError extends Error {
}
}
+const IndexOpts = figgyPudding({
+ metadata: {},
+ size: {},
+ uid: {},
+ gid: {}
+})
+
module.exports.insert = insert
function insert (cache, key, integrity, opts) {
- opts = opts || {}
+ opts = IndexOpts(opts)
const bucket = bucketPath(cache, key)
const entry = {
key,
@@ -116,9 +124,10 @@ function lsStream (cache) {
}, new Map())
return getKeyToEntry.then(reduced => {
- return Array.from(reduced.values()).map(
- entry => stream.push(formatEntry(cache, entry))
- )
+ for (let entry of reduced.values()) {
+ const formatted = formatEntry(cache, entry)
+ formatted && stream.push(formatted)
+ }
}).catch({code: 'ENOENT'}, nop)
})
})
@@ -196,9 +205,9 @@ function hashEntry (str) {
function hash (str, digest) {
return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
+ .createHash(digest)
+ .update(str)
+ .digest('hex')
}
function formatEntry (cache, entry) {
@@ -216,8 +225,8 @@ function formatEntry (cache, entry) {
function readdirOrEmpty (dir) {
return readdirAsync(dir)
- .catch({code: 'ENOENT'}, () => [])
- .catch({code: 'ENOTDIR'}, () => [])
+ .catch({code: 'ENOENT'}, () => [])
+ .catch({code: 'ENOTDIR'}, () => [])
}
function nop () {
diff --git a/deps/npm/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/cacache/lib/util/tmp.js
index 4fc4512cc8..65fc4b297e 100644
--- a/deps/npm/node_modules/cacache/lib/util/tmp.js
+++ b/deps/npm/node_modules/cacache/lib/util/tmp.js
@@ -2,14 +2,21 @@
const BB = require('bluebird')
+const figgyPudding = require('figgy-pudding')
const fixOwner = require('./fix-owner')
const path = require('path')
const rimraf = BB.promisify(require('rimraf'))
const uniqueFilename = require('unique-filename')
+const TmpOpts = figgyPudding({
+ tmpPrefix: {},
+ uid: {},
+ gid: {}
+})
+
module.exports.mkdir = mktmpdir
function mktmpdir (cache, opts) {
- opts = opts || {}
+ opts = TmpOpts(opts)
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
return fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid).then(() => {
return tmpTarget
@@ -22,11 +29,12 @@ function withTmp (cache, opts, cb) {
cb = opts
opts = null
}
- opts = opts || {}
+ opts = TmpOpts(opts)
return BB.using(mktmpdir(cache, opts).disposer(rimraf), cb)
}
module.exports.fix = fixtmpdir
function fixtmpdir (cache, opts) {
+ opts = TmpOpts(opts)
return fixOwner(path.join(cache, 'tmp'), opts.uid, opts.gid)
}
diff --git a/deps/npm/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/cacache/lib/verify.js
index 6a01004c97..3468bc6b8e 100644
--- a/deps/npm/node_modules/cacache/lib/verify.js
+++ b/deps/npm/node_modules/cacache/lib/verify.js
@@ -3,6 +3,7 @@
const BB = require('bluebird')
const contentPath = require('./content/path')
+const figgyPudding = require('figgy-pudding')
const finished = BB.promisify(require('mississippi').finished)
const fixOwner = require('./util/fix-owner')
const fs = require('graceful-fs')
@@ -14,10 +15,22 @@ const ssri = require('ssri')
BB.promisifyAll(fs)
+const VerifyOpts = figgyPudding({
+ concurrency: {
+ default: 20
+ },
+ filter: {},
+ log: {
+ default: { silly () {} }
+ },
+ uid: {},
+ gid: {}
+})
+
module.exports = verify
function verify (cache, opts) {
- opts = opts || {}
- opts.log && opts.log.silly('verify', 'verifying cache at', cache)
+ opts = VerifyOpts(opts)
+ opts.log.silly('verify', 'verifying cache at', cache)
return BB.reduce([
markStartTime,
fixPerms,
@@ -40,7 +53,7 @@ function verify (cache, opts) {
})
}, {}).tap(stats => {
stats.runTime.total = stats.endTime - stats.startTime
- opts.log && opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
+ opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
})
}
@@ -53,7 +66,7 @@ function markEndTime (cache, opts) {
}
function fixPerms (cache, opts) {
- opts.log && opts.log.silly('verify', 'fixing cache permissions')
+ opts.log.silly('verify', 'fixing cache permissions')
return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => {
// TODO - fix file permissions too
return fixOwner.chownr(cache, opts.uid, opts.gid)
@@ -70,11 +83,11 @@ function fixPerms (cache, opts) {
// 5. If content is not marked as live, rimraf it.
//
function garbageCollect (cache, opts) {
- opts.log && opts.log.silly('verify', 'garbage collecting content')
+ opts.log.silly('verify', 'garbage collecting content')
const indexStream = index.lsStream(cache)
const liveContent = new Set()
indexStream.on('data', entry => {
- if (opts && opts.filter && !opts.filter(entry)) { return }
+ if (opts.filter && !opts.filter(entry)) { return }
liveContent.add(entry.integrity.toString())
})
return finished(indexStream).then(() => {
@@ -117,7 +130,7 @@ function garbageCollect (cache, opts) {
})
})
}
- }, {concurrency: opts.concurrency || 20}))
+ }, {concurrency: opts.concurrency}))
})
})
}
@@ -141,7 +154,7 @@ function verifyContent (filepath, sri) {
}
function rebuildIndex (cache, opts) {
- opts.log && opts.log.silly('verify', 'rebuilding index')
+ opts.log.silly('verify', 'rebuilding index')
return index.ls(cache).then(entries => {
const stats = {
missingContent: 0,
@@ -153,7 +166,7 @@ function rebuildIndex (cache, opts) {
if (entries.hasOwnProperty(k)) {
const hashed = index._hashKey(k)
const entry = entries[k]
- const excluded = opts && opts.filter && !opts.filter(entry)
+ const excluded = opts.filter && !opts.filter(entry)
excluded && stats.rejectedEntries++
if (buckets[hashed] && !excluded) {
buckets[hashed].push(entry)
@@ -170,7 +183,7 @@ function rebuildIndex (cache, opts) {
}
return BB.map(Object.keys(buckets), key => {
return rebuildBucket(cache, buckets[key], stats, opts)
- }, {concurrency: opts.concurrency || 20}).then(() => stats)
+ }, {concurrency: opts.concurrency}).then(() => stats)
})
}
@@ -184,7 +197,8 @@ function rebuildBucket (cache, bucket, stats, opts) {
return index.insert(cache, entry.key, entry.integrity, {
uid: opts.uid,
gid: opts.gid,
- metadata: entry.metadata
+ metadata: entry.metadata,
+ size: entry.size
}).then(() => { stats.totalEntries++ })
}).catch({code: 'ENOENT'}, () => {
stats.rejectedEntries++
@@ -195,13 +209,13 @@ function rebuildBucket (cache, bucket, stats, opts) {
}
function cleanTmp (cache, opts) {
- opts.log && opts.log.silly('verify', 'cleaning tmp directory')
+ opts.log.silly('verify', 'cleaning tmp directory')
return rimraf(path.join(cache, 'tmp'))
}
function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
- opts.log && opts.log.silly('verify', 'writing verifile to ' + verifile)
+ opts.log.silly('verify', 'writing verifile to ' + verifile)
return fs.writeFileAsync(verifile, '' + (+(new Date())))
}
diff --git a/deps/npm/node_modules/cacache/node_modules/ssri/CHANGELOG.md b/deps/npm/node_modules/cacache/node_modules/ssri/CHANGELOG.md
deleted file mode 100644
index 7ae2b000dc..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/ssri/CHANGELOG.md
+++ /dev/null
@@ -1,190 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="4.1.6"></a>
-## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
-
-
-### Bug Fixes
-
-* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
-
-
-
-<a name="4.1.5"></a>
-## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
-
-
-### Bug Fixes
-
-* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
-
-
-
-<a name="4.1.4"></a>
-## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
-
-
-
-<a name="4.1.3"></a>
-## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
-
-
-
-<a name="4.1.2"></a>
-## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
-
-
-### Bug Fixes
-
-* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
-
-
-
-<a name="4.1.1"></a>
-## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
-
-
-### Bug Fixes
-
-* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
-
-
-
-<a name="4.1.0"></a>
-# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
-
-
-### Features
-
-* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
-
-
-### BREAKING CHANGES
-
-* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
-
-
-### Features
-
-* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
-* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
-* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
-* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
-* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
-* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
-* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
-* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
-* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
-
-
-### BREAKING CHANGES
-
-* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
-* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
-
-To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
-* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
-
-
-### Bug Fixes
-
-* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
-
-
-### Features
-
-* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
-* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
-* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
-* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
-* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
-* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
-
-
-### BREAKING CHANGES
-
-* **pickAlgo:** ssri will prioritize specific hashes now
-* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
-* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-03-23)
-
-
-### Features
-
-* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
-
-
-### BREAKING CHANGES
-
-* **api:** Initial API established.
diff --git a/deps/npm/node_modules/cacache/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/cacache/node_modules/ssri/LICENSE.md
deleted file mode 100644
index 8d28acf866..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/cacache/node_modules/ssri/README.md b/deps/npm/node_modules/cacache/node_modules/ssri/README.md
deleted file mode 100644
index f2fc035da5..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/ssri/README.md
+++ /dev/null
@@ -1,462 +0,0 @@
-# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
-
-[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
-Integrity, is a Node.js utility for parsing, manipulating, serializing,
-generating, and verifying [Subresource
-Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
-
-## Install
-
-`$ npm install --save ssri`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * Parsing & Serializing
- * [`parse`](#parse)
- * [`stringify`](#stringify)
- * [`Integrity#concat`](#integrity-concat)
- * [`Integrity#toString`](#integrity-to-string)
- * [`Integrity#toJSON`](#integrity-to-json)
- * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
- * [`Integrity#hexDigest`](#integrity-hex-digest)
- * Integrity Generation
- * [`fromHex`](#from-hex)
- * [`fromData`](#from-data)
- * [`fromStream`](#from-stream)
- * [`create`](#create)
- * Integrity Verification
- * [`checkData`](#check-data)
- * [`checkStream`](#check-stream)
- * [`integrityStream`](#integrity-stream)
-
-### Example
-
-```javascript
-const ssri = require('ssri')
-
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Parsing and serializing
-const parsed = ssri.parse(integrity)
-ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
-parsed.toString() // === integrity
-
-// Async stream functions
-ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
-ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
- sri.toString() === integrity
-})
-fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
-
-// Sync data functions
-ssri.fromData(fs.readFileSync('./my-file')) // === parsed
-ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
-```
-
-### Features
-
-* Parses and stringifies SRI strings.
-* Generates SRI strings from raw data or Streams.
-* Strict standard compliance.
-* `?foo` metadata option support.
-* Multiple entries for the same algorithm.
-* Object-based integrity hash manipulation.
-* Small footprint: no dependencies, concise implementation.
-* Full test coverage.
-* Customizable algorithm picker.
-
-### Contributing
-
-The ssri team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
-
-Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
-string, an `Hash`-like with `digest` and `algorithm` fields and an optional
-`options` field, or an `Integrity`-like object. The resulting object will be an
-`Integrity` instance that has this shape:
-
-```javascript
-{
- 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
- 'sha512': [
- {algorithm: 'sha512', digest: 'c0ffee', options: []},
- {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
- ],
-}
-```
-
-If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
-single object that looks like `{algorithm, digest, options}`, as opposed to a
-larger object with multiple of these.
-
-If `opts.strict` is truthy, the resulting object will be filtered such that
-it strictly follows the Subresource Integrity spec, throwing away any entries
-with any invalid components. This also means a restricted set of algorithms
-will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
-
-Strict mode is recommended if the integrity strings are intended for use in
-browsers, or in other situations where strict adherence to the spec is needed.
-
-##### Example
-
-```javascript
-ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
-```
-
-#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
-
-This function is identical to [`Integrity#toString()`](#integrity-to-string),
-except it can be used on _any_ object that [`parse`](#parse) can handle -- that
-is, a string, an `Hash`-like, or an `Integrity`-like.
-
-The `opts.sep` option defines the string to use when joining multiple entries
-together. To be spec-compliant, this _must_ be whitespace. The default is a
-single space (`' '`).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// Useful for cleaning up input SRI strings:
-ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
-// -> 'sha512-foo sha384-bar'
-
-// Hash-like: only a single entry.
-ssri.stringify({
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
-ssri.stringify({
- 'sha512': [
- {
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
- }
- ]
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-```
-
-#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
-
-Concatenates an `Integrity` object with another IntegrityLike, or an integrity
-string.
-
-This is functionally equivalent to concatenating the string format of both
-integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
-
-If `opts.strict` is true, the new `Integrity` will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// This will combine the integrity checks for two different versions of
-// your index.js file so you can use a single integrity string and serve
-// either of these to clients, from a single `<script>` tag.
-const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
-const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
-
-// Note that browsers (and ssri) will succeed as long as ONE of the entries
-// for the *prioritized* algorithm succeeds. That is, in order for this fallback
-// to work, both desktop and mobile *must* use the same `algorithm` values.
-desktopIntegrity.concat(mobileIntegrity)
-```
-
-#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `opts.sep`, which defaults to `' '`.
-
-If you want to serialize an object that didn't from from an `ssri` function,
-use [`ssri.stringify()`](#stringify).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-ssri.parse(integrity).toString() === integrity
-```
-
-#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `' '`.
-
-This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
-For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
-
-##### Example
-
-```javascript
-const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
-
-JSON.stringify(ssri.parse(integrity)) === integrity
-```
-
-#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
-
-Returns the "best" algorithm from those available in the integrity object.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
-```
-
-#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
-
-`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
-`Hash` instance. Returns its `digest`, converted to a hex representation of the
-base64 data.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
-```
-
-#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
-
-Creates an `Integrity` object with a single entry, based on a hex-formatted
-hash. This is a utility function to help convert existing shasums to the
-Integrity format, and is roughly equivalent to something like:
-
-```javascript
-algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
-```
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-If `opts.single` is true, a single `Hash` object will be returned.
-
-##### Example
-
-```javascript
-ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
-```
-
-#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
-
-Creates an `Integrity` object from either string or `Buffer` data, calculating
-all the requested hashes and adding any specified options to the object.
-
-`opts.algorithms` determines which algorithms to generate hashes for. All
-results will be included in a single `Integrity` object. The default value for
-`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
-in `crypto.getHashes()` for the host Node.js platform.
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrityObj = ssri.fromData('foobarbaz', {
- algorithms: ['sha256', 'sha384', 'sha512']
-})
-integrity.toString('\n')
-// ->
-// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
-// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
-
-Returns a Promise of an Integrity object calculated by reading data from
-a given `stream`.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-Additionally, `opts.Promise` may be passed in to inject a Promise library of
-choice. By default, ssri will use Node's built-in Promises.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-ssri.fromStream(fs.createReadStream('index.js'), {
- algorithms: ['sha1', 'sha512']
-}).then(integrity => {
- return ssri.checkStream(fs.createReadStream('index.js'), integrity)
-}) // succeeds
-```
-
-#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
-
-Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
-
-
-The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
-`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
-calls to update.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = ssri.create().update('foobarbaz').digest()
-integrity.toString()
-// ->
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
-
-Verifies `data` integrity against an `sri` argument. `data` may be either a
-`String` or a `Buffer`, and `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-If verification succeeds, `checkData` will return the name of the algorithm that
-was used for verification (a truthy value). Otherwise, it will return `false`.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const data = fs.readFileSync('index.js')
-ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
-ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
-ssri.checkData(data, 'sha1-BaDDigEST') // -> false
-```
-
-#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
-
-Verifies the contents of `stream` against an `sri` argument. `stream` will be
-consumed in its entirety by this process. `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-`checkStream` will return a Promise that either resolves to the
-`Hash` that succeeded verification, or, if the verification fails
-or an error happens with `stream`, the Promise will be rejected.
-
-If the Promise is rejected because verification failed, the returned error will
-have `err.code` as `EINTEGRITY`.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- integrity
-)
-// ->
-// Promise<{
-// algorithm: 'sha512',
-// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
-// }>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
-) // -> Promise<Hash>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha1-BaDDigEST'
-) // -> Promise<Error<{code: 'EINTEGRITY'}>>
-```
-
-#### <a name="integrity-stream"></a> `> integrityStream(sri, [opts]) -> IntegrityStream`
-
-Returns a `Transform` stream that data can be piped through in order to generate
-and optionally check data integrity for piped data. When the stream completes
-successfully, it emits `size` and `integrity` events, containing the total
-number of bytes processed and a calculated `Integrity` instance based on stream
-data, respectively.
-
-If `opts.algorithms` is passed in, the listed algorithms will be calculated when
-generating the final `Integrity` instance. The default is `['sha512']`.
-
-If `opts.single` is passed in, a single `Hash` instance will be returned.
-
-If `opts.integrity` is passed in, it should be an `integrity` value understood
-by [`parse`](#parse) that the stream will check the data against. If
-verification succeeds, the integrity stream will emit a `verified` event whose
-value is a single `Hash` object that is the one that succeeded verification. If
-verification fails, the stream will error with an `EINTEGRITY` error code.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-fs.createReadStream('index.js')
-.pipe(ssri.checkStream(integrity))
-```
diff --git a/deps/npm/node_modules/cacache/node_modules/ssri/index.js b/deps/npm/node_modules/cacache/node_modules/ssri/index.js
deleted file mode 100644
index 8ece662ba6..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/ssri/index.js
+++ /dev/null
@@ -1,334 +0,0 @@
-'use strict'
-
-const Buffer = require('safe-buffer').Buffer
-
-const crypto = require('crypto')
-const Transform = require('stream').Transform
-
-const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
-
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/]+(?:=?=?))([?\x21-\x7E]*)$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-class Hash {
- get isHash () { return true }
- constructor (hash, opts) {
- const strict = !!(opts && opts.strict)
- this.source = hash.trim()
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) { return }
- if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- this.options = rawOpts ? rawOpts.slice(1).split('?') : []
- }
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- if (opts && opts.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- (this.options || []).every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- const options = this.options && this.options.length
- ? `?${this.options.join('?')}`
- : ''
- return `${this.algorithm}-${this.digest}${options}`
- }
-}
-
-class Integrity {
- get isIntegrity () { return true }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = opts || {}
- let sep = opts.sep || ' '
- if (opts.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
- }
- return Object.keys(this).map(k => {
- return this[k].map(hash => {
- return Hash.prototype.toString.call(hash, opts)
- }).filter(x => x.length).join(sep)
- }).filter(x => x.length).join(sep)
- }
- concat (integrity, opts) {
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
- hexDigest () {
- return parse(this, {single: true}).hexDigest()
- }
- pickAlgorithm (opts) {
- const pickAlgorithm = (opts && opts.pickAlgorithm) || getPrioritizedHash
- const keys = Object.keys(this)
- if (!keys.length) {
- throw new Error(`No algorithms available for ${
- JSON.stringify(this.toString())
- }`)
- }
- return keys.reduce((acc, algo) => {
- return pickAlgorithm(acc, algo) || acc
- })
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- opts = opts || {}
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts.single) {
- return new Hash(integrity, opts)
- }
- return integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- const optString = (opts && opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- opts = opts || {}
- const algorithms = opts.algorithms || ['sha512']
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- opts = opts || {}
- const P = opts.Promise || Promise
- const istream = integrityStream(opts)
- return new P((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => { sri = s })
- istream.on('end', () => resolve(sri))
- istream.on('data', () => {})
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- opts = opts || {}
- sri = parse(sri, opts)
- if (!Object.keys(sri).length) { return false }
- const algorithm = sri.pickAlgorithm(opts)
- const digests = sri[algorithm] || []
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- return digests.find(hash => hash.digest === digest) || false
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = opts || {}
- const P = opts.Promise || Promise
- const checker = integrityStream(Object.assign({}, opts, {
- integrity: sri
- }))
- return new P((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let sri
- checker.on('verified', s => { sri = s })
- checker.on('end', () => resolve(sri))
- checker.on('data', () => {})
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts) {
- opts = opts || {}
- // For verification
- const sri = opts.integrity && parse(opts.integrity, opts)
- const goodSri = sri && Object.keys(sri).length
- const algorithm = goodSri && sri.pickAlgorithm(opts)
- const digests = goodSri && sri[algorithm]
- // Calculating stream
- const algorithms = opts.algorithms || [algorithm || 'sha512']
- const hashes = algorithms.map(crypto.createHash)
- let streamSize = 0
- const stream = new Transform({
- transform (chunk, enc, cb) {
- streamSize += chunk.length
- hashes.forEach(h => h.update(chunk, enc))
- cb(null, chunk, enc)
- }
- }).on('end', () => {
- const optString = (opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- const newSri = parse(hashes.map((h, i) => {
- return `${algorithms[i]}-${h.digest('base64')}${optString}`
- }).join(' '), opts)
- const match = (
- // Integrity verification mode
- opts.integrity &&
- newSri[algorithm] &&
- digests &&
- digests.find(hash => {
- return newSri[algorithm].find(newhash => {
- return hash.digest === newhash.digest
- })
- })
- )
- if (typeof opts.size === 'number' && streamSize !== opts.size) {
- const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
- err.code = 'EBADSIZE'
- err.found = streamSize
- err.expected = opts.size
- err.sri = sri
- stream.emit('error', err)
- } else if (opts.integrity && !match) {
- const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = digests
- err.algorithm = algorithm
- err.sri = sri
- stream.emit('error', err)
- } else {
- stream.emit('size', streamSize)
- stream.emit('integrity', newSri)
- match && stream.emit('verified', match)
- }
- })
- return stream
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- opts = opts || {}
- const algorithms = opts.algorithms || ['sha512']
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function (enc) {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- }
- }
-}
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
-]
-function getPrioritizedHash (algo1, algo2) {
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/ssri/package.json b/deps/npm/node_modules/cacache/node_modules/ssri/package.json
deleted file mode 100644
index 15b3b5648a..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/ssri/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "_from": "ssri@^5.0.0",
- "_id": "ssri@5.0.0",
- "_inBundle": false,
- "_integrity": "sha512-728D4yoQcQm1ooZvSbywLkV1RjfITZXh0oWrhM/lnsx3nAHx7LsRGJWB/YyvoceAYRq98xqbstiN4JBv1/wNHg==",
- "_location": "/cacache/ssri",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "ssri@^5.0.0",
- "name": "ssri",
- "escapedName": "ssri",
- "rawSpec": "^5.0.0",
- "saveSpec": null,
- "fetchSpec": "^5.0.0"
- },
- "_requiredBy": [
- "/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/ssri/-/ssri-5.0.0.tgz",
- "_shasum": "13c19390b606c821f2a10d02b351c1729b94d8cf",
- "_spec": "ssri@^5.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/cacache",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/ssri/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "safe-buffer": "^5.1.0"
- },
- "deprecated": false,
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "devDependencies": {
- "nyc": "^10.3.2",
- "standard": "^9.0.2",
- "standard-version": "^4.1.0",
- "tap": "^10.3.3",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/zkat/ssri#readme",
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "ssri",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/ssri.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "5.0.0"
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/y18n/LICENSE b/deps/npm/node_modules/cacache/node_modules/y18n/LICENSE
deleted file mode 100644
index 3c157f0b9d..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/y18n/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any purpose
-with or without fee is hereby granted, provided that the above copyright notice
-and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
-THIS SOFTWARE.
diff --git a/deps/npm/node_modules/cacache/node_modules/y18n/README.md b/deps/npm/node_modules/cacache/node_modules/y18n/README.md
deleted file mode 100644
index 9859458f20..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/y18n/README.md
+++ /dev/null
@@ -1,91 +0,0 @@
-# y18n
-
-[![Build Status][travis-image]][travis-url]
-[![Coverage Status][coveralls-image]][coveralls-url]
-[![NPM version][npm-image]][npm-url]
-[![js-standard-style][standard-image]][standard-url]
-
-The bare-bones internationalization library used by yargs.
-
-Inspired by [i18n](https://www.npmjs.com/package/i18n).
-
-## Examples
-
-_simple string translation:_
-
-```js
-var __ = require('y18n').__
-
-console.log(__('my awesome string %s', 'foo'))
-```
-
-output:
-
-`my awesome string foo`
-
-_pluralization support:_
-
-```js
-var __n = require('y18n').__n
-
-console.log(__n('one fish %s', '%d fishes %s', 2, 'foo'))
-```
-
-output:
-
-`2 fishes foo`
-
-## JSON Language Files
-
-The JSON language files should be stored in a `./locales` folder.
-File names correspond to locales, e.g., `en.json`, `pirate.json`.
-
-When strings are observed for the first time they will be
-added to the JSON file corresponding to the current locale.
-
-## Methods
-
-### require('y18n')(config)
-
-Create an instance of y18n with the config provided, options include:
-
-* `directory`: the locale directory, default `./locales`.
-* `updateFiles`: should newly observed strings be updated in file, default `true`.
-* `locale`: what locale should be used.
-* `fallbackToLanguage`: should fallback to a language-only file (e.g. `en.json`)
- be allowed if a file matching the locale does not exist (e.g. `en_US.json`),
- default `true`.
-
-### y18n.\_\_(str, arg, arg, arg)
-
-Print a localized string, `%s` will be replaced with `arg`s.
-
-### y18n.\_\_n(singularString, pluralString, count, arg, arg, arg)
-
-Print a localized string with appropriate pluralization. If `%d` is provided
-in the string, the `count` will replace this placeholder.
-
-### y18n.setLocale(str)
-
-Set the current locale being used.
-
-### y18n.getLocale()
-
-What locale is currently being used?
-
-### y18n.updateLocale(obj)
-
-Update the current locale with the key value pairs in `obj`.
-
-## License
-
-ISC
-
-[travis-url]: https://travis-ci.org/yargs/y18n
-[travis-image]: https://img.shields.io/travis/yargs/y18n.svg
-[coveralls-url]: https://coveralls.io/github/yargs/y18n
-[coveralls-image]: https://img.shields.io/coveralls/yargs/y18n.svg
-[npm-url]: https://npmjs.org/package/y18n
-[npm-image]: https://img.shields.io/npm/v/y18n.svg
-[standard-image]: https://img.shields.io/badge/code%20style-standard-brightgreen.svg
-[standard-url]: https://github.com/feross/standard
diff --git a/deps/npm/node_modules/cacache/node_modules/y18n/index.js b/deps/npm/node_modules/cacache/node_modules/y18n/index.js
deleted file mode 100644
index 91b159e342..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/y18n/index.js
+++ /dev/null
@@ -1,172 +0,0 @@
-var fs = require('fs')
-var path = require('path')
-var util = require('util')
-
-function Y18N (opts) {
- // configurable options.
- opts = opts || {}
- this.directory = opts.directory || './locales'
- this.updateFiles = typeof opts.updateFiles === 'boolean' ? opts.updateFiles : true
- this.locale = opts.locale || 'en'
- this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
-
- // internal stuff.
- this.cache = {}
- this.writeQueue = []
-}
-
-Y18N.prototype.__ = function () {
- var args = Array.prototype.slice.call(arguments)
- var str = args.shift()
- var cb = function () {} // start with noop.
-
- if (typeof args[args.length - 1] === 'function') cb = args.pop()
- cb = cb || function () {} // noop.
-
- if (!this.cache[this.locale]) this._readLocaleFile()
-
- // we've observed a new string, update the language file.
- if (!this.cache[this.locale][str] && this.updateFiles) {
- this.cache[this.locale][str] = str
-
- // include the current directory and locale,
- // since these values could change before the
- // write is performed.
- this._enqueueWrite([this.directory, this.locale, cb])
- } else {
- cb()
- }
-
- return util.format.apply(util, [this.cache[this.locale][str] || str].concat(args))
-}
-
-Y18N.prototype._enqueueWrite = function (work) {
- this.writeQueue.push(work)
- if (this.writeQueue.length === 1) this._processWriteQueue()
-}
-
-Y18N.prototype._processWriteQueue = function () {
- var _this = this
- var work = this.writeQueue[0]
-
- // destructure the enqueued work.
- var directory = work[0]
- var locale = work[1]
- var cb = work[2]
-
- var languageFile = this._resolveLocaleFile(directory, locale)
- var serializedLocale = JSON.stringify(this.cache[locale], null, 2)
-
- fs.writeFile(languageFile, serializedLocale, 'utf-8', function (err) {
- _this.writeQueue.shift()
- if (_this.writeQueue.length > 0) _this._processWriteQueue()
- cb(err)
- })
-}
-
-Y18N.prototype._readLocaleFile = function () {
- var localeLookup = {}
- var languageFile = this._resolveLocaleFile(this.directory, this.locale)
-
- try {
- localeLookup = JSON.parse(fs.readFileSync(languageFile, 'utf-8'))
- } catch (err) {
- if (err instanceof SyntaxError) {
- err.message = 'syntax error in ' + languageFile
- }
-
- if (err.code === 'ENOENT') localeLookup = {}
- else throw err
- }
-
- this.cache[this.locale] = localeLookup
-}
-
-Y18N.prototype._resolveLocaleFile = function (directory, locale) {
- var file = path.resolve(directory, './', locale + '.json')
- if (this.fallbackToLanguage && !this._fileExistsSync(file) && ~locale.lastIndexOf('_')) {
- // attempt fallback to language only
- var languageFile = path.resolve(directory, './', locale.split('_')[0] + '.json')
- if (this._fileExistsSync(languageFile)) file = languageFile
- }
- return file
-}
-
-// this only exists because fs.existsSync() "will be deprecated"
-// see https://nodejs.org/api/fs.html#fs_fs_existssync_path
-Y18N.prototype._fileExistsSync = function (file) {
- try {
- return fs.statSync(file).isFile()
- } catch (err) {
- return false
- }
-}
-
-Y18N.prototype.__n = function () {
- var args = Array.prototype.slice.call(arguments)
- var singular = args.shift()
- var plural = args.shift()
- var quantity = args.shift()
-
- var cb = function () {} // start with noop.
- if (typeof args[args.length - 1] === 'function') cb = args.pop()
-
- if (!this.cache[this.locale]) this._readLocaleFile()
-
- var str = quantity === 1 ? singular : plural
- if (this.cache[this.locale][singular]) {
- str = this.cache[this.locale][singular][quantity === 1 ? 'one' : 'other']
- }
-
- // we've observed a new string, update the language file.
- if (!this.cache[this.locale][singular] && this.updateFiles) {
- this.cache[this.locale][singular] = {
- one: singular,
- other: plural
- }
-
- // include the current directory and locale,
- // since these values could change before the
- // write is performed.
- this._enqueueWrite([this.directory, this.locale, cb])
- } else {
- cb()
- }
-
- // if a %d placeholder is provided, add quantity
- // to the arguments expanded by util.format.
- var values = [str]
- if (~str.indexOf('%d')) values.push(quantity)
-
- return util.format.apply(util, values.concat(args))
-}
-
-Y18N.prototype.setLocale = function (locale) {
- this.locale = locale
-}
-
-Y18N.prototype.getLocale = function () {
- return this.locale
-}
-
-Y18N.prototype.updateLocale = function (obj) {
- if (!this.cache[this.locale]) this._readLocaleFile()
-
- for (var key in obj) {
- this.cache[this.locale][key] = obj[key]
- }
-}
-
-module.exports = function (opts) {
- var y18n = new Y18N(opts)
-
- // bind all functions to y18n, so that
- // they can be used in isolation.
- for (var key in y18n) {
- if (typeof y18n[key] === 'function') {
- y18n[key] = y18n[key].bind(y18n)
- }
- }
-
- return y18n
-}
diff --git a/deps/npm/node_modules/cacache/node_modules/y18n/package.json b/deps/npm/node_modules/cacache/node_modules/y18n/package.json
deleted file mode 100644
index a96457708a..0000000000
--- a/deps/npm/node_modules/cacache/node_modules/y18n/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "y18n@^3.2.1",
- "_id": "y18n@3.2.1",
- "_inBundle": false,
- "_integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=",
- "_location": "/cacache/y18n",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "y18n@^3.2.1",
- "name": "y18n",
- "escapedName": "y18n",
- "rawSpec": "^3.2.1",
- "saveSpec": null,
- "fetchSpec": "^3.2.1"
- },
- "_requiredBy": [
- "/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz",
- "_shasum": "6d15fba884c08679c0d77e88e7759e811e07fa41",
- "_spec": "y18n@^3.2.1",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/cacache",
- "author": {
- "name": "Ben Coe",
- "email": "ben@npmjs.com"
- },
- "bugs": {
- "url": "https://github.com/yargs/y18n/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "the bare-bones internationalization library used by yargs",
- "devDependencies": {
- "chai": "^3.4.1",
- "coveralls": "^2.11.6",
- "mocha": "^2.3.4",
- "nyc": "^6.1.1",
- "rimraf": "^2.5.0",
- "standard": "^5.4.1"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/yargs/y18n",
- "keywords": [
- "i18n",
- "internationalization",
- "yargs"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "y18n",
- "repository": {
- "type": "git",
- "url": "git+ssh://git@github.com/yargs/y18n.git"
- },
- "scripts": {
- "coverage": "nyc report --reporter=text-lcov | coveralls",
- "pretest": "standard",
- "test": "nyc mocha"
- },
- "version": "3.2.1"
-}
diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json
index 221c605109..c9e61596f9 100644
--- a/deps/npm/node_modules/cacache/package.json
+++ b/deps/npm/node_modules/cacache/package.json
@@ -1,30 +1,34 @@
{
- "_from": "cacache@latest",
- "_id": "cacache@10.0.1",
+ "_args": [
+ [
+ "cacache@11.0.2",
+ "/Users/rebecca/code/npm"
+ ]
+ ],
+ "_from": "cacache@11.0.2",
+ "_id": "cacache@11.0.2",
"_inBundle": false,
- "_integrity": "sha512-dRHYcs9LvG9cHgdPzjiI+/eS7e1xRhULrcyOx04RZQsszNJXU2SL9CyG60yLnge282Qq5nwTv+ieK2fH+WPZmA==",
+ "_integrity": "sha512-hMiz7LN4w8sdfmKsvNs80ao/vf2JCGWWdpu95JyY90AJZRbZJmgE71dCefRiNf8OCqiZQDcUBfYiLlUNu4/j5A==",
"_location": "/cacache",
- "_phantomChildren": {
- "safe-buffer": "5.1.1"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "tag",
+ "type": "version",
"registry": true,
- "raw": "cacache@latest",
+ "raw": "cacache@11.0.2",
"name": "cacache",
"escapedName": "cacache",
- "rawSpec": "latest",
+ "rawSpec": "11.0.2",
"saveSpec": null,
- "fetchSpec": "latest"
+ "fetchSpec": "11.0.2"
},
"_requiredBy": [
- "#USER",
- "/"
+ "/",
+ "/make-fetch-happen",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-10.0.1.tgz",
- "_shasum": "3e05f6e616117d9b54665b1b20c8aeb93ea5d36f",
- "_spec": "cacache@latest",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/cacache/-/cacache-11.0.2.tgz",
+ "_spec": "11.0.2",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -32,7 +36,6 @@
"bugs": {
"url": "https://github.com/zkat/cacache/issues"
},
- "bundleDependencies": false,
"cache-version": {
"content": "2",
"index": "5"
@@ -56,33 +59,32 @@
}
],
"dependencies": {
- "bluebird": "^3.5.0",
+ "bluebird": "^3.5.1",
"chownr": "^1.0.1",
+ "figgy-pudding": "^3.1.0",
"glob": "^7.1.2",
"graceful-fs": "^4.1.11",
- "lru-cache": "^4.1.1",
- "mississippi": "^1.3.0",
+ "lru-cache": "^4.1.2",
+ "mississippi": "^3.0.0",
"mkdirp": "^0.5.1",
"move-concurrently": "^1.0.1",
"promise-inflight": "^1.0.1",
- "rimraf": "^2.6.1",
- "ssri": "^5.0.0",
+ "rimraf": "^2.6.2",
+ "ssri": "^6.0.0",
"unique-filename": "^1.1.0",
- "y18n": "^3.2.1"
+ "y18n": "^4.0.0"
},
- "deprecated": false,
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
"devDependencies": {
"benchmark": "^2.1.4",
- "chalk": "^2.0.1",
- "cross-env": "^5.0.1",
- "nyc": "^11.1.0",
+ "chalk": "^2.3.2",
+ "cross-env": "^5.1.4",
"require-inject": "^1.4.2",
"safe-buffer": "^5.1.1",
- "standard": "^10.0.2",
- "standard-version": "^4.2.0",
+ "standard": "^11.0.1",
+ "standard-version": "^4.3.0",
"tacks": "^1.2.2",
- "tap": "^10.7.0",
+ "tap": "^11.1.3",
"weallbehave": "^1.2.0",
"weallcontribute": "^1.0.8"
},
@@ -118,12 +120,12 @@
"benchmarks": "node test/benchmarks",
"postrelease": "npm publish && git push --follow-tags",
"prerelease": "npm t",
- "pretest": "standard lib test *.js",
+ "pretest": "standard",
"release": "standard-version -s",
- "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true nyc --all -- tap -J test/*.js",
+ "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true tap --coverage --nyc-arg=--all -J test/*.js",
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "10.0.1"
+ "version": "11.0.2"
}
diff --git a/deps/npm/node_modules/cacache/put.js b/deps/npm/node_modules/cacache/put.js
index fe1293e5e7..0b0ee14978 100644
--- a/deps/npm/node_modules/cacache/put.js
+++ b/deps/npm/node_modules/cacache/put.js
@@ -1,17 +1,31 @@
'use strict'
+const figgyPudding = require('figgy-pudding')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const write = require('./lib/content/write')
const to = require('mississippi').to
+const PutOpts = figgyPudding({
+ algorithms: {
+ default: ['sha512']
+ },
+ integrity: {},
+ memoize: {},
+ metadata: {},
+ size: {},
+ tmpPrefix: {},
+ uid: {},
+ gid: {}
+})
+
module.exports = putData
function putData (cache, key, data, opts) {
- opts = opts || {}
+ opts = PutOpts(opts)
return write(cache, data, opts).then(res => {
- // TODO - stop modifying opts
- opts.size = res.size
- return index.insert(cache, key, res.integrity, opts).then(entry => {
+ return index.insert(
+ cache, key, res.integrity, opts.concat({size: res.size})
+ ).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, data, opts)
}
@@ -22,7 +36,7 @@ function putData (cache, key, data, opts) {
module.exports.stream = putStream
function putStream (cache, key, opts) {
- opts = opts || {}
+ opts = PutOpts(opts)
let integrity
let size
const contentStream = write.stream(
@@ -45,9 +59,7 @@ function putStream (cache, key, opts) {
})
}, cb => {
contentStream.end(() => {
- // TODO - stop modifying `opts`
- opts.size = size
- index.insert(cache, key, integrity, opts).then(entry => {
+ index.insert(cache, key, integrity, opts.concat({size})).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
}