1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
|
'use strict'
var fs = require('graceful-fs')
var path = require('path')
var zlib = require('zlib')
var log = require('npmlog')
var realizePackageSpecifier = require('realize-package-specifier')
var tar = require('tar')
var once = require('once')
var semver = require('semver')
var readPackageTree = require('read-package-tree')
var readPackageJson = require('read-package-json')
var iferr = require('iferr')
var rimraf = require('rimraf')
var clone = require('lodash.clonedeep')
var validate = require('aproba')
var unpipe = require('unpipe')
var normalizePackageData = require('normalize-package-data')
var limit = require('call-limit')
var npm = require('./npm.js')
var mapToRegistry = require('./utils/map-to-registry.js')
var cache = require('./cache.js')
var cachedPackageRoot = require('./cache/cached-package-root.js')
var tempFilename = require('./utils/temp-filename.js')
var getCacheStat = require('./cache/get-stat.js')
var unpack = require('./utils/tar.js').unpack
var pulseTillDone = require('./utils/pulse-till-done.js')
var parseJSON = require('./utils/parse-json.js')
var pickManifestFromRegistryMetadata = require('./utils/pick-manifest-from-registry-metadata.js')
function andLogAndFinish (spec, tracker, done) {
validate('SF', [spec, done])
return function (er, pkg) {
if (er) {
log.silly('fetchPackageMetaData', 'error for ' + spec, er)
if (tracker) tracker.finish()
}
return done(er, pkg)
}
}
module.exports = limit(fetchPackageMetadata, npm.limit.fetch)
function fetchPackageMetadata (spec, where, opts, done) {
validate('SSOF|SSFZ|OSOF|OSFZ', [spec, where, opts, done])
if (!done) {
done = opts
opts = {}
}
var tracker = opts.tracker
if (typeof spec === 'object') {
var dep = spec
spec = dep.raw
}
var logAndFinish = andLogAndFinish(spec, tracker, done)
if (!dep) {
log.silly('fetchPackageMetaData', spec)
return realizePackageSpecifier(spec, where, iferr(logAndFinish, function (dep) {
fetchPackageMetadata(dep, where, {tracker: tracker}, done)
}))
}
if (dep.type === 'version' || dep.type === 'range' || dep.type === 'tag') {
fetchNamedPackageData(dep, opts, addRequestedAndFinish)
} else if (dep.type === 'directory') {
fetchDirectoryPackageData(dep, where, addRequestedAndFinish)
} else {
fetchOtherPackageData(spec, dep, where, addRequestedAndFinish)
}
function addRequestedAndFinish (er, pkg) {
if (pkg) annotateMetadata(pkg, dep, spec, where)
logAndFinish(er, pkg)
}
}
var annotateMetadata = module.exports.annotateMetadata = function (pkg, requested, spec, where) {
validate('OOSS', arguments)
pkg._requested = requested
pkg._spec = spec
pkg._where = where
if (!pkg._args) pkg._args = []
pkg._args.push([requested, where])
// non-npm registries can and will return unnormalized data, plus
// even the npm registry may have package data normalized with older
// normalization rules. This ensures we get package data in a consistent,
// stable format.
try {
normalizePackageData(pkg)
} catch (ex) {
// don't care
}
}
function fetchOtherPackageData (spec, dep, where, next) {
validate('SOSF', arguments)
log.silly('fetchOtherPackageData', spec)
cache.add(spec, null, where, false, iferr(next, function (pkg) {
var result = clone(pkg)
result._inCache = true
next(null, result)
}))
}
function fetchDirectoryPackageData (dep, where, next) {
validate('OSF', arguments)
log.silly('fetchDirectoryPackageData', dep.name || dep.rawSpec)
readPackageJson(path.join(dep.spec, 'package.json'), false, next)
}
var regCache = {}
function fetchNamedPackageData (dep, opts, next) {
validate('OOF', arguments)
log.silly('fetchNamedPackageData', dep.name || dep.rawSpec)
mapToRegistry(dep.name || dep.rawSpec, npm.config, iferr(next, function (url, auth) {
if (regCache[url]) {
pickVersionFromRegistryDocument(clone(regCache[url]))
} else {
var fullMetadata = opts.fullMetadata == null ? true : opts.fullMetadata
npm.registry.get(url, {auth: auth, fullMetadata: fullMetadata}, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
}
function thenAddMetadata (pkg) {
pkg._from = dep.raw
pkg._resolved = pkg.dist.tarball
pkg._shasum = pkg.dist.shasum
next(null, pkg)
}
function pickVersionFromRegistryDocument (pkg) {
if (!regCache[url]) regCache[url] = pkg
var versions = Object.keys(pkg.versions)
var invalidVersions = versions.filter(function (v) { return !semver.valid(v) })
if (invalidVersions.length > 0) {
log.warn('pickVersion', 'The package %s has invalid semver-version(s): %s. This usually only happens for unofficial private registries. ' +
'You should delete or re-publish the invalid versions.', pkg.name, invalidVersions.join(', '))
}
versions = versions.filter(function (v) { return semver.valid(v) })
if (dep.type === 'tag') {
var tagVersion = pkg['dist-tags'][dep.spec]
if (pkg.versions[tagVersion]) return thenAddMetadata(pkg.versions[tagVersion])
} else {
var picked = pickManifestFromRegistryMetadata(dep.spec, npm.config.get('tag'), versions, pkg)
if (picked) return thenAddMetadata(picked.manifest)
}
// We didn't manage to find a compatible version
// If this package was requested from cache, force hitting the network
if (pkg._cached) {
log.silly('fetchNamedPackageData', 'No valid target from cache, forcing network')
return npm.registry.get(url, {
auth: auth,
skipCache: true
}, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
}
// And failing that, we error out
var targets = versions.length
? 'Valid install targets:\n' + versions.join(', ') + '\n'
: 'No valid targets found.'
var er = new Error('No compatible version found: ' +
dep.raw + '\n' + targets)
er.code = 'ETARGET'
return next(er)
}
}))
}
function retryWithCached (pkg, asserter, next) {
if (!pkg._inCache) {
cache.add(pkg._spec, null, pkg._where, false, iferr(next, function (newpkg) {
Object.keys(newpkg).forEach(function (key) {
if (key[0] !== '_') return
pkg[key] = newpkg[key]
})
pkg._inCache = true
return asserter(pkg, next)
}))
}
return !pkg._inCache
}
module.exports.addShrinkwrap = function addShrinkwrap (pkg, next) {
validate('OF', arguments)
if (pkg._shrinkwrap !== undefined) return next(null, pkg)
if (pkg._hasShrinkwrap === false) {
pkg._shrinkwrap = null
return next(null, pkg)
}
if (retryWithCached(pkg, addShrinkwrap, next)) return
pkg._shrinkwrap = null
// FIXME: cache the shrinkwrap directly
var pkgname = pkg.name
var ver = pkg.version
var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
untarStream(tarball, function (er, untar) {
if (er) {
if (er.code === 'ENOTTARBALL') {
pkg._shrinkwrap = null
return next()
} else {
return next(er)
}
}
if (er) return next(er)
var foundShrinkwrap = false
untar.on('entry', function (entry) {
if (!/^(?:[^\/]+[\/])npm-shrinkwrap.json$/.test(entry.path)) return
log.silly('addShrinkwrap', 'Found shrinkwrap in ' + pkgname + ' ' + entry.path)
foundShrinkwrap = true
var shrinkwrap = ''
entry.on('data', function (chunk) {
shrinkwrap += chunk
})
entry.on('end', function () {
untar.close()
log.silly('addShrinkwrap', 'Completed reading shrinkwrap in ' + pkgname)
try {
pkg._shrinkwrap = parseJSON(shrinkwrap)
} catch (ex) {
var er = new Error('Error parsing ' + pkgname + '@' + ver + "'s npm-shrinkwrap.json: " + ex.message)
er.type = 'ESHRINKWRAP'
return next(er)
}
next(null, pkg)
})
entry.resume()
})
untar.on('end', function () {
if (!foundShrinkwrap) {
pkg._shrinkwrap = null
next(null, pkg)
}
})
})
}
module.exports.addBundled = function addBundled (pkg, next) {
validate('OF', arguments)
if (pkg._bundled !== undefined) return next(null, pkg)
if (!pkg.bundleDependencies) return next(null, pkg)
if (retryWithCached(pkg, addBundled, next)) return
pkg._bundled = null
var pkgname = pkg.name
var ver = pkg.version
var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
var target = tempFilename('unpack')
getCacheStat(iferr(next, function (cs) {
log.verbose('addBundled', 'extract', tarball)
unpack(tarball, target, null, null, cs.uid, cs.gid, iferr(next, function () {
log.silly('addBundled', 'read tarball')
readPackageTree(target, function (er, tree) {
log.silly('cleanup', 'remove extracted module')
rimraf(target, function () {
if (tree) {
pkg._bundled = tree.children
}
next(null, pkg)
})
})
}))
}))
}
// FIXME: hasGzipHeader / hasTarHeader / untarStream duplicate a lot
// of code from lib/utils/tar.js– these should be brought together.
function hasGzipHeader (c) {
return c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08
}
function hasTarHeader (c) {
return c[257] === 0x75 && // tar archives have 7573746172 at position
c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
c[259] === 0x74 &&
c[260] === 0x61 &&
c[261] === 0x72 &&
((c[262] === 0x00 &&
c[263] === 0x30 &&
c[264] === 0x30) ||
(c[262] === 0x20 &&
c[263] === 0x20 &&
c[264] === 0x00))
}
function untarStream (tarball, cb) {
validate('SF', arguments)
cb = once(cb)
var stream
var file = stream = fs.createReadStream(tarball)
var tounpipe = [file]
file.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EREADFILE'
cb(er)
})
file.on('data', function OD (c) {
if (hasGzipHeader(c)) {
doGunzip()
} else if (hasTarHeader(c)) {
doUntar()
} else {
if (file.close) file.close()
if (file.destroy) file.destroy()
var er = new Error('Non-gzip/tarball ' + tarball)
er.code = 'ENOTTARBALL'
return cb(er)
}
file.removeListener('data', OD)
file.emit('data', c)
cb(null, stream)
})
function doGunzip () {
var gunzip = stream.pipe(zlib.createGunzip())
gunzip.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EGUNZIP'
cb(er)
})
tounpipe.push(gunzip)
stream = gunzip
doUntar()
}
function doUntar () {
var untar = stream.pipe(tar.Parse())
untar.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EUNTAR'
cb(er)
})
tounpipe.push(untar)
stream = untar
addClose()
}
function addClose () {
stream.close = function () {
tounpipe.forEach(function (stream) {
unpipe(stream)
})
if (file.close) file.close()
if (file.destroy) file.destroy()
}
}
}
|