summaryrefslogtreecommitdiff
path: root/deps/npm/lib
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/lib')
-rw-r--r--deps/npm/lib/bugs.js2
-rw-r--r--deps/npm/lib/cache.js125
-rw-r--r--deps/npm/lib/config.js7
-rw-r--r--deps/npm/lib/docs.js2
-rw-r--r--deps/npm/lib/init.js12
-rw-r--r--deps/npm/lib/install.js524
-rw-r--r--deps/npm/lib/link.js4
-rw-r--r--deps/npm/lib/ls.js280
-rw-r--r--deps/npm/lib/npm.js54
-rw-r--r--deps/npm/lib/outdated.js4
-rw-r--r--deps/npm/lib/rebuild.js3
-rw-r--r--deps/npm/lib/search.js3
-rw-r--r--deps/npm/lib/set.js1
-rw-r--r--deps/npm/lib/unbuild.js20
-rw-r--r--deps/npm/lib/uninstall.js7
-rw-r--r--deps/npm/lib/utils/cmd-shim.js8
-rw-r--r--deps/npm/lib/utils/completion/file-completion.js2
-rw-r--r--deps/npm/lib/utils/config-defs.js17
-rw-r--r--deps/npm/lib/utils/error-handler.js11
-rw-r--r--deps/npm/lib/utils/excludes.js157
-rw-r--r--deps/npm/lib/utils/exec.js20
-rw-r--r--deps/npm/lib/utils/fetch.js9
-rw-r--r--deps/npm/lib/utils/find-prefix.js5
-rw-r--r--deps/npm/lib/utils/get-agent.js62
-rw-r--r--deps/npm/lib/utils/get.js6
-rw-r--r--deps/npm/lib/utils/ini.js10
-rw-r--r--deps/npm/lib/utils/lifecycle.js39
-rw-r--r--deps/npm/lib/utils/link.js6
-rw-r--r--deps/npm/lib/utils/mkdir-p.js191
-rw-r--r--deps/npm/lib/utils/npm-registry-client/get.js12
-rw-r--r--deps/npm/lib/utils/npm-registry-client/request.js5
-rw-r--r--deps/npm/lib/utils/output.js1
-rw-r--r--deps/npm/lib/utils/read-installed.js15
-rw-r--r--deps/npm/lib/utils/read-json.js101
-rw-r--r--deps/npm/lib/utils/set.js25
-rw-r--r--deps/npm/lib/utils/tar.js628
-rw-r--r--deps/npm/lib/utils/uid-number.js55
-rw-r--r--deps/npm/lib/version.js2
-rw-r--r--deps/npm/lib/view.js5
39 files changed, 1052 insertions, 1388 deletions
diff --git a/deps/npm/lib/bugs.js b/deps/npm/lib/bugs.js
index a3a017cc02..7982746cfb 100644
--- a/deps/npm/lib/bugs.js
+++ b/deps/npm/lib/bugs.js
@@ -28,7 +28,7 @@ function bugs (args, cb) {
}
if (repo) {
if (Array.isArray(repo)) repo = repo.shift()
- if (repo.url) repo = repo.url
+ if (repo.hasOwnProperty("url")) repo = repo.url
log.verbose(repo, "repository")
if (repo && repo.match(/^(https?:\/\/|git(:\/\/|@))github.com/)) {
return open(repo.replace(/^git(@|:\/\/)/, "http://")
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js
index 3dc1fb3a0a..b62e82dd14 100644
--- a/deps/npm/lib/cache.js
+++ b/deps/npm/lib/cache.js
@@ -3,11 +3,10 @@
/*
adding a folder:
1. tar into tmp/random/package.tgz
-2. untar into tmp/random/contents/{blah}
-3. rename {blah} to "package"
-4. tar tmp/random/contents/package to cache/n/v/package.tgz
-5. untar cache/n/v/package.tgz into cache/n/v/package
-6. rm tmp/random
+2. untar into tmp/random/contents/package, stripping one dir piece
+3. tar tmp/random/contents/package to cache/n/v/package.tgz
+4. untar cache/n/v/package.tgz into cache/n/v/package
+5. rm tmp/random
Adding a url:
1. fetch to tmp/random/package.tgz
@@ -32,7 +31,7 @@ exports.read = read
exports.clean = clean
exports.unpack = unpack
-var mkdir = require("./utils/mkdir-p.js")
+var mkdir = require("mkdirp")
, exec = require("./utils/exec.js")
, fetch = require("./utils/fetch.js")
, npm = require("./npm.js")
@@ -50,6 +49,7 @@ var mkdir = require("./utils/mkdir-p.js")
, tar = require("./utils/tar.js")
, fileCompletion = require("./utils/completion/file-completion.js")
, url = require("url")
+ , chownr = require("chownr")
cache.usage = "npm cache add <tarball file>"
+ "\nnpm cache add <folder>"
@@ -95,12 +95,10 @@ function cache (args, cb) {
// if the pkg and ver are in the cache, then
// just do a readJson and return.
// if they're not, then fetch them from the registry.
-var cacheSeen = {}
function read (name, ver, forceBypass, cb) {
if (typeof cb !== "function") cb = forceBypass, forceBypass = true
var jsonFile = path.join(npm.cache, name, ver, "package", "package.json")
function c (er, data) {
- if (!er) cacheSeen[data._id] = data
if (data) deprCheck(data)
return cb(er, data)
}
@@ -110,10 +108,6 @@ function read (name, ver, forceBypass, cb) {
return addNamed(name, ver, c)
}
- if (name+"@"+ver in cacheSeen) {
- return cb(null, cacheSeen[name+"@"+ver])
- }
-
readJson(jsonFile, function (er, data) {
if (er) return addNamed(name, ver, c)
deprCheck(data)
@@ -126,9 +120,13 @@ function ls (args, cb) {
output = output || require("./utils/output.js")
args = args.join("/").split("@").join("/")
if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
+ var prefix = npm.config.get("cache")
+ if (0 === prefix.indexOf(process.env.HOME)) {
+ prefix = "~" + prefix.substr(process.env.HOME.length)
+ }
ls_(args, npm.config.get("depth"), function(er, files) {
output.write(files.map(function (f) {
- return path.join("~/.npm", f)
+ return path.join(prefix, f)
}).join("\n").trim(), function (er) {
return cb(er, files)
})
@@ -212,7 +210,7 @@ function add (args, cb) {
// see if the spec is a url
// otherwise, treat as name@version
- var p = url.parse(spec.replace(/^git\+/, "git")) || {}
+ var p = url.parse(spec) || {}
log.verbose(p, "parsed url")
// it could be that we got name@http://blah
@@ -230,11 +228,11 @@ function add (args, cb) {
case "https:":
return addRemoteTarball(spec, null, name, cb)
case "git:":
- case "githttp:":
- case "githttps:":
- case "gitrsync:":
- case "gitftp:":
- case "gitssh:":
+ case "git+http:":
+ case "git+https:":
+ case "git+rsync:":
+ case "git+ftp:":
+ case "git+ssh:":
//p.protocol = p.protocol.replace(/^git([^:])/, "$1")
return addRemoteGit(spec, p, name, cb)
default:
@@ -636,7 +634,7 @@ function getCacheStat (cb) {
}
function makeCacheDir (cb) {
- if (!process.getuid) return mkdir(npm.cache, npm.modes.exec, cb)
+ if (!process.getuid) return mkdir(npm.cache, cb)
var uid = +process.getuid()
, gid = +process.getgid()
@@ -647,18 +645,28 @@ function makeCacheDir (cb) {
}
if (uid !== 0 || !process.env.HOME) {
cacheStat = {uid: uid, gid: gid}
- return mkdir(npm.cache, npm.modes.exec, uid, gid, function (er) {
- return cb(er, cacheStat)
- })
+ return mkdir(npm.cache, afterMkdir)
}
+
fs.stat(process.env.HOME, function (er, st) {
if (er) return log.er(cb, "homeless?")(er)
cacheStat = st
log.silly([st.uid, st.gid], "uid, gid for cache dir")
- return mkdir(npm.cache, npm.modes.exec, st.uid, st.gid, function (er) {
+ return mkdir(npm.cache, afterMkdir)
+ })
+
+ function afterMkdir (er, made) {
+ if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
+ return cb(er, cacheStat)
+ }
+
+ if (!made) return cb(er, cacheStat)
+
+ // ensure that the ownership is correct.
+ chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
return cb(er, cacheStat)
})
- })
+ }
}
@@ -736,9 +744,20 @@ function addLocalDirectory (p, name, cb) {
, tgz = placeDirect ? placed : tmptgz
, doFancyCrap = p.indexOf(npm.tmp) !== 0
&& p.indexOf(npm.cache) !== 0
- tar.pack(tgz, p, data, doFancyCrap, function (er) {
- if (er) return log.er(cb,"couldn't pack "+p+ " to "+tgz)(er)
- addLocalTarball(tgz, name, cb)
+ getCacheStat(function (er, cs) {
+ mkdir(path.dirname(tgz), function (er, made) {
+ if (er) return cb(er)
+ tar.pack(tgz, p, data, doFancyCrap, function (er) {
+ if (er) return log.er(cb,"couldn't pack "+p+ " to "+tgz)(er)
+
+ if (er || !cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
+
+ chownr(made || tgz, cs.uid, cs.gid, function (er) {
+ if (er) return cb(er)
+ addLocalTarball(tgz, name, cb)
+ })
+ })
+ })
})
})
}
@@ -747,36 +766,15 @@ function addTmpTarball (tgz, name, cb) {
if (!cb) cb = name, name = ""
getCacheStat(function (er, cs) {
if (er) return cb(er)
- return addTmpTarball_(tgz, name, cs.uid, cs.gid, cb)
- })
-}
-
-function addTmpTarball_ (tgz, name, uid, gid, cb) {
- var contents = path.resolve(path.dirname(tgz)) // , "contents")
- tar.unpack( tgz, path.resolve(contents, "package")
- , null, null
- , uid, gid
- , function (er) {
- if (er) return log.er(cb, "couldn't unpack "+tgz+" to "+contents)(er)
- fs.readdir(contents, function (er, folder) {
- if (er) return log.er(cb, "couldn't readdir "+contents)(er)
- log.verbose(folder, "tarball contents")
- if (folder.length > 1) {
- folder = folder.filter(function (f) {
- return !f.match(/^\.|^tmp\.tgz$/)
- })
- }
- if (folder.length > 1) {
- log.warn(folder.slice(1).join("\n")
- ,"extra junk in folder, ignoring")
+ var contents = path.dirname(tgz)
+ tar.unpack( tgz, path.resolve(contents, "package")
+ , null, null
+ , cs.uid, cs.gid
+ , function (er) {
+ if (er) {
+ return cb(er)
}
- if (!folder.length) return cb(new Error("Empty package tarball"))
- folder = path.join(contents, folder[0])
- var newName = path.join(contents, "package")
- fs.rename(folder, newName, function (er) {
- if (er) return log.er(cb, "couldn't rename "+folder+" to package")(er)
- addLocalDirectory(newName, name, cb)
- })
+ addLocalDirectory(path.resolve(contents, "package"), name, cb)
})
})
}
@@ -792,11 +790,14 @@ function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
log.error("Could not read data for "+pkg+"@"+ver)
return cb(er)
}
- tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
- , unpackTarget
- , dMode, fMode
- , uid, gid
- , cb )
+ npm.commands.unbuild([unpackTarget], function (er) {
+ if (er) return cb(er)
+ tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
+ , unpackTarget
+ , dMode, fMode
+ , uid, gid
+ , cb )
+ })
})
}
diff --git a/deps/npm/lib/config.js b/deps/npm/lib/config.js
index 6781679f1d..a69c30ea69 100644
--- a/deps/npm/lib/config.js
+++ b/deps/npm/lib/config.js
@@ -2,10 +2,12 @@
module.exports = config
config.usage = "npm config set <key> <value>"
- + "\nnpm config get <key>"
+ + "\nnpm config get [<key>]"
+ "\nnpm config delete <key>"
+ "\nnpm config list"
+ "\nnpm config edit"
+ + "\nnpm set <key> <value>"
+ + "\nnpm get [<key>]"
var ini = require("./utils/ini.js")
, log = require("./utils/log.js")
@@ -113,6 +115,9 @@ function del (key, cb) {
}
function set (key, val, cb) {
+ if (key === undefined) {
+ return unknown("", cb)
+ }
if (val === undefined) {
if (key.indexOf("=") !== -1) {
var k = key.split("=")
diff --git a/deps/npm/lib/docs.js b/deps/npm/lib/docs.js
index de9f71c654..8af4c1bb66 100644
--- a/deps/npm/lib/docs.js
+++ b/deps/npm/lib/docs.js
@@ -25,7 +25,7 @@ function docs (args, cb) {
if (homepage) return open(homepage, cb)
if (repo) {
if (Array.isArray(repo)) repo = repo.shift()
- if (repo.url) repo = repo.url
+ if (repo.hasOwnProperty("url")) repo = repo.url
log.verbose(repo, "repository")
if (repo) {
return open(repo.replace(/^git(@|:\/\/)/, 'http://')
diff --git a/deps/npm/lib/init.js b/deps/npm/lib/init.js
index 2ddb3e86fc..7cd7da8e27 100644
--- a/deps/npm/lib/init.js
+++ b/deps/npm/lib/init.js
@@ -99,7 +99,10 @@ function init_ (data, folder, cb) {
, function (er, r) {
if (er) return cb(er)
if (r !== "none") {
- data.repository = (data.repository || {}).url = r
+ data.repository = (data.repository || {})
+ data.repository.url = r
+ } else {
+ delete data.repository
}
cb()
}
@@ -146,13 +149,6 @@ function init_ (data, folder, cb) {
(data.scripts = data.scripts || {}).test = t
}
)
- ( read
- , [ { prompt: "What versions of node does it run on? "
- , default: data.engines && data.engines.node || (eng) } ]
- , function (nodever) {
- (data.engines = data.engines || {}).node = nodever
- }
- )
(cleanupPaths, [data, folder])
(function (cb) {
try { data = readJson.processJson(data) }
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 1fb6dc30c0..5873ca968e 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -1,14 +1,15 @@
-
// npm install <pkg> <pkg> <pkg>
//
// See doc/install.md for more description
-// Managing "family" lists...
-// every time we dive into a deeper node_modules folder, the "family"
-// list that gets passed along uses the previous "family" list as
-// it's __proto__. Any "resolved precise dependency" things that aren't
-// already on this object get added, and then that's passed to the next
-// generation of installation.
+// Managing contexts...
+// there's a lot of state associated with an "install" operation, including
+// packages that are already installed, parent packages, current shrinkwrap, and
+// so on. We maintain this state in a "context" object that gets passed around.
+// every time we dive into a deeper node_modules folder, the "family" list that
+// gets passed along uses the previous "family" list as its __proto__. Any
+// "resolved precise dependency" things that aren't already on this object get
+// added, and then that's passed to the next generation of installation.
module.exports = install
@@ -20,7 +21,9 @@ install.usage = "npm install <tarball file>"
+ "\nnpm install <pkg>@<version>"
+ "\nnpm install <pkg>@<version range>"
+ "\n\nCan specify one or more: npm install ./foo.tgz bar@stable /some/folder"
- + "\nInstalls dependencies in ./package.json if no argument supplied"
+ + "\nIf no argument is supplied and ./npm-shrinkwrap.json is "
+ + "\npresent, installs dependencies specified in the shrinkwrap."
+ + "\nOtherwise, installs dependencies from ./package.json."
install.completion = function (opts, cb) {
// install can complete to a folder with a package.json, or any package.
@@ -64,8 +67,9 @@ var npm = require("./npm.js")
, relativize = require("./utils/relativize.js")
, output
, url = require("url")
- , mkdir = require("./utils/mkdir-p.js")
+ , mkdir = require("mkdirp")
, lifecycle = require("./utils/lifecycle.js")
+ , archy = require("archy")
function install (args, cb_) {
@@ -75,12 +79,15 @@ function install (args, cb_) {
output = output || require("./utils/output.js")
var tree = treeify(installed)
- , pretty = prettify(tree, installed)
+ , pretty = prettify(tree, installed).trim()
+
+ if (pretty) output.write(pretty, afterWrite)
+ else afterWrite()
- output.write(pretty, function (er) {
+ function afterWrite (er) {
if (er) return cb_(er)
save(where, installed, tree, pretty, cb_)
- })
+ }
}
// the /path/to/node_modules/..
@@ -100,39 +107,116 @@ function install (args, cb_) {
})
}
- mkdir(where, function (er) {
+ mkdir(where, function (er, made) {
if (er) return cb(er)
// install dependencies locally by default,
// or install current folder globally
if (!args.length) {
if (npm.config.get("global")) args = ["."]
- else return readJson( path.resolve(where, "package.json")
- , { dev: !npm.config.get("production") }
- , function (er, data) {
+ else return readDependencies( null
+ , where
+ , { dev: !npm.config.get("production") }
+ , function (er, data) {
if (er) return log.er(cb, "Couldn't read dependencies.")(er)
var deps = Object.keys(data.dependencies || {})
log.verbose([where, deps], "where, deps")
- var family = {}
- , ancestors = {}
- family[data.name] = ancestors[data.name] = data.version
+ var context = { family: {}
+ , ancestors: {}
+ , explicit: false
+ , parent: data
+ , wrap: null }
+ context.family[data.name] = context.ancestors[data.name] = data.version
installManyTop(deps.map(function (dep) {
var target = data.dependencies[dep]
, parsed = url.parse(target.replace(/^git\+/, "git"))
target = dep + "@" + target
return target
- }), where, family, ancestors, false, data, cb)
+ }), where, context, cb)
})
}
// initial "family" is the name:version of the root, if it's got
- // a pacakge.json file.
+ // a package.json file.
readJson(path.resolve(where, "package.json"), function (er, data) {
if (er) data = null
- var family = {}
- , ancestors = {}
- if (data) family[data.name] = ancestors[data.name] = data.version
+ var context = { family: {}
+ , ancestors: {}
+ , explicit: true
+ , parent: data
+ , wrap: null }
+ if (data) {
+ context.family[data.name] = context.ancestors[data.name] = data.version
+ }
var fn = npm.config.get("global") ? installMany : installManyTop
- fn(args, where, family, ancestors, true, data, cb)
+ fn(args, where, context, cb)
+ })
+ })
+}
+
+// reads dependencies for the package at "where". There are several cases,
+// depending on our current state and the package's configuration:
+//
+// 1. If "context" is specified, then we examine the context to see if there's a
+// shrinkwrap there. In that case, dependencies are read from the shrinkwrap.
+// 2. Otherwise, if an npm-shrinkwrap.json file is present, dependencies are
+// read from there.
+// 3. Otherwise, dependencies come from package.json.
+//
+// Regardless of which case we fall into, "cb" is invoked with a first argument
+// describing the full package (as though readJson had been used) but with
+// "dependencies" read as described above. The second argument to "cb" is the
+// shrinkwrap to use in processing this package's dependencies, which may be
+// "wrap" (in case 1) or a new shrinkwrap (in case 2).
+function readDependencies (context, where, opts, cb) {
+ var wrap = context ? context.wrap : null
+
+ readJson( path.resolve(where, "package.json")
+ , opts
+ , function (er, data) {
+ if (er) return cb(er)
+
+ if (wrap) {
+ log.verbose([where, wrap], "readDependencies: using existing wrap")
+ var rv = {}
+ Object.keys(data).forEach(function (key) {
+ rv[key] = data[key]
+ })
+ rv.dependencies = {}
+ Object.keys(wrap).forEach(function (key) {
+ log.verbose([key, wrap[key]], "from wrap")
+ var w = wrap[key]
+ rv.dependencies[key] = w.from || w.version
+ })
+ log.verbose([rv.dependencies], "readDependencies: returned deps")
+ return cb(null, rv, wrap)
+ }
+
+ var wrapfile = path.resolve(where, "npm-shrinkwrap.json")
+
+ fs.readFile(wrapfile, "utf8", function (er, wrapjson) {
+ if (er) {
+ log.verbose("readDependencies: using package.json deps")
+ return cb(null, data, null)
+ }
+
+ try {
+ var newwrap = JSON.parse(wrapjson)
+ } catch (ex) {
+ return cb(ex)
+ }
+
+ log.info(wrapfile, "using shrinkwrap file")
+ var rv = {}
+ Object.keys(data).forEach(function (key) {
+ rv[key] = data[key]
+ })
+ rv.dependencies = {}
+ Object.keys(newwrap.dependencies || {}).forEach(function (key) {
+ var w = newwrap.dependencies[key]
+ rv.dependencies[key] = w.from || w.version
+ })
+ log.verbose([rv.dependencies], "readDependencies: returned deps")
+ return cb(null, rv, newwrap.dependencies)
})
})
}
@@ -141,17 +225,28 @@ function install (args, cb_) {
// as dependencies to a package.json file.
// This is experimental.
function save (where, installed, tree, pretty, cb) {
- if (!npm.config.get("save") || npm.config.get("global")) {
+ if (!npm.config.get("save") &&
+ !npm.config.get("save-dev") &&
+ !npm.config.get("save-optional") ||
+ npm.config.get("global")) {
return cb(null, installed, tree, pretty)
}
+
// each item in the tree is a top-level thing that should be saved
// to the package.json file.
// The relevant tree shape is { <folder>: {what:<pkg>} }
var saveTarget = path.resolve(where, "package.json")
, things = Object.keys(tree).map(function (k) {
- return tree[k].what.split("@")
+ // if "what" was a url, then save that instead.
+ var t = tree[k]
+ , u = url.parse(t.from)
+ , w = t.what.split("@")
+ if (u && u.protocol) w[1] = t.from
+ return w
}).reduce(function (set, k) {
- var rangeDescriptor = semver.gte(k[1], "0.1.0") ? "~" : ""
+ var rangeDescriptor = semver.valid(k[1]) &&
+ semver.gte(k[1], "0.1.0")
+ ? "~" : ""
set[k[0]] = rangeDescriptor + k[1]
return set
}, {})
@@ -165,13 +260,18 @@ function save (where, installed, tree, pretty, cb) {
} catch (ex) {
er = ex
}
- if (er) return cb(null, installed, tree, pretty)
+ if (er) {
+ return cb(null, installed, tree, pretty)
- var deps = npm.config.get("dev") ? "devDependencies" : "dependencies"
- deps = data[deps] = data[deps] || {}
+ }
+
+ var deps = npm.config.get("save-optional") ? "optionalDependencies"
+ : npm.config.get("save-dev") ? "devDependencies"
+ : "dependencies"
+ data[deps] = data[deps] || {}
Object.keys(things).forEach(function (t) {
- deps[t] = things[t]
+ data[deps][t] = things[t]
})
data = JSON.stringify(data, null, 2) + "\n"
fs.writeFile(saveTarget, data, function (er) {
@@ -184,26 +284,48 @@ function save (where, installed, tree, pretty, cb) {
// Outputting *all* the installed modules is a bit confusing,
// because the length of the path does not make it clear
// that the submodules are not immediately require()able.
-// TODO: Show the complete tree, ls-style.
+// TODO: Show the complete tree, ls-style, but only if --long is provided
function prettify (tree, installed) {
- if (npm.config.get("parseable")) return parseable(installed)
- return Object.keys(tree).map(function (p) {
- p = tree[p]
- var c = ""
- if (p.children && p.children.length) {
- pref = "\n"
- var l = p.children.pop()
- c = p.children.map(function (c) {
- var gc = c.children && c.children.length
- ? " (" + c.children.map(function (gc) {
- return gc.what
- }).join(" ") + ")"
- : ""
- return "\n├── " + c.what + gc
- }).join("") + "\n└── " + l.what
+ if (npm.config.get("json")) {
+ function red (set, kv) {
+ set[kv[0]] = kv[1]
+ return set
}
- return [p.what, p.where, c].join(" ")
+ tree = Object.keys(tree).map(function (p) {
+ if (!tree[p]) return null
+ var what = tree[p].what.split("@")
+ , name = what.shift()
+ , version = what.join("@")
+ , o = { name: name, version: version, from: tree[p].from }
+ o.dependencies = tree[p].children.map(function P (dep) {
+ var what = dep.what.split("@")
+ , name = what.shift()
+ , version = what.join("@")
+ , o = { version: version, from: dep.from }
+ o.dependencies = dep.children.map(P).reduce(red, {})
+ return [name, o]
+ }).reduce(red, {})
+ return o
+ })
+
+ return JSON.stringify(tree, null, 2)
+ }
+ if (npm.config.get("parseable")) return parseable(installed)
+
+ return Object.keys(tree).map(function (p) {
+ return archy({ label: tree[p].what + " " + p
+ , nodes: (tree[p].children || []).map(function P (c) {
+ if (npm.config.get("long")) {
+ return { label: c.what, nodes: c.children.map(P) }
+ }
+ var g = c.children.map(function (g) {
+ return g.what
+ }).join(", ")
+ if (g) g = " (" + g + ")"
+ return c.what + g
+ })
+ })
}).join("\n")
}
@@ -226,11 +348,13 @@ function treeify (installed) {
, parent = r[2]
, where = r[1]
, what = r[0]
+ , from = r[4]
l[where] = { parentDir: parentDir
, parent: parent
, children: []
, where: where
- , what: what }
+ , what: what
+ , from: from }
return l
}, {})
@@ -252,10 +376,9 @@ function treeify (installed) {
// just like installMany, but also add the existing packages in
// where/node_modules to the family object.
-function installManyTop (what, where, family, ancestors, explicit, parent, cb_) {
-
+function installManyTop (what, where, context, cb_) {
function cb (er, d) {
- if (explicit || er) return cb_(er, d)
+ if (context.explicit || er) return cb_(er, d)
// since this wasn't an explicit install, let's build the top
// folder, so that `npm install` also runs the lifecycle scripts.
npm.commands.build([where], false, true, function (er) {
@@ -263,7 +386,7 @@ function installManyTop (what, where, family, ancestors, explicit, parent, cb_)
})
}
- if (explicit) return next()
+ if (context.explicit) return next()
readJson(path.join(where, "package.json"), function (er, data) {
if (er) return next(er)
@@ -272,21 +395,20 @@ function installManyTop (what, where, family, ancestors, explicit, parent, cb_)
function next (er) {
if (er) return cb(er)
- installManyTop_(what, where, family, ancestors, explicit, parent, cb)
+ installManyTop_(what, where, context, cb)
}
}
-function installManyTop_ (what, where, family, ancestors, explicit, parent, cb) {
+function installManyTop_ (what, where, context, cb) {
var nm = path.resolve(where, "node_modules")
- , names = explicit
+ , names = context.explicit
? what.map(function (w) { return w.split(/@/).shift() })
: []
fs.readdir(nm, function (er, pkgs) {
- if (er) return installMany(what, where, family, ancestors, explicit, parent, cb)
+ if (er) return installMany(what, where, context, cb)
pkgs = pkgs.filter(function (p) {
return !p.match(/^[\._-]/)
- && (!explicit || names.indexOf(p) === -1)
})
asyncMap(pkgs.map(function (p) {
return path.resolve(nm, p, "package.json")
@@ -299,36 +421,44 @@ function installManyTop_ (what, where, family, ancestors, explicit, parent, cb)
// add all the existing packages to the family list.
// however, do not add to the ancestors list.
packages.forEach(function (p) {
- family[p[0]] = p[1]
+ context.family[p[0]] = p[1]
})
- return installMany(what, where, family, ancestors, explicit, parent, cb)
+ return installMany(what, where, context, cb)
})
})
}
-function installMany (what, where, family, ancestors, explicit, parent, cb) {
- // 'npm install foo' should install the version of foo
- // that satisfies the dep in the current folder.
- // This will typically return immediately, since we already read
- // this file family, and it'll be cached.
- readJson(path.resolve(where, "package.json"), function (er, data) {
+function installMany (what, where, context, cb) {
+ // readDependencies takes care of figuring out whether the list of
+ // dependencies we'll iterate below comes from an existing shrinkwrap from a
+ // parent level, a new shrinkwrap at this level, or package.json at this
+ // level, as well as which shrinkwrap (if any) our dependencies should use.
+ readDependencies(context, where, {}, function (er, data, wrap) {
if (er) data = {}
- d = data.dependencies || {}
var parent = data
+ var d = data.dependencies || {}
+
+ // if we're explicitly installing "what" into "where", then the shrinkwrap
+ // for "where" doesn't apply. This would be the case if someone were adding
+ // a new package to a shrinkwrapped package. (data.dependencies will not be
+ // used here except to indicate what packages are already present, so
+ // there's no harm in using that.)
+ if (context.explicit) wrap = null
+
// what is a list of things.
// resolve each one.
asyncMap( what
- , targetResolver(where, family, ancestors, explicit, d, parent)
+ , targetResolver(where, context, d)
, function (er, targets) {
if (er) return cb(er)
// each target will be a data object corresponding
// to a package, folder, or whatever that is in the cache now.
- var newPrev = Object.create(family)
- , newAnc = Object.create(ancestors)
+ var newPrev = Object.create(context.family)
+ , newAnc = Object.create(context.ancestors)
newAnc[data.name] = data.version
targets.forEach(function (t) {
@@ -339,25 +469,41 @@ function installMany (what, where, family, ancestors, explicit, parent, cb) {
log.info(t._id, "into "+where)
})
asyncMap(targets, function (target, cb) {
- log(target._id, "installOne")
- installOne(target, where, newPrev, newAnc, parent, cb)
+ log.info(target._id, "installOne")
+ var newWrap = wrap ? wrap[target.name].dependencies || {} : null
+ var newContext = { family: newPrev
+ , ancestors: newAnc
+ , parent: parent
+ , explicit: false
+ , wrap: newWrap }
+ installOne(target, where, newContext, cb)
}, cb)
})
})
}
-function targetResolver (where, family, ancestors, explicit, deps, parent) {
- var alreadyInstalledManually = explicit ? [] : null
+function targetResolver (where, context, deps) {
+ var alreadyInstalledManually = context.explicit ? [] : null
, nm = path.resolve(where, "node_modules")
+ , parent = context.parent
+ , wrap = context.wrap
- if (!explicit) fs.readdir(nm, function (er, inst) {
+ if (!context.explicit) fs.readdir(nm, function (er, inst) {
if (er) return alreadyInstalledManually = []
asyncMap(inst, function (pkg, cb) {
readJson(path.resolve(nm, pkg, "package.json"), function (er, d) {
+ // error means it's not a package, most likely.
if (er) return cb(null, [])
- if (semver.satisfies(d.version, deps[d.name] || "*")) {
+
+ // if it's a bundled dep, then assume that anything there is valid.
+ // otherwise, make sure that it's a semver match with what we want.
+ var bd = parent.bundleDependencies
+ if (bd && bd.indexOf(d.name) !== -1 ||
+ semver.satisfies(d.version, deps[d.name] || "*")) {
return cb(null, d.name)
}
+
+ // something is there, but it's not satisfactory. Clobber it.
return cb(null, [])
})
}, function (er, inst) {
@@ -371,6 +517,7 @@ function targetResolver (where, family, ancestors, explicit, deps, parent) {
if (!alreadyInstalledManually) return setTimeout(function () {
resolver(what, cb)
}, to++)
+
// now we know what's been installed here manually,
// or tampered with in some way that npm doesn't want to overwrite.
if (alreadyInstalledManually.indexOf(what.split("@").shift()) !== -1) {
@@ -378,23 +525,56 @@ function targetResolver (where, family, ancestors, explicit, deps, parent) {
return cb(null, [])
}
- if (family[what] && semver.satisfies(family[what], deps[what] || "")) {
+ // check for a version installed higher in the tree.
+ // If installing from a shrinkwrap, it must match exactly.
+ if (context.family[what]) {
+ if (wrap && wrap[what].version === context.family[what]) {
+ log.verbose(what, "using existing (matches shrinkwrap)")
+ return cb(null, [])
+ }
+ }
+
+ // if it's identical to its parent, then it's probably someone
+ // doing `npm install foo` inside of the foo project. Print
+ // a warning, and skip it.
+ if (parent && parent.name === what && !npm.config.get("force")) {
+ log.warn("Refusing to install "+what+" as a dependency of itself"
+ ,"install")
return cb(null, [])
}
- if (deps[what]) {
+ if (wrap) {
+ name = what.split(/@/).shift()
+ if (wrap[name]) {
+ var wrapTarget = wrap[name].from || wrap[name].version
+ log.verbose("resolving "+what+" to "+wrapTarget, "shrinkwrap")
+ what = name + "@" + wrapTarget
+ } else {
+ log.verbose("skipping "+what+" (not in shrinkwrap)", "shrinkwrap")
+ }
+ } else if (deps[what]) {
what = what + "@" + deps[what]
}
cache.add(what, function (er, data) {
if (er && parent && parent.optionalDependencies &&
- parent.optionalDependencies.hasOwnProperty(what.split("@").shift())) {
+ parent.optionalDependencies.hasOwnProperty(what.split("@")[0])) {
log.warn(what, "optional dependency failed, continuing")
+ log.verbose([what, er], "optional dependency failed, continuing")
return cb(null, [])
}
- if (!er && data && family[data.name] === data.version) {
+
+ if (!er &&
+ data &&
+ !context.explicit &&
+ context.family[data.name] === data.version &&
+ !npm.config.get("force")) {
+ log.info(data.name + "@" + data.version, "already installed")
return cb(null, [])
}
+
+ if (data) data._from = what
+
return cb(er, data)
})
}
@@ -402,20 +582,33 @@ function targetResolver (where, family, ancestors, explicit, deps, parent) {
// we've already decided to install this. if anything's in the way,
// then uninstall it first.
-function installOne (target, where, family, ancestors, parent, cb) {
+function installOne (target, where, context, cb) {
// the --link flag makes this a "link" command if it's at the
// the top level.
if (where === npm.prefix && npm.config.get("link")
&& !npm.config.get("global")) {
- return localLink(target, where, family, ancestors, parent, cb)
+ return localLink(target, where, context, cb)
}
- installOne_(target, where, family, ancestors, parent, cb)
+ installOne_(target, where, context, function (er, installedWhat) {
+
+ // check if this one is optional to its parent.
+ if (er && context.parent && context.parent.optionalDependencies &&
+ context.parent.optionalDependencies.hasOwnProperty(target.name)) {
+ log.warn(target._id, "optional dependency failed, continuing")
+ log.verbose([target._id, er], "optional dependency failed, continuing")
+ er = null
+ }
+
+ cb(er, installedWhat)
+ })
+
}
-function localLink (target, where, family, ancestors, parent, cb) {
+function localLink (target, where, context, cb) {
log.verbose(target._id, "try to link")
var jsonFile = path.resolve( npm.dir, target.name
, "package.json" )
+ , parent = context.parent
readJson(jsonFile, function (er, data) {
if (er || data._id === target._id) {
@@ -437,7 +630,7 @@ function localLink (target, where, family, ancestors, parent, cb) {
} else {
log.verbose(target._id, "install locally (no link)")
- installOne_(target, where, family, ancestors, parent, cb)
+ installOne_(target, where, context, cb)
}
})
}
@@ -458,23 +651,25 @@ function resultList (target, where, parentId) {
return [ target._id
, targetFolder
, prettyWhere && parentId
- , parentId && prettyWhere ]
+ , parentId && prettyWhere
+ , target._from ]
}
-function installOne_ (target, where, family, ancestors, parent, cb) {
+function installOne_ (target, where, context, cb) {
var nm = path.resolve(where, "node_modules")
, targetFolder = path.resolve(nm, target.name)
, prettyWhere = relativize(where, process.cwd() + "/x")
+ , parent = context.parent
if (prettyWhere === ".") prettyWhere = null
chain
( [ [checkEngine, target]
- , [checkCycle, target, ancestors]
+ , [checkPlatform, target]
+ , [checkCycle, target, context.ancestors]
, [checkGit, targetFolder]
- , [write, target, targetFolder, family, ancestors] ]
+ , [write, target, targetFolder, context] ]
, function (er, d) {
- log.verbose(target._id, "installOne cb")
if (er) return cb(er)
d.push(resultList(target, where, parent && parent._id))
cb(er, d)
@@ -499,6 +694,58 @@ function checkEngine (target, cb) {
return cb()
}
+function checkPlatform (target, cb) {
+ var platform = process.platform
+ , arch = process.arch
+ , osOk = true
+ , cpuOk = true
+ , force = npm.config.get("force")
+
+ if (force) {
+ return cb()
+ }
+
+ if (target.os) {
+ osOk = checkList(platform, target.os)
+ }
+ if (target.cpu) {
+ cpuOk = checkList(arch, target.cpu)
+ }
+ if (!osOk || !cpuOk) {
+ var er = new Error("Unsupported")
+ er.errno = npm.EBADPLATFORM
+ er.os = target.os || ['any']
+ er.cpu = target.cpu || ['any']
+ er.pkgid = target._id
+ return cb(er)
+ }
+ return cb()
+}
+
+function checkList (value, list) {
+ var tmp
+ , match = false
+ , blc = 0
+ if (typeof list === "string") {
+ list = [list]
+ }
+ if (list.length === 1 && list[0] === "any") {
+ return true;
+ }
+ for (var i = 0; i < list.length; ++i) {
+ tmp = list[i]
+ if (tmp[0] === '!') {
+ tmp = tmp.slice(1)
+ if (tmp === value) {
+ return false;
+ }
+ ++blc
+ } else {
+ match = match || tmp === value
+ }
+ }
+ return match || blc === list.length
+}
function checkCycle (target, ancestors, cb) {
// there are some very rare and pathological edge-cases where
@@ -513,8 +760,16 @@ function checkCycle (target, ancestors, cb) {
// A more correct, but more complex, solution would be to symlink
// the deeper thing into the new location.
// Will do that if anyone whines about this irl.
-
- var p = Object.getPrototypeOf(ancestors)
+ //
+ // Note: `npm install foo` inside of the `foo` package will abort
+ // earlier if `--force` is not set. However, if it IS set, then
+ // we need to still fail here, but just skip the first level. Of
+ // course, it'll still fail eventually if it's a true cycle, and
+ // leave things in an undefined state, but that's what is to be
+ // expected when `--force` is used. That is why getPrototypeOf
+ // is used *twice* here: to skip the first level of repetition.
+
+ var p = Object.getPrototypeOf(Object.getPrototypeOf(ancestors))
, name = target.name
, version = target.version
while (p && p !== Object.prototype && p[name] !== version) {
@@ -556,16 +811,17 @@ function checkGit_ (folder, cb) {
})
}
-function write (target, targetFolder, family, ancestors, cb_) {
+function write (target, targetFolder, context, cb_) {
var up = npm.config.get("unsafe-perm")
, user = up ? null : npm.config.get("user")
, group = up ? null : npm.config.get("group")
+ , family = context.family
function cb (er, data) {
// cache.unpack returns the data object, and all we care about
// is the list of installed packages from that last thing.
if (!er) return cb_(er, data)
- log.error(target._id,"error installing")
+
if (false === npm.config.get("rollback")) return cb_(er)
npm.commands.unbuild([targetFolder], function (er2) {
if (er2) log.error(er2, "error rolling back "+target._id)
@@ -573,33 +829,67 @@ function write (target, targetFolder, family, ancestors, cb_) {
})
}
+ var bundled = []
+
chain
- ( [ [ npm.commands.unbuild, [targetFolder] ]
- , [ cache.unpack, target.name, target.version, targetFolder
+ ( [ [ cache.unpack, target.name, target.version, targetFolder
, null, null, user, group ]
- , [ lifecycle, target, "preinstall", targetFolder ] ]
+ , [ fs, "writeFile"
+ , path.resolve(targetFolder, "package.json")
+ , JSON.stringify(target, null, 2) + "\n" ]
+ , [ lifecycle, target, "preinstall", targetFolder ]
+ , function (cb) {
+ if (!target.bundleDependencies) return cb()
+
+ var bd = path.resolve(targetFolder, "node_modules")
+ fs.readdir(bd, function (er, b) {
+ // nothing bundled, maybe
+ if (er) return cb()
+ bundled = b || []
+ cb()
+ })
+ } ]
// nest the chain so that we can throw away the results returned
// up until this point, since we really don't care about it.
- , function (er) {
+ , function X (er) {
if (er) return cb(er)
- var deps = Object.keys(target.dependencies || {})
- installMany(deps.filter(function (d) {
- // prefer to not install things that are satisfied by
- // something in the "family" list.
- return !semver.satisfies(family[d], target.dependencies[d])
- }).map(function (d) {
- var t = target.dependencies[d]
- , parsed = url.parse(t.replace(/^git\+/, "git"))
- t = d + "@" + t
- return t
- }), targetFolder, family, ancestors, false, target, function (er, d) {
- log.verbose(targetFolder, "about to build")
- if (er) return cb(er)
- npm.commands.build( [targetFolder]
- , npm.config.get("global")
- , true
- , function (er) { return cb(er, d) })
+
+ // before continuing to installing dependencies, check for a shrinkwrap.
+ readDependencies(context, targetFolder, {}, function (er, data, wrap) {
+ var deps = Object.keys(data.dependencies || {})
+
+ // don't install bundleDependencies, unless they're missing.
+ if (data.bundleDependencies) {
+ deps = deps.filter(function (d) {
+ return data.bundleDependencies.indexOf(d) === -1 ||
+ bundled.indexOf(d) === -1
+ })
+ }
+
+ var newcontext = { family: family
+ , ancestors: context.ancestors
+ , parent: target
+ , explicit: false
+ , wrap: wrap }
+ installMany(deps.filter(function (d) {
+ // prefer to not install things that are satisfied by
+ // something in the "family" list, unless we're installing
+ // from a shrinkwrap.
+ return wrap || !semver.satisfies(family[d], data.dependencies[d])
+ }).map(function (d) {
+ var t = data.dependencies[d]
+ , parsed = url.parse(t.replace(/^git\+/, "git"))
+ t = d + "@" + t
+ return t
+ }), targetFolder, newcontext, function (er, d) {
+ log.verbose(targetFolder, "about to build")
+ if (er) return cb(er)
+ npm.commands.build( [targetFolder]
+ , npm.config.get("global")
+ , true
+ , function (er) { return cb(er, d) })
+ })
})
- } )
+ })
}
diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js
index fea6606666..3049884cab 100644
--- a/deps/npm/lib/link.js
+++ b/deps/npm/lib/link.js
@@ -141,6 +141,7 @@ function resultPrinter (pkg, src, dest, rp, cb) {
var where = relativize(dest, path.resolve(process.cwd(),"x"))
rp = (rp || "").trim()
src = (src || "").trim()
+ // XXX If --json is set, then look up the data from the package.json
if (npm.config.get("parseable")) {
return parseableOutput(dest, rp || src, cb)
}
@@ -150,6 +151,9 @@ function resultPrinter (pkg, src, dest, rp, cb) {
}
function parseableOutput (dest, rp, cb) {
+ // XXX this should match ls --parseable and install --parseable
+ // look up the data from package.json, format it the same way.
+ //
// link is always effectively "long", since it doesn't help much to
// *just* print the target folder.
// However, we don't actually ever read the version number, so
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
index 33336cf63e..276530c354 100644
--- a/deps/npm/lib/ls.js
+++ b/deps/npm/lib/ls.js
@@ -13,6 +13,7 @@ var npm = require("./npm.js")
, log = require("./utils/log.js")
, relativize = require("./utils/relativize.js")
, path = require("path")
+ , archy = require("archy")
ls.usage = "npm ls"
@@ -26,13 +27,102 @@ function ls (args, silent, cb) {
var dir = path.resolve(npm.dir, "..")
readInstalled(dir, function (er, data) {
- if (er || silent) return cb(er, data)
+ var lite = getLite(bfsify(data))
+ if (er || silent) return cb(er, data, lite)
+
var long = npm.config.get("long")
- var out = makePretty(bfsify(data), long, dir).join("\n")
- output.write(out, function (er) { cb(er, data) })
+ , json = npm.config.get("json")
+ , out
+ if (json) {
+ var seen = []
+ var d = long ? bfsify(data) : lite
+ // the raw data can be circular
+ out = JSON.stringify(d, function (k, o) {
+ if (typeof o === "object") {
+ if (-1 !== seen.indexOf(o)) return "[Circular]"
+ seen.push(o)
+ }
+ return o
+ }, 2)
+ } else if (npm.config.get("parseable")) {
+ out = makeParseable(bfsify(data), long, dir)
+ } else if (data) {
+ out = makeArchy(bfsify(data), long, dir)
+ }
+ output.write(out, function (er) { cb(er, data, lite) })
})
}
+function alphasort (a, b) {
+ a = a.toLowerCase()
+ b = b.toLowerCase()
+ return a > b ? 1
+ : a < b ? -1 : 0
+}
+
+function getLite (data, noname) {
+ var lite = {}
+ , maxDepth = npm.config.get("depth")
+ , url = require("url")
+
+ if (!noname && data.name) lite.name = data.name
+ if (data.version) lite.version = data.version
+ if (data.extraneous) {
+ lite.extraneous = true
+ lite.problems = lite.problems || []
+ lite.problems.push( "extraneous: "
+ + data.name + "@" + data.version
+ + " " + (data.path || "") )
+ }
+
+ if (data._from) {
+ var from = data._from
+ if (from.indexOf(data.name + "@") === 0) {
+ from = from.substr(data.name.length + 1)
+ }
+ var u = url.parse(from)
+ if (u.protocol) lite.from = from
+ }
+
+ if (data.invalid) {
+ lite.invalid = true
+ lite.problems = lite.problems || []
+ lite.problems.push( "invalid: "
+ + data.name + "@" + data.version
+ + " " + (data.path || "") )
+ }
+
+ if (data.dependencies) {
+ var deps = Object.keys(data.dependencies)
+ if (deps.length) lite.dependencies = deps.map(function (d) {
+ var dep = data.dependencies[d]
+ if (typeof dep === "string") {
+ lite.problems = lite.problems || []
+ var p
+ if (data.depth >= maxDepth) {
+ p = "max depth reached: "
+ } else {
+ p = "missing: "
+ }
+ p += d + "@" + dep
+ + ", required by "
+ + data.name + "@" + data.version
+ lite.problems.push(p)
+ return [d, { required: dep, missing: true }]
+ }
+ return [d, getLite(dep, true)]
+ }).reduce(function (deps, d) {
+ if (d[1].problems) {
+ lite.problems = lite.problems || []
+ lite.problems.push.apply(lite.problems, d[1].problems)
+ }
+ deps[d[0]] = d[1]
+ return deps
+ }, {})
+ }
+ return lite
+}
+
function bfsify (root, current, queue, seen) {
// walk over the data, and turn it from this:
// +-- a
@@ -45,7 +135,7 @@ function bfsify (root, current, queue, seen) {
// which looks nicer
current = current || root
queue = queue || []
- seen = seen || []
+ seen = seen || [root]
var deps = current.dependencies = current.dependencies || {}
Object.keys(deps).forEach(function (d) {
var dep = deps[d]
@@ -67,103 +157,115 @@ function bfsify (root, current, queue, seen) {
}
-function makePretty (data, long, dir, prefix, list) {
- var top = !list
- list = list || []
- prefix = prefix || ""
- list.push(format(data, long, prefix, dir))
- var deps = data.dependencies || {}
- , childPref = prefix.split("├─").join("│ ")
- .split("└─").join(" ")
- , depList = Object.keys(deps)
- , depLast = depList.length - 1
- , maxDepth = npm.config.get("depth")
- Object.keys(deps).sort(function (a, b) {
- return a > b ? 1 : -1
- }).forEach(function (d, i) {
- var depData = deps[d]
- if (typeof depData === "string") {
- if (data.depth < maxDepth) {
- var p = data.link || data.path
- log.warn("Unmet dependency in "+p, d+" "+deps[d])
- depData = npm.config.get("parseable")
- ? ( npm.config.get("long")
- ? path.resolve(data.path, "node_modules", d)
- + ":"+d+"@"+JSON.stringify(depData)+":INVALID:MISSING"
- : "" )
- : "─ \033[31;40mUNMET DEPENDENCY\033[0m "+d+" "+depData
- } else {
- if (npm.config.get("parseable")) {
- depData = path.resolve(data.path, "node_modules", d)
- + (npm.config.get("long")
- ? ":" + d + "@" + JSON.stringify(depData)
- + ":" // no realpath resolved
- + ":MAXDEPTH"
- : "")
- } else {
- depData = "─ "+d+"@'"+depData +"' (max depth reached)"
- }
- }
- }
- var c = i === depLast ? "└─" : "├─"
- makePretty(depData, long, dir, childPref + c, list)
- })
- if (top && list.length === 1 && !data._id) {
- if (!npm.config.get("parseable")) {
- list.push("(empty)")
- } else if (npm.config.get("long")) list[0] += ":EMPTY"
- }
- return list.filter(function (l) { return l && l.trim() })
+function makeArchy (data, long, dir) {
+ var out = makeArchy_(data, long, dir, 0)
+ return archy(out, "", { unicode: npm.config.get("unicode") })
}
-function ugly (data) {
+function makeArchy_ (data, long, dir, depth, parent, d) {
if (typeof data === "string") {
+ if (depth < npm.config.get("depth")) {
+ // just missing
+ var p = parent.link || parent.path
+ log.warn("Unmet dependency in "+p, d+" "+data)
+ data = "\033[31;40mUNMET DEPENDENCY\033[0m " + d + " " + data
+ } else {
+ data = d+"@'"+ data +"' (max depth reached)"
+ }
return data
}
- if (!npm.config.get("long")) return data.path
- return data.path
- + ":" + (data._id || "")
- + ":" + (data.realPath !== data.path ? data.realPath : "")
- + (data.extraneous ? ":EXTRANEOUS" : "")
- + (data.invalid ? ":INVALID" : "")
-}
+ var out = {}
+ // the top level is a bit special.
+ out.label = data._id ? data._id + " " : ""
+ if (data.link) out.label += "-> " + data.link
-function format (data, long, prefix, dir) {
- if (npm.config.get("parseable")) return ugly(data)
- if (typeof data === "string") {
- return prefix + data
- }
-// console.log([data.path, dir], "relativize")
- var depLen = Object.keys(data.dependencies).length
- , space = prefix.split("├─").join("│ ")
- .split("└─").join(" ")
- + (depLen ? "" : " ")
- , rel = relativize(data.path || "", dir)
- , l = prefix
- + (rel === "." ? "" : depLen ? "┬ " : "─ ")
- + (data._id ? data._id + " " : "")
- + (data.link ? "-> " + data.link : "") + ""
- + (rel === "." && !(long && data._id) ? dir : "")
if (data.invalid) {
- if (data.realName !== data.name) l += " ("+data.realName+")"
- l += " \033[31;40minvalid\033[0m"
+ if (data.realName !== data.name) out.label += " ("+data.realName+")"
+ out.label += " \033[31;40minvalid\033[0m"
}
- if (data.extraneous && rel !== ".") {
- l += " \033[32;40mextraneous\033[0m"
+
+ if (data.extraneous && data.path !== dir) {
+ out.label += " \033[32;40mextraneous\033[0m"
+ }
+
+ if (long) {
+ if (dir === data.path) out.label += "\n" + dir
+ out.label += "\n" + getExtras(data, dir)
+ } else if (dir === data.path) {
+ out.label += dir
+ }
+
+ // now all the children.
+ out.nodes = Object.keys(data.dependencies || {})
+ .sort(alphasort).map(function (d) {
+ return makeArchy_(data.dependencies[d], long, dir, depth + 1, data, d)
+ })
+
+ if (out.nodes.length === 0 && data.path === dir) {
+ out.nodes = ["(empty)"]
}
- if (!long || !data._id) return l
+
+ return out
+}
+
+function getExtras (data, dir) {
var extras = []
- if (rel !== ".") extras.push(rel)
- else extras.push(dir)
+ , rel = relativize(data.path || "", dir)
+ , url = require("url")
+
if (data.description) extras.push(data.description)
if (data.repository) extras.push(data.repository.url)
if (data.homepage) extras.push(data.homepage)
- extras = extras.filter(function (e) { return e })
- var lastExtra = !depLen && extras.length - 1
- l += extras.map(function (e, i) {
- var indent = !depLen ? " " : "│ "
- return "\n" + space + indent + e
- }).join("")
- return l
+ if (data._from) {
+ var from = data._from
+ if (from.indexOf(data.name + "@") === 0) {
+ from = from.substr(data.name.length + 1)
+ }
+ var u = url.parse(from)
+ if (u.protocol) extras.push(from)
+ }
+ return extras.join("\n")
+}
+
+
+function makeParseable (data, long, dir, depth, parent, d) {
+ depth = depth || 0
+
+ return [ makeParseable_(data, long, dir, depth, parent, d) ]
+ .concat(Object.keys(data.dependencies || {})
+ .sort(alphasort).map(function (d) {
+ return makeParseable(data.dependencies[d], long, dir, depth + 1, data, d)
+ }))
+ .join("\n")
+}
+
+function makeParseable_ (data, long, dir, depth, parent, d) {
+ if (typeof data === "string") {
+ if (data.depth < npm.config.get("depth")) {
+ var p = parent.link || parent.path
+ log.warn("Unmet dependency in "+p, d+" "+data)
+ data = npm.config.get("long")
+ ? path.resolve(parent.path, "node_modules", d)
+ + ":"+d+"@"+JSON.stringify(data)+":INVALID:MISSING"
+ : ""
+ } else {
+ data = path.resolve(data.path, "node_modules", d)
+ + (npm.config.get("long")
+ ? ":" + d + "@" + JSON.stringify(data)
+ + ":" // no realpath resolved
+ + ":MAXDEPTH"
+ : "")
+ }
+
+ return data
+ }
+
+ if (!npm.config.get("long")) return data.path
+
+ return data.path
+ + ":" + (data._id || "")
+ + ":" + (data.realPath !== data.path ? data.realPath : "")
+ + (data.extraneous ? ":EXTRANEOUS" : "")
+ + (data.invalid ? ":INVALID" : "")
}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index de68393d39..2db21e34de 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -17,8 +17,6 @@ require("path").SPLIT_CHAR = process.platform === "win32" ? "\\" : "/"
var EventEmitter = require("events").EventEmitter
, npm = module.exports = new EventEmitter
, config = require("./config.js")
- , set = require("./utils/set.js")
- , get = require("./utils/get.js")
, ini = require("./utils/ini.js")
, log = require("./utils/log.js")
, fs = require("graceful-fs")
@@ -27,11 +25,28 @@ var EventEmitter = require("events").EventEmitter
, which = require("which")
, semver = require("semver")
, findPrefix = require("./utils/find-prefix.js")
- , getUid = require("./utils/uid-number.js")
- , mkdir = require("./utils/mkdir-p.js")
+ , getUid = require("uid-number")
+ , mkdirp = require("mkdirp")
, slide = require("slide")
, chain = slide.chain
+// /usr/local is often a read-only fs, which is not
+// well handled by node or mkdirp. Just double-check
+// in the case of errors when making the prefix dirs.
+function mkdir (p, cb) {
+ mkdirp(p, function (er, made) {
+ // it could be that we couldn't create it, because it
+ // already exists, and is on a read-only fs.
+ if (er) {
+ return fs.stat(p, function (er2, st) {
+ if (er2 || !st.isDirectory()) return cb(er)
+ return cb(null, made)
+ })
+ }
+ return cb(er, made)
+ })
+}
+
npm.commands = {}
npm.ELIFECYCLE = {}
npm.E404 = {}
@@ -40,30 +55,7 @@ npm.EJSONPARSE = {}
npm.EISGIT = {}
npm.ECYCLE = {}
npm.ENOTSUP = {}
-
-// HACK for windows
-if (process.platform === "win32") {
- // stub in unavailable methods from process and fs binding
- if (!process.getuid) process.getuid = function() {}
- if (!process.getgid) process.getgid = function() {}
- var fsBinding = process.binding("fs")
- if (!fsBinding.chown) fsBinding.chown = function() {
- var cb = arguments[arguments.length - 1]
- if (typeof cb == "function") cb()
- }
-
- // patch rename/renameSync, but this should really be fixed in node
- var _fsRename = fs.rename
- , _fsPathPatch
- _fsPathPatch = function(p) {
- return p && p.replace(/\\/g, "/") || p;
- }
- fs.rename = function(p1, p2) {
- arguments[0] = _fsPathPatch(p1)
- arguments[1] = _fsPathPatch(p2)
- return _fsRename.apply(fs, arguments);
- }
-}
+npm.EBADPLATFORM = {}
try {
// startup, ok to do this synchronously
@@ -112,6 +104,7 @@ var commandCache = {}
, "unstar": "star" // same function
, "apihelp" : "help"
, "login": "adduser"
+ , "add-user": "adduser"
}
, aliasNames = Object.keys(aliases)
@@ -138,6 +131,7 @@ var commandCache = {}
, "unpublish"
, "owner"
, "deprecate"
+ , "shrinkwrap"
, "help"
, "help-search"
@@ -309,7 +303,7 @@ function loadPrefix (npm, conf, cb) {
})
// the prefix MUST exist, or else nothing works.
if (!npm.config.get("global")) {
- mkdir(p, npm.modes.exec, null, null, true, next)
+ mkdir(p, next)
} else {
next(er)
}
@@ -322,7 +316,7 @@ function loadPrefix (npm, conf, cb) {
, enumerable : true
})
// the prefix MUST exist, or else nothing works.
- mkdir(gp, npm.modes.exec, null, null, true, next)
+ mkdir(gp, next)
})
var i = 2
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index 496dfbd426..e883abd359 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -48,6 +48,10 @@ function makePretty (p) {
, dir = path.resolve(p[0], "node_modules", dep)
, has = p[2]
, want = p[3]
+
+ // XXX add --json support
+ // Should match (more or less) the output of ls --json
+
if (parseable) {
var str = dir
if (npm.config.get("long")) {
diff --git a/deps/npm/lib/rebuild.js b/deps/npm/lib/rebuild.js
index fc5f96825d..0e1d56a1de 100644
--- a/deps/npm/lib/rebuild.js
+++ b/deps/npm/lib/rebuild.js
@@ -31,7 +31,6 @@ function rebuild (args, cb) {
function cleanBuild (folders, set, cb) {
// https://github.com/isaacs/npm/issues/1872
- // If there's a makefile, try 'make clean'
// If there's a wscript, try 'node-waf clean'
// But don't die on either of those if they fail.
// Just a best-effort kind of deal.
@@ -41,8 +40,6 @@ function cleanBuild (folders, set, cb) {
if (er) return cb(er)
if (files.indexOf("wscript") !== -1) {
exec("node-waf", ["clean"], null, false, f, thenBuild)
- } else if (files.indexOf("Makefile") !== -1) {
- exec("make", ["clean"], null, false, f, thenBuild)
} else thenBuild()
})
function thenBuild (er) {
diff --git a/deps/npm/lib/search.js b/deps/npm/lib/search.js
index 92f4319f58..213390eb08 100644
--- a/deps/npm/lib/search.js
+++ b/deps/npm/lib/search.js
@@ -137,7 +137,8 @@ function prettify (data, args) {
, stdout = process.stdout
, cols = !tty.isatty(stdout.fd) ? Infinity
: stdout._handle ? stdout._handle.getWindowSize()[0]
- : tty.getWindowSize()[1]
+ : process.stdout.getWindowSize()[0]
+ cols = (cols == 0) ? Infinity : cols
} catch (ex) { cols = Infinity }
// name, desc, author, keywords
diff --git a/deps/npm/lib/set.js b/deps/npm/lib/set.js
index d821095157..c83602ec1f 100644
--- a/deps/npm/lib/set.js
+++ b/deps/npm/lib/set.js
@@ -8,5 +8,6 @@ var npm = require("./npm.js")
set.completion = npm.commands.config.completion
function set (args, cb) {
+ if (!args.length) return cb(set.usage)
npm.commands.config(["set"].concat(args), cb)
}
diff --git a/deps/npm/lib/unbuild.js b/deps/npm/lib/unbuild.js
index f4bd008a66..771eddf7db 100644
--- a/deps/npm/lib/unbuild.js
+++ b/deps/npm/lib/unbuild.js
@@ -3,6 +3,7 @@ unbuild.usage = "npm unbuild <folder>\n(this is plumbing)"
var readJson = require("./utils/read-json.js")
, rm = require("rimraf")
+ , gentlyRm = require("./utils/gently-rm.js")
, npm = require("./npm.js")
, path = require("path")
, fs = require("graceful-fs")
@@ -56,15 +57,20 @@ function rmBins (pkg, folder, parent, top, cb) {
if (process.platform === "win32") {
rm(path.resolve(binRoot, b) + ".cmd", cb)
} else {
- rm( path.resolve(binRoot, b)
- , { gently: !npm.config.get("force") && folder }
- , cb )
+ gentlyRm( path.resolve(binRoot, b)
+ , !npm.config.get("force") && folder
+ , cb )
}
}, cb)
}
function rmMans (pkg, folder, parent, top, cb) {
- if (!pkg.man || !top || process.platform === "win32") return cb()
+ if (!pkg.man
+ || !top
+ || process.platform === "win32"
+ || !npm.config.get("global")) {
+ return cb()
+ }
var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
asyncMap(pkg.man, function (man, cb) {
var parseMan = man.match(/(.*)\.([0-9]+)(\.gz)?$/)
@@ -78,8 +84,8 @@ function rmMans (pkg, folder, parent, top, cb) {
: pkg.name + "-" + bn)
+ "." + sxn + gz
)
- rm( manDest
- , { gently: !npm.config.get("force") && folder }
- , cb )
+ gentlyRm( manDest
+ , !npm.config.get("force") && folder
+ , cb )
}, cb)
}
diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js
index 1b43607d6d..655e5eb968 100644
--- a/deps/npm/lib/uninstall.js
+++ b/deps/npm/lib/uninstall.js
@@ -40,7 +40,12 @@ function uninstall (args, cb) {
function uninstall_ (args, nm, cb) {
asyncMap(args, function (arg, cb) {
- var p = path.resolve(nm, arg)
+ // uninstall .. should not delete /usr/local/lib/node_modules/..
+ var p = path.join(path.resolve(nm), path.join("/", arg))
+ if (path.resolve(p) === nm) {
+ log.warn(arg, "uninstall: invalid argument")
+ return cb(null, [])
+ }
fs.lstat(p, function (er) {
if (er) {
log.warn(arg, "Not installed in "+nm)
diff --git a/deps/npm/lib/utils/cmd-shim.js b/deps/npm/lib/utils/cmd-shim.js
index a7892e8ee3..e24da36f6d 100644
--- a/deps/npm/lib/utils/cmd-shim.js
+++ b/deps/npm/lib/utils/cmd-shim.js
@@ -14,7 +14,7 @@ cmdShim.ifExists = cmdShimIfExists
var fs = require("graceful-fs")
, chain = require("slide").chain
- , mkdir = require("./mkdir-p.js")
+ , mkdir = require("mkdirp")
, rm = require("rimraf")
, log = require("./log.js")
, path = require("path")
@@ -73,16 +73,16 @@ function writeShim_ (from, to, prog, args, cb) {
target = ""
shTarget = ""
} else {
- longProg = "\"%~dp0\"\\\"" + prog + ".exe\""
+ longProg = "\"%~dp0\\" + prog + ".exe\""
shLongProg = "\"`dirname \"$0\"`/" + prog + "\""
target = "\"%~dp0\\" + target + "\""
shTarget = "\"`dirname \"$0\"`/" + shTarget + "\""
}
- // @IF EXIST "%~dp0"\"node.exe" (
+ // @IF EXIST "%~dp0\node.exe" (
// "%~dp0\node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
// ) ELSE (
- // node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+ // node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
// )
var cmd
if (longProg) {
diff --git a/deps/npm/lib/utils/completion/file-completion.js b/deps/npm/lib/utils/completion/file-completion.js
index 427efefb44..c1c241d682 100644
--- a/deps/npm/lib/utils/completion/file-completion.js
+++ b/deps/npm/lib/utils/completion/file-completion.js
@@ -1,7 +1,7 @@
module.exports = fileCompletion
var find = require("../find.js")
- , mkdir = require("../mkdir-p.js")
+ , mkdir = require("mkdirp")
, path = require("path")
function fileCompletion (root, req, depth, cb) {
diff --git a/deps/npm/lib/utils/config-defs.js b/deps/npm/lib/utils/config-defs.js
index d368c49726..18b47ecdb7 100644
--- a/deps/npm/lib/utils/config-defs.js
+++ b/deps/npm/lib/utils/config-defs.js
@@ -9,6 +9,7 @@ var path = require("path")
, os = require("os")
, nopt = require("nopt")
, log = require("./log.js")
+ , npm = require("../npm.js")
function Octal () {}
function validateOctal (data, k, val) {
@@ -139,6 +140,8 @@ Object.defineProperty(exports, "defaults", {get: function () {
, cache : process.platform === "win32"
? path.resolve(process.env.APPDATA || home || temp, "npm-cache")
: path.resolve( home || temp, ".npm")
+ , "cache-max": Infinity
+ , "cache-min": 0
, color : process.platform !== "win32" || winColor
, coverage: false
@@ -161,6 +164,7 @@ Object.defineProperty(exports, "defaults", {get: function () {
, "init.author.name" : ""
, "init.author.email" : ""
, "init.author.url" : ""
+ , json: false
, link: false
, logfd : 2
, loglevel : "http"
@@ -180,10 +184,13 @@ Object.defineProperty(exports, "defaults", {get: function () {
, proxy : process.env.HTTP_PROXY || process.env.http_proxy || null
, "https-proxy" : process.env.HTTPS_PROXY || process.env.https_proxy ||
process.env.HTTP_PROXY || process.env.http_proxy || null
+ , "user-agent" : "npm/" + npm.version + " node/" + process.version
, "rebuild-bundle" : true
, registry : "http" + (httpsOk ? "s" : "") + "://registry.npmjs.org/"
, rollback : true
, save : false
+ , "save-dev" : false
+ , "save-optional" : false
, searchopts: ""
, searchexclude: null
, searchsort: "name"
@@ -206,6 +213,7 @@ Object.defineProperty(exports, "defaults", {get: function () {
, userignorefile : path.resolve(home, ".npmignore")
, umask: 022
, version : false
+ , versions : false
, viewer: process.platform === "win32" ? "browser" : "man"
, yes: null
@@ -220,6 +228,8 @@ exports.types =
, browser : String
, ca: [null, String]
, cache : path
+ , "cache-max": Number
+ , "cache-min": Number
, color : ["always", Boolean]
, coverage: Boolean
, depth : Number
@@ -233,11 +243,13 @@ exports.types =
, globalignorefile: path
, group : [Number, String]
, "https-proxy" : [null, url]
+ , "user-agent" : String
, ignore : String
, "init.version" : [null, semver]
, "init.author.name" : String
, "init.author.email" : String
, "init.author.url" : ["", url]
+ , json: Boolean
, link: Boolean
, logfd : [Number, Stream]
, loglevel : ["silent","win","error","warn","http","info","verbose","silly"]
@@ -259,6 +271,8 @@ exports.types =
, registry : [null, url]
, rollback : Boolean
, save : Boolean
+ , "save-dev" : Boolean
+ , "save-optional" : Boolean
, searchopts : String
, searchexclude: [null, String]
, searchsort: [ "name", "-name"
@@ -279,6 +293,7 @@ exports.types =
, userignorefile : path
, umask: Octal
, version : Boolean
+ , versions : Boolean
, viewer: String
, yes: [false, null, Boolean]
, _exit : Boolean
@@ -313,6 +328,8 @@ exports.shorthands =
, porcelain : ["--parseable"]
, g : ["--global"]
, S : ["--save"]
+ , D : ["--save-dev"]
+ , O : ["--save-optional"]
, y : ["--yes"]
, n : ["--no-yes"]
}
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
index 22d26d8d6c..0cdc03faca 100644
--- a/deps/npm/lib/utils/error-handler.js
+++ b/deps/npm/lib/utils/error-handler.js
@@ -159,6 +159,17 @@ function errorHandler (er) {
].join("\n"))
break
+ case npm.EBADPLATFORM:
+ er.code = "EBADPLATFORM"
+ log.error([er.message
+ ,"Not compatible with your operating system or architecture: "+er.pkgid
+ ,"Valid OS: "+er.os.join(",")
+ ,"Valid Arch: "+er.cpu.join(",")
+ ,"Actual OS: "+process.platform
+ ,"Actual Arch: "+process.arch
+ ].join("\n"))
+ break
+
case "EEXIST":
case constants.EEXIST:
log.error([er.message
diff --git a/deps/npm/lib/utils/excludes.js b/deps/npm/lib/utils/excludes.js
deleted file mode 100644
index 75fe41c057..0000000000
--- a/deps/npm/lib/utils/excludes.js
+++ /dev/null
@@ -1,157 +0,0 @@
-// build up a set of exclude lists in order of precedence:
-// [ ["!foo", "bar"]
-// , ["foo", "!bar"] ]
-// being *included* will override a previous exclusion,
-// and being excluded will override a previous inclusion.
-//
-// Each time the tar file-list generator thingie enters a new directory,
-// it calls "addIgnoreFile(dir, list, cb)". If an ignore file is found,
-// then it is added to the list and the cb() is called with an
-// child of the original list, so that we don't have
-// to worry about popping it off at the right time, since other
-// directories will continue to use the original parent list.
-//
-// If no ignore file is found, then the original list is returned.
-//
-// To start off with, ~/.{npm,git}ignore is added, as is
-// prefix/{npm,git}ignore, effectively treated as if they were in the
-// base package directory.
-
-exports.addIgnoreFile = addIgnoreFile
-exports.readIgnoreFile = readIgnoreFile
-exports.parseIgnoreFile = parseIgnoreFile
-exports.test = test
-exports.filter = filter
-
-var path = require("path")
- , fs = require("graceful-fs")
- , minimatch = require("minimatch")
- , relativize = require("./relativize.js")
- , log = require("./log.js")
-
-// todo: memoize
-
-// read an ignore file, or fall back to the
-// "gitBase" file in the same directory.
-function readIgnoreFile (file, gitBase, cb) {
- //log.warn(file, "ignoreFile")
- if (!file) return cb(null, "")
- fs.readFile(file, function (er, data) {
- if (!er || !gitBase) return cb(null, data || "")
- var gitFile = path.resolve(path.dirname(file), gitBase)
- fs.readFile(gitFile, function (er, data) {
- return cb(null, data || "")
- })
- })
-}
-
-// read a file, and then return the list of patterns
-function parseIgnoreFile (file, gitBase, dir, cb) {
- readIgnoreFile(file, gitBase, function (er, data) {
- data = data ? data.toString("utf8") : ""
-
- data = data.split(/[\r\n]+/).map(function (p) {
- return p.trim()
- }).filter(function (p) {
- return p.length && p.charAt(0) !== "#"
- })
- data.dir = dir
- return cb(er, data)
- })
-}
-
-// add an ignore file to an existing list which can
-// then be passed to the test() function. If the ignore
-// file doesn't exist, then the list is unmodified. If
-// it is, then a concat-child of the original is returned,
-// so that this is suitable for walking a directory tree.
-function addIgnoreFile (file, gitBase, list, dir, cb) {
- if (typeof cb !== "function") cb = dir, dir = path.dirname(file)
- if (typeof cb !== "function") cb = list, list = []
- parseIgnoreFile(file, gitBase, dir, function (er, data) {
- if (!er && data) {
- // package.json "files" array trumps everything
- // Make sure it's always last.
- if (list.length && list[list.length-1].packageFiles) {
- list = list.concat([data, list.pop()])
- } else {
- list = list.concat([data])
- }
- }
- cb(er, list)
- })
-}
-
-
-// no IO
-// loop through the lists created in the functions above, and test to
-// see if a file should be included or not, given those exclude lists.
-function test (file, excludeList) {
- if (path.basename(file) === "package.json") return true
- // log.warn(file, "test file")
- // log.warn(excludeList, "test list")
- var incRe = /^\!(\!\!)*/
- , excluded = false
- for (var i = 0, l = excludeList.length; i < l; i ++) {
- var excludes = excludeList[i]
- , dir = excludes.dir
-
- // chop the filename down to be relative to excludeDir
- var rf = relativize(file, dir, true)
- rf = rf.replace(/^\.?\//, "")
- if (file.slice(-1) === "/") rf += "/"
-
- // log.warn([file, rf], "rf")
-
- for (var ii = 0, ll = excludes.length; ii < ll; ii ++) {
- var ex = excludes[ii].replace(/^(!*)\//, "$1")
- , inc = !!ex.match(incRe)
-
- // log.warn([ex, rf], "ex, rf")
- // excluding/including a dir excludes/includes all the files in it.
- if (ex.slice(-1) === "/") ex += "**"
-
- // if this is not an inclusion attempt, and someone else
- // excluded it, then just continue, because there's nothing
- // that can be done here to change the exclusion.
- if (!inc && excluded) continue
-
- // if it's an inclusion attempt, and the file has not been
- // excluded, then skip it, because there's no need to try again.
- if (inc && !excluded) continue
-
- // if it matches the pattern, then it should be excluded.
- excluded = !!minimatch(rf, ex, { matchBase: true })
- // log.error([rf, ex, excluded], "rf, ex, excluded")
-
- // if you include foo, then it also includes foo/bar.js
- if (inc && excluded && ex.slice(-3) !== "/**") {
- excluded = minimatch(rf, ex + "/**", { matchBase: true })
- // log.warn([rf, ex + "/**", inc, excluded], "dir without /")
- }
-
- // if you exclude foo, then it also excludes foo/bar.js
- if (!inc
- && excluded
- && ex.slice(-3) !== "/**"
- && rf.slice(-1) === "/"
- && excludes.indexOf(ex + "/**") === -1) {
- // log.warn(ex + "/**", "adding dir-matching exclude pattern")
- excludes.splice(ii, 1, ex, ex + "/**")
- ll ++
- }
- }
- // log.warn([rf, excluded, excludes], "rf, excluded, excludes")
- }
- // true if it *should* be included
- // log.warn([file, excludeList, excluded], "file, excluded")
- return !excluded
-}
-
-// returns a function suitable for Array#filter
-function filter (dir, list) { return function (file) {
- file = file.trim()
- var testFile = path.resolve(dir, file)
- if (file.slice(-1) === "/") testFile += "/"
- return file && test(testFile, list)
-}}
diff --git a/deps/npm/lib/utils/exec.js b/deps/npm/lib/utils/exec.js
index 0fa0371b2e..b9a5b69114 100644
--- a/deps/npm/lib/utils/exec.js
+++ b/deps/npm/lib/utils/exec.js
@@ -1,4 +1,3 @@
-
module.exports = exec
exec.spawn = spawn
exec.pipe = pipe
@@ -11,6 +10,7 @@ var log = require("./log.js")
, myGID = process.getgid ? process.getgid() : null
, isRoot = process.getuid && myUID === 0
, constants = require("constants")
+ , uidNumber = require("uid-number")
function exec (cmd, args, env, takeOver, cwd, uid, gid, cb) {
if (typeof cb !== "function") cb = gid, gid = null
@@ -34,6 +34,15 @@ function exec (cmd, args, env, takeOver, cwd, uid, gid, cb) {
log.verbose(uid, "Setting uid from "+myUID)
log.verbose(new Error().stack, "stack at uid setting")
}
+
+ if (uid && gid && (isNaN(uid) || isNaN(gid))) {
+ // get the numeric values
+ return uidNumber(uid, gid, function (er, uid, gid) {
+ if (er) return cb(er)
+ exec(cmd, args, env, takeOver, cwd, uid, gid, cb)
+ })
+ }
+
log.silly(cmd+" "+args.map(JSON.stringify).join(" "), "exec")
var stdout = ""
, stderr = ""
@@ -77,6 +86,7 @@ function pipe (cp1, cp2, cb) {
cb(errState = new Error(
"Failed "+(cp1.name || "<unknown>")+"\nexited with "+code))
})
+
cp2.on("exit", function (code) {
cp2._exited = true
if (errState) return
@@ -94,10 +104,10 @@ function spawn (c, a, env, takeOver, cwd, uid, gid) {
, env : env || process.env
, cwd : cwd || null }
, cp
- if (uid != null) opts.uid = uid
- if (gid != null) opts.gid = gid
- if (!isNaN(opts.uid)) opts.uid = +opts.uid
- if (!isNaN(opts.gid)) opts.gid = +opts.gid
+
+ if (uid && !isNaN(uid)) opts.uid = +uid
+ if (gid && !isNaN(gid)) opts.gid = +gid
+
var name = c +" "+ a.map(JSON.stringify).join(" ")
log.silly([c, a, opts.cwd], "spawning")
cp = child_process.spawn(c, a, opts)
diff --git a/deps/npm/lib/utils/fetch.js b/deps/npm/lib/utils/fetch.js
index ba1e567b1f..bc1c095cdf 100644
--- a/deps/npm/lib/utils/fetch.js
+++ b/deps/npm/lib/utils/fetch.js
@@ -8,16 +8,16 @@ var request = require("request")
, url = require("url")
, log = require("./log.js")
, path = require("path")
- , mkdir = require("./mkdir-p.js")
+ , mkdir = require("mkdirp")
+ , chownr = require("chownr")
, regHost
- , getAgent = require("./get-agent.js")
module.exports = fetch
function fetch (remote, local, headers, cb) {
if (typeof cb !== "function") cb = headers, headers = {}
log.verbose(local, "fetch to")
- mkdir(path.dirname(local), function (er) {
+ mkdir(path.dirname(local), function (er, made) {
if (er) return cb(er)
fetch_(remote, local, headers, cb)
})
@@ -57,8 +57,9 @@ function makeRequest (remote, fstr, headers) {
request({ url: remote
, proxy: proxy
- , agent: getAgent(remote)
, strictSSL: npm.config.get("strict-ssl")
+ , ca: remote.host === regHost ? npm.config.get("ca") : undefined
+ , headers: { "user-agent": npm.config.get("user-agent") }
, onResponse: onResponse }).pipe(fstr)
function onResponse (er, res) {
if (er) return fstr.emit("error", er)
diff --git a/deps/npm/lib/utils/find-prefix.js b/deps/npm/lib/utils/find-prefix.js
index 320456c7ea..bc2c9598db 100644
--- a/deps/npm/lib/utils/find-prefix.js
+++ b/deps/npm/lib/utils/find-prefix.js
@@ -45,6 +45,9 @@ function findPrefix_ (p, original, cb) {
return cb(null, p)
}
- return findPrefix_(path.dirname(p), original, cb)
+ var d = path.dirname(p)
+ if (d === p) return cb(null, original)
+
+ return findPrefix_(d, original, cb)
})
}
diff --git a/deps/npm/lib/utils/get-agent.js b/deps/npm/lib/utils/get-agent.js
deleted file mode 100644
index 4bc074f29b..0000000000
--- a/deps/npm/lib/utils/get-agent.js
+++ /dev/null
@@ -1,62 +0,0 @@
-// get an http/https agent
-// This is necessary for the custom CA certs in http2,
-// especially while juggling multiple different registries.
-//
-// When using http2, the agent key is just the CA setting,
-// since it can manage socket pooling across different host:port
-// options. When using the older implementation, the
-// key is ca:host:port combination.
-
-module.exports = getAgent
-
-var npm = require("../npm.js")
- , url = require("url")
- , agents = {}
- , isHttp2 = !!require("http").globalAgent
- , registry = url.parse(npm.config.get("registry") || "")
- , regCA = npm.config.get("ca")
-
-function getAgent (remote) {
- // If not doing https, then there's no CA cert to manage.
- // on http2, this will use the default global agent.
- // on http1, this is undefined, so it'll spawn based on
- // host:port if necessary.
- if (remote.protocol !== "https:") {
- return require("http").globalAgent
- }
-
- if (typeof remote === "string") {
- remote = url.parse(remote)
- }
-
- var ca
- // if this is the registry, then use the configuration ca.
- // otherwise, just use the built-in CAs that node has.
- // todo: multi-registry support.
- if (remote.hostname === registry.hostname
- && remote.port === registry.port) {
- ca = regCA
- }
-
- // no CA, just use the default agent.
- if (!ca) {
- return require("https").globalAgent
- }
-
- var hostname = remote.hostname
- , port = remote.port
- , key = agentKey(hostname, port, ca)
-
- return agents[key] = agents[key] || getAgent_(hostname, port, ca)
-}
-
-function getAgent_ (hostname, port, ca) {
- var Agent = require("https").Agent
- return new Agent({ host: hostname
- , port: port
- , ca: ca })
-}
-
-function agentKey (hostname, port, ca) {
- return JSON.stringify(isHttp2 ? ca : [hostname, port, ca])
-}
diff --git a/deps/npm/lib/utils/get.js b/deps/npm/lib/utils/get.js
deleted file mode 100644
index 3c5e4f099b..0000000000
--- a/deps/npm/lib/utils/get.js
+++ /dev/null
@@ -1,6 +0,0 @@
-
-module.exports = get
-function get (obj, key) {
- for (var i in obj) if (i.toLowerCase() === key.toLowerCase()) return obj[i]
- return undefined
-}
diff --git a/deps/npm/lib/utils/ini.js b/deps/npm/lib/utils/ini.js
index 48d4f99edf..b033b6a04d 100644
--- a/deps/npm/lib/utils/ini.js
+++ b/deps/npm/lib/utils/ini.js
@@ -323,8 +323,14 @@ function envReplace (f) {
if (typeof f !== "string" || !f) return f
// replace any ${ENV} values with the appropriate environ.
- return f.replace(/\$\{([^}]+)\}/g, function (orig, name, i, s) {
- return process.env[name] || orig
+ var envExpr = /(\\*)\$\{([^}]+)\}/g
+ return f.replace(envExpr, function (orig, esc, name, i, s) {
+ esc = esc.length && esc.length % 2
+ if (esc) return orig
+ if (undefined === process.env[name]) {
+ throw new Error("Failed to replace env in config: "+orig)
+ }
+ return process.env[name]
})
}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index 970157349a..92b086e699 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -11,6 +11,17 @@ var log = require("./log.js")
, chain = require("slide").chain
, constants = require("constants")
, output = require("./output.js")
+ , PATH = "PATH"
+
+// windows calls it's path "Path" usually, but this is not guaranteed.
+if (process.platform === "win32") {
+ PATH = "Path"
+ Object.keys(process.env).forEach(function (e) {
+ if (e.match(/^PATH$/i)) {
+ PATH = e
+ }
+ })
+}
function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
if (typeof cb !== "function") cb = failOk, failOk = false
@@ -54,16 +65,21 @@ function checkForLink (pkg, cb) {
}
function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
- var PATH = []
+ var pathArr = []
, p = wd.split("node_modules")
, acc = path.resolve(p.shift())
p.forEach(function (pp) {
- PATH.unshift(path.join(acc, "node_modules", ".bin"))
+ pathArr.unshift(path.join(acc, "node_modules", ".bin"))
acc = path.join(acc, "node_modules", pp)
})
- PATH.unshift(path.join(acc, "node_modules", ".bin"))
- if (env.PATH) PATH.push(env.PATH)
- env.PATH = PATH.join(process.platform === "win32" ? ";" : ":")
+ pathArr.unshift(path.join(acc, "node_modules", ".bin"))
+
+ // we also unshift the bundled node-gyp-bin folder so that
+ // the bundled one will be used for installing things.
+ pathArr.unshift(path.join(__dirname, "..", "..", "bin", "node-gyp-bin"))
+
+ if (env[PATH]) pathArr.push(env[PATH])
+ env[PATH] = pathArr.join(process.platform === "win32" ? ";" : ":")
var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
@@ -113,7 +129,7 @@ function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
, cmd = env.npm_lifecycle_script
, sh = "sh"
, shFlag = "-c"
-
+
if (process.platform === "win32") {
sh = "cmd"
shFlag = "/c"
@@ -121,9 +137,12 @@ function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
log.verbose(unsafe, "unsafe-perm in lifecycle")
- output.write("\n> "+pkg._id+" " + stage+" "+wd+"\n> "+cmd+"\n", function (er) {
+ var note = "\n> " + pkg._id + " " + stage + " " + wd
+ + "\n> " + cmd + "\n"
+
+ output.write(note, function (er) {
if (er) return cb(er)
-
+
exec( sh, [shFlag, cmd], env, true, wd
, user, group
, function (er, code, stdout, stderr) {
@@ -233,6 +252,10 @@ function makeEnv (data, prefix, env) {
return
}
var value = ini.get(i)
+ if (/^(log|out)fd$/.test(i) && typeof value === "object") {
+ // not an fd, a stream
+ return
+ }
if (!value) value = ""
else if (typeof value !== "string") value = JSON.stringify(value)
diff --git a/deps/npm/lib/utils/link.js b/deps/npm/lib/utils/link.js
index 9be1221f0a..7fa80d5e12 100644
--- a/deps/npm/lib/utils/link.js
+++ b/deps/npm/lib/utils/link.js
@@ -4,8 +4,8 @@ link.ifExists = linkIfExists
var fs = require("graceful-fs")
, chain = require("slide").chain
- , mkdir = require("./mkdir-p.js")
- , rm = require("rimraf")
+ , mkdir = require("mkdirp")
+ , rm = require("./gently-rm.js")
, log = require("./log.js")
, path = require("path")
, relativize = require("./relativize.js")
@@ -23,7 +23,7 @@ function link (from, to, gently, cb) {
if (npm.config.get("force")) gently = false
chain
( [ [fs, "stat", from]
- , [rm, to, { gently: gently }]
+ , [rm, to, gently]
, [mkdir, path.dirname(to)]
, [fs, "symlink", relativize(from, to), to] ]
, cb)
diff --git a/deps/npm/lib/utils/mkdir-p.js b/deps/npm/lib/utils/mkdir-p.js
deleted file mode 100644
index cc2b465fb6..0000000000
--- a/deps/npm/lib/utils/mkdir-p.js
+++ /dev/null
@@ -1,191 +0,0 @@
-
-var log = require("./log.js")
- , fs = require("graceful-fs")
- , path = require("path")
- , npm = require("../npm.js")
- , exec = require("./exec.js")
- , uidNumber = require("./uid-number.js")
- , umask = process.umask()
- , umaskOrig = umask
- , addedUmaskExit = false
- , mkdirCache = {}
-
-module.exports = mkdir
-function mkdir (ensure, mode, uid, gid, noChmod, cb_) {
- if (typeof cb_ !== "function") cb_ = noChmod, noChmod = null
- if (typeof cb_ !== "function") cb_ = gid, gid = null
- if (typeof cb_ !== "function") cb_ = uid, uid = null
- if (typeof cb_ !== "function") cb_ = mode, mode = npm.modes.exec
-
- if (mode & umask) {
- log.verbose(mode.toString(8), "umasking from "+umask.toString(8))
- process.umask(umask = 0)
- if (!addedUmaskExit) {
- addedUmaskExit = true
- process.on("exit", function () { process.umask(umask = umaskOrig) })
- }
- }
-
- ensure = path.resolve(ensure).replace(/\/+$/, '')
-
- // mkdir("/") should not do anything, since that always exists.
- if (!ensure
- || ( process.platform === "win32"
- && ensure.match(/^[a-zA-Z]:(\\|\/)?$/))) {
- return cb_()
- }
-
- if (mkdirCache.hasOwnProperty(ensure)) {
- return mkdirCache[ensure].push(cb_)
- }
- mkdirCache[ensure] = [cb_]
-
- function cb (er) {
- var cbs = mkdirCache[ensure]
- delete mkdirCache[ensure]
- cbs.forEach(function (c) { c(er) })
- }
-
- if (uid === null && gid === null) {
- return mkdir_(ensure, mode, uid, gid, noChmod, cb)
- }
-
- uidNumber(uid, gid, function (er, uid, gid) {
- if (er) return cb(er)
- mkdir_(ensure, mode, uid, gid, noChmod, cb)
- })
-}
-
-function mkdir_ (ensure, mode, uid, gid, noChmod, cb) {
- // if it's already a dir, then just check the bits and owner.
- fs.stat(ensure, function (er, s) {
- if (s && s.isDirectory()) {
- // check mode, uid, and gid.
- if ((noChmod || (s.mode & mode) === mode)
- && (typeof uid !== "number" || s.uid === uid)
- && (typeof gid !== "number" || s.gid === gid)) return cb()
- return done(ensure, mode, uid, gid, noChmod, cb)
- }
- return walkDirs(ensure, mode, uid, gid, noChmod, cb)
- })
-}
-
-function done (ensure, mode, uid, gid, noChmod, cb) {
- // now the directory has been created.
- // chown it to the desired uid/gid
- // Don't chown the npm.root dir, though, in case we're
- // in unsafe-perm mode.
- log.verbose("done: "+ensure+" "+mode.toString(8), "mkdir")
-
- // only chmod if noChmod isn't set.
- var d = done_(ensure, mode, uid, gid, cb)
- if (noChmod) return d()
- fs.chmod(ensure, mode, d)
-}
-
-function done_ (ensure, mode, uid, gid, cb) {
- return function (er) {
- if (er
- || ensure === npm.dir
- || typeof uid !== "number"
- || typeof gid !== "number"
- || npm.config.get("unsafe-perm")) return cb(er)
- uid = Math.floor(uid)
- gid = Math.floor(gid)
- fs.chown(ensure, uid, gid, cb)
- }
-}
-
-var pathSplit = process.platform === "win32" ? /\/|\\/ : "/"
-function walkDirs (ensure, mode, uid, gid, noChmod, cb) {
- var dirs = ensure.split(pathSplit)
- , walker = []
- , foundUID = null
- , foundGID = null
-
- // gobble the "/" or C: first
- walker.push(dirs.shift())
-
- // The loop that goes through and stats each dir.
- ;(function S (d) {
- // no more directory steps left.
- if (d === undefined) {
- // do the chown stuff
- return done(ensure, mode, uid, gid, noChmod, cb)
- }
-
- // get the absolute dir for the next piece being stat'd
- walker.push(d)
- var dir = walker.join(path.SPLIT_CHAR)
-
- // stat callback lambda
- fs.stat(dir, function STATCB (er, s) {
- if (er) {
- // the stat failed - directory does not exist.
-
- log.verbose(er.message, "mkdir (expected) error")
-
- // use the same uid/gid as the nearest parent, if not set.
- if (foundUID !== null) uid = foundUID
- if (foundGID !== null) gid = foundGID
-
- // make the directory
- fs.mkdir(dir, mode, function MKDIRCB (er) {
- // since stat and mkdir are done as two separate syscalls,
- // operating on a path rather than a file descriptor, it's
- // possible that the directory didn't exist when we did
- // the stat, but then *did* exist when we go to to the mkdir.
- // If we didn't care about uid/gid, we could just mkdir
- // repeatedly, failing on any error other than "EEXIST".
- if (er && er.message.indexOf("EEXIST") === 0) {
- return fs.stat(dir, STATCB)
- }
-
- // any other kind of error is not saveable.
- if (er) return cb(er)
-
- // at this point, we've just created a new directory successfully.
-
- // if we care about permissions
- if (!npm.config.get("unsafe-perm") // care about permissions
- // specified a uid and gid
- && uid !== null
- && gid !== null ) {
- // set the proper ownership
- return fs.chown(dir, uid, gid, function (er) {
- if (er) return cb(er)
- // attack the next portion of the path.
- S(dirs.shift())
- })
- } else {
- // either we don't care about ownership, or it's already right.
- S(dirs.shift())
- }
- }) // mkdir
-
- } else {
- // the stat succeeded.
- if (s.isDirectory()) {
- // if it's a directory, that's good.
- // if the uid and gid aren't already set, then try to preserve
- // the ownership on up the tree. Things in ~ remain owned by
- // the user, things in / remain owned by root, etc.
- if (uid === null && typeof s.uid === "number") foundUID = s.uid
- if (gid === null && typeof s.gid === "number") foundGID = s.gid
-
- // move onto next portion of path
- S(dirs.shift())
-
- } else {
- // the stat succeeded, but it's not a directory
- log.verbose(dir, "mkdir exists")
- log.silly(s, "stat("+dir+")")
- log.verbose(s.isDirectory(), "isDirectory()")
- cb(new Error("Failed to mkdir "+dir+": File exists"))
- }// if (isDirectory) else
- } // if (stat failed) else
- }) // stat
-
- // start the S function with the first item in the list of directories.
- })(dirs.shift())
-}
diff --git a/deps/npm/lib/utils/npm-registry-client/get.js b/deps/npm/lib/utils/npm-registry-client/get.js
index ae05ac1fc3..e0902f0276 100644
--- a/deps/npm/lib/utils/npm-registry-client/get.js
+++ b/deps/npm/lib/utils/npm-registry-client/get.js
@@ -6,8 +6,9 @@ var GET = require("./request.js").GET
, npm = require("../../npm.js")
, path = require("path")
, log = require("../log.js")
- , mkdir = require("../mkdir-p.js")
+ , mkdir = require("mkdirp")
, cacheStat = null
+ , chownr = require("chownr")
function get (project, version, timeout, nofollow, staleOk, cb) {
if (typeof cb !== "function") cb = staleOk, staleOk = false
@@ -19,6 +20,9 @@ function get (project, version, timeout, nofollow, staleOk, cb) {
throw new Error("No callback provided to registry.get")
}
+ timeout = Math.min(timeout, npm.config.get("cache-max"))
+ timeout = Math.max(timeout, npm.config.get("cache-min"))
+
if ( process.env.COMP_CWORD !== undefined
&& process.env.COMP_LINE !== undefined
&& process.env.COMP_POINT !== undefined
@@ -136,7 +140,7 @@ function get_ (uri, timeout, cache, stat, data, nofollow, staleOk, cb) {
data = remoteData
if (!data) {
- er = new Error("failed to fetch from registry: " + uri)
+ er = er || new Error("failed to fetch from registry: " + uri)
}
if (er) return cb(er, data, raw, response)
@@ -170,13 +174,13 @@ function saveToCache (cache, data, saved) {
}
function saveToCache_ (cache, data, uid, gid, saved) {
- mkdir(path.dirname(cache), npm.modes.exec, uid, gid, function (er) {
+ mkdir(path.dirname(cache), function (er, made) {
if (er) return saved()
fs.writeFile(cache, JSON.stringify(data), function (er) {
if (er || uid === null || gid === null) {
return saved()
}
- fs.chown(cache, uid, gid, saved)
+ chownr(made || cache, uid, gid, saved)
})
})
}
diff --git a/deps/npm/lib/utils/npm-registry-client/request.js b/deps/npm/lib/utils/npm-registry-client/request.js
index d98135e49e..d5122629dd 100644
--- a/deps/npm/lib/utils/npm-registry-client/request.js
+++ b/deps/npm/lib/utils/npm-registry-client/request.js
@@ -16,7 +16,6 @@ var npm = require("../../npm.js")
, stream = require("stream")
, Stream = stream.Stream
, request = require("request")
- , getAgent = require("../get-agent.js")
function regRequest (method, where, what, etag, nofollow, cb_) {
if (typeof cb_ !== "function") cb_ = nofollow, nofollow = false
@@ -85,7 +84,7 @@ function regRequest (method, where, what, etag, nofollow, cb_) {
function makeRequest (method, remote, where, what, etag, nofollow, cb) {
var opts = { url: remote
, method: method
- , agent: getAgent(remote)
+ , ca: npm.config.get("ca")
, strictSSL: npm.config.get("strict-ssl") }
, headers = opts.headers = {}
if (etag) {
@@ -95,6 +94,8 @@ function makeRequest (method, remote, where, what, etag, nofollow, cb) {
headers.accept = "application/json"
+ headers["user-agent"] = npm.config.get("user-agent")
+
opts.proxy = npm.config.get( remote.protocol === "https:"
? "https-proxy" : "proxy" )
diff --git a/deps/npm/lib/utils/output.js b/deps/npm/lib/utils/output.js
index 00da9f69f7..b705153ad5 100644
--- a/deps/npm/lib/utils/output.js
+++ b/deps/npm/lib/utils/output.js
@@ -58,6 +58,7 @@ function write (args, stream, lf, cb) {
if (!npm.config.get("unicode")) {
arg = arg.replace(/└/g, "`")
.replace(/─/g, "-")
+ .replace(/│/g, "|")
.replace(/├/g, "+")
.replace(/┬/g, "-")
}
diff --git a/deps/npm/lib/utils/read-installed.js b/deps/npm/lib/utils/read-installed.js
index 6c0ece25bc..ff220943d6 100644
--- a/deps/npm/lib/utils/read-installed.js
+++ b/deps/npm/lib/utils/read-installed.js
@@ -94,6 +94,7 @@ var npm = require("../npm.js")
, semver = require("semver")
, readJson = require("./read-json.js")
, log = require("./log.js")
+ , url = require("url")
module.exports = readInstalled
@@ -125,7 +126,8 @@ function readInstalled_ (folder, parent, name, reqver, depth, maxDepth, cb) {
})
readJson(path.resolve(folder, "package.json"), function (er, data) {
- obj = data
+ obj = copy(data)
+
if (!parent) {
obj = obj || true
er = null
@@ -253,6 +255,8 @@ function findUnmet (obj) {
continue
}
if ( typeof deps[d] === "string"
+ // url deps presumed innocent.
+ && !url.parse(deps[d]).protocol
&& !semver.satisfies(found.version, deps[d])) {
// the bad thing will happen
log.warn(obj.path + " requires "+d+"@'"+deps[d]
@@ -269,6 +273,15 @@ function findUnmet (obj) {
return obj
}
+function copy (obj) {
+ if (!obj || typeof obj !== 'object') return obj
+ if (Array.isArray(obj)) return obj.map(copy)
+
+ var o = {}
+ for (var i in obj) o[i] = copy(obj[i])
+ return o
+}
+
if (module === require.main) {
var util = require("util")
console.error("testing")
diff --git a/deps/npm/lib/utils/read-json.js b/deps/npm/lib/utils/read-json.js
index 388d6727eb..d1bba10f58 100644
--- a/deps/npm/lib/utils/read-json.js
+++ b/deps/npm/lib/utils/read-json.js
@@ -34,8 +34,33 @@ function readJson (jsonFile, opts, cb) {
var wscript = null
, contributors = null
, serverjs = null
+ , gypfile = null
- if (opts.wscript != null) {
+ if (opts.gypfile !== null && opts.gypfile !== undefined) {
+ gypfile = opts.gypfile
+ next()
+ } else {
+ var pkgdir = path.dirname(jsonFile)
+
+ function hasGyp (has) {
+ gypfile = opts.gypfile = has
+ next()
+ }
+
+ fs.readdir(pkgdir, function (er, gf) {
+ // this would be weird.
+ if (er) return hasGyp(false)
+
+ // see if there are any *.gyp files in there.
+ gf = gf.filter(function (f) {
+ return f.match(/\.gyp$/)
+ })
+ gf = gf[0]
+ return hasGyp(!!gf)
+ })
+ }
+
+ if (opts.wscript !== null && opts.wscript !== undefined) {
wscript = opts.wscript
next()
} else fs.readFile( path.join(path.dirname(jsonFile), "wscript")
@@ -47,7 +72,7 @@ function readJson (jsonFile, opts, cb) {
next()
})
- if (opts.contributors != null) {
+ if (opts.contributors !== null && opts.contributors !== undefined) {
contributors = opts.contributors
next()
} else fs.readFile( path.join(path.dirname(jsonFile), "AUTHORS")
@@ -64,7 +89,7 @@ function readJson (jsonFile, opts, cb) {
next()
})
- if (opts.serverjs != null) {
+ if (opts.serverjs !== null && opts.serverjs !== undefined) {
serverjs = opts.serverjs
next()
} else fs.stat( path.join(path.dirname(jsonFile), "server.js")
@@ -76,22 +101,55 @@ function readJson (jsonFile, opts, cb) {
})
function next () {
- if (wscript === null
- || contributors === null
- || serverjs === null) {
+ if (wscript === null ||
+ contributors === null ||
+ gypfile === null ||
+ serverjs === null) {
return
}
- fs.readFile(jsonFile, processJson(opts, function (er, data) {
+ // XXX this api here is insane. being internal is no excuse.
+ // please refactor.
+ var thenLoad = processJson(opts, function (er, data) {
if (er) return cb(er)
var doLoad = !(jsonFile.indexOf(npm.cache) === 0 &&
path.basename(path.dirname(jsonFile)) !== "package")
if (!doLoad) return cb(er, data)
loadPackageDefaults(data, path.dirname(jsonFile), cb)
- }))
+ })
+
+ fs.readFile(jsonFile, function (er, data) {
+ if (er && er.code === "ENOENT") {
+ // single-file module, maybe?
+ // check index.js for a /**package { ... } **/ section.
+ var indexFile = path.resolve(path.dirname(jsonFile), "index.js")
+ return fs.readFile(indexFile, function (er2, data) {
+ // if this doesn't work, then die with the original error.
+ if (er2) return cb(er)
+ data = parseIndex(data)
+ if (!data) return cb(er)
+ thenLoad(null, data)
+ })
+ }
+ thenLoad(er, data)
+ })
}
}
+// sync. no io.
+// /**package { "name": "foo", "version": "1.2.3", ... } **/
+function parseIndex (data) {
+ data = data.toString()
+ data = data.split(/^\/\*\*package(?:\s|$)/m)
+ if (data.length < 2) return null
+ data = data[1]
+ data = data.split(/\*\*\/$/m)
+ if (data.length < 2) return null
+ data = data[0]
+ data = data.replace(/^\s*\*/mg, "")
+ return data
+}
+
function processJson (opts, cb) {
if (typeof cb !== "function") cb = opts, opts = {}
if (typeof cb !== "function") {
@@ -113,8 +171,8 @@ function processJson (opts, cb) {
}
function processJsonString (opts, cb) { return function (er, jsonString) {
- jsonString += ""
if (er) return cb(er, jsonString)
+ jsonString += ""
var json
try {
json = JSON.parse(jsonString)
@@ -188,11 +246,12 @@ function typoWarn (json) {
}
if (typeof json.bugs === "object") {
+ // just go ahead and correct these.
Object.keys(bugsTypos).forEach(function (d) {
if (json.bugs.hasOwnProperty(d)) {
- log.warn( "package.json: bugs['" + d + "'] should probably be "
- + "bugs['" + bugsTypos[d] + "']", json._id)
- }
+ json.bugs[ bugsTypos[d] ] = json.bugs[d]
+ delete json.bugs[d]
+ }
})
}
@@ -300,6 +359,15 @@ function processObject (opts, cb) { return function (er, json) {
var scripts = json.scripts || {}
+ // if it has a bindings.gyp, then build with node-gyp
+ if (opts.gypfile && !json.prebuilt) {
+ log.verbose([json.prebuilt, opts], "has bindings.gyp")
+ if (!scripts.install && !scripts.preinstall) {
+ scripts.install = "node-gyp rebuild"
+ json.scripts = scripts
+ }
+ }
+
// if it has a wscript, then build it.
if (opts.wscript && !json.prebuilt) {
log.verbose([json.prebuilt, opts], "has wscript")
@@ -379,11 +447,9 @@ function processObject (opts, cb) { return function (er, json) {
if (opts.dev
|| npm.config.get("dev")
|| npm.config.get("npat")) {
- // log.warn(json._id, "Adding devdeps")
Object.keys(json.devDependencies || {}).forEach(function (d) {
json.dependencies[d] = json.devDependencies[d]
})
- // log.warn(json.dependencies, "Added devdeps")
}
typoWarn(json)
@@ -409,13 +475,6 @@ function processObject (opts, cb) { return function (er, json) {
var depObjectifyWarn = {}
function depObjectify (deps, d, id) {
- if ((!deps || typeof deps !== "object" || Array.isArray(deps))
- && !depObjectifyWarn[id+d]) {
- log.warn( d + " field should be hash of <name>:<version-range> pairs"
- , id )
- depObjectifyWarn[id + d] = true
- }
-
if (!deps) return {}
if (typeof deps === "string") {
deps = deps.trim().split(/[\n\r\s\t ,]+/)
diff --git a/deps/npm/lib/utils/set.js b/deps/npm/lib/utils/set.js
deleted file mode 100644
index 4d9241aab7..0000000000
--- a/deps/npm/lib/utils/set.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-module.exports = set
-var get = require("./get.js")
- , processJson = require("./read-json.js").processJson
-function set (obj, key, val) {
- for (var i in obj) {
- if (i.toLowerCase() === key.toLowerCase()) return obj[i] = val
- }
- obj[key] = val
- if (!val) return
- // if it's a package set, then assign all the versions.
- if (val.versions) return Object.keys(val.versions).forEach(function (v) {
- if (typeof val.versions[v] !== "object") return
- set(obj, key+"@"+v, val.versions[v])
- })
- // Note that this doesn't put the dist-tags there, only updates the versions
- if (key === val.name+"@"+val.version) {
- processJson(val)
- var reg = get(obj, val.name) || {}
- reg.name = reg._id = val.name
- set(obj, val.name, reg)
- reg.versions = get(reg, "versions") || {}
- if (!get(reg.versions, val.version)) set(reg.versions, val.version, val)
- }
-}
diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js
index f315bbf961..415eb7f9e2 100644
--- a/deps/npm/lib/utils/tar.js
+++ b/deps/npm/lib/utils/tar.js
@@ -1,115 +1,39 @@
-// XXX lib/cache.js and this file need to be rewritten.
-
// commands for packing and unpacking tarballs
// this file is used by lib/cache.js
var npm = require("../npm.js")
, fs = require("graceful-fs")
- , exec = require("./exec.js")
- , find = require("./find.js")
- , mkdir = require("./mkdir-p.js")
- , asyncMap = require("slide").asyncMap
, path = require("path")
, log = require("./log.js")
- , uidNumber = require("./uid-number.js")
+ , uidNumber = require("uid-number")
, rm = require("rimraf")
, readJson = require("./read-json.js")
, relativize = require("./relativize.js")
, cache = require("../cache.js")
- , excludes = require("./excludes.js")
, myUid = process.getuid && process.getuid()
, myGid = process.getgid && process.getgid()
, tar = require("tar")
, zlib = require("zlib")
, fstream = require("fstream")
+ , Packer = require("fstream-npm")
+
+if (process.env.SUDO_UID && myUid === 0) {
+ if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
+ if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
+}
exports.pack = pack
exports.unpack = unpack
-exports.makeList = makeList
function pack (targetTarball, folder, pkg, dfc, cb) {
+ log.verbose([targetTarball, folder], "tar.pack")
if (typeof cb !== "function") cb = dfc, dfc = true
- folder = path.resolve(folder)
-
- log.verbose(folder, "pack")
-
- if (typeof pkg === "function") {
- cb = pkg, pkg = null
- return readJson(path.resolve(folder, "package.json"), function (er, pkg) {
- if (er) return log.er(cb, "Couldn't find package.json in "+folder)(er)
- pack(targetTarball, folder, pkg, dfc, cb)
- })
- }
- log.verbose(folder+" "+targetTarball, "pack")
- var parent = path.dirname(folder)
- , addFolder = path.basename(folder)
-
- var confEx = npm.config.get("ignore")
- log.silly(folder, "makeList")
- makeList(folder, pkg, dfc, function (er, files, cleanup) {
- if (er) return cb(er)
- // log.silly(files, "files")
- return packFiles(targetTarball, parent, files, pkg, function (er) {
- if (!cleanup || !cleanup.length) return cb(er)
- // try to be a good citizen, even/especially in the event of failure.
- cleanupResolveLinkDep(cleanup, function (er2) {
- if (er || er2) {
- if (er) log(er, "packing tarball")
- if (er2) log(er2, "while cleaning up resolved deps")
- }
- return cb(er || er2)
- })
- })
- })
-}
-
-function packFiles (targetTarball, parent, files, pkg, cb_) {
-
- var p
-
- files = files.map(function (f) {
- p = f.split(/\/|\\/)[0]
- return path.resolve(parent, f)
- })
-
- parent = path.resolve(parent, p)
-
- var called = false
- function cb (er) {
- if (called) return
- called = true
- cb_(er)
- }
log.verbose(targetTarball, "tarball")
- log.verbose(parent, "parent")
- fstream.Reader({ type: "Directory"
- , path: parent
- , filter: function () {
- // files should *always* get into tarballs
- // in a user-writable state, even if they're
- // being installed from some wackey vm-mounted
- // read-only filesystem.
- this.props.mode = this.props.mode | 0200
- var inc = -1 !== files.indexOf(this.path)
+ log.verbose(folder, "folder")
+ new Packer({ path: folder, type: "Directory", isDirectory: true })
+ .on("error", log.er(cb, "error reading "+folder))
- // WARNING! Hackety hack!
- // XXX Fix this in a better way.
- // Rename .gitignore to .npmignore if there is not a
- // .npmignore file there already, the better to lock
- // down installed packages with git for deployment.
- if (this.basename === ".gitignore") {
- if (this.parent._entries.indexOf(".npmignore") !== -1) {
- return false
- }
- var d = path.dirname(this.path)
- this.basename = ".npmignore"
- this.path = path.join(d, ".npmignore")
- }
- return inc
- }
- })
- .on("error", log.er(cb, "error reading "+parent))
// By default, npm includes some proprietary attributes in the
// package tarball. This is sane, and allowed by the spec.
// However, npm *itself* excludes these from its own package,
@@ -121,11 +45,14 @@ function packFiles (targetTarball, parent, files, pkg, cb_) {
.on("error", log.er(cb, "gzip error "+targetTarball))
.pipe(fstream.Writer({ type: "File", path: targetTarball }))
.on("error", log.er(cb, "Could not write "+targetTarball))
- .on("close", cb)
+ .on("close", function () {
+ cb()
+ })
}
function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
+ log.verbose(tarball, "unpack")
if (typeof cb !== "function") cb = gid, gid = null
if (typeof cb !== "function") cb = uid, uid = null
if (typeof cb !== "function") cb = fMode, fMode = npm.modes.file
@@ -138,466 +65,139 @@ function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
}
function unpack_ ( tarball, unpackTarget, dMode, fMode, uid, gid, cb ) {
- // If the desired target is /path/to/foo,
- // then unpack into /path/to/.foo.npm/{something}
- // rename that to /path/to/foo, and delete /path/to/.foo.npm
var parent = path.dirname(unpackTarget)
, base = path.basename(unpackTarget)
- , tmp = path.resolve(parent, "___" + base + ".npm")
- mkdir(tmp, dMode || npm.modes.exec, uid, gid, function (er) {
- log.verbose([uid, gid], "unpack_ uid, gid")
- log.verbose(unpackTarget, "unpackTarget")
- if (er) return log.er(cb, "Could not create "+tmp)(er)
- // cp the gzip of the tarball, pipe the stdout into tar's stdin
+ rm(unpackTarget, function (er) {
+ if (er) return cb(er)
+
// gzip {tarball} --decompress --stdout \
// | tar -mvxpf - --strip-components=1 -C {unpackTarget}
- gunzTarPerm( tarball, tmp
+ gunzTarPerm( tarball, unpackTarget
, dMode, fMode
, uid, gid
, function (er, folder) {
if (er) return cb(er)
- log.verbose(folder, "gunzed")
-
- rm(unpackTarget, function (er) {
- if (er) return cb(er)
- log.verbose(unpackTarget, "rm'ed")
-
- moveIntoPlace(folder, unpackTarget, function (er) {
- if (er) return cb(er)
- log.verbose([folder, unpackTarget], "renamed")
- // curse you, nfs! It will lie and tell you that the
- // mv is done, when in fact, it isn't. In theory,
- // reading the file should cause it to wait until it's done.
- readJson( path.resolve(unpackTarget, "package.json")
- , function (er, data) {
- // now we read the json, so we know it's there.
- rm(tmp, function (er2) { cb(er || er2, data) })
- })
- })
- })
+ readJson(path.resolve(folder, "package.json"), cb)
})
})
}
-// on Windows, A/V software can lock the directory, causing this
-// to fail with an EACCES. Try again on failure, for up to 1 second.
-// XXX Fix this by not unpacking into a temp directory, instead just
-// renaming things on the way out of the tarball.
-function moveIntoPlace (folder, unpackTarget, cb) {
- var start = Date.now()
- fs.rename(folder, unpackTarget, function CB (er) {
- if (er
- && process.platform === "win32"
- && er.code === "EACCES"
- && Date.now() - start < 1000) {
- return fs.rename(folder, unpackTarget, CB)
- }
- cb(er)
- })
-}
-
-function gunzTarPerm (tarball, tmp, dMode, fMode, uid, gid, cb) {
+function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) {
if (!dMode) dMode = npm.modes.exec
if (!fMode) fMode = npm.modes.file
log.silly([dMode.toString(8), fMode.toString(8)], "gunzTarPerm modes")
- fs.createReadStream(tarball)
- .on("error", log.er(cb, "error reading "+tarball))
- .pipe(zlib.Unzip())
- .on("error", log.er(cb, "unzip error "+tarball))
- .pipe(tar.Extract({ type: "Directory", path: tmp }))
- .on("error", log.er(cb, "Failed unpacking "+tarball))
- .on("close", afterUntar)
-
- //
- // XXX Do all this in an Extract filter.
- //
- function afterUntar (er) {
- log.silly(er, "afterUntar")
- // if we're not doing ownership management,
- // then we're done now.
- if (er) return log.er(cb, "Failed unpacking "+tarball)(er)
-
- // HACK skip on windows
- if (npm.config.get("unsafe-perm") && process.platform !== "win32") {
- uid = process.getuid()
- gid = process.getgid()
- if (uid === 0) {
- if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
- if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
- }
- }
-
- if (process.platform === "win32") {
- return fs.readdir(tmp, function (er, files) {
- files = files.filter(function (f) {
- return f && f.indexOf("\0") === -1
- })
- cb(er, files && path.resolve(tmp, files[0]))
- })
- }
-
- find(tmp, function (f) {
- return f !== tmp
- }, function (er, files) {
- if (er) return cb(er)
- asyncMap(files, function (f, cb) {
- f = path.resolve(f)
- log.silly(f, "asyncMap in gTP")
- fs.lstat(f, function (er, stat) {
-
- if (er || stat.isSymbolicLink()) return cb(er)
- if (typeof uid === "number" && typeof gid === "number") {
- fs.chown(f, uid, gid, chown)
- } else chown()
-
- function chown (er) {
- if (er) return cb(er)
- var mode = stat.isDirectory() ? dMode : fMode
- , oldMode = stat.mode & 0777
- , newMode = (oldMode | mode) & (~npm.modes.umask)
- if (mode && newMode !== oldMode) {
- log.silly(newMode.toString(8), "chmod "+path.basename(f))
- fs.chmod(f, newMode, cb)
- } else cb()
- }
- })
- }, function (er) {
-
- if (er) return cb(er)
- if (typeof myUid === "number" && typeof myGid === "number") {
- fs.chown(tmp, myUid, myGid, chown)
- } else chown()
-
- function chown (er) {
- if (er) return cb(er)
- fs.readdir(tmp, function (er, folder) {
- folder = folder && folder.filter(function (f) {
- return f && !f.match(/^\._/)
- })
- cb(er, folder && path.resolve(tmp, folder[0]))
- })
- }
- })
- })
+ var cbCalled = false
+ function cb (er) {
+ if (cbCalled) return
+ cbCalled = true
+ cb_(er, target)
}
-}
-
-function makeList (dir, pkg, dfc, cb) {
- if (typeof cb !== "function") cb = dfc, dfc = true
- if (typeof cb !== "function") cb = pkg, pkg = null
- dir = path.resolve(dir)
-
- if (!pkg.path) pkg.path = dir
-
- var name = path.basename(dir)
-
- // since this is a top-level traversal, get the user and global
- // exclude files, as well as the "ignore" config setting.
- var confIgnore = npm.config.get("ignore").trim()
- .split(/[\n\r\s\t]+/)
- .filter(function (i) { return i.trim() })
- , userIgnore = npm.config.get("userignorefile")
- , globalIgnore = npm.config.get("globalignorefile")
- , userExclude
- , globalExclude
-
- confIgnore.dir = dir
- confIgnore.name = "confIgnore"
-
- var defIgnore = ["build/"]
- defIgnore.dir = dir
-
- // TODO: only look these up once, and cache outside this function
- excludes.parseIgnoreFile( userIgnore, null, dir
- , function (er, uex) {
- if (er) return cb(er)
- userExclude = uex
- next()
- })
- excludes.parseIgnoreFile( globalIgnore, null, dir
- , function (er, gex) {
- if (er) return cb(er)
- globalExclude = gex
- next()
- })
+ var fst = fs.createReadStream(tarball)
- function next () {
- if (!globalExclude || !userExclude) return
- var exList = [ defIgnore, confIgnore, globalExclude, userExclude ]
-
- makeList_(dir, pkg, exList, dfc, function (er, files, cleanup) {
- if (er) return cb(er)
- var dirLen = dir.replace(/(\/|\\)$/, "").length + 1
- log.silly([dir, dirLen], "dir, dirLen")
- files = files.map(function (file) {
- return path.join(name, file.substr(dirLen))
- })
- return cb(null, files, cleanup)
- })
+ // figure out who we're supposed to be, if we're not pretending
+ // to be a specific user.
+ if (npm.config.get("unsafe-perm") && process.platform !== "win32") {
+ uid = myUid
+ gid = myGid
}
-}
-
-// Patterns ending in slashes will only match targets
-// ending in slashes. To implement this, add a / to
-// the filename iff it lstats isDirectory()
-function readDir (dir, pkg, dfc, cb) {
- fs.readdir(dir, function (er, files) {
- if (er) return cb(er)
- files = files.filter(function (f) {
- return f && f.charAt(0) !== "/" && f.indexOf("\0") === -1
- })
- asyncMap(files, function (file, cb) {
- fs.lstat(path.resolve(dir, file), function (er, st) {
- if (er) return cb(null, [])
- // if it's a directory, then tack "/" onto the name
- // so that it can match dir-only patterns in the
- // include/exclude logic later.
- if (st.isDirectory()) return cb(null, file + "/")
-
- // if it's a symlink, then we need to do some more
- // complex stuff for GH-691
- if (st.isSymbolicLink()) return readSymlink(dir, file, pkg, dfc, cb)
-
- // otherwise, just let it on through.
- return cb(null, file)
- })
- }, cb)
- })
-}
-
-// just see where this link is pointing, and resolve relative paths.
-function shallowReal (link, cb) {
- link = path.resolve(link)
- fs.readlink(link, function (er, t) {
- if (er) return cb(er)
- return cb(null, path.resolve(path.dirname(link), t), t)
- })
-}
-
-function readSymlink (dir, file, pkg, dfc, cb) {
- var isNM = dfc
- && path.basename(dir) === "node_modules"
- && path.dirname(dir) === pkg.path
- // see if this thing is pointing outside of the package.
- // external symlinks are resolved for deps, ignored for other things.
- // internal symlinks are allowed through.
- var df = path.resolve(dir, file)
- shallowReal(df, function (er, r, target) {
- if (er) return cb(null, []) // wtf? exclude file.
- if (r.indexOf(dir) === 0) return cb(null, file) // internal
- if (!isNM) return cb(null, []) // external non-dep
- // now the fun stuff!
- fs.realpath(df, function (er, resolved) {
- if (er) return cb(null, []) // can't add it.
- readJson(path.resolve(resolved, "package.json"), function (er) {
- if (er) return cb(null, []) // not a package
- resolveLinkDep(dir, file, resolved, target, pkg, function (er, f, c) {
- cb(er, f, c)
- })
- })
- })
- })
-}
-
-// put the link back the way it was.
-function cleanupResolveLinkDep (cleanup, cb) {
- // cut it out of the list, so that cycles will be broken.
- if (!cleanup) return cb()
- asyncMap(cleanup, function (d, cb) {
- rm(d[1], function (er) {
- if (er) return cb(er)
- fs.symlink(d[0], d[1], cb)
- })
- }, cb)
-}
-
-function resolveLinkDep (dir, file, resolved, target, pkg, cb) {
- // we've already decided that this is a dep that will be bundled.
- // make sure the data reflects this.
- var bd = pkg.bundleDependencies || pkg.bundledDependencies || []
- delete pkg.bundledDependencies
- pkg.bundleDependencies = bd
- var f = path.resolve(dir, file)
- , cleanup = [[target, f, resolved]]
+ function extractEntry (entry) {
+ log.silly(entry.path, "extracting entry")
+ // never create things that are user-unreadable,
+ // or dirs that are user-un-listable. Only leads to headaches.
+ var originalMode = entry.mode = entry.mode || entry.props.mode
+ entry.mode = entry.mode | (entry.type === "Directory" ? dMode : fMode)
+ entry.mode = entry.mode & (~npm.modes.umask)
+ entry.props.mode = entry.mode
+ if (originalMode !== entry.mode) {
+ log.silly([entry.path, originalMode, entry.mode], "modified mode")
+ }
- if (bd.indexOf(file) === -1) {
- // then we don't do this one.
- // just move the symlink out of the way.
- return rm(f, function (er) {
- cb(er, file, cleanup)
- })
+ // if there's a specific owner uid/gid that we want, then set that
+ if (process.platform !== "win32" &&
+ typeof uid === "number" &&
+ typeof gid === "number") {
+ entry.props.uid = entry.uid = uid
+ entry.props.gid = entry.gid = gid
+ }
}
- rm(f, function (er) {
- if (er) return cb(er)
- cache.add(resolved, function (er, data) {
- if (er) return cb(er)
- cache.unpack(data.name, data.version, f, function (er, data) {
- if (er) return cb(er)
- // now clear out the cache entry, since it's weird, probably.
- // pass the cleanup object along so that the thing getting the
- // list of files knows what to clean up afterwards.
- cache.clean([data._id], function (er) { cb(er, file, cleanup) })
- })
- })
- })
-}
-
-// exList is a list of ignore lists.
-// Each exList item is an array of patterns of files to ignore
-//
-function makeList_ (dir, pkg, exList, dfc, cb) {
- var files = null
- , cleanup = null
+ var extractOpts = { type: "Directory", path: target, strip: 1 }
- readDir(dir, pkg, dfc, function (er, f, c) {
- if (er) return cb(er)
- cleanup = c
- files = f.map(function (f) {
- // no nulls in paths!
- return f.split(/\0/)[0]
- }).filter(function (f) {
- // always remove all source control folders and
- // waf/vim/OSX garbage. this is a firm requirement.
- return !( f === ".git/"
- || f === ".lock-wscript"
- || f === "CVS/"
- || f === ".svn/"
- || f === ".hg/"
- || f.match(/^\..*\.swp/)
- || f === ".DS_Store"
- || f.match(/^\._/)
- || f === "npm-debug.log"
- || f === ""
- || f.charAt(0) === "/"
- )
- })
-
- // if (files.length > 0) files.push(".")
-
- if (files.indexOf("package.json") !== -1 && dir !== pkg.path) {
- // a package.json file starts the whole exclude/include
- // logic all over. Otherwise, a parent could break its
- // deps with its files list or .npmignore file.
- readJson(path.resolve(dir, "package.json"), function (er, data) {
- if (!er && typeof data === "object") {
- data.path = dir
- return makeList(dir, data, dfc, function (er, files) {
- // these need to be mounted onto the directory now.
- cb(er, files && files.map(function (f) {
- return path.resolve(path.dirname(dir), f)
- }))
- })
- }
- next()
- })
- //next()
- } else next()
+ if (process.platform !== "win32" &&
+ typeof uid === "number" &&
+ typeof gid === "number") {
+ extractOpts.uid = uid
+ extractOpts.gid = gid
+ }
- // add a local ignore file, if found.
- if (files.indexOf(".npmignore") === -1
- && files.indexOf(".gitignore") === -1) next()
- else {
- excludes.addIgnoreFile( path.resolve(dir, ".npmignore")
- , ".gitignore"
- , exList
- , dir
- , function (er, list) {
- if (!er) exList = list
- next(er)
- })
+ extractOpts.filter = function () {
+ // symbolic links are not allowed in packages.
+ if (this.type.match(/^.*Link$/)) {
+ log.warn( this.path.substr(target.length + 1)
+ + ' -> ' + this.linkpath
+ , "excluding symbolic link")
+ return false
}
- })
+ return true
+ }
- var n = 2
- , errState = null
- function next (er) {
- if (errState) return
- if (er) return cb(errState = er, [], cleanup)
- if (-- n > 0) return
- if (!pkg) return cb(new Error("No package.json file in "+dir))
- if (pkg.path === dir && pkg.files) {
- pkg.files = pkg.files.filter(function (f) {
- f = f.trim()
- return f && f.charAt(0) !== "#"
- })
- if (!pkg.files.length) pkg.files = null
- }
- if (pkg.path === dir && pkg.files) {
- // stuff on the files list MUST be there.
- // ignore everything, then include the stuff on the files list.
- var pkgFiles = ["*"].concat(pkg.files.map(function (f) {
- return "!" + f
- }))
- pkgFiles.dir = dir
- pkgFiles.packageFiles = true
- exList.push(pkgFiles)
- }
-
- if (path.basename(dir) === "node_modules"
- && pkg.path === path.dirname(dir)
- && dfc) { // do fancy crap
- files = filterNodeModules(files, pkg)
+ fst.on("error", log.er(cb, "error reading "+tarball))
+ fst.on("data", function OD (c) {
+ // detect what it is.
+ // Then, depending on that, we'll figure out whether it's
+ // a single-file module, gzipped tarball, or naked tarball.
+ // gzipped files all start with 1f8b08
+ if (c[0] === 0x1F &&
+ c[1] === 0x8B &&
+ c[2] === 0x08) {
+ fst
+ .pipe(zlib.Unzip())
+ .on("error", log.er(cb, "unzip error "+tarball))
+ .pipe(tar.Extract(extractOpts))
+ .on("entry", extractEntry)
+ .on("error", log.er(cb, "untar error "+tarball))
+ .on("close", cb)
+ } else if (c.toString().match(/^package\//)) {
+ // naked tar
+ fst
+ .pipe(tar.Extract(extractOpts))
+ .on("entry", extractEntry)
+ .on("error", log.er(cb, "untar error "+tarball))
+ .on("close", cb)
} else {
- // If a directory is excluded, we still need to be
- // able to *include* a file within it, and have that override
- // the prior exclusion.
- //
- // This whole makeList thing probably needs to be rewritten
- files = files.filter(function (f) {
- return excludes.filter(dir, exList)(f) || f.slice(-1) === "/"
- })
- }
-
-
- asyncMap(files, function (file, cb) {
- // if this is a dir, then dive into it.
- // otherwise, don't.
- file = path.resolve(dir, file)
-
- // in 0.6.0, fs.readdir can produce some really odd results.
- // XXX: remove this and make the engines hash exclude 0.6.0
- if (file.indexOf(dir) !== 0) {
- return cb(null, [])
+ // naked js file
+ var jsOpts = { path: path.resolve(target, "index.js") }
+
+ if (process.platform !== "win32" &&
+ typeof uid === "number" &&
+ typeof gid === "number") {
+ jsOpts.uid = uid
+ jsOpts.gid = gid
}
- fs.lstat(file, function (er, st) {
- if (er) return cb(er)
- if (st.isDirectory()) {
- return makeList_(file, pkg, exList, dfc, cb)
- }
- return cb(null, file)
- })
- }, function (er, files, c) {
- if (c) cleanup = (cleanup || []).concat(c)
- if (files.length > 0) files.push(dir)
- return cb(er, files, cleanup)
- })
- }
-}
-
-// only include node_modules folder that are:
-// 1. not on the dependencies list or
-// 2. on the "bundleDependencies" list.
-function filterNodeModules (files, pkg) {
- var bd = pkg.bundleDependencies || pkg.bundledDependencies || []
- , deps = Object.keys(pkg.dependencies || {})
- .filter(function (key) { return !pkg.dependencies[key].extraneous })
- .concat(Object.keys(pkg.devDependencies || {}))
-
- delete pkg.bundledDependencies
- pkg.bundleDependencies = bd
+ fst
+ .pipe(fstream.Writer(jsOpts))
+ .on("error", log.er(cb, "copy error "+tarball))
+ .on("close", function () {
+ var j = path.resolve(target, "package.json")
+ readJson(j, function (er, d) {
+ if (er) {
+ log.error(tarball, "Not a package")
+ return cb(er)
+ }
+ fs.writeFile(j, JSON.stringify(d) + "\n", cb)
+ })
+ })
+ }
- return files.filter(function (f) {
- f = f.replace(/\/$/, "")
- return f.charAt(0) !== "."
- && f.charAt(0) !== "_"
- && bd.indexOf(f) !== -1
+ // now un-hook, and re-emit the chunk
+ fst.removeListener("data", OD)
+ fst.emit("data", c)
})
}
diff --git a/deps/npm/lib/utils/uid-number.js b/deps/npm/lib/utils/uid-number.js
deleted file mode 100644
index 3756275534..0000000000
--- a/deps/npm/lib/utils/uid-number.js
+++ /dev/null
@@ -1,55 +0,0 @@
-module.exports = uidNumber
-
-// This module calls into bin/npm-get-uid-gid.js, which sets the
-// uid and gid to the supplied argument, in order to find out their
-// numeric value. This can't be done in the main node process,
-// because otherwise npm would be running as that user.
-
-var exec = require("./exec.js")
- , path = require("path")
- , log = require("./log.js")
- , constants = require("constants")
- , npm = require("../npm.js")
- , uidSupport = process.getuid && process.setuid
- , uidCache = {}
- , gidCache = {}
-
-function uidNumber (uid, gid, cb) {
- if (!uidSupport || npm.config.get("unsafe-perm")) return cb()
- if (typeof cb !== "function") cb = gid, gid = null
- if (typeof cb !== "function") cb = uid, uid = null
- if (gid == null) gid = process.getgid()
- if (uid == null) uid = process.getuid()
- if (!isNaN(gid)) gid = +gid
- if (!isNaN(uid)) uid = +uid
-
- if (uidCache[uid]) uid = uidCache[uid]
- if (gidCache[gid]) gid = gidCache[gid]
-
- if (typeof gid === "number" && typeof uid === "number") {
- return cb(null, uid, gid)
- }
-
- var getter = path.join(__dirname, "..", "..", "bin", "npm-get-uid-gid.js")
- return exec( process.execPath, [getter, uid, gid], process.env, false
- , null, process.getuid(), process.getgid()
- , function (er, code, out, err) {
- if (er) return log.er(cb, "Could not get uid/gid "+err)(er)
- log.silly(out, "output from getuid/gid")
- out = JSON.parse(out+"")
- if (out.error) {
- if (!npm.config.get("unsafe-perm")) {
- var er = new Error(out.error)
- er.errno = out.errno
- return cb(er)
- } else {
- return cb(null, +process.getuid(), +process.getgid())
- }
- }
- if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error(
- "Could not get uid/gid: "+JSON.stringify(out)))
- uidCache[uid] = out.uid
- uidCache[gid] = out.gid
- cb(null, out.uid, out.gid)
- })
-}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index f13d9e1b2e..febb56d730 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -63,6 +63,6 @@ function checkGit (data, cb) {
}
function write (data, cb) {
fs.writeFile( path.join(process.cwd(), "package.json")
- , new Buffer(JSON.stringify(data, null, 2))
+ , new Buffer(JSON.stringify(data, null, 2) + "\n")
, cb )
}
diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js
index 3e39f76de9..33a5d0df80 100644
--- a/deps/npm/lib/view.js
+++ b/deps/npm/lib/view.js
@@ -70,6 +70,7 @@ function view (args, silent, cb) {
data.versions = Object.keys(data.versions).sort(semver.compare)
if (!args.length) args = [""]
+ // remove readme unless we asked for it
if (-1 === args.indexOf("readme")) {
delete data.readme
}
@@ -81,6 +82,10 @@ function view (args, silent, cb) {
delete versions[v]
}
if (semver.satisfies(v, version)) args.forEach(function (args) {
+ // remove readme unless we asked for it
+ if (-1 === args.indexOf("readme")) {
+ delete versions[v].readme
+ }
results.push(showFields(data, versions[v], args))
})
})