summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKat Marchán <kzm@sykosomatic.org>2015-08-07 21:49:12 -0700
committerJames M Snell <jasnell@gmail.com>2015-08-21 15:27:53 -0400
commitf6935658139e91254866b65660fcbd9375e0d11b (patch)
treefee7127ccd3e70f7a26d37a1d190d48e1854b8d3
parent35bbe984017c6f82ab6c57bea22da10d25413c30 (diff)
downloadnode-f6935658139e91254866b65660fcbd9375e0d11b.tar.gz
deps: upgrade to npm 2.13.4
PR-URL: https://github.com/joyent/node/pull/25825 Reviewed-By: James M Snell <jasnell@gmail.com>
-rw-r--r--deps/npm/.travis.yml2
-rw-r--r--deps/npm/AUTHORS14
-rw-r--r--deps/npm/CHANGELOG.md537
-rw-r--r--deps/npm/doc/api/npm-ping.md14
-rw-r--r--deps/npm/doc/cli/npm-install.md19
-rw-r--r--deps/npm/doc/cli/npm-ping.md16
-rw-r--r--deps/npm/doc/cli/npm-run-script.md7
-rw-r--r--deps/npm/doc/cli/npm-version.md49
-rw-r--r--deps/npm/doc/files/npmrc.md3
-rw-r--r--deps/npm/doc/files/package.json.md52
-rw-r--r--deps/npm/doc/misc/npm-faq.md1
-rw-r--r--deps/npm/doc/misc/npm-index.md8
-rw-r--r--deps/npm/html/doc/README.html4
-rw-r--r--deps/npm/html/doc/api/npm-bin.html2
-rw-r--r--deps/npm/html/doc/api/npm-bugs.html2
-rw-r--r--deps/npm/html/doc/api/npm-cache.html2
-rw-r--r--deps/npm/html/doc/api/npm-commands.html2
-rw-r--r--deps/npm/html/doc/api/npm-config.html2
-rw-r--r--deps/npm/html/doc/api/npm-deprecate.html2
-rw-r--r--deps/npm/html/doc/api/npm-docs.html2
-rw-r--r--deps/npm/html/doc/api/npm-edit.html2
-rw-r--r--deps/npm/html/doc/api/npm-explore.html2
-rw-r--r--deps/npm/html/doc/api/npm-help-search.html2
-rw-r--r--deps/npm/html/doc/api/npm-init.html2
-rw-r--r--deps/npm/html/doc/api/npm-install.html2
-rw-r--r--deps/npm/html/doc/api/npm-link.html2
-rw-r--r--deps/npm/html/doc/api/npm-load.html2
-rw-r--r--deps/npm/html/doc/api/npm-ls.html2
-rw-r--r--deps/npm/html/doc/api/npm-outdated.html2
-rw-r--r--deps/npm/html/doc/api/npm-owner.html2
-rw-r--r--deps/npm/html/doc/api/npm-pack.html2
-rw-r--r--deps/npm/html/doc/api/npm-ping.html (renamed from deps/npm/html/doc/api/npm-submodule.html)31
-rw-r--r--deps/npm/html/doc/api/npm-prefix.html2
-rw-r--r--deps/npm/html/doc/api/npm-prune.html2
-rw-r--r--deps/npm/html/doc/api/npm-publish.html2
-rw-r--r--deps/npm/html/doc/api/npm-rebuild.html2
-rw-r--r--deps/npm/html/doc/api/npm-repo.html2
-rw-r--r--deps/npm/html/doc/api/npm-restart.html2
-rw-r--r--deps/npm/html/doc/api/npm-root.html2
-rw-r--r--deps/npm/html/doc/api/npm-run-script.html2
-rw-r--r--deps/npm/html/doc/api/npm-search.html2
-rw-r--r--deps/npm/html/doc/api/npm-shrinkwrap.html2
-rw-r--r--deps/npm/html/doc/api/npm-start.html2
-rw-r--r--deps/npm/html/doc/api/npm-stop.html2
-rw-r--r--deps/npm/html/doc/api/npm-tag.html2
-rw-r--r--deps/npm/html/doc/api/npm-test.html2
-rw-r--r--deps/npm/html/doc/api/npm-uninstall.html2
-rw-r--r--deps/npm/html/doc/api/npm-unpublish.html2
-rw-r--r--deps/npm/html/doc/api/npm-update.html2
-rw-r--r--deps/npm/html/doc/api/npm-version.html2
-rw-r--r--deps/npm/html/doc/api/npm-view.html2
-rw-r--r--deps/npm/html/doc/api/npm-whoami.html2
-rw-r--r--deps/npm/html/doc/api/npm.html4
-rw-r--r--deps/npm/html/doc/cli/npm-access.html2
-rw-r--r--deps/npm/html/doc/cli/npm-adduser.html2
-rw-r--r--deps/npm/html/doc/cli/npm-bin.html2
-rw-r--r--deps/npm/html/doc/cli/npm-bugs.html2
-rw-r--r--deps/npm/html/doc/cli/npm-build.html2
-rw-r--r--deps/npm/html/doc/cli/npm-bundle.html2
-rw-r--r--deps/npm/html/doc/cli/npm-cache.html2
-rw-r--r--deps/npm/html/doc/cli/npm-completion.html2
-rw-r--r--deps/npm/html/doc/cli/npm-config.html2
-rw-r--r--deps/npm/html/doc/cli/npm-dedupe.html2
-rw-r--r--deps/npm/html/doc/cli/npm-deprecate.html2
-rw-r--r--deps/npm/html/doc/cli/npm-dist-tag.html2
-rw-r--r--deps/npm/html/doc/cli/npm-docs.html2
-rw-r--r--deps/npm/html/doc/cli/npm-edit.html2
-rw-r--r--deps/npm/html/doc/cli/npm-explore.html2
-rw-r--r--deps/npm/html/doc/cli/npm-help-search.html2
-rw-r--r--deps/npm/html/doc/cli/npm-help.html2
-rw-r--r--deps/npm/html/doc/cli/npm-init.html2
-rw-r--r--deps/npm/html/doc/cli/npm-install.html23
-rw-r--r--deps/npm/html/doc/cli/npm-link.html2
-rw-r--r--deps/npm/html/doc/cli/npm-logout.html2
-rw-r--r--deps/npm/html/doc/cli/npm-ls.html4
-rw-r--r--deps/npm/html/doc/cli/npm-outdated.html2
-rw-r--r--deps/npm/html/doc/cli/npm-owner.html2
-rw-r--r--deps/npm/html/doc/cli/npm-pack.html2
-rw-r--r--deps/npm/html/doc/cli/npm-ping.html (renamed from deps/npm/html/doc/cli/npm-submodule.html)28
-rw-r--r--deps/npm/html/doc/cli/npm-prefix.html2
-rw-r--r--deps/npm/html/doc/cli/npm-prune.html2
-rw-r--r--deps/npm/html/doc/cli/npm-publish.html2
-rw-r--r--deps/npm/html/doc/cli/npm-rebuild.html2
-rw-r--r--deps/npm/html/doc/cli/npm-repo.html2
-rw-r--r--deps/npm/html/doc/cli/npm-restart.html2
-rw-r--r--deps/npm/html/doc/cli/npm-rm.html2
-rw-r--r--deps/npm/html/doc/cli/npm-root.html2
-rw-r--r--deps/npm/html/doc/cli/npm-run-script.html7
-rw-r--r--deps/npm/html/doc/cli/npm-search.html2
-rw-r--r--deps/npm/html/doc/cli/npm-shrinkwrap.html2
-rw-r--r--deps/npm/html/doc/cli/npm-star.html2
-rw-r--r--deps/npm/html/doc/cli/npm-stars.html2
-rw-r--r--deps/npm/html/doc/cli/npm-start.html2
-rw-r--r--deps/npm/html/doc/cli/npm-stop.html2
-rw-r--r--deps/npm/html/doc/cli/npm-tag.html2
-rw-r--r--deps/npm/html/doc/cli/npm-test.html2
-rw-r--r--deps/npm/html/doc/cli/npm-uninstall.html2
-rw-r--r--deps/npm/html/doc/cli/npm-unpublish.html2
-rw-r--r--deps/npm/html/doc/cli/npm-update.html2
-rw-r--r--deps/npm/html/doc/cli/npm-version.html50
-rw-r--r--deps/npm/html/doc/cli/npm-view.html2
-rw-r--r--deps/npm/html/doc/cli/npm-whoami.html2
-rw-r--r--deps/npm/html/doc/cli/npm.html10
-rw-r--r--deps/npm/html/doc/files/npm-folders.html2
-rw-r--r--deps/npm/html/doc/files/npm-global.html2
-rw-r--r--deps/npm/html/doc/files/npm-json.html53
-rw-r--r--deps/npm/html/doc/files/npmrc.html4
-rw-r--r--deps/npm/html/doc/files/package.json.html53
-rw-r--r--deps/npm/html/doc/index.html6
-rw-r--r--deps/npm/html/doc/misc/npm-coding-style.html2
-rw-r--r--deps/npm/html/doc/misc/npm-config.html2
-rw-r--r--deps/npm/html/doc/misc/npm-developers.html2
-rw-r--r--deps/npm/html/doc/misc/npm-disputes.html8
-rw-r--r--deps/npm/html/doc/misc/npm-faq.html5
-rw-r--r--deps/npm/html/doc/misc/npm-index.html6
-rw-r--r--deps/npm/html/doc/misc/npm-registry.html2
-rw-r--r--deps/npm/html/doc/misc/npm-scope.html2
-rw-r--r--deps/npm/html/doc/misc/npm-scripts.html2
-rw-r--r--deps/npm/html/doc/misc/removing-npm.html2
-rw-r--r--deps/npm/html/doc/misc/semver.html2
-rw-r--r--deps/npm/html/partial/doc/README.html2
-rw-r--r--deps/npm/html/partial/doc/api/npm-ping.html8
-rw-r--r--deps/npm/html/partial/doc/api/npm-submodule.html21
-rw-r--r--deps/npm/html/partial/doc/api/npm.html2
-rw-r--r--deps/npm/html/partial/doc/cli/npm-install.html21
-rw-r--r--deps/npm/html/partial/doc/cli/npm-ls.html2
-rw-r--r--deps/npm/html/partial/doc/cli/npm-ping.html11
-rw-r--r--deps/npm/html/partial/doc/cli/npm-run-script.html5
-rw-r--r--deps/npm/html/partial/doc/cli/npm-submodule.html21
-rw-r--r--deps/npm/html/partial/doc/cli/npm-version.html48
-rw-r--r--deps/npm/html/partial/doc/cli/npm.html8
-rw-r--r--deps/npm/html/partial/doc/files/npm-json.html51
-rw-r--r--deps/npm/html/partial/doc/files/npmrc.html2
-rw-r--r--deps/npm/html/partial/doc/files/package.json.html51
-rw-r--r--deps/npm/html/partial/doc/index.html4
-rw-r--r--deps/npm/html/partial/doc/misc/npm-disputes.html6
-rw-r--r--deps/npm/html/partial/doc/misc/npm-faq.html3
-rw-r--r--deps/npm/html/partial/doc/misc/npm-index.html4
-rw-r--r--deps/npm/lib/cache.js6
-rw-r--r--deps/npm/lib/cache/add-named.js6
-rw-r--r--deps/npm/lib/cache/add-remote-git.js36
-rw-r--r--deps/npm/lib/cache/get-stat.js74
-rw-r--r--deps/npm/lib/install.js5
-rw-r--r--deps/npm/lib/npm.js4
-rw-r--r--deps/npm/lib/outdated.js2
-rw-r--r--deps/npm/lib/ping.js20
-rw-r--r--deps/npm/lib/utils/correct-mkdir.js110
-rw-r--r--deps/npm/lib/utils/lifecycle.js5
-rw-r--r--deps/npm/lib/utils/locker.js32
-rw-r--r--deps/npm/lib/version.js177
-rw-r--r--deps/npm/lib/view.js8
-rw-r--r--deps/npm/man/man1/npm-README.138
-rw-r--r--deps/npm/man/man1/npm-access.116
-rw-r--r--deps/npm/man/man1/npm-adduser.122
-rw-r--r--deps/npm/man/man1/npm-bin.12
-rw-r--r--deps/npm/man/man1/npm-bugs.110
-rw-r--r--deps/npm/man/man1/npm-build.18
-rw-r--r--deps/npm/man/man1/npm-bundle.16
-rw-r--r--deps/npm/man/man1/npm-cache.122
-rw-r--r--deps/npm/man/man1/npm-completion.18
-rw-r--r--deps/npm/man/man1/npm-config.110
-rw-r--r--deps/npm/man/man1/npm-dedupe.16
-rw-r--r--deps/npm/man/man1/npm-deprecate.16
-rw-r--r--deps/npm/man/man1/npm-dist-tag.120
-rw-r--r--deps/npm/man/man1/npm-docs.112
-rw-r--r--deps/npm/man/man1/npm-edit.114
-rw-r--r--deps/npm/man/man1/npm-explore.18
-rw-r--r--deps/npm/man/man1/npm-help-search.16
-rw-r--r--deps/npm/man/man1/npm-help.18
-rw-r--r--deps/npm/man/man1/npm-init.14
-rw-r--r--deps/npm/man/man1/npm-install.1161
-rw-r--r--deps/npm/man/man1/npm-link.122
-rw-r--r--deps/npm/man/man1/npm-logout.18
-rw-r--r--deps/npm/man/man1/npm-ls.114
-rw-r--r--deps/npm/man/man1/npm-outdated.12
-rw-r--r--deps/npm/man/man1/npm-owner.12
-rw-r--r--deps/npm/man/man1/npm-pack.14
-rw-r--r--deps/npm/man/man1/npm-ping.123
-rw-r--r--deps/npm/man/man1/npm-prefix.18
-rw-r--r--deps/npm/man/man1/npm-prune.110
-rw-r--r--deps/npm/man/man1/npm-publish.124
-rw-r--r--deps/npm/man/man1/npm-rebuild.16
-rw-r--r--deps/npm/man/man1/npm-repo.110
-rw-r--r--deps/npm/man/man1/npm-restart.14
-rw-r--r--deps/npm/man/man1/npm-rm.12
-rw-r--r--deps/npm/man/man1/npm-root.14
-rw-r--r--deps/npm/man/man1/npm-run-script.135
-rw-r--r--deps/npm/man/man1/npm-search.16
-rw-r--r--deps/npm/man/man1/npm-shrinkwrap.162
-rw-r--r--deps/npm/man/man1/npm-star.12
-rw-r--r--deps/npm/man/man1/npm-stars.12
-rw-r--r--deps/npm/man/man1/npm-start.12
-rw-r--r--deps/npm/man/man1/npm-stop.12
-rw-r--r--deps/npm/man/man1/npm-submodule.141
-rw-r--r--deps/npm/man/man1/npm-tag.118
-rw-r--r--deps/npm/man/man1/npm-test.14
-rw-r--r--deps/npm/man/man1/npm-uninstall.114
-rw-r--r--deps/npm/man/man1/npm-unpublish.16
-rw-r--r--deps/npm/man/man1/npm-update.178
-rw-r--r--deps/npm/man/man1/npm-version.170
-rw-r--r--deps/npm/man/man1/npm-view.110
-rw-r--r--deps/npm/man/man1/npm-whoami.14
-rw-r--r--deps/npm/man/man1/npm.150
-rw-r--r--deps/npm/man/man3/npm-bin.34
-rw-r--r--deps/npm/man/man3/npm-bugs.34
-rw-r--r--deps/npm/man/man3/npm-cache.36
-rw-r--r--deps/npm/man/man3/npm-commands.36
-rw-r--r--deps/npm/man/man3/npm-config.320
-rw-r--r--deps/npm/man/man3/npm-deprecate.312
-rw-r--r--deps/npm/man/man3/npm-docs.34
-rw-r--r--deps/npm/man/man3/npm-edit.38
-rw-r--r--deps/npm/man/man3/npm-explore.34
-rw-r--r--deps/npm/man/man3/npm-help-search.32
-rw-r--r--deps/npm/man/man3/npm-init.32
-rw-r--r--deps/npm/man/man3/npm-install.32
-rw-r--r--deps/npm/man/man3/npm-link.38
-rw-r--r--deps/npm/man/man3/npm-load.34
-rw-r--r--deps/npm/man/man3/npm-ls.34
-rw-r--r--deps/npm/man/man3/npm-outdated.32
-rw-r--r--deps/npm/man/man3/npm-owner.32
-rw-r--r--deps/npm/man/man3/npm-pack.34
-rw-r--r--deps/npm/man/man3/npm-ping.317
-rw-r--r--deps/npm/man/man3/npm-prefix.32
-rw-r--r--deps/npm/man/man3/npm-prune.32
-rw-r--r--deps/npm/man/man3/npm-publish.36
-rw-r--r--deps/npm/man/man3/npm-rebuild.36
-rw-r--r--deps/npm/man/man3/npm-repo.34
-rw-r--r--deps/npm/man/man3/npm-restart.36
-rw-r--r--deps/npm/man/man3/npm-root.34
-rw-r--r--deps/npm/man/man3/npm-run-script.32
-rw-r--r--deps/npm/man/man3/npm-search.32
-rw-r--r--deps/npm/man/man3/npm-shrinkwrap.32
-rw-r--r--deps/npm/man/man3/npm-start.34
-rw-r--r--deps/npm/man/man3/npm-stop.34
-rw-r--r--deps/npm/man/man3/npm-submodule.341
-rw-r--r--deps/npm/man/man3/npm-tag.36
-rw-r--r--deps/npm/man/man3/npm-test.36
-rw-r--r--deps/npm/man/man3/npm-uninstall.32
-rw-r--r--deps/npm/man/man3/npm-unpublish.32
-rw-r--r--deps/npm/man/man3/npm-update.36
-rw-r--r--deps/npm/man/man3/npm-version.32
-rw-r--r--deps/npm/man/man3/npm-view.310
-rw-r--r--deps/npm/man/man3/npm-whoami.34
-rw-r--r--deps/npm/man/man3/npm.350
-rw-r--r--deps/npm/man/man5/npm-folders.5116
-rw-r--r--deps/npm/man/man5/npm-global.5116
-rw-r--r--deps/npm/man/man5/npm-json.5250
-rw-r--r--deps/npm/man/man5/npmrc.531
-rw-r--r--deps/npm/man/man5/package.json.5250
-rw-r--r--deps/npm/man/man7/npm-coding-style.730
-rw-r--r--deps/npm/man/man7/npm-config.7286
-rw-r--r--deps/npm/man/man7/npm-developers.794
-rw-r--r--deps/npm/man/man7/npm-disputes.732
-rw-r--r--deps/npm/man/man7/npm-faq.7132
-rw-r--r--deps/npm/man/man7/npm-index.78
-rw-r--r--deps/npm/man/man7/npm-registry.716
-rw-r--r--deps/npm/man/man7/npm-scope.732
-rw-r--r--deps/npm/man/man7/npm-scripts.790
-rw-r--r--deps/npm/man/man7/removing-npm.78
-rw-r--r--deps/npm/man/man7/semver.7252
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/.npmignore (renamed from deps/npm/node_modules/graceful-fs/.npmignore)0
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/LICENSE (renamed from deps/npm/node_modules/rimraf/node_modules/glob/LICENSE)0
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/max-open.js (renamed from deps/npm/node_modules/graceful-fs/test/max-open.js)0
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/open.js (renamed from deps/npm/node_modules/graceful-fs/test/open.js)0
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/readdir-sort.js (renamed from deps/npm/node_modules/graceful-fs/test/readdir-sort.js)0
-rw-r--r--deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/write-then-read.js (renamed from deps/npm/node_modules/graceful-fs/test/write-then-read.js)0
-rwxr-xr-xdeps/npm/node_modules/columnify/node_modules/strip-ansi/cli.js (renamed from deps/npm/node_modules/strip-ansi/cli.js)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/index.js (renamed from deps/npm/node_modules/strip-ansi/index.js)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/index.js (renamed from deps/npm/node_modules/ansi-regex/index.js)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/license (renamed from deps/npm/node_modules/ansi-regex/license)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/package.json77
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/readme.md (renamed from deps/npm/node_modules/ansi-regex/readme.md)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/package.json69
-rw-r--r--deps/npm/node_modules/columnify/node_modules/strip-ansi/readme.md (renamed from deps/npm/node_modules/strip-ansi/readme.md)0
-rw-r--r--deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/node_modules/clone/package.json3
-rw-r--r--deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/package.json3
-rw-r--r--deps/npm/node_modules/columnify/node_modules/wcwidth/package.json3
-rw-r--r--deps/npm/node_modules/columnify/package.json3
-rw-r--r--deps/npm/node_modules/dezalgo/.travis.yml7
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/CHANGES.md63
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/LICENSE.md3
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/README.md236
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/asap.js162
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/browser-asap.js66
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/browser-raw.js220
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/package.json77
-rw-r--r--deps/npm/node_modules/dezalgo/node_modules/asap/raw.js101
-rw-r--r--deps/npm/node_modules/dezalgo/package.json37
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/fstream-npm/fstream-npm.js4
-rw-r--r--deps/npm/node_modules/fstream-npm/package.json14
-rw-r--r--deps/npm/node_modules/fstream/examples/filter-pipe.js19
-rw-r--r--deps/npm/node_modules/fstream/examples/pipe.js17
-rw-r--r--deps/npm/node_modules/fstream/examples/symlink-write.js2
-rw-r--r--deps/npm/node_modules/fstream/lib/abstract.js32
-rw-r--r--deps/npm/node_modules/fstream/lib/dir-writer.js4
-rw-r--r--deps/npm/node_modules/fstream/lib/file-reader.js10
-rw-r--r--deps/npm/node_modules/fstream/lib/link-reader.js2
-rw-r--r--deps/npm/node_modules/fstream/lib/link-writer.js6
-rw-r--r--deps/npm/node_modules/fstream/lib/reader.js24
-rw-r--r--deps/npm/node_modules/fstream/lib/writer.js18
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/fstream/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/fstream/package.json42
-rw-r--r--deps/npm/node_modules/glob/README.md1
-rw-r--r--deps/npm/node_modules/glob/glob.js5
-rw-r--r--deps/npm/node_modules/glob/package.json23
-rw-r--r--deps/npm/node_modules/glob/sync.js3
-rw-r--r--deps/npm/node_modules/graceful-fs/LICENSE2
-rw-r--r--deps/npm/node_modules/graceful-fs/fs.js32
-rw-r--r--deps/npm/node_modules/graceful-fs/graceful-fs.js323
-rw-r--r--deps/npm/node_modules/graceful-fs/legacy-streams.js118
-rw-r--r--deps/npm/node_modules/graceful-fs/package.json67
-rw-r--r--deps/npm/node_modules/graceful-fs/polyfills.js263
-rw-r--r--deps/npm/node_modules/init-package-json/.travis.yml5
-rw-r--r--deps/npm/node_modules/init-package-json/default-input.js3
-rw-r--r--deps/npm/node_modules/init-package-json/node_modules/promzard/package.json2
-rw-r--r--deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md7
-rw-r--r--deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md29
-rw-r--r--deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js26
-rw-r--r--deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore5
-rw-r--r--deps/npm/node_modules/init-package-json/package.json48
-rw-r--r--deps/npm/node_modules/init-package-json/test/basic.js2
-rw-r--r--deps/npm/node_modules/init-package-json/test/license.js7
-rw-r--r--deps/npm/node_modules/init-package-json/test/name-spaces.js7
-rw-r--r--deps/npm/node_modules/init-package-json/test/name-uppercase.js7
-rw-r--r--deps/npm/node_modules/init-package-json/test/scope-in-config-existing-name.js30
-rw-r--r--deps/npm/node_modules/init-package-json/test/scope-in-config.js18
-rw-r--r--deps/npm/node_modules/init-package-json/test/scope.js3
-rw-r--r--deps/npm/node_modules/init-package-json/test/yes-defaults.js2
-rw-r--r--deps/npm/node_modules/lru-cache/.travis.yml8
-rw-r--r--deps/npm/node_modules/lru-cache/README.md2
-rw-r--r--deps/npm/node_modules/lru-cache/package.json23
-rw-r--r--deps/npm/node_modules/lru-cache/test/foreach.js1
-rw-r--r--deps/npm/node_modules/lru-cache/test/memory-leak.js1
-rw-r--r--deps/npm/node_modules/minimatch/browser.js64
-rw-r--r--deps/npm/node_modules/minimatch/minimatch.js59
-rw-r--r--deps/npm/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json4
-rw-r--r--deps/npm/node_modules/minimatch/package.json27
-rw-r--r--deps/npm/node_modules/node-gyp/History.md16
-rw-r--r--deps/npm/node_modules/node-gyp/README.md15
-rw-r--r--deps/npm/node_modules/node-gyp/addon.gypi2
-rw-r--r--deps/npm/node_modules/node-gyp/lib/configure.js2
-rw-r--r--deps/npm/node_modules/node-gyp/lib/install.js4
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/package.json72
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/path-array/node_modules/array-index/package.json3
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/path-array/package.json3
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/.npmignore4
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/.travis.yml5
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/Makefile (renamed from deps/npm/node_modules/semver/Makefile)0
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/README.md303
-rwxr-xr-xdeps/npm/node_modules/node-gyp/node_modules/semver/bin/semver133
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/foot.js.txt (renamed from deps/npm/node_modules/semver/foot.js.txt)0
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/head.js.txt (renamed from deps/npm/node_modules/semver/head.js.txt)1
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/package.json55
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js (renamed from deps/npm/node_modules/semver/semver.browser.js)0
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js.gz (renamed from deps/npm/node_modules/semver/semver.browser.js.gz)bin7992 -> 7992 bytes
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.js1205
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js (renamed from deps/npm/node_modules/semver/semver.min.js)0
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js.gz (renamed from deps/npm/node_modules/semver/semver.min.js.gz)bin3790 -> 3790 bytes
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/amd.js (renamed from deps/npm/node_modules/semver/test/amd.js)0
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/big-numbers.js31
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/clean.js29
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/gtr.js173
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/index.js685
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/ltr.js181
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/major-minor-patch.js72
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/test/no-module.js (renamed from deps/npm/node_modules/semver/test/no-module.js)1
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/package.json3
-rw-r--r--deps/npm/node_modules/node-gyp/package.json20
-rw-r--r--deps/npm/node_modules/nopt/.travis.yml9
-rw-r--r--deps/npm/node_modules/nopt/README.md7
-rw-r--r--deps/npm/node_modules/nopt/package.json23
-rw-r--r--deps/npm/node_modules/normalize-git-url/.eslintrc19
-rw-r--r--deps/npm/node_modules/normalize-git-url/normalize-git-url.js33
-rw-r--r--deps/npm/node_modules/normalize-git-url/package.json10
-rw-r--r--deps/npm/node_modules/normalize-git-url/test/basic.js62
-rw-r--r--deps/npm/node_modules/normalize-package-data/README.md2
-rw-r--r--deps/npm/node_modules/normalize-package-data/lib/fixer.js18
-rw-r--r--deps/npm/node_modules/normalize-package-data/lib/warning_messages.json2
-rw-r--r--deps/npm/node_modules/normalize-package-data/package.json16
-rw-r--r--deps/npm/node_modules/normalize-package-data/test/normalize.js2
-rw-r--r--deps/npm/node_modules/npm-cache-filename/index.js8
-rw-r--r--deps/npm/node_modules/npm-cache-filename/package.json42
-rw-r--r--deps/npm/node_modules/npm-cache-filename/test.js2
-rw-r--r--deps/npm/node_modules/npm-install-checks/package.json44
-rw-r--r--deps/npm/node_modules/npm-package-arg/package.json14
-rw-r--r--deps/npm/node_modules/npm-registry-client/lib/ping.js23
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/.npmignore1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.npmignore5
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.travis.yml39
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/LICENSE18
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/README.md36
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown1651
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md58
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/duplex.js1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js82
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js27
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js959
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js197
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js520
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md3
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch604
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js107
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json37
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/util.js106
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md54
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/build/build.js208
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json19
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js3
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json38
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml (renamed from deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.travis.yml)2
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js13
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json45
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md18
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js17
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore2
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE20
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md7
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js221
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json34
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md11
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE24
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md53
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js62
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js6
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json53
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/package.json75
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/passthrough.js1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/readable.js12
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/transform.js1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/writable.js1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/package.json25
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/readme.md10
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/array.js12
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/buffer.js31
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/infer.js15
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/nothing.js25
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/objects.js29
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/server/ls.js16
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/string.js76
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/typedarray.js33
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/package.json72
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/npm-registry-client/package.json18
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/fixtures/@npm/npm-registry-client/cache.json1
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/cache.json1
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/package.tgzbin58692 -> 0 bytes
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/cache.json1
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/get-403.js29
-rw-r--r--deps/npm/node_modules/npm-registry-client/test/ping.js75
-rw-r--r--deps/npm/node_modules/osenv/node_modules/os-homedir/index.js24
-rw-r--r--deps/npm/node_modules/osenv/node_modules/os-homedir/license (renamed from deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/License)4
-rw-r--r--deps/npm/node_modules/osenv/node_modules/os-homedir/package.json70
-rw-r--r--deps/npm/node_modules/osenv/node_modules/os-homedir/readme.md33
-rw-r--r--deps/npm/node_modules/osenv/node_modules/os-tmpdir/package.json5
-rw-r--r--deps/npm/node_modules/osenv/osenv.js5
-rw-r--r--deps/npm/node_modules/osenv/package.json42
-rw-r--r--deps/npm/node_modules/osenv/x.tap39
-rw-r--r--deps/npm/node_modules/read-installed/.travis.yml9
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/debuglog/package.json2
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/package.json72
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/readdir-scoped-modules/package.json2
-rw-r--r--deps/npm/node_modules/read-installed/node_modules/util-extend/package.json4
-rw-r--r--deps/npm/node_modules/read-installed/package.json18
-rw-r--r--deps/npm/node_modules/read-installed/read-installed.js16
-rw-r--r--deps/npm/node_modules/read-installed/test/issue-40.js15
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/request/.eslintrc45
-rw-r--r--deps/npm/node_modules/request/.travis.yml6
-rw-r--r--deps/npm/node_modules/request/CHANGELOG.md23
-rw-r--r--deps/npm/node_modules/request/README.md21
-rwxr-xr-xdeps/npm/node_modules/request/index.js25
-rw-r--r--deps/npm/node_modules/request/lib/copy.js10
-rw-r--r--deps/npm/node_modules/request/lib/helpers.js19
-rw-r--r--deps/npm/node_modules/request/lib/oauth.js9
-rw-r--r--deps/npm/node_modules/request/lib/redirect.js3
-rw-r--r--deps/npm/node_modules/request/lib/tunnel.js183
-rw-r--r--deps/npm/node_modules/request/node_modules/aws-sign2/package.json20
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/.jshintrc59
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/.travis.yml7
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/README.md2
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml39
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.zuul.yml1
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md32
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown1651
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md58
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js43
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js23
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js451
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js41
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js302
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/core-util-is/package.json22
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json27
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml7
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js13
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json45
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/readme.md18
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/test.js17
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/string_decoder/package.json26
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/History.md11
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/LICENSE24
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/README.md53
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/browser.js62
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/node.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/package.json53
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json67
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/readable.js8
-rw-r--r--deps/npm/node_modules/request/node_modules/bl/package.json33
-rw-r--r--deps/npm/node_modules/request/node_modules/caseless/LICENSE28
-rw-r--r--deps/npm/node_modules/request/node_modules/caseless/index.js1
-rw-r--r--deps/npm/node_modules/request/node_modules/caseless/package.json31
-rw-r--r--deps/npm/node_modules/request/node_modules/combined-stream/Readme.md11
-rw-r--r--deps/npm/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/package.json2
-rw-r--r--deps/npm/node_modules/request/node_modules/combined-stream/package.json24
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/.jscs.json103
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/.npmignore1
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/.travis.yml44
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/CHANGELOG.md68
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/LICENSE22
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/README.md61
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/component.json31
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/index.js85
-rw-r--r--deps/npm/node_modules/request/node_modules/extend/package.json73
-rw-r--r--deps/npm/node_modules/request/node_modules/forever-agent/package.json33
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/lib/browser.js1
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jscsrc3
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jshintrc25
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/CHANGELOG.md81
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md658
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/bower.json38
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/component.json16
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js1423
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json58
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/form-data/node_modules/async/support/sync-package-managers.js4
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/Readme.md132
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/lib/combined_stream.js188
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/.npmignore2
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Makefile6
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Readme.md154
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js99
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json42
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/common.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-http-upload.js36
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-auto-pause.js21
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-pause.js14
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream.js48
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-handle-source-errors.js15
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-max-data-size.js18
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-pipe-resumes.js13
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-proxy-readable.js13
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/run.js7
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json37
-rw-r--r--deps/npm/node_modules/request/node_modules/form-data/package.json70
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/LICENSE34
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/README.md2
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/error.js (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/error.js)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/index.js (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/index.js)2
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cache.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cache.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cacheEntry.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cacheEntry.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/content.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/content.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cookie.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cookie.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/creator.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/creator.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/entry.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/entry.json)5
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/har.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/har.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/index.js (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/index.js)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/log.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/log.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/page.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/page.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/pageTimings.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/pageTimings.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/postData.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/postData.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/record.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/record.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/request.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/request.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/response.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/response.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/timings.json (renamed from deps/npm/node_modules/request/node_modules/har-validator/src/schemas/timings.json)0
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/LICENSE2
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/README.md20
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/changelog.md47
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js647
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js10
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/async.js80
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/bind.js9
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/captured_trace.js3
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/catch_filter.js2
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/debuggability.js17
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/direct_resolve.js5
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/finally.js7
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/map.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js9
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js88
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise_array.js2
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js23
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/reduce.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/schedule.js5
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/thenables.js7
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/util.js67
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/package.json26
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/index.js46
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/index.js111
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json43
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/readme.md6
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json29
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/cli.js45
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/index.js4
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/license (renamed from deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/License)4
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/package.json (renamed from deps/npm/node_modules/ansi-regex/package.json)28
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/readme.md31
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/index.js49
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/package.json48
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/readme.md44
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json54
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/readme.md21
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/index.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/license21
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/index.js4
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/license21
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/package.json86
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/readme.md31
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/package.json (renamed from deps/npm/node_modules/strip-ansi/package.json)56
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/readme.md33
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/cli.js29
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/index.js21
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/package.json49
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/readme.md18
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json66
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/readme.md31
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/package.json23
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-function/package.json22
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/package.json23
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json23
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc30
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json22
-rw-r--r--deps/npm/node_modules/request/node_modules/har-validator/package.json42
-rw-r--r--deps/npm/node_modules/request/node_modules/hawk/.npmignore1
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/.travis.yml1
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/Makefile8
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/README.md5
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/example/usage.js2
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/index.js1
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/lib/browser.js14
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/lib/client.js10
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/lib/server.js52
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/lib/utils.js6
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/node_modules/boom/README.md20
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/node_modules/boom/index.js1
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/node_modules/boom/lib/index.js12
-rw-r--r--deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json22
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/node_modules/boom/test/index.js1
-rw-r--r--deps/npm/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json24
-rw-r--r--deps/npm/node_modules/request/node_modules/hawk/node_modules/sntp/package.json24
-rw-r--r--deps/npm/node_modules/request/node_modules/hawk/package.json47
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/browser.js44
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/client.js807
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/crypto.js110
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/message.js261
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/readme.js113
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/server.js1678
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/uri.js845
-rwxr-xr-xdeps/npm/node_modules/request/node_modules/hawk/test/utils.js175
-rw-r--r--deps/npm/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json30
-rw-r--r--deps/npm/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json23
-rw-r--r--deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json27
-rw-r--r--deps/npm/node_modules/request/node_modules/isstream/.jshintrc59
-rw-r--r--deps/npm/node_modules/request/node_modules/isstream/package.json23
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md44
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/LICENSE37
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/README.md9
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/index.js217
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md46
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/README.md12
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json111
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json28
-rw-r--r--deps/npm/node_modules/request/node_modules/mime-types/package.json20
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/.jshintignore1
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/.jshintrc10
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/Makefile8
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/Readme.md18
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/index.js1
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/lib/parse.js42
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/lib/utils.js16
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/package.json27
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/test/parse.js247
-rw-r--r--deps/npm/node_modules/request/node_modules/qs/test/stringify.js18
-rw-r--r--deps/npm/node_modules/request/node_modules/stringstream/package.json26
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/README.md296
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js321
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/lib/memstore.js34
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/lib/store.js6
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/package.json14
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/public-suffix.txt374
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_sorting_test.js66
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_to_json_test.js4
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/test/ietf_test.js3
-rw-r--r--deps/npm/node_modules/request/node_modules/tough-cookie/test/jar_serialization_test.js348
-rw-r--r--deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc5
-rw-r--r--deps/npm/node_modules/request/node_modules/tunnel-agent/index.js21
-rw-r--r--deps/npm/node_modules/request/node_modules/tunnel-agent/package.json44
-rw-r--r--deps/npm/node_modules/request/package.json44
-rw-r--r--deps/npm/node_modules/request/request.js224
-rw-r--r--deps/npm/node_modules/rimraf/README.md4
-rwxr-xr-xdeps/npm/node_modules/rimraf/bin.js15
-rw-r--r--deps/npm/node_modules/rimraf/node_modules/glob/README.md369
-rw-r--r--deps/npm/node_modules/rimraf/node_modules/glob/common.js237
-rw-r--r--deps/npm/node_modules/rimraf/node_modules/glob/glob.js740
-rw-r--r--deps/npm/node_modules/rimraf/node_modules/glob/package.json72
-rw-r--r--deps/npm/node_modules/rimraf/node_modules/glob/sync.js457
-rw-r--r--deps/npm/node_modules/rimraf/package.json30
-rw-r--r--deps/npm/node_modules/semver/package.json28
-rw-r--r--deps/npm/node_modules/semver/semver.js8
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/sha/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/LICENSE174
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/README.md61
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/index.js74
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/LICENSE174
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md (renamed from deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md)0
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json (renamed from deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json)37
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js (renamed from deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js)0
-rw-r--r--deps/npm/node_modules/validate-npm-package-license/package.json (renamed from deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json)37
-rw-r--r--deps/npm/node_modules/validate-npm-package-name/LICENSE6
-rw-r--r--deps/npm/node_modules/validate-npm-package-name/README.md45
-rw-r--r--deps/npm/node_modules/validate-npm-package-name/node_modules/builtins/package.json29
-rw-r--r--deps/npm/node_modules/validate-npm-package-name/package.json34
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/.npmignore1
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/LICENSE15
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/README.md36
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/fs.js11
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/graceful-fs.js158
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/package.json96
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/polyfills.js254
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/max-open.js69
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/open.js39
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/readdir-sort.js20
-rw-r--r--deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/write-then-read.js47
-rw-r--r--deps/npm/package.json57
-rw-r--r--deps/npm/scripts/release.sh6
-rw-r--r--deps/npm/test/fixtures/config/userconfig-with-gc2
-rw-r--r--deps/npm/test/tap/add-remote-git-get-resolved.js73
-rw-r--r--deps/npm/test/tap/peer-deps-invalid.js36
-rw-r--r--deps/npm/test/tap/ping.js68
-rw-r--r--deps/npm/test/tap/splat-with-only-prerelease-to-latest.js81
-rw-r--r--deps/npm/test/tap/version-git-not-clean.js104
-rw-r--r--deps/npm/test/tap/version-lifecycle.js107
828 files changed, 32429 insertions, 11798 deletions
diff --git a/deps/npm/.travis.yml b/deps/npm/.travis.yml
index e971a8553..d55568298 100644
--- a/deps/npm/.travis.yml
+++ b/deps/npm/.travis.yml
@@ -8,7 +8,7 @@ env:
- DEPLOY_VERSION=testing
before_install:
- "npm config set spin false"
- - "npm install -g npm/npm"
+ - "npm install -g npm/npm#2.x"
- "sudo mkdir -p /var/run/couchdb"
script: "npm run-script test-all"
notifications:
diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS
index 92dee7cfc..5e822a9f8 100644
--- a/deps/npm/AUTHORS
+++ b/deps/npm/AUTHORS
@@ -288,3 +288,17 @@ Cedric Nelson <cedric.nelson@gmail.com>
Kat Marchán <kzm@sykosomatic.org>
Andrew <talktome@aboutandrew.co.uk>
Eduardo Pinho <enet4mikeenet@gmail.com>
+Rachel Hutchison <rhutchix@intel.com>
+Ryan Temple <ryantemple145@gmail.com>
+Eugene Sharygin <eush77@gmail.com>
+Nick Heiner <nick.heiner@opower.com>
+James Talmage <james@talmage.io>
+jane arc <jane@uber.com>
+Joseph Dykstra <josephdykstra@gmail.com>
+Joshua Egan <josh-egan@users.noreply.github.com>
+Thomas Cort <thomasc@ssimicro.com>
+Thaddee Tyl <thaddee.tyl@gmail.com>
+Steve Klabnik <steve@steveklabnik.com>
+Andrew Murray <radarhere@gmail.com>
+Stephan Bönnemann <stephan@excellenteasy.com>
+Kyle M. Tarplee <kyle.tarplee@numerica.us>
diff --git a/deps/npm/CHANGELOG.md b/deps/npm/CHANGELOG.md
index a2b62ecf1..90141a67c 100644
--- a/deps/npm/CHANGELOG.md
+++ b/deps/npm/CHANGELOG.md
@@ -1,3 +1,530 @@
+### v2.13.4 (2015-07-30):
+
+#### JULY ENDS ON A FAIRLY QUIET NOTE
+
+Hey everyone! I hope you've had a great week. We're having a fairly small
+release this week while we wrap up Teams and Orgs (or, as we've taken to calling
+it internally, _Teens and Orcs_).
+
+In other exciting news, a bunch of us are gonna be at the [Node.js Collaborator
+Summit](https://github.com/nodejs/summit/issues/1), and you can also find us at
+[wafflejs](https://wafflejs.com/) on Wednesday. Hopefully we'll be seeing some
+of you there. :)
+
+#### THE PATCH!!!
+
+So here it is. The patch. Hope it helps. (Thanks,
+[@ktarplee](https://github.com/ktarplee)!)
+
+* [`3e58c48`](https://github.com/npm/npm/commit/3e58c4819e3cafe4ae23ab7f4a520fe09258cfd7)
+ [#9033](https://github.com/npm/npm/pull/9033) `npm version` now works on git
+ submodules
+ ([@ktarplee](https://github.com/ktarplee))
+
+#### OH AND THERE'S A DEV DEPENDENCIES UPDATE
+
+Hooray.
+
+* [`d204683`](https://github.com/npm/npm/commit/d2046839d471322e61e3ceb0f00e78e5c481f967)
+ nock@2.9.1
+ ([@pgte](https://github.com/pgte))
+
+### v2.13.3 (2015-07-23):
+
+#### I'M SAVING THE GOOD JOKES FOR MORE INTERESTING RELEASES
+
+It's pretty hard to outdo last week's release buuuuut~ I promise I'll have a
+treat when we release our shiny new **Teams and Organizations** feature! :D
+(Coming Soon™). It'll be a real *gem*.
+
+That means it's a pretty low-key release this week. We got some nice
+documentation tweaks, a few bugfixes, and other such things, though!
+
+Oh, and a _bunch of version bumps_. Thanks, `semver`!
+
+#### IT'S THE LITTLE THINGS THAT MATTER
+
+* [`2fac6ae`](https://github.com/npm/npm/commit/2fac6aeffefba2934c3db395b525d931599c34d8)
+ [#9012](https://github.com/npm/npm/issues/9012) A convenience for releases --
+ using the globally-installed npm before now was causing minor annoyances, so
+ we just use the exact same npm we're releasing to build the new release.
+ ([@zkat](https://github.com/zkat))
+
+#### WHAT DOES THIS BUTTON DO?
+
+There's a couple of doc updates! The last one might be interesting.
+
+* [`4cd3205`](https://github.com/npm/npm/commit/4cd32050c0f89b7f1ae486354fa2c35eea302ba5)
+ [#9002](https://github.com/npm/npm/issues/9002) Updated docs to list the
+ various files that npm automatically includes and excludes, regardless of
+ settings.
+ ([@SimenB](https://github.com/SimenB))
+* [`cf09e75`](https://github.com/npm/npm/commit/cf09e754931739af32647d667b671e72a4c79081)
+ [#9022](https://github.com/npm/npm/issues/9022) Document the `"access"` field
+ in `"publishConfig"`. Did you know you don't need to use `--access=public`
+ when publishing scoped packages?! Just put it in your `package.json`!
+ Go refresh yourself on scopes packages by [checking our docs](https://docs.npmjs.com/getting-started/scoped-packages) on them.
+ ([@boennemann](https://github.com/boennemann))
+* [`bfd73da`](https://github.com/npm/npm/commit/bfd73da33349cc2afb8278953b2ae16ea95023de)
+ [#9013](https://github.com/npm/npm/issues/9013) fixed typo in changelog
+ ([@radarhere](https://github.com/radarhere))
+
+#### THE SEMVER MAJOR VERSION APOCALYPSE IS UPON US
+
+Basically, `semver` is up to `@5`, and that meant we needed to go in an update a
+bunch of our dependencies manually. `node-gyp` is still pending update, since
+it's not ours, though!
+
+* [`9232e58`](https://github.com/npm/npm/commit/9232e58d54c032c23716ef976023d36a42bfdcc9)
+ [#8972](https://github.com/npm/npm/issues/8972) `init-package-json@1.7.1`
+ ([@othiym23](https://github.com/othiym23))
+* [`ba44f6b`](https://github.com/npm/npm/commit/ba44f6b4201a4faee025341b123e372d8f45b6d9)
+ [#8972](https://github.com/npm/npm/issues/8972) `normalize-package-data@2.3.1`
+ ([@othiym23](https://github.com/othiym23))
+* [`3901d3c`](https://github.com/npm/npm/commit/3901d3cf191880bb4420b1d6b8aedbcd8fc26cdf)
+ [#8972](https://github.com/npm/npm/issues/8972) `npm-install-checks@1.0.6`
+ ([@othiym23](https://github.com/othiym23))
+* [`ffcc7dd`](https://github.com/npm/npm/commit/ffcc7dd12f8bb94ff0f64c465c57e460b3f24a24)
+ [#8972](https://github.com/npm/npm/issues/8972) `npm-package-arg@4.0.2`
+ ([@othiym23](https://github.com/othiym23))
+* [`7128f9e`](https://github.com/npm/npm/commit/7128f9ec10c0c8482087511b716dbddb54249626)
+ [#8972](https://github.com/npm/npm/issues/8972) `npm-registry-client@6.5.1`
+ ([@othiym23](https://github.com/othiym23))
+* [`af28911`](https://github.com/npm/npm/commit/af28911ecd54a844f848c6ae41887097d6aa2f3b)
+ [#8972](https://github.com/npm/npm/issues/8972) `read-installed@4.0.2`
+ ([@othiym23](https://github.com/othiym23))
+* [`3cc817a`](https://github.com/npm/npm/commit/3cc817a0f34f698b580ff6ff02308700efc54f7c)
+ [#8972](https://github.com/npm/npm/issues/8972) node-gyp needs its own version
+ of semver
+ ([@othiym23](https://github.com/othiym23))
+* [`f98eccc`](https://github.com/npm/npm/commit/f98eccc6e3a6699ca0aa9ecbad93a3b995583871)
+ [#8972](https://github.com/npm/npm/issues/8972) `semver@5.0.1`: Stop including
+ browser builds.
+ ([@isaacs](https://github.com/isaacs))
+
+#### \*BUMP\*
+
+And some other version bumps for good measure.
+
+* [`254ecfb`](https://github.com/npm/npm/commit/254ecfb04f026c2fd16427db01a53600c1892c8b)
+ [#8990](https://github.com/npm/npm/issues/8990) `marked-man@0.1.5`: Fixes an
+ issue with documentation rendering where backticks in 2nd-level headers would
+ break rendering (?!?!)
+ ([@steveklabnik](https://github.com/steveklabnik))
+* [`79efd79`](https://github.com/npm/npm/commit/79efd79ac216da8cee8636fb2ed926b0196a4eb6)
+ `minimatch@2.0.10`: A pattern like `'*.!(x).!(y)'` should not match a name
+ like `'a.xyz.yab'`.
+ ([@isaacs](https://github.com/isaacs))
+* [`39c7dc9`](https://github.com/npm/npm/commit/39c7dc9a4e17cd35a5ed882ba671821c9a900f9e)
+ `request@2.60.0`: A few bug fixes and doc updates.
+ ([@simov](https://github.com/simov))
+* [`72d3c3a`](https://github.com/npm/npm/commit/72d3c3a9e1e461608aa21b14c01a650333330da9)
+ `rimraf@2.4.2`: Minor doc and dep updates
+ ([@isaacs](https://github.com/isaacs))
+* [`7513035`](https://github.com/npm/npm/commit/75130356a06f5f4fbec3786aac9f9f0b36dfe010)
+ `nock@2.9.1`
+ ([@pgte](https://github.com/pgte))
+* [`3d9aa82`](https://github.com/npm/npm/commit/3d9aa82260f0643a32c13d0c1ed16f644b6fd4ab)
+ Fixes this thing where Kat decided to save `nock` as a regular dependency ;)
+ ([@othiym23](https://github.com/othiym23))
+
+### v2.13.2 (2015-07-16):
+
+#### HOLD ON TO YOUR TENTACLES... IT'S NPM RELEASE TIME!
+
+Kat: Hooray! Full team again, and we've got a pretty small patch release this
+week, about everyone's favorite recurring issue: git URLs!
+
+Rebecca: No Way! Again?
+
+Kat: The ride never ends! In the meantime, there's some fun, exciting work in
+the background to get orgs and teams out the door. Keep an eye out for news. :)
+
+Rebecca: And make sure to keep an eye out for patches for the super-fresh
+`npm@3`!
+
+#### LET'S GIT INKY
+
+Rebecca: So what's this about another git URL issue?
+
+Kat: Welp, I apparently broke backwards-compatibility on what are actually
+invalid `git+https` URLs! So I'm making it work, but we're gonna deprecate URLs
+that look like `git+https://user@host:path/is/here`.
+
+Rebecca: What should we use instead?!
+
+Kat: Just do me a solid and use `git+ssh://user@host:path/here` or
+`git+https://user@host/absolute/https/path` instead!
+
+* [`769f06e`](https://github.com/npm/npm/commit/769f06e5455d7a9fc738379de2e05868df0dab6f)
+ Updated tests for `getResolved` so the URLs are run through
+ `normalize-git-url`.
+ ([@zkat](https://github.com/zkat))
+* [`edbae68`](https://github.com/npm/npm/commit/edbae685bf48971e878ced373d6825fc1891ee47)
+ [#8881](https://github.com/npm/npm/issues/8881) Added tests to verify that `git+https:` URLs are handled compatibly.
+ ([@zkat](https://github.com/zkat))
+
+#### NEWS FLASH! DOCUMENTATION IMPROVEMENTS!
+
+* [`bad4e014`](https://github.com/npm/npm/commit/bad4e0143cc95754a682f1da543b2b4e196e924b)
+ [#8924](https://github.com/npm/npm/pull/8924) Make sure documented default
+ values in `lib/cache.js` properly correspond to current code.
+ ([@watilde](https://github.com/watilde))
+* [`e7a11fd`](https://github.com/npm/npm/commit/e7a11fdf70e333cdfe3dac94a1a30907adb76d59)
+ [#8036](https://github.com/npm/npm/issues/8036) Clarify the documentation for
+ `.npmrc` to clarify that it's not read at the project level when doing global
+ installs.
+ ([@espadrine](https://github.com/espadrine))
+
+#### STAY FRESH~
+
+Kat: That's it for npm core changes!
+
+Rebecca: Great! Let's look at the fresh new dependencies, then!
+
+Kat: See you all next week!
+
+Both: Stay Freeesh~
+
+(some cat form of Forrest can be seen snoring in the corner)
+
+* [`bfa1f45`](https://github.com/npm/npm/bfa1f45ee760d05039557d2245b7e3df9fda8def)
+ `normalize-git-url@3.0.1`: Fixes url normalization such that `git+https:`
+ accepts scp syntax, but get converted into absolute-path `https:` URLs. Also
+ fixes scp syntax so you can have absolute paths after the `:`
+ (`git@myhost.org:/some/absolute/place.git`)
+ ([@zkat](https://github.com/zkat))
+* [`6f757d2`](https://github.com/npm/npm/6f757d22b53f91da0bebec6b5d16c1f4dbe130b4)
+ `glob@5.0.15`: Better handling of ENOTSUP
+ ([@isaacs](https://github.com/isaacs))
+* [`0920819`](https://github.com/npm/npm/09208197fb8b0c6d5dbf6bd7f59970cf366de989)
+ `node-gyp@2.0.2`: Fixes an issue with long paths on Win32
+ ([@TooTallNate](https://github.com/TooTallNate))
+
+### v2.13.1 (2015-07-09):
+
+#### KAUAI WAS NICE. I MISS IT.
+
+But Forrest's still kinda on vacation, and not just mentally, because he's
+hanging out with the fine meatbags at CascadiaFest. Enjoy this small bug
+release.
+
+#### MAKE OURSELVES HAPPY
+
+* [`40981f2`](https://github.com/npm/npm/commit/40981f2e0c9c12bb003ccf188169afd1d201f5af)
+ [#8862](https://github.com/npm/npm/issues/8862) Make the lifecycle's safety
+ check work with scoped packages. ([@tcort](https://github.com/tcort))
+* [`5125856`](https://github.com/npm/npm/commit/512585622481dbbda9a0306932468d59efaff658)
+ [#8855](https://github.com/npm/npm/issues/8855) Make dependency versions of
+ `"*"` match `"latest"` when all versions are prerelease.
+ ([@iarna](https://github.com/iarna))
+* [`22fdc1d`](https://github.com/npm/npm/commit/22fdc1d52602ba7098af978c75fca8f7d1060141)
+ Visually emphasize the correct way to write lifecycle scripts.
+ ([@josh-egan](https://github.com/josh-egan))
+
+#### MAKE TRAVIS HAPPY
+
+* [`413c3ac`](https://github.com/npm/npm/commit/413c3ac2ab2437f3011c6ca0d1630109ec14e604)
+ Use npm's `2.x` branch for testing its `2.x` branch.
+ ([@iarna](https://github.com/iarna))
+* [`7602f64`](https://github.com/npm/npm/commit/7602f64826f7a465d9f3a20bd87a376d992607e6)
+ Don't prompt for GnuPG passphrase in version lifecycle tests.
+ ([@othiym23](https://github.com/othiym23))
+
+#### MAKE `npm outdated` HAPPY
+
+* [`d338668`](https://github.com/npm/npm/commit/d338668601d1ebe5247a26237106e80ea8cd7f48)
+ [#8796](https://github.com/npm/npm/issues/8796) `fstream-npm@1.0.4`: When packing the
+ package tarball, npm no longer crashes for packages with certain combinations of
+ `.npmignore` entries, `.gitignore` entries, and lifecycle scripts.
+ ([@iarna](https://github.com/iarna))
+* [`dbe7c9c`](https://github.com/npm/npm/commit/dbe7c9c74734be870d16dd61b9e7f746123011f6)
+ `nock@2.7.0`: Add matching based on query strings.
+ ([@othiym23](https://github.com/othiym23))
+
+There are new versions of `strip-ansi` and `ansi-regex`, but npm only uses them
+indirectly, so we pushed them down into their dependencies where they can get
+updated at their own pace.
+
+* [`06b6ca5`](https://github.com/npm/npm/commit/06b6ca5b5333025f10c8d901628859bd4678e027)
+ undeduplicate `ansi-regex` ([@othiym23](https://github.com/othiym23))
+* [`b168e33`](https://github.com/npm/npm/commit/b168e33ad46faf47020a45f72ba8cec8c644bdb9)
+ undeduplicate `strip-ansi` ([@othiym23](https://github.com/othiym23))
+
+### v2.13.0 (2015-07-02):
+
+#### FORREST IS OUT! LET'S SNEAK IN ALL THE THINGS!
+
+Well, not _everything_. Just a couple of goodies, like the new `npm ping`
+command, and the ability to add files to the commits created by `npm version`
+with the new version hooks. There's also a couple of bugfixes in `npm` itself
+and some of its dependencies. Here we go!
+
+#### YES HELLO THIS IS NPM REGISTRY SORRY NO DOG HERE
+
+Yes, that's right! We now have a dedicated `npm ping` command. It's super simple
+and super easy. You ping. We tell you whether you pinged right by saying hello
+right back. This should help out folks dealing with things like proxy issues or
+other registry-access debugging issues. Give it a shot!
+
+This addresses [#5750](https://github.com/npm/npm/issues/5750), and will help
+with the `npm doctor` stuff described in
+[#6756](https://github.com/npm/npm/issues/6756).
+
+* [`f1f7a85`](https://github.com/npm/npm/commit/f1f7a85)
+ Add ping command to CLI
+ ([@michaelnisi](https://github.com/michaelnisi))
+* [`8cec629`](https://github.com/npm/npm/commit/8cec629)
+ Add ping command to npm-registry-client
+ ([@michaelnisi](https://github.com/michaelnisi))
+* [`0c0c92d`](https://github.com/npm/npm/0c0c92d)
+ Fixed ping command issues (added docs, tests, fixed minor bugs, etc)
+ ([@zkat](https://github.com/zkat))
+
+#### I'VE WANTED THIS FOR `version` SINCE LIKE LITERALLY FOREVER AND A DAY
+
+Seriously! This patch lets you add files to the `version` commit before it's
+made, So you can add additional metadata files, more automated changes to
+`package.json`, or even generate `CHANGELOG.md` automatically pre-commit if
+you're into that sort of thing. I'm so happy this is there I can't even. Do you
+have other fun usecases for this? Tell
+[npmbot (@npmjs)](http://twitter.com/npmjs) about it!
+
+* [`582f170`](https://github.com/npm/npm/commit/582f170)
+ [#8620](https://github.com/npm/npm/issues/8620) version: Allow scripts to add
+ files to the commit.
+ ([@jamestalmage](https://github.com/jamestalmage))
+
+#### ALL YOUR FILE DESCRIPTORS ARE BELONG TO US
+
+We've had problems in the past with things like `EMFILE` errors popping up when
+trying to install packages with a bunch of dependencies. Isaac patched up
+[`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to handle this case
+better, so we should be seeing fewer of those.
+
+* [`022691a`](https://github.com/npm/npm/commit/022691a)
+ `graceful-fs@4.1.2`: Updated so we can monkey patch globally.
+ ([@isaacs](https://github.com/isaacs))
+* [`c9fb0fd`](https://github.com/npm/npm/commit/c9fb0fd)
+ Globally monkey-patch graceful-fs. This should fix some errors when installing
+ packages with lots of dependencies.
+ ([@isaacs](https://github.com/isaacs))
+
+#### READ THE FINE DOCS. THEY'VE IMPROVED
+
+* [`5587d0d`](https://github.com/npm/npm/commit/5587d0d)
+ Nice clarification for `directories.bin`
+ ([@ujane](https://github.com/ujane))
+* [`20673c7`](https://github.com/npm/npm/commit/20673c7)
+ Hey, Windows folks! Check out
+ [`nvm-windows`](https://github.com/coreybutler/nvm-windows)
+ ([@ArtskydJ](https://github.com/ArtskydJ))
+
+#### MORE NUMBERS! MORE VALUE!
+
+* [`5afa2d5`](https://github.com/npm/npm/commit/5afa2d5)
+ `validate-npm-package-name@2.2.2`: Documented package name rules in README
+ ([@zeusdeux](https://github.com/zeusdeux))
+* [`021f4d9`](https://github.com/npm/npm/commit/021f4d9)
+ `rimraf@2.4.1`: [#74](https://github.com/isaacs/rimraf/issues/74) Use async
+ function for bin (to better handle Window's `EBUSY`)
+ ([@isaacs](https://github.com/isaacs))
+* [`5223432`](https://github.com/npm/npm/commit/5223432)
+ `osenv@0.1.3`: Use `os.homedir()` polyfill for more reliable output. io.js
+ added the function and the polyfill does a better job than the prior solution.
+ ([@sindresorhus](https://github.com/sindresorhus))
+* [`8ebbc90`](https://github.com/npm/npm/commit/8ebbc90)
+ `npm-cache-filename@1.0.2`: Make sure different git references get different
+ cache folders. This should prevent `foo/bar#v1.0` and `foo/bar#master` from
+ sharing the same cache folder.
+ ([@tomekwi](https://github.com/tomekwi))
+* [`367b854`](https://github.com/npm/npm/commit/367b854)
+ `lru-cache@2.6.5`: Minor test/typo changes
+ ([@isaacs](https://github.com/isaacs))
+* [`9fcae61`](https://github.com/npm/npm/commit/9fcae61)
+ `glob@5.0.13`: Tiny doc change + stop firing 'match' events for ignored items.
+ ([@isaacs](https://github.com/isaacs))
+
+#### OH AND ONE MORE THING
+
+* [`7827249`](https://github.com/npm/npm/commit/7827249)
+ `PeerDependencies` errors now include the package version.
+ ([@NickHeiner](https://github.com/NickHeiner))
+
+### v2.12.1 (2015-06-25):
+
+#### HEY WHERE DID EVERYBODY GO
+
+I keep [hearing some commotion](https://github.com/npm/npm/releases/tag/v3.0.0).
+Is there something going on? Like, a party or something? Anyway, here's a small
+release with at least two significant bug fixes, at least one of which some of
+you have been waiting for for quite a while.
+
+#### REMEMBER WHEN I SAID "REMEMBER WHEN I SAID THAT THING ABOUT PERMISSIONS?"?
+
+`npm@2.12.0` has a change that introduces a fix for a permissions problem
+whereby the `_locks` directory in the cache directory can up being owned by
+root. The fix in 2.12.0 takes care of that problem, but introduces a new
+problem for Windows users where npm tries to call `process.getuid()`, which
+doesn't exist on Windows. It was easy enough to fix (but more or less
+impossible to test, thanks to all the external dependencies involved with
+permissions and platforms and whatnot), but as a result, Windows users might
+want to skip `npm@2.12.0` and go straight to `npm@2.12.1`. Sorry about that!
+
+* [`7e5da23`](https://github.com/npm/npm/commit/7e5da238ee869201fdb9027c27b79b0f76b440a8)
+ When using the new, "fixed" cache directory creator, be extra-careful to not
+ call `process.getuid()` on platforms that lack it.
+ ([@othiym23](https://github.com/othiym23))
+
+#### WHEW! ALL DONE FIXING GIT FOREVER!
+
+New npm CLI team hero [@zkat](https://github.com/zkat) has finally (FINALLY)
+fixed the regression somebody (hi!) introduced a couple months ago whereby git
+URLs of the format `git+ssh://user@githost.com:org/repo.git` suddenly stopped
+working, and also started being saved (and cached) incorrectly. I am 100% sure
+there are absolutely no more bugs in the git caching code at all ever. Mm hm.
+Yep. Pretty sure. Maybe. Hmm... I hope.
+
+*Sighs audibly.*
+
+[Let us know](http://github.com/npm/npm/issues/new) if we broke something else
+with this fix.
+
+* [`94ca4a7`](https://github.com/npm/npm/commit/94ca4a711619ba8e40ce3d20bc42b13cdb7611b7)
+ [#8031](https://github.com/npm/npm/issues/8031) Even though
+ `git+ssh://user@githost.com:org/repo.git` isn't a URL, treat it like one for
+ the purposes of npm. ([@zkat](https://github.com/zkat))
+* [`e7f56e5`](https://github.com/npm/npm/commit/e7f56e5a97fcf1c52d5c5bee71303b0126129815)
+ [#8031](https://github.com/npm/npm/issues/8031) `normalize-git-url@2.0.0`:
+ Handle git URLs (and URL-like remote refs) in a manner consistent with npm's
+ docs. ([@zkat](https://github.com/zkat))
+
+#### YEP, THERE ARE STILL DEPENDENCY UPGRADES
+
+* [`679bf47`](https://github.com/npm/npm/commit/679bf4745ac2cfbb01c9ce273e189807fd04fa33)
+ [#40](http://github.com/npm/read-installed/issues/40) `read-installed@4.0.1`:
+ Handle prerelease versions in top-level dependencies not in `package.json`
+ without marking those packages as invalid.
+ ([@benjamn](https://github.com/benjamn))
+* [`3a67410`](https://github.com/npm/npm/commit/3a6741068c9119174c920496778aeee870ebdac0)
+ `tap@1.3.1` ([@isaacs](https://github.com/isaacs))
+* [`151904a`](https://github.com/npm/npm/commit/151904af39dc24567f8c98529a2a64a4dbcc960a)
+ `nopt@3.0.3` ([@isaacs](https://github.com/isaacs))
+
+### v2.12.0 (2015-06-18):
+
+#### REMEMBER WHEN I SAID THAT THING ABOUT PERMISSIONS?
+
+About [a million people](https://github.com/npm/npm/issues?utf8=%E2%9C%93&q=is%3Aissue+EACCES+_locks)
+have filed issues related to having a tough time using npm after they've run
+npm once or twice with sudo. "Don't worry about it!" I said. "We've fixed all
+those permissions problems ages ago! Use this one weird trick and you'll never
+have to deal with this again!"
+
+Well, uh, if you run npm with root the first time you run npm on a machine, it
+turns out that the directory npm uses to store lockfiles ends up being owned by
+the wrong user (almost always root), and that can, well, it can cause problems
+sometimes. By which I mean every time you run npm without being root it'll barf
+with `EACCES` errors. Whoops!
+
+This is an obnoxious regression, and to prevent it from recurring, we've made
+it so that the cache, cached git remotes, and the lockfile directories are all
+created and maintained using the same utilty module, which not only creates the
+relevant paths with the correct permissions, but will fix the permissions on
+those directories (if it can) when it notices that they're broken. An `npm
+install` run as root ought to be sufficient to fix things up (and if that
+doesn't work, first tell us about it, and then run `sudo chown -R $(whoami)
+$HOME/.npm`)
+
+Also, I apologize for inadvertently gaslighting any of you by claiming this bug
+wasn't actually a bug. I do think we've got this permanently dealt with now,
+but I'll be paying extra-close attention to permissions issues related to the
+cache for a while.
+
+* [`85d1a53`](https://github.com/npm/npm/commit/85d1a53d7b5e0fc04823187e522ae3711ede61fa)
+ Set permissions on lock directory to the owner of the process.
+ ([@othiym23](https://github.com/othiym23))
+
+#### I WENT TO NODECONF AND ALL I GOT WAS THIS LOUSY SPDX T-SHIRT
+
+That's not literally true. We spent very little time discussing SPDX,
+[@kemitchell](https://github.com/kemitchell) is a champ, and I had a lot of fun
+playing drum & bass to a mostly empty Boogie Barn and only ended up with one
+moderately severe cold for my pains. Another winner of a NodeConf! (I would
+probably wear a SPDX T-shirt if somebody gave me one, though.)
+
+A bunch of us did have a spirited discussion of the basics of open-source
+intellectual property, and the convergence of me,
+[@kemitchell](https://github.com/kemitchell), and
+[@jandrieu](https://github.com/jandrieu) in one place allowed us to hammmer out
+a small but significant issue that had been bedeviling early adopters of the
+new SPDX expression syntax in `package.json` license fields: how to deal with
+packages that are left without a license on purpose.
+
+Refer to [the docs](https://github.com/npm/npm/blob/16a3dd545b10f8a2464e2037506ce39124739b41/doc/files/package.json.md#license)
+for the specifics, but the short version is that instead of using
+`LicenseRef-LICENSE` for proprietary licenses, you can now use either
+`UNLICENSED` if you want to make it clear that you don't _want_ your software
+to be licensed (and want npm to stop warning you about this), or `SEE LICENSE
+IN <filename>` if there's a license with custom text you want to use. At some
+point in the near term, we'll be updating npm to verify that the mentioned
+file actually exists, but for now you're all on the honor system.
+
+* [`4827fc7`](https://github.com/npm/npm/commit/4827fc784117c17f35dd9b51b21d1eff6094f661)
+ [#8557](https://github.com/npm/npm/issues/8557)
+ `normalize-package-data@2.2.1`: Allow `UNLICENSED` and `SEE LICENSE IN
+ <filename>` in "license" field of `package.json`.
+ ([@kemitchell](https://github.com/kemitchell))
+* [`16a3dd5`](https://github.com/npm/npm/commit/16a3dd545b10f8a2464e2037506ce39124739b41)
+ [#8557](https://github.com/npm/npm/issues/8557) Document the new accepted
+ values for the "license" field.
+ ([@kemitchell](https://github.com/kemitchell))
+* [`8155311`](https://github.com/npm/npm/commit/81553119350deaf199e79e38e35b52a5c8ad206c)
+ [#8557](https://github.com/npm/npm/issues/8557) `init-package-json@1.7.0`:
+ Support new "license" field values at init time.
+ ([@kemitchell](https://github.com/kemitchell))
+
+#### SMALLISH BUG FIXES
+
+* [`9d8cac9`](https://github.com/npm/npm/commit/9d8cac94a258db648a2b1069b1c8c6529c79d013)
+ [#8548](https://github.com/npm/npm/issues/8548) Remove extraneous newline
+ from `npm view` output, making it easier to use in shell scripts.
+ ([@eush77](https://github.com/eush77))
+* [`765fd4b`](https://github.com/npm/npm/commit/765fd4bfca8ea3e2a4a399765b17eec40a3d893d)
+ [#8521](https://github.com/npm/npm/issues/8521) When checking for outdated
+ packages, or updating packages, raise an error when the registry is
+ unreachable instead of silently "succeeding".
+ ([@ryantemple](https://github.com/ryantemple))
+
+#### SMALLERISH DOCUMENTATION TWEAKS
+
+* [`5018335`](https://github.com/npm/npm/commit/5018335ce1754a9f771954ecbc1a93acde9b8c0a)
+ [#8365](https://github.com/npm/npm/issues/8365) Add details about which git
+ environment variables are whitelisted by npm.
+ ([@nmalaguti](https://github.com/nmalaguti))
+* [`bed9edd`](https://github.com/npm/npm/commit/bed9edddfdcc6d22a80feab33b53e4ef9172ec72)
+ [#8554](https://github.com/npm/npm/issues/8554) Fix typo in version docs.
+ ([@rainyday](https://github.com/rainyday))
+
+#### WELL, I GUESS THERE ARE MORE DEPENDENCY UPGRADES
+
+* [`7ce2f06`](https://github.com/npm/npm/commit/7ce2f06f6f34d469b1d2e248084d4f3fef10c05e)
+ `request@2.58.0`: Refactor tunneling logic, and use `extend` instead of
+ abusing `util._extend`. ([@simov](https://github.com/simov))
+* [`e6c6195`](https://github.com/npm/npm/commit/e6c61954aad42e20eec49745615c7640b2026a6c)
+ `nock@2.6.0`: Refined interception behavior.
+ ([@pgte](https://github.com/pgte))
+* [`9583cc3`](https://github.com/npm/npm/commit/9583cc3cb192c2fced006927cfba7cd37b588605)
+ `fstream-npm@1.0.3`: Ensure that `main` entry in `package.json` is always
+ included in the bundled package tarball.
+ ([@coderhaoxin](https://github.com/coderhaoxin))
+* [`df89493`](https://github.com/npm/npm/commit/df894930f2716adac28740b29b2e863170919990)
+ `fstream@1.0.7` ([@isaacs](https://github.com/isaacs))
+* [`9744049`](https://github.com/npm/npm/commit/974404934758124aa8ae5b54f7d5257c3bd6b588)
+ `dezalgo@1.0.3`: `dezalgo` should be usable in the browser, and can be now
+ that `asap` has been upgraded to be browserifiable.
+ ([@mvayngrib](https://github.com/mvayngrib))
+
### v2.11.3 (2015-06-11):
This was a very quiet week. This release was done by
@@ -15,19 +542,19 @@ NodeConf Adventure!
* [`9f439da`](https://github.com/npm/npm/commit/9f439da)
`spdx@0.4.1`: License range updates
- (@kemitchell)[https://github.com/kemitchell]
+ ([@kemitchell](https://github.com/kemitchell))
* [`2dd055b`](https://github.com/npm/npm/commit/2dd055b)
`normalize-package-data@2.2.1`: Fixes a crashing bug when the package.json
`scripts` property is not an object.
- (@iarna)[https://github.com/iarna]
+ ([@iarna](https://github.com/iarna))
* [`e02e85d`](https://github.com/npm/npm/commit/e02e85d)
`osenv@0.1.2`: Switches to using the `os-tmpdir` module instead of
`os.tmpdir()` for greate consistency in behavior between node versions.
- (@iarna)[https://github.com/iarna]
+ ([@iarna](https://github.com/iarna))
* [`a6f0265`](https://github.com/npm/npm/commit/a6f0265)
- `ini@1.3.4` (@isaacs)[https://github.com/isaacs]
+ `ini@1.3.4` ([@isaacs](https://github.com/isaacs))
* [`7395977`](https://github.com/npm/npm/commit/7395977)
- `rimraf@2.4.0` (@isaacs)[https://github.com/isaacs]
+ `rimraf@2.4.0` ([@isaacs](https://github.com/isaacs))
### v2.11.2 (2015-06-04):
diff --git a/deps/npm/doc/api/npm-ping.md b/deps/npm/doc/api/npm-ping.md
new file mode 100644
index 000000000..4357fe2ba
--- /dev/null
+++ b/deps/npm/doc/api/npm-ping.md
@@ -0,0 +1,14 @@
+npm-ping(3) -- Ping npm registry
+================================
+
+## SYNOPSIS
+
+ npm.registry.ping(registry, options, function (er, pong))
+
+## DESCRIPTION
+
+Attempts to connect to the given registry, returning a `pong`
+object with various metadata if it succeeds.
+
+This function is primarily useful for debugging connection issues
+to npm registries.
diff --git a/deps/npm/doc/cli/npm-install.md b/deps/npm/doc/cli/npm-install.md
index b2f4e077b..be32f7b29 100644
--- a/deps/npm/doc/cli/npm-install.md
+++ b/deps/npm/doc/cli/npm-install.md
@@ -166,11 +166,24 @@ after packing it up into a tarball (b).
`git+https`. If no `<commit-ish>` is specified, then `master` is
used.
+ The following git environment variables are recognized by npm and will be added
+ to the environment when running git:
+
+ * `GIT_ASKPASS`
+ * `GIT_PROXY_COMMAND`
+ * `GIT_SSH`
+ * `GIT_SSH_COMMAND`
+ * `GIT_SSL_CAINFO`
+ * `GIT_SSL_NO_VERIFY`
+
+ See the git man page for details.
+
Examples:
- git+ssh://git@github.com:npm/npm.git#v1.0.27
- git+https://isaacs@github.com/npm/npm.git
- git://github.com/npm/npm.git#v1.0.27
+ npm install git+ssh://git@github.com:npm/npm.git#v1.0.27
+ npm install git+https://isaacs@github.com/npm/npm.git
+ npm install git://github.com/npm/npm.git#v1.0.27
+ GIT_SSH_COMMAND='ssh -i ~/.ssh/custom_ident' npm install git+ssh://git@github.com:npm/npm.git
* `npm install <githubname>/<githubrepo>[#<commit-ish>]`:
* `npm install github:<githubname>/<githubrepo>[#<commit-ish>]`:
diff --git a/deps/npm/doc/cli/npm-ping.md b/deps/npm/doc/cli/npm-ping.md
new file mode 100644
index 000000000..f0e628915
--- /dev/null
+++ b/deps/npm/doc/cli/npm-ping.md
@@ -0,0 +1,16 @@
+npm-ping(1) -- Ping npm registry
+================================
+
+## SYNOPSIS
+
+ npm ping [--registry <registry>]
+
+## DESCRIPTION
+
+Ping the configured or given npm registry and verify authentication.
+
+## SEE ALSO
+
+* npm-config(1)
+* npm-config(7)
+* npmrc(5)
diff --git a/deps/npm/doc/cli/npm-run-script.md b/deps/npm/doc/cli/npm-run-script.md
index 487bd5942..895e382f2 100644
--- a/deps/npm/doc/cli/npm-run-script.md
+++ b/deps/npm/doc/cli/npm-run-script.md
@@ -33,8 +33,11 @@ In addition to the shell's pre-existing `PATH`, `npm run` adds
`node_modules/.bin` to the `PATH` provided to scripts. Any binaries provided by
locally-installed dependencies can be used without the `node_modules/.bin`
prefix. For example, if there is a `devDependency` on `tap` in your package,
-you should write `"scripts": {"test": "tap test/\*.js"}` instead of `"scripts":
-{"test": "node_modules/.bin/tap test/\*.js"}` to run your tests.
+you should write:
+
+ "scripts": {"test": "tap test/\*.js"}
+
+instead of `"scripts": {"test": "node_modules/.bin/tap test/\*.js"}` to run your tests.
## SEE ALSO
diff --git a/deps/npm/doc/cli/npm-version.md b/deps/npm/doc/cli/npm-version.md
index 63a5e9512..0a00e78cd 100644
--- a/deps/npm/doc/cli/npm-version.md
+++ b/deps/npm/doc/cli/npm-version.md
@@ -11,14 +11,15 @@ Run this in a package directory to bump the version and write the new
data back to `package.json` and, if present, `npm-shrinkwrap.json`.
The `newversion` argument should be a valid semver string, *or* a
-valid second argument to semver.inc (one of "patch", "minor", "major",
-"prepatch", "preminor", "premajor", "prerelease"). In the second case,
+valid second argument to semver.inc (one of `patch`, `minor`, `major`,
+`prepatch`, `preminor`, `premajor`, `prerelease`). In the second case,
the existing version will be incremented by 1 in the specified field.
-If run in a git repo, it will also create a version commit and tag, and fail if
-the repo is not clean. This behavior is controlled by `git-tag-version` (see
-below), and can be disabled on the command line by running `npm
---no-git-tag-version version`
+If run in a git repo, it will also create a version commit and tag.
+This behavior is controlled by `git-tag-version` (see below), and can
+be disabled on the command line by running `npm --no-git-tag-version version`.
+It will fail if the working directory is not clean, unless the `--force`
+flag is set.
If supplied with `--message` (shorthand: `-m`) config option, npm will
use it as a commit message when creating a version commit. If the
@@ -40,13 +41,35 @@ in your git config for this to work properly. For example:
Enter passphrase:
-If "preversion", "version", "postversion" in the "scripts" property of
-the package.json, it will execute by running `npm version`. preversion
-and version ware executed before bump the package version, postversion
-was executed after bump the package version. For example to run `npm version`
-after passed all test:
-
- "scripts": { "preversion": "npm test" }
+If `preversion`, `version`, or `postversion` are in the `scripts` property of
+the package.json, they will be executed as part of running `npm version`.
+
+The exact order of execution is as follows:
+ 1. Check to make sure the git working directory is clean before we get started.
+ Your scripts may add files to the commit in future steps.
+ This step is skipped if the `--force` flag is set.
+ 2. Run the `preversion` script. These scripts have access to the old `version` in package.json.
+ A typical use would be running your full test suite before deploying.
+ Any files you want added to the commit should be explicitly added using `git add`.
+ 3. Bump `version` in `package.json` as requested (`patch`, `minor`, `major`, etc).
+ 4. Run the `version` script. These scripts have access to the new `version` in package.json
+ (so they can incorporate it into file headers in generated files for example).
+ Again, scripts should explicitly add generated files to the commit using `git add`.
+ 5. Commit and tag.
+ 6. Run the `postversion` script. Use it to clean up the file system or automatically push
+ the commit and/or tag.
+
+Take the following example:
+
+ "scripts": {
+ "preversion": "npm test",
+ "version": "npm run build && git add -A dist",
+ "postversion": "git push && git push --tags && rm -rf build/temp"
+ }
+
+This runs all your tests, and proceeds only if they pass. Then runs your `build` script, and
+adds everything in the `dist` directory to the commit. After the commit, it pushes the new commit
+and tag up to the server, and deletes the `build/temp` directory.
## CONFIGURATION
diff --git a/deps/npm/doc/files/npmrc.md b/deps/npm/doc/files/npmrc.md
index 4d068efc8..8aab60aff 100644
--- a/deps/npm/doc/files/npmrc.md
+++ b/deps/npm/doc/files/npmrc.md
@@ -52,6 +52,9 @@ running npm in. It has no effect when your module is published. For
example, you can't publish a module that forces itself to install
globally, or in a different location.
+Additionally, this file is not read in global mode, such as when running
+`npm install -g`.
+
### Per-user config file
`$HOME/.npmrc` (or the `userconfig` param, if set in the environment
diff --git a/deps/npm/doc/files/package.json.md b/deps/npm/doc/files/package.json.md
index ad2b6623a..b0d86d949 100644
--- a/deps/npm/doc/files/package.json.md
+++ b/deps/npm/doc/files/package.json.md
@@ -115,9 +115,9 @@ expression syntax version 2.0 string](http://npmjs.com/package/spdx), like this:
If you are using a license that hasn't been assigned an SPDX identifier, or if
you are using a custom license, use the following valid SPDX expression:
- { "license" : "LicenseRef-LICENSE" }
+ { "license" : "SEE LICENSE IN <filename>" }
-Then include a LICENSE file at the top level of the package.
+Then include a file named `<filename>` at the top level of the package.
Some old packages used license objects or a "licenses" property containing an
array of license objects:
@@ -147,6 +147,13 @@ Those styles are now deprecated. Instead, use SPDX expressions, like this:
{ "license": "(MIT OR Apache-2.0)" }
+Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:
+
+ { "license": "UNLICENSED"}
+
+Consider also setting `"private": true` to prevent accidental publication.
+
## people fields: author, contributors
The "author" is one person. "contributors" is an array of people. A "person"
@@ -176,6 +183,26 @@ which will keep files from being included, even if they would be picked
up by the files array. The ".npmignore" file works just like a
".gitignore".
+Certain files are always included, regardless of settings:
+
+* `package.json`
+* `README` (and its variants)
+* `CHANGELOG` (and its variants)
+* `LICENSE` / `LICENCE`
+
+Conversely, some files are always ignored:
+
+* `.git`
+* `CVS`
+* `.svn`
+* `.hg`
+* `.lock-wscript`
+* `.wafpickle-N`
+* `*.swp`
+* `.DS_Store`
+* `._*`
+* `npm-debug.log`
+
## main
The main field is a module ID that is the primary entry point to your program.
@@ -276,10 +303,13 @@ with the lib folder in any way, but it's useful meta info.
### directories.bin
-If you specify a `bin` directory, then all the files in that folder will
-be added as children of the `bin` path.
+If you specify a `bin` directory in `directories.bin`, all the files in
+that folder will be added.
-If you have a `bin` path already, then this has no effect.
+Because of the way the `bin` directive works, specifying both a
+`bin` path and setting `directories.bin` is an error. If you want to
+specify individual files, use `bin`, and for all the files in an
+existing `bin` directory, use `directories.bin`.
### directories.man
@@ -660,13 +690,13 @@ param at publish-time.
## publishConfig
-This is a set of config values that will be used at publish-time. It's
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with "latest" or published to
-the global public registry by default.
+This is a set of config values that will be used at publish-time. It's
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with "latest", published
+to the global public registry or that a scoped module is private by default.
-Any config values can be overridden, but of course only "tag" and
-"registry" probably matter for the purposes of publishing.
+Any config values can be overridden, but of course only "tag", "registry" and
+"access" probably matter for the purposes of publishing.
See `npm-config(7)` to see the list of config options that can be
overridden.
diff --git a/deps/npm/doc/misc/npm-faq.md b/deps/npm/doc/misc/npm-faq.md
index b9e837f76..557ec1a9c 100644
--- a/deps/npm/doc/misc/npm-faq.md
+++ b/deps/npm/doc/misc/npm-faq.md
@@ -279,6 +279,7 @@ Unix:
Windows:
* <http://github.com/marcelklehr/nodist>
+* <https://github.com/coreybutler/nvm-windows>
* <https://github.com/hakobera/nvmw>
* <https://github.com/nanjingboy/nvmw>
diff --git a/deps/npm/doc/misc/npm-index.md b/deps/npm/doc/misc/npm-index.md
index 8b9b69a48..837ef26ba 100644
--- a/deps/npm/doc/misc/npm-index.md
+++ b/deps/npm/doc/misc/npm-index.md
@@ -113,6 +113,10 @@ Manage package owners
Create a tarball from a package
+### npm-ping(1)
+
+Ping npm registry
+
### npm-prefix(1)
Display prefix
@@ -285,6 +289,10 @@ Manage package owners
Create a tarball from a package
+### npm-ping(3)
+
+Ping npm registry
+
### npm-prefix(3)
Display prefix
diff --git a/deps/npm/html/doc/README.html b/deps/npm/html/doc/README.html
index 58a7ad43f..e3793baeb 100644
--- a/deps/npm/html/doc/README.html
+++ b/deps/npm/html/doc/README.html
@@ -140,7 +140,7 @@ specific purpose, or lack of malice in any given npm package.</p>
<p>If you have a complaint about a package in the public npm registry,
and cannot <a href="https://docs.npmjs.com/misc/disputes">resolve it with the package
owner</a>, please email
-<a href="&#109;&#97;&#x69;&#x6c;&#116;&#111;&#58;&#115;&#x75;&#112;&#x70;&#x6f;&#114;&#116;&#64;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;">&#115;&#x75;&#112;&#x70;&#x6f;&#114;&#116;&#64;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;</a> and explain the situation.</p>
+<a href="&#x6d;&#97;&#x69;&#108;&#x74;&#x6f;&#x3a;&#115;&#117;&#x70;&#112;&#x6f;&#x72;&#116;&#x40;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#x63;&#111;&#109;">&#115;&#117;&#x70;&#112;&#x6f;&#x72;&#116;&#x40;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#x63;&#111;&#109;</a> and explain the situation.</p>
<p>Any data published to The npm Registry (including user account
information) may be removed or modified at the sole discretion of the
npm server administrators.</p>
@@ -183,5 +183,5 @@ will no doubt tell you to put the output in a gist or email.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer"><a href="../doc/README.html">README</a> &mdash; npm@2.11.3</p>
+<p id="footer"><a href="../doc/README.html">README</a> &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-bin.html b/deps/npm/html/doc/api/npm-bin.html
index dcc8dbdee..d4d5b5f6f 100644
--- a/deps/npm/html/doc/api/npm-bin.html
+++ b/deps/npm/html/doc/api/npm-bin.html
@@ -28,5 +28,5 @@ to the <code>npm.bin</code> property.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-bin &mdash; npm@2.11.3</p>
+<p id="footer">npm-bin &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-bugs.html b/deps/npm/html/doc/api/npm-bugs.html
index 9abf98e95..be307612d 100644
--- a/deps/npm/html/doc/api/npm-bugs.html
+++ b/deps/npm/html/doc/api/npm-bugs.html
@@ -33,5 +33,5 @@ friendly for programmatic use.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-bugs &mdash; npm@2.11.3</p>
+<p id="footer">npm-bugs &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-cache.html b/deps/npm/html/doc/api/npm-cache.html
index 218f93099..e05d3a4cc 100644
--- a/deps/npm/html/doc/api/npm-cache.html
+++ b/deps/npm/html/doc/api/npm-cache.html
@@ -42,5 +42,5 @@ incrementation.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-cache &mdash; npm@2.11.3</p>
+<p id="footer">npm-cache &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-commands.html b/deps/npm/html/doc/api/npm-commands.html
index b7074c74f..ccc10e3ff 100644
--- a/deps/npm/html/doc/api/npm-commands.html
+++ b/deps/npm/html/doc/api/npm-commands.html
@@ -36,5 +36,5 @@ usage, or <code>man 3 npm-&lt;command&gt;</code> for programmatic usage.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-commands &mdash; npm@2.11.3</p>
+<p id="footer">npm-commands &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-config.html b/deps/npm/html/doc/api/npm-config.html
index 8f9c9904f..411d4643d 100644
--- a/deps/npm/html/doc/api/npm-config.html
+++ b/deps/npm/html/doc/api/npm-config.html
@@ -57,5 +57,5 @@ functions instead.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-config &mdash; npm@2.11.3</p>
+<p id="footer">npm-config &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-deprecate.html b/deps/npm/html/doc/api/npm-deprecate.html
index 123c1905f..3ce4287aa 100644
--- a/deps/npm/html/doc/api/npm-deprecate.html
+++ b/deps/npm/html/doc/api/npm-deprecate.html
@@ -47,5 +47,5 @@ a deprecation warning to all who attempt to install it.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-deprecate &mdash; npm@2.11.3</p>
+<p id="footer">npm-deprecate &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-docs.html b/deps/npm/html/doc/api/npm-docs.html
index 0c1b8509c..a06e8bead 100644
--- a/deps/npm/html/doc/api/npm-docs.html
+++ b/deps/npm/html/doc/api/npm-docs.html
@@ -33,5 +33,5 @@ friendly for programmatic use.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-docs &mdash; npm@2.11.3</p>
+<p id="footer">npm-docs &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-edit.html b/deps/npm/html/doc/api/npm-edit.html
index 1c3ba0f7d..8c7471939 100644
--- a/deps/npm/html/doc/api/npm-edit.html
+++ b/deps/npm/html/doc/api/npm-edit.html
@@ -36,5 +36,5 @@ and how this is used.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-edit &mdash; npm@2.11.3</p>
+<p id="footer">npm-edit &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-explore.html b/deps/npm/html/doc/api/npm-explore.html
index 3f4534ad7..e3468bfc9 100644
--- a/deps/npm/html/doc/api/npm-explore.html
+++ b/deps/npm/html/doc/api/npm-explore.html
@@ -31,5 +31,5 @@ sure to use <code>npm rebuild &lt;pkg&gt;</code> if you make any changes.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-explore &mdash; npm@2.11.3</p>
+<p id="footer">npm-explore &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-help-search.html b/deps/npm/html/doc/api/npm-help-search.html
index eb7701580..b6d53b8d1 100644
--- a/deps/npm/html/doc/api/npm-help-search.html
+++ b/deps/npm/html/doc/api/npm-help-search.html
@@ -44,5 +44,5 @@ Name of the file that matched</li>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-help-search &mdash; npm@2.11.3</p>
+<p id="footer">npm-help-search &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-init.html b/deps/npm/html/doc/api/npm-init.html
index 55e914a20..b403b17d1 100644
--- a/deps/npm/html/doc/api/npm-init.html
+++ b/deps/npm/html/doc/api/npm-init.html
@@ -39,5 +39,5 @@ then go ahead and use this programmatically.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-init &mdash; npm@2.11.3</p>
+<p id="footer">npm-init &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-install.html b/deps/npm/html/doc/api/npm-install.html
index 84aed0de0..ad049be29 100644
--- a/deps/npm/html/doc/api/npm-install.html
+++ b/deps/npm/html/doc/api/npm-install.html
@@ -32,5 +32,5 @@ installed or when an error has been encountered.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-install &mdash; npm@2.11.3</p>
+<p id="footer">npm-install &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-link.html b/deps/npm/html/doc/api/npm-link.html
index ac8ff4f6c..a2bd29edc 100644
--- a/deps/npm/html/doc/api/npm-link.html
+++ b/deps/npm/html/doc/api/npm-link.html
@@ -42,5 +42,5 @@ the package in the current working directory</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-link &mdash; npm@2.11.3</p>
+<p id="footer">npm-link &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-load.html b/deps/npm/html/doc/api/npm-load.html
index 7f3348778..137a129d9 100644
--- a/deps/npm/html/doc/api/npm-load.html
+++ b/deps/npm/html/doc/api/npm-load.html
@@ -37,5 +37,5 @@ config object.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-load &mdash; npm@2.11.3</p>
+<p id="footer">npm-load &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-ls.html b/deps/npm/html/doc/api/npm-ls.html
index 07d913461..5c6abb144 100644
--- a/deps/npm/html/doc/api/npm-ls.html
+++ b/deps/npm/html/doc/api/npm-ls.html
@@ -63,5 +63,5 @@ dependency will only be output once.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-ls &mdash; npm@2.11.3</p>
+<p id="footer">npm-ls &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-outdated.html b/deps/npm/html/doc/api/npm-outdated.html
index a02620306..624a9eeb6 100644
--- a/deps/npm/html/doc/api/npm-outdated.html
+++ b/deps/npm/html/doc/api/npm-outdated.html
@@ -28,5 +28,5 @@ currently outdated.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-outdated &mdash; npm@2.11.3</p>
+<p id="footer">npm-outdated &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-owner.html b/deps/npm/html/doc/api/npm-owner.html
index 185c373df..65e54deca 100644
--- a/deps/npm/html/doc/api/npm-owner.html
+++ b/deps/npm/html/doc/api/npm-owner.html
@@ -47,5 +47,5 @@ that is not implemented at this time.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-owner &mdash; npm@2.11.3</p>
+<p id="footer">npm-owner &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-pack.html b/deps/npm/html/doc/api/npm-pack.html
index 6ea6ed1dd..2a453c9de 100644
--- a/deps/npm/html/doc/api/npm-pack.html
+++ b/deps/npm/html/doc/api/npm-pack.html
@@ -33,5 +33,5 @@ overwritten the second time.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-pack &mdash; npm@2.11.3</p>
+<p id="footer">npm-pack &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-submodule.html b/deps/npm/html/doc/api/npm-ping.html
index f4b5c85c6..b2339fc6a 100644
--- a/deps/npm/html/doc/api/npm-submodule.html
+++ b/deps/npm/html/doc/api/npm-ping.html
@@ -1,35 +1,22 @@
<!doctype html>
<html>
- <title>npm-submodule</title>
+ <title>npm-ping</title>
<meta http-equiv="content-type" value="text/html;utf-8">
<link rel="stylesheet" type="text/css" href="../../static/style.css">
- <link rel="canonical" href="https://www.npmjs.org/doc/api/npm-submodule.html">
+ <link rel="canonical" href="https://www.npmjs.org/doc/api/npm-ping.html">
<script async=true src="../../static/toc.js"></script>
<body>
<div id="wrapper">
-<h1><a href="../api/npm-submodule.html">npm-submodule</a></h1> <p>Add a package as a git submodule</p>
+<h1><a href="../api/npm-ping.html">npm-ping</a></h1> <p>Ping npm registry</p>
<h2 id="synopsis">SYNOPSIS</h2>
-<pre><code>npm.commands.submodule(packages, callback)
+<pre><code>npm.registry.ping(registry, options, function (er, pong))
</code></pre><h2 id="description">DESCRIPTION</h2>
-<p>For each package specified, npm will check if it has a git repository url
-in its package.json description then add it as a git submodule at
-<code>node_modules/&lt;pkg name&gt;</code>.</p>
-<p>This is a convenience only. From then on, it&#39;s up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a <code>.git</code> subfolder
-in it.</p>
-<p>This command also does not install missing dependencies, if the package
-does not include them in its git repository. If <code>npm ls</code> reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do <code>npm explore &lt;pkgname&gt; -- npm install</code> to install the
-dependencies into the submodule folder.</p>
-<h2 id="see-also">SEE ALSO</h2>
-<ul>
-<li>npm help json</li>
-<li>git help submodule</li>
-</ul>
+<p>Attempts to connect to the given registry, returning a <code>pong</code>
+object with various metadata if it succeeds.</p>
+<p>This function is primarily useful for debugging connection issues
+to npm registries.</p>
</div>
@@ -42,4 +29,4 @@ dependencies into the submodule folder.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-submodule &mdash; npm@1.4.28</p>
+<p id="footer">npm-ping &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-prefix.html b/deps/npm/html/doc/api/npm-prefix.html
index beb56a37f..c99917bd9 100644
--- a/deps/npm/html/doc/api/npm-prefix.html
+++ b/deps/npm/html/doc/api/npm-prefix.html
@@ -29,5 +29,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-prefix &mdash; npm@2.11.3</p>
+<p id="footer">npm-prefix &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-prune.html b/deps/npm/html/doc/api/npm-prune.html
index 01ab1055a..c9d07c131 100644
--- a/deps/npm/html/doc/api/npm-prune.html
+++ b/deps/npm/html/doc/api/npm-prune.html
@@ -30,5 +30,5 @@ package&#39;s dependencies list.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-prune &mdash; npm@2.11.3</p>
+<p id="footer">npm-prune &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-publish.html b/deps/npm/html/doc/api/npm-publish.html
index c870b3d36..0a399d928 100644
--- a/deps/npm/html/doc/api/npm-publish.html
+++ b/deps/npm/html/doc/api/npm-publish.html
@@ -46,5 +46,5 @@ the registry. Overwrites when the &quot;force&quot; environment variable is set
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-publish &mdash; npm@2.11.3</p>
+<p id="footer">npm-publish &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-rebuild.html b/deps/npm/html/doc/api/npm-rebuild.html
index 612acf114..fea1c4be8 100644
--- a/deps/npm/html/doc/api/npm-rebuild.html
+++ b/deps/npm/html/doc/api/npm-rebuild.html
@@ -30,5 +30,5 @@ the new binary. If no &#39;packages&#39; parameter is specify, every package wil
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-rebuild &mdash; npm@2.11.3</p>
+<p id="footer">npm-rebuild &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-repo.html b/deps/npm/html/doc/api/npm-repo.html
index e426c2ad6..e3e21e1ea 100644
--- a/deps/npm/html/doc/api/npm-repo.html
+++ b/deps/npm/html/doc/api/npm-repo.html
@@ -33,5 +33,5 @@ friendly for programmatic use.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-repo &mdash; npm@2.11.3</p>
+<p id="footer">npm-repo &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-restart.html b/deps/npm/html/doc/api/npm-restart.html
index ae60b9b90..3ac4e6808 100644
--- a/deps/npm/html/doc/api/npm-restart.html
+++ b/deps/npm/html/doc/api/npm-restart.html
@@ -52,5 +52,5 @@ behavior will be accompanied by an increase in major version number</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-restart &mdash; npm@2.11.3</p>
+<p id="footer">npm-restart &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-root.html b/deps/npm/html/doc/api/npm-root.html
index c3d418d3a..33aa59806 100644
--- a/deps/npm/html/doc/api/npm-root.html
+++ b/deps/npm/html/doc/api/npm-root.html
@@ -29,5 +29,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-root &mdash; npm@2.11.3</p>
+<p id="footer">npm-root &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-run-script.html b/deps/npm/html/doc/api/npm-run-script.html
index 1c4aa15dc..9ddeaadac 100644
--- a/deps/npm/html/doc/api/npm-run-script.html
+++ b/deps/npm/html/doc/api/npm-run-script.html
@@ -41,5 +41,5 @@ assumed to be the command to run. All other elements are ignored.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-run-script &mdash; npm@2.11.3</p>
+<p id="footer">npm-run-script &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-search.html b/deps/npm/html/doc/api/npm-search.html
index 1638a63e0..f4d947079 100644
--- a/deps/npm/html/doc/api/npm-search.html
+++ b/deps/npm/html/doc/api/npm-search.html
@@ -53,5 +53,5 @@ like).</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-search &mdash; npm@2.11.3</p>
+<p id="footer">npm-search &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-shrinkwrap.html b/deps/npm/html/doc/api/npm-shrinkwrap.html
index 9dfff6c71..d8fff65cc 100644
--- a/deps/npm/html/doc/api/npm-shrinkwrap.html
+++ b/deps/npm/html/doc/api/npm-shrinkwrap.html
@@ -33,5 +33,5 @@ been saved.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-shrinkwrap &mdash; npm@2.11.3</p>
+<p id="footer">npm-shrinkwrap &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-start.html b/deps/npm/html/doc/api/npm-start.html
index d1f5ff7a8..e2543098e 100644
--- a/deps/npm/html/doc/api/npm-start.html
+++ b/deps/npm/html/doc/api/npm-start.html
@@ -28,5 +28,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-start &mdash; npm@2.11.3</p>
+<p id="footer">npm-start &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-stop.html b/deps/npm/html/doc/api/npm-stop.html
index cb3b83a4a..4b8e84558 100644
--- a/deps/npm/html/doc/api/npm-stop.html
+++ b/deps/npm/html/doc/api/npm-stop.html
@@ -28,5 +28,5 @@ in the <code>packages</code> parameter.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-stop &mdash; npm@2.11.3</p>
+<p id="footer">npm-stop &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-tag.html b/deps/npm/html/doc/api/npm-tag.html
index 51d7af306..2152ad8c2 100644
--- a/deps/npm/html/doc/api/npm-tag.html
+++ b/deps/npm/html/doc/api/npm-tag.html
@@ -36,5 +36,5 @@ used. For more information about how to set this config, check
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-tag &mdash; npm@2.11.3</p>
+<p id="footer">npm-tag &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-test.html b/deps/npm/html/doc/api/npm-test.html
index 10303fead..721cbdf8c 100644
--- a/deps/npm/html/doc/api/npm-test.html
+++ b/deps/npm/html/doc/api/npm-test.html
@@ -30,5 +30,5 @@ in the <code>packages</code> parameter.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-test &mdash; npm@2.11.3</p>
+<p id="footer">npm-test &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-uninstall.html b/deps/npm/html/doc/api/npm-uninstall.html
index 4cc010db4..918b4d6fa 100644
--- a/deps/npm/html/doc/api/npm-uninstall.html
+++ b/deps/npm/html/doc/api/npm-uninstall.html
@@ -30,5 +30,5 @@ uninstalled or when an error has been encountered.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-uninstall &mdash; npm@2.11.3</p>
+<p id="footer">npm-uninstall &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-unpublish.html b/deps/npm/html/doc/api/npm-unpublish.html
index 60f9e3b2b..7f3c35215 100644
--- a/deps/npm/html/doc/api/npm-unpublish.html
+++ b/deps/npm/html/doc/api/npm-unpublish.html
@@ -33,5 +33,5 @@ the root package entry is removed from the registry entirely.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-unpublish &mdash; npm@2.11.3</p>
+<p id="footer">npm-unpublish &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-update.html b/deps/npm/html/doc/api/npm-update.html
index c31fcd059..8decece1d 100644
--- a/deps/npm/html/doc/api/npm-update.html
+++ b/deps/npm/html/doc/api/npm-update.html
@@ -33,5 +33,5 @@ parameter will be called when done or when an error occurs.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-update &mdash; npm@2.11.3</p>
+<p id="footer">npm-update &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-version.html b/deps/npm/html/doc/api/npm-version.html
index 02ac457e2..78751e2b8 100644
--- a/deps/npm/html/doc/api/npm-version.html
+++ b/deps/npm/html/doc/api/npm-version.html
@@ -32,5 +32,5 @@ not have exactly one element. The only element should be a version number.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-version &mdash; npm@2.11.3</p>
+<p id="footer">npm-version &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-view.html b/deps/npm/html/doc/api/npm-view.html
index 59b0f4bda..32f915754 100644
--- a/deps/npm/html/doc/api/npm-view.html
+++ b/deps/npm/html/doc/api/npm-view.html
@@ -81,5 +81,5 @@ the field name.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-view &mdash; npm@2.11.3</p>
+<p id="footer">npm-view &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm-whoami.html b/deps/npm/html/doc/api/npm-whoami.html
index 6f63d93da..499142365 100644
--- a/deps/npm/html/doc/api/npm-whoami.html
+++ b/deps/npm/html/doc/api/npm-whoami.html
@@ -29,5 +29,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-whoami &mdash; npm@2.11.3</p>
+<p id="footer">npm-whoami &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/api/npm.html b/deps/npm/html/doc/api/npm.html
index 2f75c6794..ccdf3e1d8 100644
--- a/deps/npm/html/doc/api/npm.html
+++ b/deps/npm/html/doc/api/npm.html
@@ -23,7 +23,7 @@ npm.load([configObject, ]function (er, npm) {
npm.commands.install([&quot;package&quot;], cb)
})
</code></pre><h2 id="version">VERSION</h2>
-<p>2.11.3</p>
+<p>2.13.4</p>
<h2 id="description">DESCRIPTION</h2>
<p>This is the API documentation for npm.
To find documentation of the command line
@@ -109,5 +109,5 @@ method names. Use the <code>npm.deref</code> method to find the real name.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm &mdash; npm@2.11.3</p>
+<p id="footer">npm &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-access.html b/deps/npm/html/doc/cli/npm-access.html
index ef5fda3e6..a80586b0e 100644
--- a/deps/npm/html/doc/cli/npm-access.html
+++ b/deps/npm/html/doc/cli/npm-access.html
@@ -75,5 +75,5 @@ with an HTTP 402 status code (logically enough), unless you use
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-access &mdash; npm@2.11.3</p>
+<p id="footer">npm-access &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-adduser.html b/deps/npm/html/doc/cli/npm-adduser.html
index 5eda2bf5c..bbf8b056a 100644
--- a/deps/npm/html/doc/cli/npm-adduser.html
+++ b/deps/npm/html/doc/cli/npm-adduser.html
@@ -68,5 +68,5 @@ precedence over any global configuration.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-adduser &mdash; npm@2.11.3</p>
+<p id="footer">npm-adduser &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-bin.html b/deps/npm/html/doc/cli/npm-bin.html
index 000641e2e..9143460bc 100644
--- a/deps/npm/html/doc/cli/npm-bin.html
+++ b/deps/npm/html/doc/cli/npm-bin.html
@@ -35,5 +35,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-bin &mdash; npm@2.11.3</p>
+<p id="footer">npm-bin &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-bugs.html b/deps/npm/html/doc/cli/npm-bugs.html
index 62539c0dd..d830cfe27 100644
--- a/deps/npm/html/doc/cli/npm-bugs.html
+++ b/deps/npm/html/doc/cli/npm-bugs.html
@@ -54,5 +54,5 @@ a <code>package.json</code> in the current folder and use the <code>name</code>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-bugs &mdash; npm@2.11.3</p>
+<p id="footer">npm-bugs &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-build.html b/deps/npm/html/doc/cli/npm-build.html
index b6b1b9c3b..f518b7213 100644
--- a/deps/npm/html/doc/cli/npm-build.html
+++ b/deps/npm/html/doc/cli/npm-build.html
@@ -40,5 +40,5 @@ directly, run:</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-build &mdash; npm@2.11.3</p>
+<p id="footer">npm-build &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-bundle.html b/deps/npm/html/doc/cli/npm-bundle.html
index f9b1deea9..ad63952ab 100644
--- a/deps/npm/html/doc/cli/npm-bundle.html
+++ b/deps/npm/html/doc/cli/npm-bundle.html
@@ -31,5 +31,5 @@ install packages into the local space.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-bundle &mdash; npm@2.11.3</p>
+<p id="footer">npm-bundle &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-cache.html b/deps/npm/html/doc/cli/npm-cache.html
index 4f174bbc4..3d700b8d0 100644
--- a/deps/npm/html/doc/cli/npm-cache.html
+++ b/deps/npm/html/doc/cli/npm-cache.html
@@ -81,5 +81,5 @@ they do not make an HTTP request to the registry.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-cache &mdash; npm@2.11.3</p>
+<p id="footer">npm-cache &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-completion.html b/deps/npm/html/doc/cli/npm-completion.html
index 2632938af..edaceb316 100644
--- a/deps/npm/html/doc/cli/npm-completion.html
+++ b/deps/npm/html/doc/cli/npm-completion.html
@@ -42,5 +42,5 @@ completions based on the arguments.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-completion &mdash; npm@2.11.3</p>
+<p id="footer">npm-completion &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-config.html b/deps/npm/html/doc/cli/npm-config.html
index 376bbd480..27e133e7d 100644
--- a/deps/npm/html/doc/cli/npm-config.html
+++ b/deps/npm/html/doc/cli/npm-config.html
@@ -66,5 +66,5 @@ global config.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-config &mdash; npm@2.11.3</p>
+<p id="footer">npm-config &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-dedupe.html b/deps/npm/html/doc/cli/npm-dedupe.html
index b93f3fcb7..17063f778 100644
--- a/deps/npm/html/doc/cli/npm-dedupe.html
+++ b/deps/npm/html/doc/cli/npm-dedupe.html
@@ -63,5 +63,5 @@ versions.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-dedupe &mdash; npm@2.11.3</p>
+<p id="footer">npm-dedupe &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-deprecate.html b/deps/npm/html/doc/cli/npm-deprecate.html
index d916b791c..47b5f97c6 100644
--- a/deps/npm/html/doc/cli/npm-deprecate.html
+++ b/deps/npm/html/doc/cli/npm-deprecate.html
@@ -38,5 +38,5 @@ something like this:</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-deprecate &mdash; npm@2.11.3</p>
+<p id="footer">npm-deprecate &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-dist-tag.html b/deps/npm/html/doc/cli/npm-dist-tag.html
index 1fdf4b97a..3389bdc65 100644
--- a/deps/npm/html/doc/cli/npm-dist-tag.html
+++ b/deps/npm/html/doc/cli/npm-dist-tag.html
@@ -77,5 +77,5 @@ begin with a number or the letter <code>v</code>.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-dist-tag &mdash; npm@2.11.3</p>
+<p id="footer">npm-dist-tag &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-docs.html b/deps/npm/html/doc/cli/npm-docs.html
index 8dfd9f0a6..4a63982a0 100644
--- a/deps/npm/html/doc/cli/npm-docs.html
+++ b/deps/npm/html/doc/cli/npm-docs.html
@@ -56,5 +56,5 @@ the current folder and use the <code>name</code> property.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-docs &mdash; npm@2.11.3</p>
+<p id="footer">npm-docs &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-edit.html b/deps/npm/html/doc/cli/npm-edit.html
index 44581b1e8..eeca7b287 100644
--- a/deps/npm/html/doc/cli/npm-edit.html
+++ b/deps/npm/html/doc/cli/npm-edit.html
@@ -49,5 +49,5 @@ or <code>&quot;notepad&quot;</code> on Windows.</li>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-edit &mdash; npm@2.11.3</p>
+<p id="footer">npm-edit &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-explore.html b/deps/npm/html/doc/cli/npm-explore.html
index d6b2c363f..357d7b6dc 100644
--- a/deps/npm/html/doc/cli/npm-explore.html
+++ b/deps/npm/html/doc/cli/npm-explore.html
@@ -49,5 +49,5 @@ Windows</li>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-explore &mdash; npm@2.11.3</p>
+<p id="footer">npm-explore &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-help-search.html b/deps/npm/html/doc/cli/npm-help-search.html
index a39aad665..221d45619 100644
--- a/deps/npm/html/doc/cli/npm-help-search.html
+++ b/deps/npm/html/doc/cli/npm-help-search.html
@@ -46,5 +46,5 @@ where the terms were found in the documentation.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-help-search &mdash; npm@2.11.3</p>
+<p id="footer">npm-help-search &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-help.html b/deps/npm/html/doc/cli/npm-help.html
index a9ac805bb..c815150a8 100644
--- a/deps/npm/html/doc/cli/npm-help.html
+++ b/deps/npm/html/doc/cli/npm-help.html
@@ -52,5 +52,5 @@ matches are equivalent to specifying a topic name.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-help &mdash; npm@2.11.3</p>
+<p id="footer">npm-help &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-init.html b/deps/npm/html/doc/cli/npm-init.html
index 5791edd4a..357f29dbd 100644
--- a/deps/npm/html/doc/cli/npm-init.html
+++ b/deps/npm/html/doc/cli/npm-init.html
@@ -48,5 +48,5 @@ defaults and not prompt you for any options.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-init &mdash; npm@2.11.3</p>
+<p id="footer">npm-init &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-install.html b/deps/npm/html/doc/cli/npm-install.html
index 35162c501..13d00cac3 100644
--- a/deps/npm/html/doc/cli/npm-install.html
+++ b/deps/npm/html/doc/cli/npm-install.html
@@ -138,11 +138,24 @@ fetch the package by name if it is not valid.
</code></pre><p> <code>&lt;protocol&gt;</code> is one of <code>git</code>, <code>git+ssh</code>, <code>git+http</code>, or
<code>git+https</code>. If no <code>&lt;commit-ish&gt;</code> is specified, then <code>master</code> is
used.</p>
-<p> Examples:</p>
-<pre><code> git+ssh://git@github.com:npm/npm.git#v1.0.27
- git+https://isaacs@github.com/npm/npm.git
- git://github.com/npm/npm.git#v1.0.27
+<p> The following git environment variables are recognized by npm and will be added
+ to the environment when running git:</p>
+<ul>
+<li><code>GIT_ASKPASS</code></li>
+<li><code>GIT_PROXY_COMMAND</code></li>
+<li><code>GIT_SSH</code></li>
+<li><code>GIT_SSH_COMMAND</code></li>
+<li><code>GIT_SSL_CAINFO</code></li>
+<li><p><code>GIT_SSL_NO_VERIFY</code></p>
+<p>See the git man page for details.</p>
+<p>Examples:</p>
+<pre><code>npm install git+ssh://git@github.com:npm/npm.git#v1.0.27
+npm install git+https://isaacs@github.com/npm/npm.git
+npm install git://github.com/npm/npm.git#v1.0.27
+GIT_SSH_COMMAND=&#39;ssh -i ~/.ssh/custom_ident&#39; npm install git+ssh://git@github.com:npm/npm.git
</code></pre></li>
+</ul>
+</li>
<li><p><code>npm install &lt;githubname&gt;/&lt;githubrepo&gt;[#&lt;commit-ish&gt;]</code>:</p>
</li>
<li><p><code>npm install github:&lt;githubname&gt;/&lt;githubrepo&gt;[#&lt;commit-ish&gt;]</code>:</p>
@@ -264,5 +277,5 @@ affects a real use-case, it will be investigated.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-install &mdash; npm@2.11.3</p>
+<p id="footer">npm-install &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-link.html b/deps/npm/html/doc/cli/npm-link.html
index fa2d33cc3..398b0fe1d 100644
--- a/deps/npm/html/doc/cli/npm-link.html
+++ b/deps/npm/html/doc/cli/npm-link.html
@@ -72,5 +72,5 @@ include that scope, e.g.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-link &mdash; npm@2.11.3</p>
+<p id="footer">npm-link &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-logout.html b/deps/npm/html/doc/cli/npm-logout.html
index add0967b2..6262d13a7 100644
--- a/deps/npm/html/doc/cli/npm-logout.html
+++ b/deps/npm/html/doc/cli/npm-logout.html
@@ -55,5 +55,5 @@ that registry at the same time.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-logout &mdash; npm@2.11.3</p>
+<p id="footer">npm-logout &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-ls.html b/deps/npm/html/doc/cli/npm-ls.html
index 90a2aa750..4de901b68 100644
--- a/deps/npm/html/doc/cli/npm-ls.html
+++ b/deps/npm/html/doc/cli/npm-ls.html
@@ -22,7 +22,7 @@ installed, as well as their dependencies, in a tree-structure.</p>
limit the results to only the paths to the packages named. Note that
nested packages will <em>also</em> show the paths to the specified packages.
For example, running <code>npm ls promzard</code> in npm&#39;s source tree will show:</p>
-<pre><code>npm@2.11.3 /path/to/npm
+<pre><code>npm@2.13.4 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
</code></pre><p>It will print out extraneous, missing, and invalid packages.</p>
@@ -97,5 +97,5 @@ project.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-ls &mdash; npm@2.11.3</p>
+<p id="footer">npm-ls &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-outdated.html b/deps/npm/html/doc/cli/npm-outdated.html
index e0ecfd26c..130680af3 100644
--- a/deps/npm/html/doc/cli/npm-outdated.html
+++ b/deps/npm/html/doc/cli/npm-outdated.html
@@ -67,5 +67,5 @@ project.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-outdated &mdash; npm@2.11.3</p>
+<p id="footer">npm-outdated &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-owner.html b/deps/npm/html/doc/cli/npm-owner.html
index 9b83f7e67..382943327 100644
--- a/deps/npm/html/doc/cli/npm-owner.html
+++ b/deps/npm/html/doc/cli/npm-owner.html
@@ -49,5 +49,5 @@ that is not implemented at this time.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-owner &mdash; npm@2.11.3</p>
+<p id="footer">npm-owner &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-pack.html b/deps/npm/html/doc/cli/npm-pack.html
index ccebbe323..68d85d08d 100644
--- a/deps/npm/html/doc/cli/npm-pack.html
+++ b/deps/npm/html/doc/cli/npm-pack.html
@@ -41,5 +41,5 @@ overwritten the second time.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-pack &mdash; npm@2.11.3</p>
+<p id="footer">npm-pack &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-submodule.html b/deps/npm/html/doc/cli/npm-ping.html
index 30bde01b3..a441ebdbf 100644
--- a/deps/npm/html/doc/cli/npm-submodule.html
+++ b/deps/npm/html/doc/cli/npm-ping.html
@@ -1,34 +1,24 @@
<!doctype html>
<html>
- <title>npm-submodule</title>
+ <title>npm-ping</title>
<meta http-equiv="content-type" value="text/html;utf-8">
<link rel="stylesheet" type="text/css" href="../../static/style.css">
- <link rel="canonical" href="https://www.npmjs.org/doc/cli/npm-submodule.html">
+ <link rel="canonical" href="https://www.npmjs.org/doc/cli/npm-ping.html">
<script async=true src="../../static/toc.js"></script>
<body>
<div id="wrapper">
-<h1><a href="../cli/npm-submodule.html">npm-submodule</a></h1> <p>Add a package as a git submodule</p>
+<h1><a href="../cli/npm-ping.html">npm-ping</a></h1> <p>Ping npm registry</p>
<h2 id="synopsis">SYNOPSIS</h2>
-<pre><code>npm submodule &lt;pkg&gt;
+<pre><code>npm ping [--registry &lt;registry&gt;]
</code></pre><h2 id="description">DESCRIPTION</h2>
-<p>If the specified package has a git repository url in its package.json
-description, then this command will add it as a git submodule at
-<code>node_modules/&lt;pkg name&gt;</code>.</p>
-<p>This is a convenience only. From then on, it&#39;s up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a <code>.git</code> subfolder
-in it.</p>
-<p>This command also does not install missing dependencies, if the package
-does not include them in its git repository. If <code>npm ls</code> reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do <code>npm explore &lt;pkgname&gt; -- npm install</code> to install the
-dependencies into the submodule folder.</p>
+<p>Ping the configured or given npm registry and verify authentication.</p>
<h2 id="see-also">SEE ALSO</h2>
<ul>
-<li><a href="../files/package.json.html">package.json(5)</a></li>
-<li>git help submodule</li>
+<li><a href="../cli/npm-config.html"><a href="../cli/npm-config.html">npm-config(1)</a></a></li>
+<li><a href="../misc/npm-config.html"><a href="../misc/npm-config.html">npm-config(7)</a></a></li>
+<li><a href="../files/npmrc.html"><a href="../files/npmrc.html">npmrc(5)</a></a></li>
</ul>
</div>
@@ -42,4 +32,4 @@ dependencies into the submodule folder.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-submodule &mdash; npm@1.4.28</p>
+<p id="footer">npm-ping &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-prefix.html b/deps/npm/html/doc/cli/npm-prefix.html
index f9af61318..7e10292cd 100644
--- a/deps/npm/html/doc/cli/npm-prefix.html
+++ b/deps/npm/html/doc/cli/npm-prefix.html
@@ -38,5 +38,5 @@ to contain a package.json file unless <code>-g</code> is also specified.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-prefix &mdash; npm@2.11.3</p>
+<p id="footer">npm-prefix &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-prune.html b/deps/npm/html/doc/cli/npm-prune.html
index 58125c416..96bae39a1 100644
--- a/deps/npm/html/doc/cli/npm-prune.html
+++ b/deps/npm/html/doc/cli/npm-prune.html
@@ -41,5 +41,5 @@ negate <code>NODE_ENV</code> being set to <code>production</code>.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-prune &mdash; npm@2.11.3</p>
+<p id="footer">npm-prune &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-publish.html b/deps/npm/html/doc/cli/npm-publish.html
index ba1bad9e2..a29b247dd 100644
--- a/deps/npm/html/doc/cli/npm-publish.html
+++ b/deps/npm/html/doc/cli/npm-publish.html
@@ -66,5 +66,5 @@ it is removed with <a href="../cli/npm-unpublish.html"><a href="../cli/npm-unpub
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-publish &mdash; npm@2.11.3</p>
+<p id="footer">npm-publish &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-rebuild.html b/deps/npm/html/doc/cli/npm-rebuild.html
index 38bc94532..c6176df61 100644
--- a/deps/npm/html/doc/cli/npm-rebuild.html
+++ b/deps/npm/html/doc/cli/npm-rebuild.html
@@ -38,5 +38,5 @@ the new binary.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-rebuild &mdash; npm@2.11.3</p>
+<p id="footer">npm-rebuild &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-repo.html b/deps/npm/html/doc/cli/npm-repo.html
index 63b850a61..372bb7170 100644
--- a/deps/npm/html/doc/cli/npm-repo.html
+++ b/deps/npm/html/doc/cli/npm-repo.html
@@ -42,5 +42,5 @@ a <code>package.json</code> in the current folder and use the <code>name</code>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-repo &mdash; npm@2.11.3</p>
+<p id="footer">npm-repo &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-restart.html b/deps/npm/html/doc/cli/npm-restart.html
index faafb1d00..1ce1cbe6b 100644
--- a/deps/npm/html/doc/cli/npm-restart.html
+++ b/deps/npm/html/doc/cli/npm-restart.html
@@ -53,5 +53,5 @@ behavior will be accompanied by an increase in major version number</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-restart &mdash; npm@2.11.3</p>
+<p id="footer">npm-restart &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-rm.html b/deps/npm/html/doc/cli/npm-rm.html
index 09ece656a..018aa4d3f 100644
--- a/deps/npm/html/doc/cli/npm-rm.html
+++ b/deps/npm/html/doc/cli/npm-rm.html
@@ -39,5 +39,5 @@ on its behalf.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-rm &mdash; npm@2.11.3</p>
+<p id="footer">npm-rm &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-root.html b/deps/npm/html/doc/cli/npm-root.html
index c3092e686..0ae95fb9b 100644
--- a/deps/npm/html/doc/cli/npm-root.html
+++ b/deps/npm/html/doc/cli/npm-root.html
@@ -35,5 +35,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-root &mdash; npm@2.11.3</p>
+<p id="footer">npm-root &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-run-script.html b/deps/npm/html/doc/cli/npm-run-script.html
index 03f1aa201..1635c415b 100644
--- a/deps/npm/html/doc/cli/npm-run-script.html
+++ b/deps/npm/html/doc/cli/npm-run-script.html
@@ -34,8 +34,9 @@ built-in.</p>
<code>node_modules/.bin</code> to the <code>PATH</code> provided to scripts. Any binaries provided by
locally-installed dependencies can be used without the <code>node_modules/.bin</code>
prefix. For example, if there is a <code>devDependency</code> on <code>tap</code> in your package,
-you should write <code>&quot;scripts&quot;: {&quot;test&quot;: &quot;tap test/\*.js&quot;}</code> instead of <code>&quot;scripts&quot;:
-{&quot;test&quot;: &quot;node_modules/.bin/tap test/\*.js&quot;}</code> to run your tests.</p>
+you should write:</p>
+<pre><code>&quot;scripts&quot;: {&quot;test&quot;: &quot;tap test/\*.js&quot;}
+</code></pre><p>instead of <code>&quot;scripts&quot;: {&quot;test&quot;: &quot;node_modules/.bin/tap test/\*.js&quot;}</code> to run your tests.</p>
<h2 id="see-also">SEE ALSO</h2>
<ul>
<li><a href="../misc/npm-scripts.html"><a href="../misc/npm-scripts.html">npm-scripts(7)</a></a></li>
@@ -56,5 +57,5 @@ you should write <code>&quot;scripts&quot;: {&quot;test&quot;: &quot;tap test/\*
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-run-script &mdash; npm@2.11.3</p>
+<p id="footer">npm-run-script &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-search.html b/deps/npm/html/doc/cli/npm-search.html
index fc065f999..8c7a2d5a7 100644
--- a/deps/npm/html/doc/cli/npm-search.html
+++ b/deps/npm/html/doc/cli/npm-search.html
@@ -49,5 +49,5 @@ fall on multiple lines.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-search &mdash; npm@2.11.3</p>
+<p id="footer">npm-search &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-shrinkwrap.html b/deps/npm/html/doc/cli/npm-shrinkwrap.html
index b6f8ddc6d..fe2e91b76 100644
--- a/deps/npm/html/doc/cli/npm-shrinkwrap.html
+++ b/deps/npm/html/doc/cli/npm-shrinkwrap.html
@@ -164,5 +164,5 @@ contents rather than versions.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-shrinkwrap &mdash; npm@2.11.3</p>
+<p id="footer">npm-shrinkwrap &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-star.html b/deps/npm/html/doc/cli/npm-star.html
index 273ba2406..39245e05c 100644
--- a/deps/npm/html/doc/cli/npm-star.html
+++ b/deps/npm/html/doc/cli/npm-star.html
@@ -36,5 +36,5 @@ a vaguely positive way to show that you care.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-star &mdash; npm@2.11.3</p>
+<p id="footer">npm-star &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-stars.html b/deps/npm/html/doc/cli/npm-stars.html
index 1899557ad..10207445f 100644
--- a/deps/npm/html/doc/cli/npm-stars.html
+++ b/deps/npm/html/doc/cli/npm-stars.html
@@ -37,5 +37,5 @@ you will most certainly enjoy this command.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-stars &mdash; npm@2.11.3</p>
+<p id="footer">npm-stars &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-start.html b/deps/npm/html/doc/cli/npm-start.html
index eb36bd5b2..68779a395 100644
--- a/deps/npm/html/doc/cli/npm-start.html
+++ b/deps/npm/html/doc/cli/npm-start.html
@@ -34,5 +34,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-start &mdash; npm@2.11.3</p>
+<p id="footer">npm-start &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-stop.html b/deps/npm/html/doc/cli/npm-stop.html
index c772f8946..122c0bce8 100644
--- a/deps/npm/html/doc/cli/npm-stop.html
+++ b/deps/npm/html/doc/cli/npm-stop.html
@@ -34,5 +34,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-stop &mdash; npm@2.11.3</p>
+<p id="footer">npm-stop &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-tag.html b/deps/npm/html/doc/cli/npm-tag.html
index 414e84a0d..a2b3c68ac 100644
--- a/deps/npm/html/doc/cli/npm-tag.html
+++ b/deps/npm/html/doc/cli/npm-tag.html
@@ -62,5 +62,5 @@ that do not begin with a number or the letter <code>v</code>.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-tag &mdash; npm@2.11.3</p>
+<p id="footer">npm-tag &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-test.html b/deps/npm/html/doc/cli/npm-test.html
index 1adc88977..8b50bb790 100644
--- a/deps/npm/html/doc/cli/npm-test.html
+++ b/deps/npm/html/doc/cli/npm-test.html
@@ -37,5 +37,5 @@ true.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-test &mdash; npm@2.11.3</p>
+<p id="footer">npm-test &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-uninstall.html b/deps/npm/html/doc/cli/npm-uninstall.html
index aae5ca9b4..467244144 100644
--- a/deps/npm/html/doc/cli/npm-uninstall.html
+++ b/deps/npm/html/doc/cli/npm-uninstall.html
@@ -57,5 +57,5 @@ npm uninstall dtrace-provider --save-optional
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-uninstall &mdash; npm@2.11.3</p>
+<p id="footer">npm-uninstall &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-unpublish.html b/deps/npm/html/doc/cli/npm-unpublish.html
index 1f042fc55..6cc3f2d3f 100644
--- a/deps/npm/html/doc/cli/npm-unpublish.html
+++ b/deps/npm/html/doc/cli/npm-unpublish.html
@@ -47,5 +47,5 @@ package again, a new version number must be used.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-unpublish &mdash; npm@2.11.3</p>
+<p id="footer">npm-unpublish &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-update.html b/deps/npm/html/doc/cli/npm-update.html
index ae6685d44..4989cf5d9 100644
--- a/deps/npm/html/doc/cli/npm-update.html
+++ b/deps/npm/html/doc/cli/npm-update.html
@@ -119,5 +119,5 @@ be <em>downgraded</em>.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-update &mdash; npm@2.11.3</p>
+<p id="footer">npm-update &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-version.html b/deps/npm/html/doc/cli/npm-version.html
index 941df1720..172eb65f6 100644
--- a/deps/npm/html/doc/cli/npm-version.html
+++ b/deps/npm/html/doc/cli/npm-version.html
@@ -16,13 +16,14 @@
<p>Run this in a package directory to bump the version and write the new
data back to <code>package.json</code> and, if present, <code>npm-shrinkwrap.json</code>.</p>
<p>The <code>newversion</code> argument should be a valid semver string, <em>or</em> a
-valid second argument to semver.inc (one of &quot;patch&quot;, &quot;minor&quot;, &quot;major&quot;,
-&quot;prepatch&quot;, &quot;preminor&quot;, &quot;premajor&quot;, &quot;prerelease&quot;). In the second case,
+valid second argument to semver.inc (one of <code>patch</code>, <code>minor</code>, <code>major</code>,
+<code>prepatch</code>, <code>preminor</code>, <code>premajor</code>, <code>prerelease</code>). In the second case,
the existing version will be incremented by 1 in the specified field.</p>
-<p>If run in a git repo, it will also create a version commit and tag, and fail if
-the repo is not clean. This behavior is controlled by <code>git-tag-version</code> (see
-below), and can be disabled on the command line by running <code>npm
---no-git-tag-version version</code></p>
+<p>If run in a git repo, it will also create a version commit and tag.
+This behavior is controlled by <code>git-tag-version</code> (see below), and can
+be disabled on the command line by running <code>npm --no-git-tag-version version</code>.
+It will fail if the working directory is not clean, unless the <code>--force</code>
+flag is set.</p>
<p>If supplied with <code>--message</code> (shorthand: <code>-m</code>) config option, npm will
use it as a commit message when creating a version commit. If the
<code>message</code> config contains <code>%s</code> then that will be replaced with the
@@ -39,13 +40,34 @@ user: &quot;isaacs (http://blog.izs.me/) &lt;i@izs.me&gt;&quot;
2048-bit RSA key, ID 6C481CF6, created 2010-08-31
Enter passphrase:
-</code></pre><p>If &quot;preversion&quot;, &quot;version&quot;, &quot;postversion&quot; in the &quot;scripts&quot; property of
-the package.json, it will execute by running <code>npm version</code>. preversion
-and version ware executed before bump the package version, postversion
-was executed after bump the package version. For example to run <code>npm version</code>
-after passed all test:</p>
-<pre><code>&quot;scripts&quot;: { &quot;preversion&quot;: &quot;npm test&quot; }
-</code></pre><h2 id="configuration">CONFIGURATION</h2>
+</code></pre><p>If <code>preversion</code>, <code>version</code>, or <code>postversion</code> are in the <code>scripts</code> property of
+the package.json, they will be executed as part of running <code>npm version</code>.</p>
+<p>The exact order of execution is as follows:</p>
+<ol>
+<li>Check to make sure the git working directory is clean before we get started.
+Your scripts may add files to the commit in future steps.
+This step is skipped if the <code>--force</code> flag is set.</li>
+<li>Run the <code>preversion</code> script. These scripts have access to the old <code>version</code> in package.json.
+A typical use would be running your full test suite before deploying.
+Any files you want added to the commit should be explicitly added using <code>git add</code>.</li>
+<li>Bump <code>version</code> in <code>package.json</code> as requested (<code>patch</code>, <code>minor</code>, <code>major</code>, etc). </li>
+<li>Run the <code>version</code> script. These scripts have access to the new <code>version</code> in package.json
+(so they can incorporate it into file headers in generated files for example).
+Again, scripts should explicitly add generated files to the commit using <code>git add</code>.</li>
+<li>Commit and tag.</li>
+<li>Run the <code>postversion</code> script. Use it to clean up the file system or automatically push
+the commit and/or tag.</li>
+</ol>
+<p>Take the following example:</p>
+<pre><code>&quot;scripts&quot;: {
+ &quot;preversion&quot;: &quot;npm test&quot;,
+ &quot;version&quot;: &quot;npm run build &amp;&amp; git add -A dist&quot;,
+ &quot;postversion&quot;: &quot;git push &amp;&amp; git push --tags &amp;&amp; rm -rf build/temp&quot;
+}
+</code></pre><p>This runs all your tests, and proceeds only if they pass. Then runs your <code>build</code> script, and
+adds everything in the <code>dist</code> directory to the commit. After the commit, it pushes the new commit
+and tag up to the server, and deletes the <code>build/temp</code> directory.</p>
+<h2 id="configuration">CONFIGURATION</h2>
<h3 id="git-tag-version">git-tag-version</h3>
<ul>
<li>Default: true</li>
@@ -73,5 +95,5 @@ after passed all test:</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-version &mdash; npm@2.11.3</p>
+<p id="footer">npm-version &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-view.html b/deps/npm/html/doc/cli/npm-view.html
index e0f48b675..e3eb6ec8c 100644
--- a/deps/npm/html/doc/cli/npm-view.html
+++ b/deps/npm/html/doc/cli/npm-view.html
@@ -82,5 +82,5 @@ the field name.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-view &mdash; npm@2.11.3</p>
+<p id="footer">npm-view &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm-whoami.html b/deps/npm/html/doc/cli/npm-whoami.html
index 9b451f148..fe003c533 100644
--- a/deps/npm/html/doc/cli/npm-whoami.html
+++ b/deps/npm/html/doc/cli/npm-whoami.html
@@ -33,5 +33,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-whoami &mdash; npm@2.11.3</p>
+<p id="footer">npm-whoami &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/cli/npm.html b/deps/npm/html/doc/cli/npm.html
index 83fe67d57..8251fccc5 100644
--- a/deps/npm/html/doc/cli/npm.html
+++ b/deps/npm/html/doc/cli/npm.html
@@ -13,7 +13,7 @@
<h2 id="synopsis">SYNOPSIS</h2>
<pre><code>npm &lt;command&gt; [args]
</code></pre><h2 id="version">VERSION</h2>
-<p>2.11.3</p>
+<p>2.13.4</p>
<h2 id="description">DESCRIPTION</h2>
<p>npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
@@ -110,7 +110,7 @@ easily by doing <code>npm view npm contributors</code>.</p>
the issues list or ask on the mailing list.</p>
<ul>
<li><a href="http://github.com/npm/npm/issues">http://github.com/npm/npm/issues</a></li>
-<li><a href="&#x6d;&#x61;&#x69;&#x6c;&#116;&#x6f;&#x3a;&#110;&#112;&#x6d;&#45;&#64;&#103;&#111;&#111;&#103;&#108;&#101;&#x67;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#110;&#112;&#x6d;&#45;&#64;&#103;&#111;&#111;&#103;&#108;&#101;&#x67;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
+<li><a href="&#109;&#x61;&#x69;&#x6c;&#116;&#x6f;&#58;&#x6e;&#112;&#x6d;&#x2d;&#64;&#103;&#111;&#x6f;&#x67;&#x6c;&#x65;&#x67;&#x72;&#x6f;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#x6e;&#112;&#x6d;&#x2d;&#64;&#103;&#111;&#x6f;&#x67;&#x6c;&#x65;&#x67;&#x72;&#x6f;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
</ul>
<h2 id="bugs">BUGS</h2>
<p>When you find issues, please report them:</p>
@@ -118,7 +118,7 @@ the issues list or ask on the mailing list.</p>
<li>web:
<a href="http://github.com/npm/npm/issues">http://github.com/npm/npm/issues</a></li>
<li>email:
-<a href="&#109;&#x61;&#x69;&#x6c;&#116;&#111;&#x3a;&#x6e;&#112;&#109;&#45;&#x40;&#103;&#111;&#111;&#x67;&#108;&#101;&#103;&#x72;&#111;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#109;">&#x6e;&#112;&#109;&#45;&#x40;&#103;&#111;&#111;&#x67;&#108;&#101;&#103;&#x72;&#111;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#109;</a></li>
+<a href="&#x6d;&#97;&#105;&#x6c;&#116;&#111;&#58;&#x6e;&#112;&#x6d;&#x2d;&#x40;&#x67;&#x6f;&#111;&#103;&#108;&#101;&#103;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#x6e;&#112;&#x6d;&#x2d;&#x40;&#x67;&#x6f;&#111;&#103;&#108;&#101;&#103;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
</ul>
<p>Be sure to include <em>all</em> of the output from the npm command that didn&#39;t work
as expected. The <code>npm-debug.log</code> file is also helpful to provide.</p>
@@ -128,7 +128,7 @@ will no doubt tell you to put the output in a gist or email.</p>
<p><a href="http://blog.izs.me/">Isaac Z. Schlueter</a> ::
<a href="https://github.com/isaacs/">isaacs</a> ::
<a href="http://twitter.com/izs">@izs</a> ::
-<a href="&#109;&#97;&#105;&#x6c;&#x74;&#111;&#x3a;&#105;&#x40;&#105;&#x7a;&#115;&#46;&#x6d;&#x65;">&#105;&#x40;&#105;&#x7a;&#115;&#46;&#x6d;&#x65;</a></p>
+<a href="&#109;&#x61;&#x69;&#x6c;&#116;&#111;&#58;&#105;&#64;&#105;&#x7a;&#115;&#x2e;&#109;&#101;">&#105;&#64;&#105;&#x7a;&#115;&#x2e;&#109;&#101;</a></p>
<h2 id="see-also">SEE ALSO</h2>
<ul>
<li><a href="../cli/npm-help.html"><a href="../cli/npm-help.html">npm-help(1)</a></a></li>
@@ -154,5 +154,5 @@ will no doubt tell you to put the output in a gist or email.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm &mdash; npm@2.11.3</p>
+<p id="footer">npm &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/files/npm-folders.html b/deps/npm/html/doc/files/npm-folders.html
index db8b09211..6a2d1c212 100644
--- a/deps/npm/html/doc/files/npm-folders.html
+++ b/deps/npm/html/doc/files/npm-folders.html
@@ -184,5 +184,5 @@ cannot be found elsewhere. See <code><a href="../files/package.json.html"><a hr
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-folders &mdash; npm@2.11.3</p>
+<p id="footer">npm-folders &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/files/npm-global.html b/deps/npm/html/doc/files/npm-global.html
index ddc765201..cf0bce767 100644
--- a/deps/npm/html/doc/files/npm-global.html
+++ b/deps/npm/html/doc/files/npm-global.html
@@ -184,5 +184,5 @@ cannot be found elsewhere. See <code><a href="../files/package.json.html"><a hr
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-global &mdash; npm@2.11.3</p>
+<p id="footer">npm-global &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/files/npm-json.html b/deps/npm/html/doc/files/npm-json.html
index faf036a98..8c128ef34 100644
--- a/deps/npm/html/doc/files/npm-json.html
+++ b/deps/npm/html/doc/files/npm-json.html
@@ -91,8 +91,8 @@ expression syntax version 2.0 string</a>, like this:</p>
<pre><code>{ &quot;license&quot; : &quot;(ISC OR GPL-3.0)&quot; }
</code></pre><p>If you are using a license that hasn&#39;t been assigned an SPDX identifier, or if
you are using a custom license, use the following valid SPDX expression:</p>
-<pre><code>{ &quot;license&quot; : &quot;LicenseRef-LICENSE&quot; }
-</code></pre><p>Then include a LICENSE file at the top level of the package.</p>
+<pre><code>{ &quot;license&quot; : &quot;SEE LICENSE IN &lt;filename&gt;&quot; }
+</code></pre><p>Then include a file named <code>&lt;filename&gt;</code> at the top level of the package.</p>
<p>Some old packages used license objects or a &quot;licenses&quot; property containing an
array of license objects:</p>
<pre><code>// Not valid metadata
@@ -117,7 +117,11 @@ array of license objects:</p>
<pre><code>{ &quot;license&quot;: &quot;ISC&quot; }
{ &quot;license&quot;: &quot;(MIT OR Apache-2.0)&quot; }
-</code></pre><h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
+</code></pre><p>Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:</p>
+<pre><code>{ &quot;license&quot;: &quot;UNLICENSED&quot;}
+</code></pre><p>Consider also setting <code>&quot;private&quot;: true</code> to prevent accidental publication.</p>
+<h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
<p>The &quot;author&quot; is one person. &quot;contributors&quot; is an array of people. A &quot;person&quot;
is an object with a &quot;name&quot; field and optionally &quot;url&quot; and &quot;email&quot;, like this:</p>
<pre><code>{ &quot;name&quot; : &quot;Barney Rubble&quot;
@@ -136,6 +140,26 @@ inside that folder. (Unless they would be ignored by another rule.)</p>
which will keep files from being included, even if they would be picked
up by the files array. The &quot;.npmignore&quot; file works just like a
&quot;.gitignore&quot;.</p>
+<p>Certain files are always included, regardless of settings:</p>
+<ul>
+<li><code>package.json</code></li>
+<li><code><a href="../../doc/README.html"><a href="../../doc/README.html">README</a></a></code> (and its variants)</li>
+<li><code>CHANGELOG</code> (and its variants)</li>
+<li><code>LICENSE</code> / <code>LICENCE</code></li>
+</ul>
+<p>Conversely, some files are always ignored:</p>
+<ul>
+<li><code>.git</code></li>
+<li><code>CVS</code></li>
+<li><code>.svn</code></li>
+<li><code>.hg</code></li>
+<li><code>.lock-wscript</code></li>
+<li><code>.wafpickle-N</code></li>
+<li><code>*.swp</code></li>
+<li><code>.DS_Store</code></li>
+<li><code>._*</code></li>
+<li><code>npm-debug.log</code></li>
+</ul>
<h2 id="main">main</h2>
<p>The main field is a module ID that is the primary entry point to your program.
That is, if your package is named <code>foo</code>, and a user installs it, and then does
@@ -204,9 +228,12 @@ you&#39;ll see that it has directories for doc, lib, and man.</p>
<p>Tell people where the bulk of your library is. Nothing special is done
with the lib folder in any way, but it&#39;s useful meta info.</p>
<h3 id="directories-bin">directories.bin</h3>
-<p>If you specify a <code>bin</code> directory, then all the files in that folder will
-be added as children of the <code>bin</code> path.</p>
-<p>If you have a <code>bin</code> path already, then this has no effect.</p>
+<p>If you specify a <code>bin</code> directory in <code>directories.bin</code>, all the files in
+that folder will be added.</p>
+<p>Because of the way the <code>bin</code> directive works, specifying both a
+<code>bin</code> path and setting <code>directories.bin</code> is an error. If you want to
+specify individual files, use <code>bin</code>, and for all the files in an
+existing <code>bin</code> directory, use <code>directories.bin</code>.</p>
<h3 id="directories-man">directories.man</h3>
<p>A folder that is full of man pages. Sugar to generate a &quot;man&quot; array by
walking the folder.</p>
@@ -487,12 +514,12 @@ specific registry (for example, an internal registry), then use the
<code>publishConfig</code> dictionary described below to override the <code>registry</code> config
param at publish-time.</p>
<h2 id="publishconfig">publishConfig</h2>
-<p>This is a set of config values that will be used at publish-time. It&#39;s
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with &quot;latest&quot; or published to
-the global public registry by default.</p>
-<p>Any config values can be overridden, but of course only &quot;tag&quot; and
-&quot;registry&quot; probably matter for the purposes of publishing.</p>
+<p>This is a set of config values that will be used at publish-time. It&#39;s
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with &quot;latest&quot;, published
+to the global public registry or that a scoped module is private by default.</p>
+<p>Any config values can be overridden, but of course only &quot;tag&quot;, &quot;registry&quot; and
+&quot;access&quot; probably matter for the purposes of publishing.</p>
<p>See <code><a href="../misc/npm-config.html"><a href="../misc/npm-config.html">npm-config(7)</a></a></code> to see the list of config options that can be
overridden.</p>
<h2 id="default-values">DEFAULT VALUES</h2>
@@ -538,5 +565,5 @@ ignored.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-json &mdash; npm@2.11.3</p>
+<p id="footer">npm-json &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/files/npmrc.html b/deps/npm/html/doc/files/npmrc.html
index 38f5c86e3..28edc36c2 100644
--- a/deps/npm/html/doc/files/npmrc.html
+++ b/deps/npm/html/doc/files/npmrc.html
@@ -47,6 +47,8 @@ config values specific to this project.</p>
running npm in. It has no effect when your module is published. For
example, you can&#39;t publish a module that forces itself to install
globally, or in a different location.</p>
+<p>Additionally, this file is not read in global mode, such as when running
+<code>npm install -g</code>.</p>
<h3 id="per-user-config-file">Per-user config file</h3>
<p><code>$HOME/.npmrc</code> (or the <code>userconfig</code> param, if set in the environment
or on the command line)</p>
@@ -81,5 +83,5 @@ manner.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npmrc &mdash; npm@2.11.3</p>
+<p id="footer">npmrc &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/files/package.json.html b/deps/npm/html/doc/files/package.json.html
index 66651bd4e..cec160bd4 100644
--- a/deps/npm/html/doc/files/package.json.html
+++ b/deps/npm/html/doc/files/package.json.html
@@ -91,8 +91,8 @@ expression syntax version 2.0 string</a>, like this:</p>
<pre><code>{ &quot;license&quot; : &quot;(ISC OR GPL-3.0)&quot; }
</code></pre><p>If you are using a license that hasn&#39;t been assigned an SPDX identifier, or if
you are using a custom license, use the following valid SPDX expression:</p>
-<pre><code>{ &quot;license&quot; : &quot;LicenseRef-LICENSE&quot; }
-</code></pre><p>Then include a LICENSE file at the top level of the package.</p>
+<pre><code>{ &quot;license&quot; : &quot;SEE LICENSE IN &lt;filename&gt;&quot; }
+</code></pre><p>Then include a file named <code>&lt;filename&gt;</code> at the top level of the package.</p>
<p>Some old packages used license objects or a &quot;licenses&quot; property containing an
array of license objects:</p>
<pre><code>// Not valid metadata
@@ -117,7 +117,11 @@ array of license objects:</p>
<pre><code>{ &quot;license&quot;: &quot;ISC&quot; }
{ &quot;license&quot;: &quot;(MIT OR Apache-2.0)&quot; }
-</code></pre><h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
+</code></pre><p>Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:</p>
+<pre><code>{ &quot;license&quot;: &quot;UNLICENSED&quot;}
+</code></pre><p>Consider also setting <code>&quot;private&quot;: true</code> to prevent accidental publication.</p>
+<h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
<p>The &quot;author&quot; is one person. &quot;contributors&quot; is an array of people. A &quot;person&quot;
is an object with a &quot;name&quot; field and optionally &quot;url&quot; and &quot;email&quot;, like this:</p>
<pre><code>{ &quot;name&quot; : &quot;Barney Rubble&quot;
@@ -136,6 +140,26 @@ inside that folder. (Unless they would be ignored by another rule.)</p>
which will keep files from being included, even if they would be picked
up by the files array. The &quot;.npmignore&quot; file works just like a
&quot;.gitignore&quot;.</p>
+<p>Certain files are always included, regardless of settings:</p>
+<ul>
+<li><code>package.json</code></li>
+<li><code><a href="../../doc/README.html"><a href="../../doc/README.html">README</a></a></code> (and its variants)</li>
+<li><code>CHANGELOG</code> (and its variants)</li>
+<li><code>LICENSE</code> / <code>LICENCE</code></li>
+</ul>
+<p>Conversely, some files are always ignored:</p>
+<ul>
+<li><code>.git</code></li>
+<li><code>CVS</code></li>
+<li><code>.svn</code></li>
+<li><code>.hg</code></li>
+<li><code>.lock-wscript</code></li>
+<li><code>.wafpickle-N</code></li>
+<li><code>*.swp</code></li>
+<li><code>.DS_Store</code></li>
+<li><code>._*</code></li>
+<li><code>npm-debug.log</code></li>
+</ul>
<h2 id="main">main</h2>
<p>The main field is a module ID that is the primary entry point to your program.
That is, if your package is named <code>foo</code>, and a user installs it, and then does
@@ -204,9 +228,12 @@ you&#39;ll see that it has directories for doc, lib, and man.</p>
<p>Tell people where the bulk of your library is. Nothing special is done
with the lib folder in any way, but it&#39;s useful meta info.</p>
<h3 id="directories-bin">directories.bin</h3>
-<p>If you specify a <code>bin</code> directory, then all the files in that folder will
-be added as children of the <code>bin</code> path.</p>
-<p>If you have a <code>bin</code> path already, then this has no effect.</p>
+<p>If you specify a <code>bin</code> directory in <code>directories.bin</code>, all the files in
+that folder will be added.</p>
+<p>Because of the way the <code>bin</code> directive works, specifying both a
+<code>bin</code> path and setting <code>directories.bin</code> is an error. If you want to
+specify individual files, use <code>bin</code>, and for all the files in an
+existing <code>bin</code> directory, use <code>directories.bin</code>.</p>
<h3 id="directories-man">directories.man</h3>
<p>A folder that is full of man pages. Sugar to generate a &quot;man&quot; array by
walking the folder.</p>
@@ -487,12 +514,12 @@ specific registry (for example, an internal registry), then use the
<code>publishConfig</code> dictionary described below to override the <code>registry</code> config
param at publish-time.</p>
<h2 id="publishconfig">publishConfig</h2>
-<p>This is a set of config values that will be used at publish-time. It&#39;s
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with &quot;latest&quot; or published to
-the global public registry by default.</p>
-<p>Any config values can be overridden, but of course only &quot;tag&quot; and
-&quot;registry&quot; probably matter for the purposes of publishing.</p>
+<p>This is a set of config values that will be used at publish-time. It&#39;s
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with &quot;latest&quot;, published
+to the global public registry or that a scoped module is private by default.</p>
+<p>Any config values can be overridden, but of course only &quot;tag&quot;, &quot;registry&quot; and
+&quot;access&quot; probably matter for the purposes of publishing.</p>
<p>See <code><a href="../misc/npm-config.html"><a href="../misc/npm-config.html">npm-config(7)</a></a></code> to see the list of config options that can be
overridden.</p>
<h2 id="default-values">DEFAULT VALUES</h2>
@@ -538,5 +565,5 @@ ignored.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">package.json &mdash; npm@2.11.3</p>
+<p id="footer">package.json &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/index.html b/deps/npm/html/doc/index.html
index e45b852b5..ea04d091f 100644
--- a/deps/npm/html/doc/index.html
+++ b/deps/npm/html/doc/index.html
@@ -66,6 +66,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-1-"><a href="cli/npm-pack.html"><a href="cli/npm-pack.html">npm-pack(1)</a></a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-1-"><a href="cli/npm-ping.html"><a href="cli/npm-ping.html">npm-ping(1)</a></a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-1-"><a href="cli/npm-prefix.html"><a href="cli/npm-prefix.html">npm-prefix(1)</a></a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-1-"><a href="cli/npm-prune.html"><a href="cli/npm-prune.html">npm-prune(1)</a></a></h3>
@@ -152,6 +154,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-3-"><a href="api/npm-pack.html"><a href="api/npm-pack.html">npm-pack(3)</a></a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-3-"><a href="api/npm-ping.html"><a href="api/npm-ping.html">npm-ping(3)</a></a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-3-"><a href="api/npm-prefix.html"><a href="api/npm-prefix.html">npm-prefix(3)</a></a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-3-"><a href="api/npm-prune.html"><a href="api/npm-prune.html">npm-prune(3)</a></a></h3>
@@ -236,5 +240,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">index &mdash; npm@2.11.3</p>
+<p id="footer">index &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-coding-style.html b/deps/npm/html/doc/misc/npm-coding-style.html
index c336977c5..db41c5b98 100644
--- a/deps/npm/html/doc/misc/npm-coding-style.html
+++ b/deps/npm/html/doc/misc/npm-coding-style.html
@@ -147,5 +147,5 @@ set to anything.&quot;</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-coding-style &mdash; npm@2.11.3</p>
+<p id="footer">npm-coding-style &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-config.html b/deps/npm/html/doc/misc/npm-config.html
index baa0762f1..4ed4156cf 100644
--- a/deps/npm/html/doc/misc/npm-config.html
+++ b/deps/npm/html/doc/misc/npm-config.html
@@ -799,5 +799,5 @@ exit successfully.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-config &mdash; npm@2.11.3</p>
+<p id="footer">npm-config &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-developers.html b/deps/npm/html/doc/misc/npm-developers.html
index c3b26357b..d2208291b 100644
--- a/deps/npm/html/doc/misc/npm-developers.html
+++ b/deps/npm/html/doc/misc/npm-developers.html
@@ -189,5 +189,5 @@ from a fresh checkout.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-developers &mdash; npm@2.11.3</p>
+<p id="footer">npm-developers &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-disputes.html b/deps/npm/html/doc/misc/npm-disputes.html
index 797d3e6bc..33850cc37 100644
--- a/deps/npm/html/doc/misc/npm-disputes.html
+++ b/deps/npm/html/doc/misc/npm-disputes.html
@@ -13,7 +13,7 @@
<h2 id="synopsis">SYNOPSIS</h2>
<ol>
<li>Get the author email with <code>npm owner ls &lt;pkgname&gt;</code></li>
-<li>Email the author, CC <a href="&#x6d;&#x61;&#x69;&#x6c;&#x74;&#x6f;&#58;&#115;&#117;&#x70;&#112;&#111;&#x72;&#116;&#64;&#x6e;&#x70;&#109;&#x6a;&#x73;&#x2e;&#x63;&#x6f;&#109;">&#115;&#117;&#x70;&#112;&#111;&#x72;&#116;&#64;&#x6e;&#x70;&#109;&#x6a;&#x73;&#x2e;&#x63;&#x6f;&#109;</a></li>
+<li>Email the author, CC <a href="&#109;&#x61;&#x69;&#108;&#116;&#x6f;&#x3a;&#x73;&#117;&#112;&#112;&#111;&#114;&#x74;&#64;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#99;&#111;&#109;">&#x73;&#117;&#112;&#112;&#111;&#114;&#x74;&#64;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#99;&#111;&#109;</a></li>
<li>After a few weeks, if there&#39;s no resolution, we&#39;ll sort it out.</li>
</ol>
<p>Don&#39;t squat on package names. Publish code or move out of the way.</p>
@@ -51,12 +51,12 @@ Joe&#39;s appropriate course of action in each case is the same.</p>
owner (Bob).</li>
<li>Joe emails Bob, explaining the situation <strong>as respectfully as
possible</strong>, and what he would like to do with the module name. He
-adds the npm support staff <a href="&#x6d;&#x61;&#105;&#108;&#116;&#x6f;&#58;&#x73;&#117;&#112;&#x70;&#111;&#114;&#116;&#x40;&#x6e;&#112;&#x6d;&#106;&#x73;&#x2e;&#99;&#111;&#109;">&#x73;&#117;&#112;&#x70;&#111;&#114;&#116;&#x40;&#x6e;&#112;&#x6d;&#106;&#x73;&#x2e;&#99;&#111;&#109;</a> to the CC list of
+adds the npm support staff <a href="&#x6d;&#x61;&#x69;&#108;&#116;&#111;&#x3a;&#115;&#117;&#x70;&#112;&#x6f;&#114;&#116;&#x40;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;">&#115;&#117;&#x70;&#112;&#x6f;&#114;&#116;&#x40;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;</a> to the CC list of
the email. Mention in the email that Bob can run <code>npm owner add
joe foo</code> to add Joe as an owner of the <code>foo</code> package.</li>
<li>After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can&#39;t come to any sort of resolution, email support
-<a href="&#109;&#x61;&#105;&#108;&#116;&#x6f;&#x3a;&#x73;&#x75;&#x70;&#x70;&#x6f;&#x72;&#116;&#64;&#110;&#112;&#109;&#x6a;&#x73;&#x2e;&#x63;&#111;&#x6d;">&#x73;&#x75;&#x70;&#x70;&#x6f;&#x72;&#116;&#64;&#110;&#112;&#109;&#x6a;&#x73;&#x2e;&#x63;&#111;&#x6d;</a> and we&#39;ll sort it out. (&quot;Reasonable&quot; is
+<a href="&#x6d;&#97;&#105;&#108;&#x74;&#111;&#x3a;&#x73;&#117;&#x70;&#x70;&#111;&#114;&#x74;&#64;&#x6e;&#x70;&#109;&#106;&#x73;&#46;&#x63;&#x6f;&#x6d;">&#x73;&#117;&#x70;&#x70;&#111;&#114;&#x74;&#64;&#x6e;&#x70;&#109;&#106;&#x73;&#46;&#x63;&#x6f;&#x6d;</a> and we&#39;ll sort it out. (&quot;Reasonable&quot; is
usually at least 4 weeks, but extra time is allowed around common
holidays.)</li>
</ol>
@@ -112,5 +112,5 @@ things into it.</li>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-disputes &mdash; npm@2.11.3</p>
+<p id="footer">npm-disputes &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-faq.html b/deps/npm/html/doc/misc/npm-faq.html
index 01ef2c2d4..a9cb75eb2 100644
--- a/deps/npm/html/doc/misc/npm-faq.html
+++ b/deps/npm/html/doc/misc/npm-faq.html
@@ -213,6 +213,7 @@ of Node 0.3.</p>
<p>Windows:</p>
<ul>
<li><a href="http://github.com/marcelklehr/nodist">http://github.com/marcelklehr/nodist</a></li>
+<li><a href="https://github.com/coreybutler/nvm-windows">https://github.com/coreybutler/nvm-windows</a></li>
<li><a href="https://github.com/hakobera/nvmw">https://github.com/hakobera/nvmw</a></li>
<li><a href="https://github.com/nanjingboy/nvmw">https://github.com/nanjingboy/nvmw</a></li>
</ul>
@@ -236,7 +237,7 @@ that has a package.json in its root, or a git url.
<p>To check if the registry is down, open up
<a href="https://registry.npmjs.org/">https://registry.npmjs.org/</a> in a web browser. This will also tell
you if you are just unable to access the internet for some reason.</p>
-<p>If the registry IS down, let us know by emailing <a href="&#109;&#97;&#x69;&#x6c;&#x74;&#x6f;&#x3a;&#x73;&#x75;&#112;&#112;&#111;&#114;&#116;&#64;&#x6e;&#112;&#x6d;&#x6a;&#x73;&#46;&#99;&#111;&#x6d;">&#x73;&#x75;&#112;&#112;&#111;&#114;&#116;&#64;&#x6e;&#112;&#x6d;&#x6a;&#x73;&#46;&#99;&#111;&#x6d;</a>
+<p>If the registry IS down, let us know by emailing <a href="&#109;&#x61;&#105;&#x6c;&#116;&#x6f;&#58;&#115;&#x75;&#x70;&#112;&#x6f;&#114;&#x74;&#x40;&#110;&#112;&#109;&#106;&#115;&#x2e;&#x63;&#x6f;&#x6d;">&#115;&#x75;&#x70;&#112;&#x6f;&#114;&#x74;&#x40;&#110;&#112;&#109;&#106;&#115;&#x2e;&#x63;&#x6f;&#x6d;</a>
or posting an issue at <a href="https://github.com/npm/npm/issues">https://github.com/npm/npm/issues</a>. If it&#39;s
down for the world (and not just on your local network) then we&#39;re
probably already being pinged about it.</p>
@@ -307,5 +308,5 @@ good folks at <a href="http://www.npmjs.com">npm, Inc.</a></p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-faq &mdash; npm@2.11.3</p>
+<p id="footer">npm-faq &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-index.html b/deps/npm/html/doc/misc/npm-index.html
index 18d73b0ab..062f3e7d0 100644
--- a/deps/npm/html/doc/misc/npm-index.html
+++ b/deps/npm/html/doc/misc/npm-index.html
@@ -66,6 +66,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-1-"><a href="../cli/npm-pack.html"><a href="../cli/npm-pack.html">npm-pack(1)</a></a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-1-"><a href="../cli/npm-ping.html"><a href="../cli/npm-ping.html">npm-ping(1)</a></a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-1-"><a href="../cli/npm-prefix.html"><a href="../cli/npm-prefix.html">npm-prefix(1)</a></a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-1-"><a href="../cli/npm-prune.html"><a href="../cli/npm-prune.html">npm-prune(1)</a></a></h3>
@@ -152,6 +154,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-3-"><a href="../api/npm-pack.html"><a href="../api/npm-pack.html">npm-pack(3)</a></a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-3-"><a href="../api/npm-ping.html"><a href="../api/npm-ping.html">npm-ping(3)</a></a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-3-"><a href="../api/npm-prefix.html"><a href="../api/npm-prefix.html">npm-prefix(3)</a></a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-3-"><a href="../api/npm-prune.html"><a href="../api/npm-prune.html">npm-prune(3)</a></a></h3>
@@ -236,5 +240,5 @@
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-index &mdash; npm@2.11.3</p>
+<p id="footer">npm-index &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-registry.html b/deps/npm/html/doc/misc/npm-registry.html
index 23c36db63..51118b905 100644
--- a/deps/npm/html/doc/misc/npm-registry.html
+++ b/deps/npm/html/doc/misc/npm-registry.html
@@ -70,5 +70,5 @@ effectively implement the entire CouchDB API anyway.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-registry &mdash; npm@2.11.3</p>
+<p id="footer">npm-registry &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-scope.html b/deps/npm/html/doc/misc/npm-scope.html
index 027e64ed9..59155bbe5 100644
--- a/deps/npm/html/doc/misc/npm-scope.html
+++ b/deps/npm/html/doc/misc/npm-scope.html
@@ -91,5 +91,5 @@ that registry instead.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-scope &mdash; npm@2.11.3</p>
+<p id="footer">npm-scope &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/npm-scripts.html b/deps/npm/html/doc/misc/npm-scripts.html
index 982c219fe..34202641a 100644
--- a/deps/npm/html/doc/misc/npm-scripts.html
+++ b/deps/npm/html/doc/misc/npm-scripts.html
@@ -207,5 +207,5 @@ scripts is for compilation which must be done on the target architecture.</li>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">npm-scripts &mdash; npm@2.11.3</p>
+<p id="footer">npm-scripts &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/removing-npm.html b/deps/npm/html/doc/misc/removing-npm.html
index 37253651b..c1f8b8600 100644
--- a/deps/npm/html/doc/misc/removing-npm.html
+++ b/deps/npm/html/doc/misc/removing-npm.html
@@ -57,5 +57,5 @@ modules. To track those down, you can do the following:</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">removing-npm &mdash; npm@2.11.3</p>
+<p id="footer">removing-npm &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/doc/misc/semver.html b/deps/npm/html/doc/misc/semver.html
index bf227e483..a2fd948d5 100644
--- a/deps/npm/html/doc/misc/semver.html
+++ b/deps/npm/html/doc/misc/semver.html
@@ -282,5 +282,5 @@ range, use the <code>satisfies(version, range)</code> function.</p>
<tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr>
<tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr>
</table>
-<p id="footer">semver &mdash; npm@2.11.3</p>
+<p id="footer">semver &mdash; npm@2.13.4</p>
diff --git a/deps/npm/html/partial/doc/README.html b/deps/npm/html/partial/doc/README.html
index 7db20d896..98896e628 100644
--- a/deps/npm/html/partial/doc/README.html
+++ b/deps/npm/html/partial/doc/README.html
@@ -129,7 +129,7 @@ specific purpose, or lack of malice in any given npm package.</p>
<p>If you have a complaint about a package in the public npm registry,
and cannot <a href="https://docs.npmjs.com/misc/disputes">resolve it with the package
owner</a>, please email
-<a href="&#109;&#97;&#x69;&#x6c;&#116;&#111;&#58;&#115;&#x75;&#112;&#x70;&#x6f;&#114;&#116;&#64;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;">&#115;&#x75;&#112;&#x70;&#x6f;&#114;&#116;&#64;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;</a> and explain the situation.</p>
+<a href="&#x6d;&#97;&#x69;&#108;&#x74;&#x6f;&#x3a;&#115;&#117;&#x70;&#112;&#x6f;&#x72;&#116;&#x40;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#x63;&#111;&#109;">&#115;&#117;&#x70;&#112;&#x6f;&#x72;&#116;&#x40;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#x63;&#111;&#109;</a> and explain the situation.</p>
<p>Any data published to The npm Registry (including user account
information) may be removed or modified at the sole discretion of the
npm server administrators.</p>
diff --git a/deps/npm/html/partial/doc/api/npm-ping.html b/deps/npm/html/partial/doc/api/npm-ping.html
new file mode 100644
index 000000000..ac8a29e18
--- /dev/null
+++ b/deps/npm/html/partial/doc/api/npm-ping.html
@@ -0,0 +1,8 @@
+<h1><a href="../api/npm-ping.html">npm-ping</a></h1> <p>Ping npm registry</p>
+<h2 id="synopsis">SYNOPSIS</h2>
+<pre><code>npm.registry.ping(registry, options, function (er, pong))
+</code></pre><h2 id="description">DESCRIPTION</h2>
+<p>Attempts to connect to the given registry, returning a <code>pong</code>
+object with various metadata if it succeeds.</p>
+<p>This function is primarily useful for debugging connection issues
+to npm registries.</p>
diff --git a/deps/npm/html/partial/doc/api/npm-submodule.html b/deps/npm/html/partial/doc/api/npm-submodule.html
deleted file mode 100644
index cc7dd822a..000000000
--- a/deps/npm/html/partial/doc/api/npm-submodule.html
+++ /dev/null
@@ -1,21 +0,0 @@
-<h1><a href="../api/npm-submodule.html">npm-submodule</a></h1> <p>Add a package as a git submodule</p>
-<h2 id="synopsis">SYNOPSIS</h2>
-<pre><code>npm.commands.submodule(packages, callback)
-</code></pre><h2 id="description">DESCRIPTION</h2>
-<p>For each package specified, npm will check if it has a git repository url
-in its package.json description then add it as a git submodule at
-<code>node_modules/&lt;pkg name&gt;</code>.</p>
-<p>This is a convenience only. From then on, it&#39;s up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a <code>.git</code> subfolder
-in it.</p>
-<p>This command also does not install missing dependencies, if the package
-does not include them in its git repository. If <code>npm ls</code> reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do <code>npm explore &lt;pkgname&gt; -- npm install</code> to install the
-dependencies into the submodule folder.</p>
-<h2 id="see-also">SEE ALSO</h2>
-<ul>
-<li>npm help json</li>
-<li>git help submodule</li>
-</ul>
diff --git a/deps/npm/html/partial/doc/api/npm.html b/deps/npm/html/partial/doc/api/npm.html
index 8e9de746a..330af5533 100644
--- a/deps/npm/html/partial/doc/api/npm.html
+++ b/deps/npm/html/partial/doc/api/npm.html
@@ -12,7 +12,7 @@ npm.load([configObject, ]function (er, npm) {
npm.commands.install([&quot;package&quot;], cb)
})
</code></pre><h2 id="version">VERSION</h2>
-<p>2.11.3</p>
+<p>2.13.4</p>
<h2 id="description">DESCRIPTION</h2>
<p>This is the API documentation for npm.
To find documentation of the command line
diff --git a/deps/npm/html/partial/doc/cli/npm-install.html b/deps/npm/html/partial/doc/cli/npm-install.html
index 1f6aef824..98dadcc91 100644
--- a/deps/npm/html/partial/doc/cli/npm-install.html
+++ b/deps/npm/html/partial/doc/cli/npm-install.html
@@ -127,11 +127,24 @@ fetch the package by name if it is not valid.
</code></pre><p> <code>&lt;protocol&gt;</code> is one of <code>git</code>, <code>git+ssh</code>, <code>git+http</code>, or
<code>git+https</code>. If no <code>&lt;commit-ish&gt;</code> is specified, then <code>master</code> is
used.</p>
-<p> Examples:</p>
-<pre><code> git+ssh://git@github.com:npm/npm.git#v1.0.27
- git+https://isaacs@github.com/npm/npm.git
- git://github.com/npm/npm.git#v1.0.27
+<p> The following git environment variables are recognized by npm and will be added
+ to the environment when running git:</p>
+<ul>
+<li><code>GIT_ASKPASS</code></li>
+<li><code>GIT_PROXY_COMMAND</code></li>
+<li><code>GIT_SSH</code></li>
+<li><code>GIT_SSH_COMMAND</code></li>
+<li><code>GIT_SSL_CAINFO</code></li>
+<li><p><code>GIT_SSL_NO_VERIFY</code></p>
+<p>See the git man page for details.</p>
+<p>Examples:</p>
+<pre><code>npm install git+ssh://git@github.com:npm/npm.git#v1.0.27
+npm install git+https://isaacs@github.com/npm/npm.git
+npm install git://github.com/npm/npm.git#v1.0.27
+GIT_SSH_COMMAND=&#39;ssh -i ~/.ssh/custom_ident&#39; npm install git+ssh://git@github.com:npm/npm.git
</code></pre></li>
+</ul>
+</li>
<li><p><code>npm install &lt;githubname&gt;/&lt;githubrepo&gt;[#&lt;commit-ish&gt;]</code>:</p>
</li>
<li><p><code>npm install github:&lt;githubname&gt;/&lt;githubrepo&gt;[#&lt;commit-ish&gt;]</code>:</p>
diff --git a/deps/npm/html/partial/doc/cli/npm-ls.html b/deps/npm/html/partial/doc/cli/npm-ls.html
index f240024f2..f97fb2224 100644
--- a/deps/npm/html/partial/doc/cli/npm-ls.html
+++ b/deps/npm/html/partial/doc/cli/npm-ls.html
@@ -11,7 +11,7 @@ installed, as well as their dependencies, in a tree-structure.</p>
limit the results to only the paths to the packages named. Note that
nested packages will <em>also</em> show the paths to the specified packages.
For example, running <code>npm ls promzard</code> in npm&#39;s source tree will show:</p>
-<pre><code>npm@2.11.3 /path/to/npm
+<pre><code>npm@2.13.4 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
</code></pre><p>It will print out extraneous, missing, and invalid packages.</p>
diff --git a/deps/npm/html/partial/doc/cli/npm-ping.html b/deps/npm/html/partial/doc/cli/npm-ping.html
new file mode 100644
index 000000000..c71423d5e
--- /dev/null
+++ b/deps/npm/html/partial/doc/cli/npm-ping.html
@@ -0,0 +1,11 @@
+<h1><a href="../cli/npm-ping.html">npm-ping</a></h1> <p>Ping npm registry</p>
+<h2 id="synopsis">SYNOPSIS</h2>
+<pre><code>npm ping [--registry &lt;registry&gt;]
+</code></pre><h2 id="description">DESCRIPTION</h2>
+<p>Ping the configured or given npm registry and verify authentication.</p>
+<h2 id="see-also">SEE ALSO</h2>
+<ul>
+<li><a href="../cli/npm-config.html">npm-config(1)</a></li>
+<li><a href="../misc/npm-config.html">npm-config(7)</a></li>
+<li><a href="../files/npmrc.html">npmrc(5)</a></li>
+</ul>
diff --git a/deps/npm/html/partial/doc/cli/npm-run-script.html b/deps/npm/html/partial/doc/cli/npm-run-script.html
index 4d3b6722f..e79376870 100644
--- a/deps/npm/html/partial/doc/cli/npm-run-script.html
+++ b/deps/npm/html/partial/doc/cli/npm-run-script.html
@@ -23,8 +23,9 @@ built-in.</p>
<code>node_modules/.bin</code> to the <code>PATH</code> provided to scripts. Any binaries provided by
locally-installed dependencies can be used without the <code>node_modules/.bin</code>
prefix. For example, if there is a <code>devDependency</code> on <code>tap</code> in your package,
-you should write <code>&quot;scripts&quot;: {&quot;test&quot;: &quot;tap test/\*.js&quot;}</code> instead of <code>&quot;scripts&quot;:
-{&quot;test&quot;: &quot;node_modules/.bin/tap test/\*.js&quot;}</code> to run your tests.</p>
+you should write:</p>
+<pre><code>&quot;scripts&quot;: {&quot;test&quot;: &quot;tap test/\*.js&quot;}
+</code></pre><p>instead of <code>&quot;scripts&quot;: {&quot;test&quot;: &quot;node_modules/.bin/tap test/\*.js&quot;}</code> to run your tests.</p>
<h2 id="see-also">SEE ALSO</h2>
<ul>
<li><a href="../misc/npm-scripts.html">npm-scripts(7)</a></li>
diff --git a/deps/npm/html/partial/doc/cli/npm-submodule.html b/deps/npm/html/partial/doc/cli/npm-submodule.html
deleted file mode 100644
index dd7c7e887..000000000
--- a/deps/npm/html/partial/doc/cli/npm-submodule.html
+++ /dev/null
@@ -1,21 +0,0 @@
-<h1><a href="../cli/npm-submodule.html">npm-submodule</a></h1> <p>Add a package as a git submodule</p>
-<h2 id="synopsis">SYNOPSIS</h2>
-<pre><code>npm submodule &lt;pkg&gt;
-</code></pre><h2 id="description">DESCRIPTION</h2>
-<p>If the specified package has a git repository url in its package.json
-description, then this command will add it as a git submodule at
-<code>node_modules/&lt;pkg name&gt;</code>.</p>
-<p>This is a convenience only. From then on, it&#39;s up to you to manage
-updates by using the appropriate git commands. npm will stubbornly
-refuse to update, modify, or remove anything with a <code>.git</code> subfolder
-in it.</p>
-<p>This command also does not install missing dependencies, if the package
-does not include them in its git repository. If <code>npm ls</code> reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do <code>npm explore &lt;pkgname&gt; -- npm install</code> to install the
-dependencies into the submodule folder.</p>
-<h2 id="see-also">SEE ALSO</h2>
-<ul>
-<li><a href="../files/package.json.html">package.json(5)</a></li>
-<li>git help submodule</li>
-</ul>
diff --git a/deps/npm/html/partial/doc/cli/npm-version.html b/deps/npm/html/partial/doc/cli/npm-version.html
index 515f19a6d..1beda3d28 100644
--- a/deps/npm/html/partial/doc/cli/npm-version.html
+++ b/deps/npm/html/partial/doc/cli/npm-version.html
@@ -5,13 +5,14 @@
<p>Run this in a package directory to bump the version and write the new
data back to <code>package.json</code> and, if present, <code>npm-shrinkwrap.json</code>.</p>
<p>The <code>newversion</code> argument should be a valid semver string, <em>or</em> a
-valid second argument to semver.inc (one of &quot;patch&quot;, &quot;minor&quot;, &quot;major&quot;,
-&quot;prepatch&quot;, &quot;preminor&quot;, &quot;premajor&quot;, &quot;prerelease&quot;). In the second case,
+valid second argument to semver.inc (one of <code>patch</code>, <code>minor</code>, <code>major</code>,
+<code>prepatch</code>, <code>preminor</code>, <code>premajor</code>, <code>prerelease</code>). In the second case,
the existing version will be incremented by 1 in the specified field.</p>
-<p>If run in a git repo, it will also create a version commit and tag, and fail if
-the repo is not clean. This behavior is controlled by <code>git-tag-version</code> (see
-below), and can be disabled on the command line by running <code>npm
---no-git-tag-version version</code></p>
+<p>If run in a git repo, it will also create a version commit and tag.
+This behavior is controlled by <code>git-tag-version</code> (see below), and can
+be disabled on the command line by running <code>npm --no-git-tag-version version</code>.
+It will fail if the working directory is not clean, unless the <code>--force</code>
+flag is set.</p>
<p>If supplied with <code>--message</code> (shorthand: <code>-m</code>) config option, npm will
use it as a commit message when creating a version commit. If the
<code>message</code> config contains <code>%s</code> then that will be replaced with the
@@ -28,13 +29,34 @@ user: &quot;isaacs (http://blog.izs.me/) &lt;i@izs.me&gt;&quot;
2048-bit RSA key, ID 6C481CF6, created 2010-08-31
Enter passphrase:
-</code></pre><p>If &quot;preversion&quot;, &quot;version&quot;, &quot;postversion&quot; in the &quot;scripts&quot; property of
-the package.json, it will execute by running <code>npm version</code>. preversion
-and version ware executed before bump the package version, postversion
-was executed after bump the package version. For example to run <code>npm version</code>
-after passed all test:</p>
-<pre><code>&quot;scripts&quot;: { &quot;preversion&quot;: &quot;npm test&quot; }
-</code></pre><h2 id="configuration">CONFIGURATION</h2>
+</code></pre><p>If <code>preversion</code>, <code>version</code>, or <code>postversion</code> are in the <code>scripts</code> property of
+the package.json, they will be executed as part of running <code>npm version</code>.</p>
+<p>The exact order of execution is as follows:</p>
+<ol>
+<li>Check to make sure the git working directory is clean before we get started.
+Your scripts may add files to the commit in future steps.
+This step is skipped if the <code>--force</code> flag is set.</li>
+<li>Run the <code>preversion</code> script. These scripts have access to the old <code>version</code> in package.json.
+A typical use would be running your full test suite before deploying.
+Any files you want added to the commit should be explicitly added using <code>git add</code>.</li>
+<li>Bump <code>version</code> in <code>package.json</code> as requested (<code>patch</code>, <code>minor</code>, <code>major</code>, etc). </li>
+<li>Run the <code>version</code> script. These scripts have access to the new <code>version</code> in package.json
+(so they can incorporate it into file headers in generated files for example).
+Again, scripts should explicitly add generated files to the commit using <code>git add</code>.</li>
+<li>Commit and tag.</li>
+<li>Run the <code>postversion</code> script. Use it to clean up the file system or automatically push
+the commit and/or tag.</li>
+</ol>
+<p>Take the following example:</p>
+<pre><code>&quot;scripts&quot;: {
+ &quot;preversion&quot;: &quot;npm test&quot;,
+ &quot;version&quot;: &quot;npm run build &amp;&amp; git add -A dist&quot;,
+ &quot;postversion&quot;: &quot;git push &amp;&amp; git push --tags &amp;&amp; rm -rf build/temp&quot;
+}
+</code></pre><p>This runs all your tests, and proceeds only if they pass. Then runs your <code>build</code> script, and
+adds everything in the <code>dist</code> directory to the commit. After the commit, it pushes the new commit
+and tag up to the server, and deletes the <code>build/temp</code> directory.</p>
+<h2 id="configuration">CONFIGURATION</h2>
<h3 id="git-tag-version">git-tag-version</h3>
<ul>
<li>Default: true</li>
diff --git a/deps/npm/html/partial/doc/cli/npm.html b/deps/npm/html/partial/doc/cli/npm.html
index b09a98467..e147a18e8 100644
--- a/deps/npm/html/partial/doc/cli/npm.html
+++ b/deps/npm/html/partial/doc/cli/npm.html
@@ -2,7 +2,7 @@
<h2 id="synopsis">SYNOPSIS</h2>
<pre><code>npm &lt;command&gt; [args]
</code></pre><h2 id="version">VERSION</h2>
-<p>2.11.3</p>
+<p>2.13.4</p>
<h2 id="description">DESCRIPTION</h2>
<p>npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
@@ -99,7 +99,7 @@ easily by doing <code>npm view npm contributors</code>.</p>
the issues list or ask on the mailing list.</p>
<ul>
<li><a href="http://github.com/npm/npm/issues">http://github.com/npm/npm/issues</a></li>
-<li><a href="&#x6d;&#x61;&#x69;&#x6c;&#116;&#x6f;&#x3a;&#110;&#112;&#x6d;&#45;&#64;&#103;&#111;&#111;&#103;&#108;&#101;&#x67;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#110;&#112;&#x6d;&#45;&#64;&#103;&#111;&#111;&#103;&#108;&#101;&#x67;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
+<li><a href="&#109;&#x61;&#x69;&#x6c;&#116;&#x6f;&#58;&#x6e;&#112;&#x6d;&#x2d;&#64;&#103;&#111;&#x6f;&#x67;&#x6c;&#x65;&#x67;&#x72;&#x6f;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#x6e;&#112;&#x6d;&#x2d;&#64;&#103;&#111;&#x6f;&#x67;&#x6c;&#x65;&#x67;&#x72;&#x6f;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
</ul>
<h2 id="bugs">BUGS</h2>
<p>When you find issues, please report them:</p>
@@ -107,7 +107,7 @@ the issues list or ask on the mailing list.</p>
<li>web:
<a href="http://github.com/npm/npm/issues">http://github.com/npm/npm/issues</a></li>
<li>email:
-<a href="&#109;&#x61;&#x69;&#x6c;&#116;&#111;&#x3a;&#x6e;&#112;&#109;&#45;&#x40;&#103;&#111;&#111;&#x67;&#108;&#101;&#103;&#x72;&#111;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#109;">&#x6e;&#112;&#109;&#45;&#x40;&#103;&#111;&#111;&#x67;&#108;&#101;&#103;&#x72;&#111;&#x75;&#112;&#x73;&#46;&#x63;&#111;&#109;</a></li>
+<a href="&#x6d;&#97;&#105;&#x6c;&#116;&#111;&#58;&#x6e;&#112;&#x6d;&#x2d;&#x40;&#x67;&#x6f;&#111;&#103;&#108;&#101;&#103;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;">&#x6e;&#112;&#x6d;&#x2d;&#x40;&#x67;&#x6f;&#111;&#103;&#108;&#101;&#103;&#x72;&#x6f;&#117;&#112;&#x73;&#46;&#x63;&#111;&#x6d;</a></li>
</ul>
<p>Be sure to include <em>all</em> of the output from the npm command that didn&#39;t work
as expected. The <code>npm-debug.log</code> file is also helpful to provide.</p>
@@ -117,7 +117,7 @@ will no doubt tell you to put the output in a gist or email.</p>
<p><a href="http://blog.izs.me/">Isaac Z. Schlueter</a> ::
<a href="https://github.com/isaacs/">isaacs</a> ::
<a href="http://twitter.com/izs">@izs</a> ::
-<a href="&#109;&#97;&#105;&#x6c;&#x74;&#111;&#x3a;&#105;&#x40;&#105;&#x7a;&#115;&#46;&#x6d;&#x65;">&#105;&#x40;&#105;&#x7a;&#115;&#46;&#x6d;&#x65;</a></p>
+<a href="&#109;&#x61;&#x69;&#x6c;&#116;&#111;&#58;&#105;&#64;&#105;&#x7a;&#115;&#x2e;&#109;&#101;">&#105;&#64;&#105;&#x7a;&#115;&#x2e;&#109;&#101;</a></p>
<h2 id="see-also">SEE ALSO</h2>
<ul>
<li><a href="../cli/npm-help.html">npm-help(1)</a></li>
diff --git a/deps/npm/html/partial/doc/files/npm-json.html b/deps/npm/html/partial/doc/files/npm-json.html
index 752a10cbe..b2e54ba74 100644
--- a/deps/npm/html/partial/doc/files/npm-json.html
+++ b/deps/npm/html/partial/doc/files/npm-json.html
@@ -80,8 +80,8 @@ expression syntax version 2.0 string</a>, like this:</p>
<pre><code>{ &quot;license&quot; : &quot;(ISC OR GPL-3.0)&quot; }
</code></pre><p>If you are using a license that hasn&#39;t been assigned an SPDX identifier, or if
you are using a custom license, use the following valid SPDX expression:</p>
-<pre><code>{ &quot;license&quot; : &quot;LicenseRef-LICENSE&quot; }
-</code></pre><p>Then include a LICENSE file at the top level of the package.</p>
+<pre><code>{ &quot;license&quot; : &quot;SEE LICENSE IN &lt;filename&gt;&quot; }
+</code></pre><p>Then include a file named <code>&lt;filename&gt;</code> at the top level of the package.</p>
<p>Some old packages used license objects or a &quot;licenses&quot; property containing an
array of license objects:</p>
<pre><code>// Not valid metadata
@@ -106,7 +106,11 @@ array of license objects:</p>
<pre><code>{ &quot;license&quot;: &quot;ISC&quot; }
{ &quot;license&quot;: &quot;(MIT OR Apache-2.0)&quot; }
-</code></pre><h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
+</code></pre><p>Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:</p>
+<pre><code>{ &quot;license&quot;: &quot;UNLICENSED&quot;}
+</code></pre><p>Consider also setting <code>&quot;private&quot;: true</code> to prevent accidental publication.</p>
+<h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
<p>The &quot;author&quot; is one person. &quot;contributors&quot; is an array of people. A &quot;person&quot;
is an object with a &quot;name&quot; field and optionally &quot;url&quot; and &quot;email&quot;, like this:</p>
<pre><code>{ &quot;name&quot; : &quot;Barney Rubble&quot;
@@ -125,6 +129,26 @@ inside that folder. (Unless they would be ignored by another rule.)</p>
which will keep files from being included, even if they would be picked
up by the files array. The &quot;.npmignore&quot; file works just like a
&quot;.gitignore&quot;.</p>
+<p>Certain files are always included, regardless of settings:</p>
+<ul>
+<li><code>package.json</code></li>
+<li><code><a href="../../doc/README.html">README</a></code> (and its variants)</li>
+<li><code>CHANGELOG</code> (and its variants)</li>
+<li><code>LICENSE</code> / <code>LICENCE</code></li>
+</ul>
+<p>Conversely, some files are always ignored:</p>
+<ul>
+<li><code>.git</code></li>
+<li><code>CVS</code></li>
+<li><code>.svn</code></li>
+<li><code>.hg</code></li>
+<li><code>.lock-wscript</code></li>
+<li><code>.wafpickle-N</code></li>
+<li><code>*.swp</code></li>
+<li><code>.DS_Store</code></li>
+<li><code>._*</code></li>
+<li><code>npm-debug.log</code></li>
+</ul>
<h2 id="main">main</h2>
<p>The main field is a module ID that is the primary entry point to your program.
That is, if your package is named <code>foo</code>, and a user installs it, and then does
@@ -193,9 +217,12 @@ you&#39;ll see that it has directories for doc, lib, and man.</p>
<p>Tell people where the bulk of your library is. Nothing special is done
with the lib folder in any way, but it&#39;s useful meta info.</p>
<h3 id="directories-bin">directories.bin</h3>
-<p>If you specify a <code>bin</code> directory, then all the files in that folder will
-be added as children of the <code>bin</code> path.</p>
-<p>If you have a <code>bin</code> path already, then this has no effect.</p>
+<p>If you specify a <code>bin</code> directory in <code>directories.bin</code>, all the files in
+that folder will be added.</p>
+<p>Because of the way the <code>bin</code> directive works, specifying both a
+<code>bin</code> path and setting <code>directories.bin</code> is an error. If you want to
+specify individual files, use <code>bin</code>, and for all the files in an
+existing <code>bin</code> directory, use <code>directories.bin</code>.</p>
<h3 id="directories-man">directories.man</h3>
<p>A folder that is full of man pages. Sugar to generate a &quot;man&quot; array by
walking the folder.</p>
@@ -476,12 +503,12 @@ specific registry (for example, an internal registry), then use the
<code>publishConfig</code> dictionary described below to override the <code>registry</code> config
param at publish-time.</p>
<h2 id="publishconfig">publishConfig</h2>
-<p>This is a set of config values that will be used at publish-time. It&#39;s
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with &quot;latest&quot; or published to
-the global public registry by default.</p>
-<p>Any config values can be overridden, but of course only &quot;tag&quot; and
-&quot;registry&quot; probably matter for the purposes of publishing.</p>
+<p>This is a set of config values that will be used at publish-time. It&#39;s
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with &quot;latest&quot;, published
+to the global public registry or that a scoped module is private by default.</p>
+<p>Any config values can be overridden, but of course only &quot;tag&quot;, &quot;registry&quot; and
+&quot;access&quot; probably matter for the purposes of publishing.</p>
<p>See <code><a href="../misc/npm-config.html">npm-config(7)</a></code> to see the list of config options that can be
overridden.</p>
<h2 id="default-values">DEFAULT VALUES</h2>
diff --git a/deps/npm/html/partial/doc/files/npmrc.html b/deps/npm/html/partial/doc/files/npmrc.html
index f4106bebf..ed1eb0295 100644
--- a/deps/npm/html/partial/doc/files/npmrc.html
+++ b/deps/npm/html/partial/doc/files/npmrc.html
@@ -36,6 +36,8 @@ config values specific to this project.</p>
running npm in. It has no effect when your module is published. For
example, you can&#39;t publish a module that forces itself to install
globally, or in a different location.</p>
+<p>Additionally, this file is not read in global mode, such as when running
+<code>npm install -g</code>.</p>
<h3 id="per-user-config-file">Per-user config file</h3>
<p><code>$HOME/.npmrc</code> (or the <code>userconfig</code> param, if set in the environment
or on the command line)</p>
diff --git a/deps/npm/html/partial/doc/files/package.json.html b/deps/npm/html/partial/doc/files/package.json.html
index 752a10cbe..b2e54ba74 100644
--- a/deps/npm/html/partial/doc/files/package.json.html
+++ b/deps/npm/html/partial/doc/files/package.json.html
@@ -80,8 +80,8 @@ expression syntax version 2.0 string</a>, like this:</p>
<pre><code>{ &quot;license&quot; : &quot;(ISC OR GPL-3.0)&quot; }
</code></pre><p>If you are using a license that hasn&#39;t been assigned an SPDX identifier, or if
you are using a custom license, use the following valid SPDX expression:</p>
-<pre><code>{ &quot;license&quot; : &quot;LicenseRef-LICENSE&quot; }
-</code></pre><p>Then include a LICENSE file at the top level of the package.</p>
+<pre><code>{ &quot;license&quot; : &quot;SEE LICENSE IN &lt;filename&gt;&quot; }
+</code></pre><p>Then include a file named <code>&lt;filename&gt;</code> at the top level of the package.</p>
<p>Some old packages used license objects or a &quot;licenses&quot; property containing an
array of license objects:</p>
<pre><code>// Not valid metadata
@@ -106,7 +106,11 @@ array of license objects:</p>
<pre><code>{ &quot;license&quot;: &quot;ISC&quot; }
{ &quot;license&quot;: &quot;(MIT OR Apache-2.0)&quot; }
-</code></pre><h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
+</code></pre><p>Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:</p>
+<pre><code>{ &quot;license&quot;: &quot;UNLICENSED&quot;}
+</code></pre><p>Consider also setting <code>&quot;private&quot;: true</code> to prevent accidental publication.</p>
+<h2 id="people-fields-author-contributors">people fields: author, contributors</h2>
<p>The &quot;author&quot; is one person. &quot;contributors&quot; is an array of people. A &quot;person&quot;
is an object with a &quot;name&quot; field and optionally &quot;url&quot; and &quot;email&quot;, like this:</p>
<pre><code>{ &quot;name&quot; : &quot;Barney Rubble&quot;
@@ -125,6 +129,26 @@ inside that folder. (Unless they would be ignored by another rule.)</p>
which will keep files from being included, even if they would be picked
up by the files array. The &quot;.npmignore&quot; file works just like a
&quot;.gitignore&quot;.</p>
+<p>Certain files are always included, regardless of settings:</p>
+<ul>
+<li><code>package.json</code></li>
+<li><code><a href="../../doc/README.html">README</a></code> (and its variants)</li>
+<li><code>CHANGELOG</code> (and its variants)</li>
+<li><code>LICENSE</code> / <code>LICENCE</code></li>
+</ul>
+<p>Conversely, some files are always ignored:</p>
+<ul>
+<li><code>.git</code></li>
+<li><code>CVS</code></li>
+<li><code>.svn</code></li>
+<li><code>.hg</code></li>
+<li><code>.lock-wscript</code></li>
+<li><code>.wafpickle-N</code></li>
+<li><code>*.swp</code></li>
+<li><code>.DS_Store</code></li>
+<li><code>._*</code></li>
+<li><code>npm-debug.log</code></li>
+</ul>
<h2 id="main">main</h2>
<p>The main field is a module ID that is the primary entry point to your program.
That is, if your package is named <code>foo</code>, and a user installs it, and then does
@@ -193,9 +217,12 @@ you&#39;ll see that it has directories for doc, lib, and man.</p>
<p>Tell people where the bulk of your library is. Nothing special is done
with the lib folder in any way, but it&#39;s useful meta info.</p>
<h3 id="directories-bin">directories.bin</h3>
-<p>If you specify a <code>bin</code> directory, then all the files in that folder will
-be added as children of the <code>bin</code> path.</p>
-<p>If you have a <code>bin</code> path already, then this has no effect.</p>
+<p>If you specify a <code>bin</code> directory in <code>directories.bin</code>, all the files in
+that folder will be added.</p>
+<p>Because of the way the <code>bin</code> directive works, specifying both a
+<code>bin</code> path and setting <code>directories.bin</code> is an error. If you want to
+specify individual files, use <code>bin</code>, and for all the files in an
+existing <code>bin</code> directory, use <code>directories.bin</code>.</p>
<h3 id="directories-man">directories.man</h3>
<p>A folder that is full of man pages. Sugar to generate a &quot;man&quot; array by
walking the folder.</p>
@@ -476,12 +503,12 @@ specific registry (for example, an internal registry), then use the
<code>publishConfig</code> dictionary described below to override the <code>registry</code> config
param at publish-time.</p>
<h2 id="publishconfig">publishConfig</h2>
-<p>This is a set of config values that will be used at publish-time. It&#39;s
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with &quot;latest&quot; or published to
-the global public registry by default.</p>
-<p>Any config values can be overridden, but of course only &quot;tag&quot; and
-&quot;registry&quot; probably matter for the purposes of publishing.</p>
+<p>This is a set of config values that will be used at publish-time. It&#39;s
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with &quot;latest&quot;, published
+to the global public registry or that a scoped module is private by default.</p>
+<p>Any config values can be overridden, but of course only &quot;tag&quot;, &quot;registry&quot; and
+&quot;access&quot; probably matter for the purposes of publishing.</p>
<p>See <code><a href="../misc/npm-config.html">npm-config(7)</a></code> to see the list of config options that can be
overridden.</p>
<h2 id="default-values">DEFAULT VALUES</h2>
diff --git a/deps/npm/html/partial/doc/index.html b/deps/npm/html/partial/doc/index.html
index 2048487ee..d074d9127 100644
--- a/deps/npm/html/partial/doc/index.html
+++ b/deps/npm/html/partial/doc/index.html
@@ -55,6 +55,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-1-"><a href="cli/npm-pack.html">npm-pack(1)</a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-1-"><a href="cli/npm-ping.html">npm-ping(1)</a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-1-"><a href="cli/npm-prefix.html">npm-prefix(1)</a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-1-"><a href="cli/npm-prune.html">npm-prune(1)</a></h3>
@@ -141,6 +143,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-3-"><a href="api/npm-pack.html">npm-pack(3)</a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-3-"><a href="api/npm-ping.html">npm-ping(3)</a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-3-"><a href="api/npm-prefix.html">npm-prefix(3)</a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-3-"><a href="api/npm-prune.html">npm-prune(3)</a></h3>
diff --git a/deps/npm/html/partial/doc/misc/npm-disputes.html b/deps/npm/html/partial/doc/misc/npm-disputes.html
index ee9992f77..ab49d1e3d 100644
--- a/deps/npm/html/partial/doc/misc/npm-disputes.html
+++ b/deps/npm/html/partial/doc/misc/npm-disputes.html
@@ -2,7 +2,7 @@
<h2 id="synopsis">SYNOPSIS</h2>
<ol>
<li>Get the author email with <code>npm owner ls &lt;pkgname&gt;</code></li>
-<li>Email the author, CC <a href="&#x6d;&#x61;&#x69;&#x6c;&#x74;&#x6f;&#58;&#115;&#117;&#x70;&#112;&#111;&#x72;&#116;&#64;&#x6e;&#x70;&#109;&#x6a;&#x73;&#x2e;&#x63;&#x6f;&#109;">&#115;&#117;&#x70;&#112;&#111;&#x72;&#116;&#64;&#x6e;&#x70;&#109;&#x6a;&#x73;&#x2e;&#x63;&#x6f;&#109;</a></li>
+<li>Email the author, CC <a href="&#109;&#x61;&#x69;&#108;&#116;&#x6f;&#x3a;&#x73;&#117;&#112;&#112;&#111;&#114;&#x74;&#64;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#99;&#111;&#109;">&#x73;&#117;&#112;&#112;&#111;&#114;&#x74;&#64;&#110;&#x70;&#x6d;&#106;&#115;&#x2e;&#99;&#111;&#109;</a></li>
<li>After a few weeks, if there&#39;s no resolution, we&#39;ll sort it out.</li>
</ol>
<p>Don&#39;t squat on package names. Publish code or move out of the way.</p>
@@ -40,12 +40,12 @@ Joe&#39;s appropriate course of action in each case is the same.</p>
owner (Bob).</li>
<li>Joe emails Bob, explaining the situation <strong>as respectfully as
possible</strong>, and what he would like to do with the module name. He
-adds the npm support staff <a href="&#x6d;&#x61;&#105;&#108;&#116;&#x6f;&#58;&#x73;&#117;&#112;&#x70;&#111;&#114;&#116;&#x40;&#x6e;&#112;&#x6d;&#106;&#x73;&#x2e;&#99;&#111;&#109;">&#x73;&#117;&#112;&#x70;&#111;&#114;&#116;&#x40;&#x6e;&#112;&#x6d;&#106;&#x73;&#x2e;&#99;&#111;&#109;</a> to the CC list of
+adds the npm support staff <a href="&#x6d;&#x61;&#x69;&#108;&#116;&#111;&#x3a;&#115;&#117;&#x70;&#112;&#x6f;&#114;&#116;&#x40;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;">&#115;&#117;&#x70;&#112;&#x6f;&#114;&#116;&#x40;&#110;&#x70;&#109;&#106;&#115;&#x2e;&#x63;&#111;&#x6d;</a> to the CC list of
the email. Mention in the email that Bob can run <code>npm owner add
joe foo</code> to add Joe as an owner of the <code>foo</code> package.</li>
<li>After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can&#39;t come to any sort of resolution, email support
-<a href="&#109;&#x61;&#105;&#108;&#116;&#x6f;&#x3a;&#x73;&#x75;&#x70;&#x70;&#x6f;&#x72;&#116;&#64;&#110;&#112;&#109;&#x6a;&#x73;&#x2e;&#x63;&#111;&#x6d;">&#x73;&#x75;&#x70;&#x70;&#x6f;&#x72;&#116;&#64;&#110;&#112;&#109;&#x6a;&#x73;&#x2e;&#x63;&#111;&#x6d;</a> and we&#39;ll sort it out. (&quot;Reasonable&quot; is
+<a href="&#x6d;&#97;&#105;&#108;&#x74;&#111;&#x3a;&#x73;&#117;&#x70;&#x70;&#111;&#114;&#x74;&#64;&#x6e;&#x70;&#109;&#106;&#x73;&#46;&#x63;&#x6f;&#x6d;">&#x73;&#117;&#x70;&#x70;&#111;&#114;&#x74;&#64;&#x6e;&#x70;&#109;&#106;&#x73;&#46;&#x63;&#x6f;&#x6d;</a> and we&#39;ll sort it out. (&quot;Reasonable&quot; is
usually at least 4 weeks, but extra time is allowed around common
holidays.)</li>
</ol>
diff --git a/deps/npm/html/partial/doc/misc/npm-faq.html b/deps/npm/html/partial/doc/misc/npm-faq.html
index 448ef4694..435d43838 100644
--- a/deps/npm/html/partial/doc/misc/npm-faq.html
+++ b/deps/npm/html/partial/doc/misc/npm-faq.html
@@ -202,6 +202,7 @@ of Node 0.3.</p>
<p>Windows:</p>
<ul>
<li><a href="http://github.com/marcelklehr/nodist">http://github.com/marcelklehr/nodist</a></li>
+<li><a href="https://github.com/coreybutler/nvm-windows">https://github.com/coreybutler/nvm-windows</a></li>
<li><a href="https://github.com/hakobera/nvmw">https://github.com/hakobera/nvmw</a></li>
<li><a href="https://github.com/nanjingboy/nvmw">https://github.com/nanjingboy/nvmw</a></li>
</ul>
@@ -225,7 +226,7 @@ that has a package.json in its root, or a git url.
<p>To check if the registry is down, open up
<a href="https://registry.npmjs.org/">https://registry.npmjs.org/</a> in a web browser. This will also tell
you if you are just unable to access the internet for some reason.</p>
-<p>If the registry IS down, let us know by emailing <a href="&#109;&#97;&#x69;&#x6c;&#x74;&#x6f;&#x3a;&#x73;&#x75;&#112;&#112;&#111;&#114;&#116;&#64;&#x6e;&#112;&#x6d;&#x6a;&#x73;&#46;&#99;&#111;&#x6d;">&#x73;&#x75;&#112;&#112;&#111;&#114;&#116;&#64;&#x6e;&#112;&#x6d;&#x6a;&#x73;&#46;&#99;&#111;&#x6d;</a>
+<p>If the registry IS down, let us know by emailing <a href="&#109;&#x61;&#105;&#x6c;&#116;&#x6f;&#58;&#115;&#x75;&#x70;&#112;&#x6f;&#114;&#x74;&#x40;&#110;&#112;&#109;&#106;&#115;&#x2e;&#x63;&#x6f;&#x6d;">&#115;&#x75;&#x70;&#112;&#x6f;&#114;&#x74;&#x40;&#110;&#112;&#109;&#106;&#115;&#x2e;&#x63;&#x6f;&#x6d;</a>
or posting an issue at <a href="https://github.com/npm/npm/issues">https://github.com/npm/npm/issues</a>. If it&#39;s
down for the world (and not just on your local network) then we&#39;re
probably already being pinged about it.</p>
diff --git a/deps/npm/html/partial/doc/misc/npm-index.html b/deps/npm/html/partial/doc/misc/npm-index.html
index feeb5792c..a75c02577 100644
--- a/deps/npm/html/partial/doc/misc/npm-index.html
+++ b/deps/npm/html/partial/doc/misc/npm-index.html
@@ -55,6 +55,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-1-"><a href="../cli/npm-pack.html">npm-pack(1)</a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-1-"><a href="../cli/npm-ping.html">npm-ping(1)</a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-1-"><a href="../cli/npm-prefix.html">npm-prefix(1)</a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-1-"><a href="../cli/npm-prune.html">npm-prune(1)</a></h3>
@@ -141,6 +143,8 @@
<p>Manage package owners</p>
<h3 id="npm-pack-3-"><a href="../api/npm-pack.html">npm-pack(3)</a></h3>
<p>Create a tarball from a package</p>
+<h3 id="npm-ping-3-"><a href="../api/npm-ping.html">npm-ping(3)</a></h3>
+<p>Ping npm registry</p>
<h3 id="npm-prefix-3-"><a href="../api/npm-prefix.html">npm-prefix(3)</a></h3>
<p>Display prefix</p>
<h3 id="npm-prune-3-"><a href="../api/npm-prune.html">npm-prune(3)</a></h3>
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js
index cae09b181..2e11be32f 100644
--- a/deps/npm/lib/cache.js
+++ b/deps/npm/lib/cache.js
@@ -10,9 +10,9 @@
fetching a URL:
1. Check for URL in inflight URLs. If present, add cb, and return.
2. Acquire lock at {cache}/{sha(url)}.lock
- retries = {cache-lock-retries, def=3}
- stale = {cache-lock-stale, def=30000}
- wait = {cache-lock-wait, def=100}
+ retries = {cache-lock-retries, def=10}
+ stale = {cache-lock-stale, def=60000}
+ wait = {cache-lock-wait, def=10000}
3. if lock can't be acquired, then fail
4. fetch url, clear lock, call cbs
diff --git a/deps/npm/lib/cache/add-named.js b/deps/npm/lib/cache/add-named.js
index cd06aa288..64a4670b0 100644
--- a/deps/npm/lib/cache/add-named.js
+++ b/deps/npm/lib/cache/add-named.js
@@ -250,7 +250,11 @@ function addNameRange (name, range, data, cb) {
var versions = Object.keys(data.versions || {})
var ms = semver.maxSatisfying(versions, range, true)
if (!ms) {
- return cb(installTargetsError(range, data))
+ if (range === "*" && versions.length) {
+ return addNameTag(name, "latest", data, cb)
+ } else {
+ return cb(installTargetsError(range, data))
+ }
}
// if we don't have a registry connection, try to see if
diff --git a/deps/npm/lib/cache/add-remote-git.js b/deps/npm/lib/cache/add-remote-git.js
index dc40cb3b6..d3ecccdce 100644
--- a/deps/npm/lib/cache/add-remote-git.js
+++ b/deps/npm/lib/cache/add-remote-git.js
@@ -15,7 +15,7 @@ var npa = require('npm-package-arg')
var realizePackageSpecifier = require('realize-package-specifier')
var addLocal = require('./add-local.js')
-var getCacheStat = require('./get-stat.js')
+var correctMkdir = require('../utils/correct-mkdir.js')
var git = require('../utils/git.js')
var npm = require('../npm.js')
var rm = require('../utils/gently-rm.js')
@@ -380,7 +380,7 @@ function checkoutTreeish (from, resolvedURL, resolvedTreeish, tmpdir, cb) {
}
function getGitDir (cb) {
- getCacheStat(function (er, stats) {
+ correctMkdir(remotes, function (er, stats) {
if (er) return cb(er)
// We don't need global templates when cloning. Use an empty directory for
@@ -391,11 +391,7 @@ function getGitDir (cb) {
// Ensure that both the template and remotes directories have the correct
// permissions.
fs.chown(templates, stats.uid, stats.gid, function (er) {
- if (er) return cb(er)
-
- fs.chown(remotes, stats.uid, stats.gid, function (er) {
- cb(er, stats)
- })
+ cb(er, stats)
})
})
})
@@ -429,25 +425,19 @@ function getResolved (uri, treeish) {
var parsed = url.parse(uri)
- // non-hosted SSH strings that are not URLs (git@whatever.com:foo.git) are
- // no bueno
- // https://github.com/npm/npm/issues/7961
- if (!parsed.protocol) return
-
- parsed.hash = treeish
- if (!/^git[+:]/.test(parsed.protocol)) {
- parsed.protocol = 'git+' + parsed.protocol
+ // Checks for known protocols:
+ // http:, https:, ssh:, and git:, with optional git+ prefix.
+ if (!parsed.protocol ||
+ !parsed.protocol.match(/^(((git\+)?(https?|ssh))|git|file):$/)) {
+ uri = 'git+ssh://' + uri
}
- // node incorrectly sticks a / at the start of the path We know that the host
- // won't change, so split and detect this
- // https://github.com/npm/npm/issues/3224
- var spo = uri.split(parsed.host)
- var spr = url.format(parsed).split(parsed.host)
- if (spo[1] && spo[1].charAt(0) === ':' && spr[1] && spr[1].charAt(0) === '/') {
- spr[1] = spr[1].slice(1)
+ if (!/^git[+:]/.test(uri)) {
+ uri = 'git+' + uri
}
- return spr.join(parsed.host)
+
+ // Not all URIs are actually URIs, so use regex for the treeish.
+ return uri.replace(/(?:#.*)?$/, '#' + treeish)
}
// similar to chmodr except it add permissions rather than overwriting them
diff --git a/deps/npm/lib/cache/get-stat.js b/deps/npm/lib/cache/get-stat.js
index 98f95ad6a..6ea797a4d 100644
--- a/deps/npm/lib/cache/get-stat.js
+++ b/deps/npm/lib/cache/get-stat.js
@@ -1,74 +1,6 @@
-var mkdir = require("mkdirp")
- , fs = require("graceful-fs")
- , log = require("npmlog")
- , chownr = require("chownr")
- , npm = require("../npm.js")
- , inflight = require("inflight")
+var npm = require('../npm.js')
+var correctMkdir = require('../utils/correct-mkdir.js')
-// to maintain the cache dir's permissions consistently.
-var cacheStat = null
module.exports = function getCacheStat (cb) {
- if (cacheStat) return cb(null, cacheStat)
-
- fs.stat(npm.cache, function (er, st) {
- if (er) return makeCacheDir(cb)
- if (!st.isDirectory()) {
- log.error("getCacheStat", "invalid cache dir %j", npm.cache)
- return cb(er)
- }
- return cb(null, cacheStat = st)
- })
-}
-
-function makeCacheDir (cb) {
- cb = inflight("makeCacheDir", cb)
- if (!cb) {
- return log.verbose(
- "getCacheStat",
- "cache creation already in flight; waiting"
- )
- }
- log.verbose("getCacheStat", "cache creation not in flight; initializing")
-
- if (!process.getuid) return mkdir(npm.cache, function (er) {
- log.verbose("makeCacheDir", "UID & GID are irrelevant on", process.platform)
- cacheStat = { uid : 0, gid : 0 }
- return cb(er, cacheStat)
- })
-
- var uid = +process.getuid()
- , gid = +process.getgid()
-
- if (uid === 0) {
- if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
- if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
- }
-
- if (uid !== 0 || !process.env.HOME) {
- cacheStat = { uid : uid, gid : gid }
- return mkdir(npm.cache, afterMkdir)
- }
-
- fs.stat(process.env.HOME, function (er, st) {
- if (er) {
- log.error("makeCacheDir", "homeless?")
- return cb(er)
- }
- cacheStat = st
- log.silly("makeCacheDir", "cache dir uid, gid", [st.uid, st.gid])
- return mkdir(npm.cache, afterMkdir)
- })
-
- function afterMkdir (er, made) {
- if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) {
- return cb(er, cacheStat)
- }
-
- if (!made) return cb(er, cacheStat)
-
- // ensure that the ownership is correct.
- chownr(made, cacheStat.uid, cacheStat.gid, function (er) {
- return cb(er, cacheStat)
- })
- }
+ correctMkdir(npm.cache, cb)
}
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 4ab248a49..9cc6a46fc 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -122,10 +122,11 @@ function install (args, cb_) {
if (er) return cb_(er)
if (problem) {
- var peerInvalidError = new Error("The package " + problem.name +
+ var peerInvalidError = new Error("The package " + problem._id +
" does not satisfy its siblings' peerDependencies requirements!")
peerInvalidError.code = "EPEERINVALID"
peerInvalidError.packageName = problem.name
+ peerInvalidError.packageVersion = problem.version
peerInvalidError.peersDepending = problem.peersDepending
return cb(peerInvalidError)
}
@@ -287,7 +288,7 @@ function findPeerInvalid_ (packageMap, fpiList) {
peer.peerDependencies[packageName]
}
}
- return { name: pkg.name, peersDepending: peersDepending }
+ return { name: pkg.name, peersDepending: peersDepending, version: pkg.version, _id: pkg._id }
}
if (pkg.dependencies) {
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index a2756ed7d..97cdad4de 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -15,7 +15,8 @@ var EventEmitter = require("events").EventEmitter
, npm = module.exports = new EventEmitter()
, npmconf = require("./config/core.js")
, log = require("npmlog")
- , fs = require("graceful-fs")
+ , gfs = require('graceful-fs')
+ , fs = gfs.gracefulify(require('fs'))
, path = require("path")
, abbrev = require("abbrev")
, which = require("which")
@@ -133,6 +134,7 @@ var commandCache = {}
, "bin"
, "whoami"
, "dist-tag"
+ , "ping"
, "test"
, "stop"
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index fa27dfc80..ab49d1096 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -344,7 +344,7 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
function updateDeps (er, d) {
if (er) {
- if (parsed.type !== 'local') return cb()
+ if (parsed.type !== 'local') return cb(er)
return updateLocalDeps()
}
diff --git a/deps/npm/lib/ping.js b/deps/npm/lib/ping.js
new file mode 100644
index 000000000..23b18bfba
--- /dev/null
+++ b/deps/npm/lib/ping.js
@@ -0,0 +1,20 @@
+var npm = require('./npm.js')
+
+module.exports = ping
+
+ping.usage = 'npm ping\nping registry'
+
+function ping (args, silent, cb) {
+ if (typeof cb !== 'function') {
+ cb = silent
+ silent = false
+ }
+ var registry = npm.config.get('registry')
+ if (!registry) return cb(new Error('no default registry set'))
+ var auth = npm.config.getCredentialsByURI(registry)
+
+ npm.registry.ping(registry, {auth: auth}, function (er, pong) {
+ if (!silent) console.log(JSON.stringify(pong))
+ cb(er, er ? null : pong)
+ })
+}
diff --git a/deps/npm/lib/utils/correct-mkdir.js b/deps/npm/lib/utils/correct-mkdir.js
new file mode 100644
index 000000000..650c56fb1
--- /dev/null
+++ b/deps/npm/lib/utils/correct-mkdir.js
@@ -0,0 +1,110 @@
+var chownr = require('chownr')
+var dezalgo = require('dezalgo')
+var fs = require('graceful-fs')
+var inflight = require('inflight')
+var log = require('npmlog')
+var mkdirp = require('mkdirp')
+
+// memoize the directories created by this step
+var stats = {}
+var effectiveOwner
+module.exports = function correctMkdir (path, cb) {
+ cb = dezalgo(cb)
+ if (stats[path]) return cb(null, stats[path])
+
+ fs.stat(path, function (er, st) {
+ if (er) return makeDirectory(path, cb)
+
+ if (!st.isDirectory()) {
+ log.error('correctMkdir', 'invalid dir %s', path)
+ return cb(er)
+ }
+
+ var ownerStats = calculateOwner()
+ // there's always a chance the permissions could have been frobbed, so fix
+ if (st.uid !== ownerStats.uid) {
+ stats[path] = ownerStats
+ setPermissions(path, ownerStats, cb)
+ } else {
+ stats[path] = st
+ cb(null, stats[path])
+ }
+ })
+}
+
+function calculateOwner () {
+ if (!effectiveOwner) {
+ effectiveOwner = { uid: 0, gid: 0 }
+
+ if (process.getuid) effectiveOwner.uid = +process.getuid()
+ if (process.getgid) effectiveOwner.gid = +process.getgid()
+
+ if (effectiveOwner.uid === 0) {
+ if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID
+ if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID
+ }
+ }
+
+ return effectiveOwner
+}
+
+function makeDirectory (path, cb) {
+ cb = inflight('makeDirectory:' + path, cb)
+ if (!cb) {
+ return log.verbose('makeDirectory', path, 'creation already in flight; waiting')
+ } else {
+ log.verbose('makeDirectory', path, 'creation not in flight; initializing')
+ }
+
+ var owner = calculateOwner()
+
+ if (!process.getuid) {
+ return mkdirp(path, function (er) {
+ log.verbose('makeCacheDir', 'UID & GID are irrelevant on', process.platform)
+
+ stats[path] = owner
+ return cb(er, stats[path])
+ })
+ }
+
+ if (owner.uid !== 0 || !process.env.HOME) {
+ log.silly(
+ 'makeDirectory', path,
+ 'uid:', owner.uid,
+ 'gid:', owner.gid
+ )
+ stats[path] = owner
+ mkdirp(path, afterMkdir)
+ } else {
+ fs.stat(process.env.HOME, function (er, st) {
+ if (er) {
+ log.error('makeDirectory', 'homeless?')
+ return cb(er)
+ }
+
+ log.silly(
+ 'makeDirectory', path,
+ 'uid:', st.uid,
+ 'gid:', st.gid
+ )
+ stats[path] = st
+ mkdirp(path, afterMkdir)
+ })
+ }
+
+ function afterMkdir (er, made) {
+ if (er || !stats[path] || isNaN(stats[path].uid) || isNaN(stats[path].gid)) {
+ return cb(er, stats[path])
+ }
+
+ if (!made) return cb(er, stats[path])
+
+ setPermissions(made, stats[path], cb)
+ }
+}
+
+function setPermissions (path, st, cb) {
+ chownr(path, st.uid, st.gid, function (er) {
+ return cb(er, st)
+ })
+}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index 299fa5698..a6f2b98e8 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -39,8 +39,9 @@ function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
unsafe = unsafe || npm.config.get("unsafe-perm")
- if ((wd.indexOf(npm.dir) !== 0 || path.basename(wd) !== pkg.name)
- && !unsafe && pkg.scripts[stage]) {
+ if ((wd.indexOf(npm.dir) !== 0 ||
+ wd.indexOf(pkg.name) !== wd.length - pkg.name.length) &&
+ !unsafe && pkg.scripts[stage]) {
log.warn( "cannot run in wd", "%s %s (wd=%s)"
, pkg._id, pkg.scripts[stage], wd)
return cb()
diff --git a/deps/npm/lib/utils/locker.js b/deps/npm/lib/utils/locker.js
index 4479f241d..293d2da05 100644
--- a/deps/npm/lib/utils/locker.js
+++ b/deps/npm/lib/utils/locker.js
@@ -6,7 +6,7 @@ var log = require("npmlog")
var mkdirp = require("mkdirp")
var npm = require("../npm.js")
-var getStat = require("../cache/get-stat.js")
+var correctMkdir = require('../utils/correct-mkdir.js')
var installLocks = {}
@@ -20,25 +20,23 @@ function lockFileName (base, name) {
}
function lock (base, name, cb) {
- getStat(function (er) {
- var lockDir = resolve(npm.cache, "_locks")
- mkdirp(lockDir, function () {
- if (er) return cb(er)
+ var lockDir = resolve(npm.cache, "_locks")
+ correctMkdir(lockDir, function (er) {
+ if (er) return cb(er)
- var opts = { stale: npm.config.get("cache-lock-stale")
- , retries: npm.config.get("cache-lock-retries")
- , wait: npm.config.get("cache-lock-wait") }
- var lf = lockFileName(base, name)
- lockfile.lock(lf, opts, function (er) {
- if (er) log.warn("locking", lf, "failed", er)
+ var opts = { stale: npm.config.get("cache-lock-stale")
+ , retries: npm.config.get("cache-lock-retries")
+ , wait: npm.config.get("cache-lock-wait") }
+ var lf = lockFileName(base, name)
+ lockfile.lock(lf, opts, function (er) {
+ if (er) log.warn("locking", lf, "failed", er)
- if (!er) {
- log.verbose("lock", "using", lf, "for", resolve(base, name))
- installLocks[lf] = true
- }
+ if (!er) {
+ log.verbose("lock", "using", lf, "for", resolve(base, name))
+ installLocks[lf] = true
+ }
- cb(er)
- })
+ cb(er)
})
})
}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index f763aad0a..dbd48a014 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -2,36 +2,38 @@
module.exports = version
-var semver = require("semver")
- , path = require("path")
- , fs = require("graceful-fs")
- , writeFileAtomic = require("write-file-atomic")
- , chain = require("slide").chain
- , log = require("npmlog")
- , npm = require("./npm.js")
- , git = require("./utils/git.js")
- , assert = require("assert")
- , lifecycle = require("./utils/lifecycle.js")
-
-version.usage = "npm version [<newversion> | major | minor | patch | prerelease | preminor | premajor ]\n"
- + "\n(run in package dir)\n"
- + "'npm -v' or 'npm --version' to print npm version "
- + "("+npm.version+")\n"
- + "'npm view <pkg> version' to view a package's "
- + "published version\n"
- + "'npm ls' to inspect current package/dependency versions"
+var semver = require('semver')
+var path = require('path')
+var fs = require('graceful-fs')
+var writeFileAtomic = require('write-file-atomic')
+var chain = require('slide').chain
+var log = require('npmlog')
+var npm = require('./npm.js')
+var git = require('./utils/git.js')
+var assert = require('assert')
+var lifecycle = require('./utils/lifecycle.js')
+
+version.usage = 'npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease]' +
+ '\n(run in package dir)\n' +
+ "'npm -v' or 'npm --version' to print npm version " +
+ '(' + npm.version + ')\n' +
+ "'npm view <pkg> version' to view a package's " +
+ 'published version\n' +
+ "'npm ls' to inspect current package/dependency versions"
function version (args, silent, cb_) {
- if (typeof cb_ !== "function") cb_ = silent, silent = false
+ if (typeof cb_ !== 'function') {
+ cb_ = silent
+ silent = false
+ }
if (args.length > 1) return cb_(version.usage)
- var packagePath = path.join(npm.localPrefix, "package.json")
+ var packagePath = path.join(npm.localPrefix, 'package.json')
fs.readFile(packagePath, function (er, data) {
if (data) data = data.toString()
try {
data = JSON.parse(data)
- }
- catch (e) {
+ } catch (e) {
er = e
data = null
}
@@ -39,65 +41,82 @@ function version (args, silent, cb_) {
if (!args.length) return dump(data, cb_)
if (er) {
- log.error("version", "No valid package.json found")
+ log.error('version', 'No valid package.json found')
return cb_(er)
}
var newVersion = semver.valid(args[0])
if (!newVersion) newVersion = semver.inc(data.version, args[0])
if (!newVersion) return cb_(version.usage)
- if (data.version === newVersion) return cb_(new Error("Version not changed"))
+ if (data.version === newVersion) return cb_(new Error('Version not changed'))
data.version = newVersion
var lifecycleData = Object.create(data)
- lifecycleData._id = data.name + "@" + newVersion
+ lifecycleData._id = data.name + '@' + newVersion
+ var localData = {}
var where = npm.prefix
chain([
- [lifecycle, lifecycleData, "preversion", where]
- , [version_, data, silent]
- , [lifecycle, lifecycleData, "version", where]
- , [lifecycle, lifecycleData, "postversion", where] ]
- , cb_)
+ [checkGit, localData],
+ [lifecycle, lifecycleData, 'preversion', where],
+ [updatePackage, newVersion, silent],
+ [lifecycle, lifecycleData, 'version', where],
+ [commit, localData, newVersion],
+ [lifecycle, lifecycleData, 'postversion', where] ],
+ cb_)
})
}
-function version_ (data, silent, cb_) {
+function readPackage (cb) {
+ var packagePath = path.join(npm.localPrefix, 'package.json')
+ fs.readFile(packagePath, function (er, data) {
+ if (er) return cb(new Error(er))
+ if (data) data = data.toString()
+ try {
+ data = JSON.parse(data)
+ } catch (e) {
+ er = e
+ data = null
+ }
+ cb(er, data)
+ })
+}
+
+function updatePackage (newVersion, silent, cb_) {
function cb (er) {
- if (!er && !silent) console.log("v" + data.version)
+ if (!er && !silent) console.log('v' + newVersion)
cb_(er)
}
- checkGit(function (er, hasGit) {
+ readPackage(function (er, data) {
if (er) return cb(new Error(er))
+ data.version = newVersion
+ write(data, 'package.json', cb)
+ })
+}
- write(data, "package.json", function (er) {
- if (er) return cb(new Error(er))
-
- updateShrinkwrap(data.version, function (er, hasShrinkwrap) {
- if (er || !hasGit) return cb(er)
- commit(data.version, hasShrinkwrap, cb)
- })
- })
+function commit (localData, newVersion, cb) {
+ updateShrinkwrap(newVersion, function (er, hasShrinkwrap) {
+ if (er || !localData.hasGit) return cb(er)
+ _commit(newVersion, hasShrinkwrap, cb)
})
}
function updateShrinkwrap (newVersion, cb) {
- fs.readFile(path.join(npm.localPrefix, "npm-shrinkwrap.json"), function (er, data) {
- if (er && er.code === "ENOENT") return cb(null, false)
+ fs.readFile(path.join(npm.localPrefix, 'npm-shrinkwrap.json'), function (er, data) {
+ if (er && er.code === 'ENOENT') return cb(null, false)
try {
data = data.toString()
data = JSON.parse(data)
- }
- catch (er) {
- log.error("version", "Bad npm-shrinkwrap.json data")
+ } catch (er) {
+ log.error('version', 'Bad npm-shrinkwrap.json data')
return cb(er)
}
data.version = newVersion
- write(data, "npm-shrinkwrap.json", function (er) {
+ write(data, 'npm-shrinkwrap.json', function (er) {
if (er) {
- log.error("version", "Bad npm-shrinkwrap.json data")
+ log.error('version', 'Bad npm-shrinkwrap.json data')
return cb(er)
}
cb(null, true)
@@ -114,74 +133,76 @@ function dump (data, cb) {
v[k] = process.versions[k]
})
- if (npm.config.get("json")) v = JSON.stringify(v, null, 2)
+ if (npm.config.get('json')) v = JSON.stringify(v, null, 2)
console.log(v)
cb()
}
-function checkGit (cb) {
- fs.stat(path.join(npm.localPrefix, ".git"), function (er, s) {
- var doGit = !er && s.isDirectory() && npm.config.get("git-tag-version")
+function checkGit (localData, cb) {
+ fs.stat(path.join(npm.localPrefix, '.git'), function (er, s) {
+ var doGit = !er && npm.config.get('git-tag-version')
if (!doGit) {
- if (er) log.verbose("version", "error checking for .git", er)
- log.verbose("version", "not tagging in git")
+ if (er) log.verbose('version', 'error checking for .git', er)
+ log.verbose('version', 'not tagging in git')
return cb(null, false)
}
// check for git
git.whichAndExec(
- [ "status", "--porcelain" ],
- { env : process.env },
+ [ 'status', '--porcelain' ],
+ { env: process.env },
function (er, stdout) {
- if (er && er.code === "ENOGIT") {
+ if (er && er.code === 'ENOGIT') {
log.warn(
- "version",
- "This is a Git checkout, but the git command was not found.",
- "npm could not create a Git tag for this release!"
+ 'version',
+ 'This is a Git checkout, but the git command was not found.',
+ 'npm could not create a Git tag for this release!'
)
return cb(null, false)
}
- var lines = stdout.trim().split("\n").filter(function (line) {
+ var lines = stdout.trim().split('\n').filter(function (line) {
return line.trim() && !line.match(/^\?\? /)
}).map(function (line) {
return line.trim()
})
- if (lines.length) return cb(new Error(
- "Git working directory not clean.\n"+lines.join("\n")
- ))
-
+ if (lines.length && !npm.config.get('force')) {
+ return cb(new Error(
+ 'Git working directory not clean.\n' + lines.join('\n')
+ ))
+ }
+ localData.hasGit = true
cb(null, true)
}
)
})
}
-function commit (version, hasShrinkwrap, cb) {
- var options = { env : process.env }
- var message = npm.config.get("message").replace(/%s/g, version)
- var sign = npm.config.get("sign-git-tag")
- var flag = sign ? "-sm" : "-am"
+function _commit (version, hasShrinkwrap, cb) {
+ var options = { env: process.env }
+ var message = npm.config.get('message').replace(/%s/g, version)
+ var sign = npm.config.get('sign-git-tag')
+ var flag = sign ? '-sm' : '-am'
chain(
[
- git.chainableExec([ "add", "package.json" ], options),
- hasShrinkwrap && git.chainableExec([ "add", "npm-shrinkwrap.json" ] , options),
- git.chainableExec([ "commit", "-m", message ], options),
- git.chainableExec([ "tag", npm.config.get("tag-version-prefix") + version, flag, message ], options)
+ git.chainableExec([ 'add', 'package.json' ], options),
+ hasShrinkwrap && git.chainableExec([ 'add', 'npm-shrinkwrap.json' ], options),
+ git.chainableExec([ 'commit', '-m', message ], options),
+ git.chainableExec([ 'tag', npm.config.get('tag-version-prefix') + version, flag, message ], options)
],
cb
)
}
function write (data, file, cb) {
- assert(data && typeof data === "object", "must pass data to version write")
- assert(typeof file === "string", "must pass filename to write to version write")
+ assert(data && typeof data === 'object', 'must pass data to version write')
+ assert(typeof file === 'string', 'must pass filename to write to version write')
- log.verbose("version.write", "data", data, "to", file)
+ log.verbose('version.write', 'data', data, 'to', file)
writeFileAtomic(
path.join(npm.localPrefix, file),
- new Buffer(JSON.stringify(data, null, 2) + "\n"),
+ new Buffer(JSON.stringify(data, null, 2) + '\n'),
cb
)
}
diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js
index 47da39b64..9199d352a 100644
--- a/deps/npm/lib/view.js
+++ b/deps/npm/lib/view.js
@@ -254,7 +254,13 @@ function printData (data, name, cb) {
})
})
- console.log(msg)
+ // preserve output symmetry by adding a whitespace-only line at the end if
+ // there's one at the beginning
+ if (/^\s*\n/.test(msg)) msg += "\n"
+
+ // print directly to stdout to not unnecessarily add blank lines
+ process.stdout.write(msg)
+
cb(null, data)
}
function cleanup (data) {
diff --git a/deps/npm/man/man1/npm-README.1 b/deps/npm/man/man1/npm-README.1
index 7a755a96b..0f9a49903 100644
--- a/deps/npm/man/man1/npm-README.1
+++ b/deps/npm/man/man1/npm-README.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "June 2015" "" ""
+.TH "NPM" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm\fR \- a JavaScript package manager
.P
@@ -7,7 +7,7 @@ Build Status \fIhttps://img\.shields\.io/travis/npm/npm/master\.svg\fR \fIhttps:
.P
This is just enough info to get you up and running\.
.P
-Much more info available via \fBnpm help\fR once it's installed\.
+Much more info available via \fBnpm help\fP once it's installed\.
.SH IMPORTANT
.P
\fBYou need node v0\.8 or higher to run this program\.\fR
@@ -25,7 +25,7 @@ Get the MSI \fIhttp://nodejs\.org/download/\fR\|\. npm is in it\.
Get the pkg \fIhttp://nodejs\.org/download/\fR\|\. npm is in it\.
.SS Other Sorts of Unices
.P
-Run \fBmake install\fR\|\. npm will be installed with node\.
+Run \fBmake install\fP\|\. npm will be installed with node\.
.P
If you want a more fancy pants install (a different version, customized
paths, etc\.) then read on\.
@@ -60,17 +60,17 @@ npm_debug=1 sh install\.sh
.RE
.SS Even Fancier
.P
-Get the code with git\. Use \fBmake\fR to build the docs and do other stuff\.
-If you plan on hacking on npm, \fBmake link\fR is your friend\.
+Get the code with git\. Use \fBmake\fP to build the docs and do other stuff\.
+If you plan on hacking on npm, \fBmake link\fP is your friend\.
.P
If you've got the npm source code, you can also semi\-permanently set
-arbitrary config keys using the \fB\|\./configure \-\-key=val \.\.\.\fR, and then
-run npm commands by doing \fBnode cli\.js <cmd> <args>\fR\|\. (This is helpful
+arbitrary config keys using the \fB\|\./configure \-\-key=val \.\.\.\fP, and then
+run npm commands by doing \fBnode cli\.js <cmd> <args>\fP\|\. (This is helpful
for testing, or running stuff without actually installing npm itself\.)
.SH Windows Install or Upgrade
.P
You can download a zip file from https://github\.com/npm/npm/releases, and
-unpack it in the \fBnode_modules\\npm\\\fR folder inside node's installation folder\.
+unpack it in the \fBnode_modules\\npm\\\fP folder inside node's installation folder\.
.P
To upgrade to npm 2, follow the Windows upgrade instructions in
the npm Troubleshooting Guide:
@@ -105,11 +105,11 @@ Usually, the above instructions are sufficient\. That will remove
npm, but leave behind anything you've installed\.
.P
If you would like to remove all the packages that you have installed,
-then you can use the \fBnpm ls\fR command to find them, and then \fBnpm rm\fR to
+then you can use the \fBnpm ls\fP command to find them, and then \fBnpm rm\fP to
remove them\.
.P
To remove cruft left behind by npm 0\.x, you can use the included
-\fBclean\-old\.sh\fR script file\. You can run it conveniently like this:
+\fBclean\-old\.sh\fP script file\. You can run it conveniently like this:
.P
.RS 2
.nf
@@ -136,7 +136,7 @@ you have chosen\.
Although npm can be used programmatically, its API is meant for use by the CLI
\fIonly\fR, and no guarantees are made regarding its fitness for any other purpose\.
If you want to use npm to reliably perform some task, the safest thing to do is
-to invoke the desired \fBnpm\fR command with appropriate arguments\.
+to invoke the desired \fBnpm\fP command with appropriate arguments\.
.P
The semantic version of npm refers to the CLI itself, rather than the
underlying API\. \fIThe internal API is not guaranteed to remain stable even when
@@ -165,25 +165,25 @@ npm\.load(myConfigObject, function (er) {
.fi
.RE
.P
-The \fBload\fR function takes an object hash of the command\-line configs\.
-The various \fBnpm\.commands\.<cmd>\fR functions take an \fBarray\fR of
+The \fBload\fP function takes an object hash of the command\-line configs\.
+The various \fBnpm\.commands\.<cmd>\fP functions take an \fBarray\fR of
positional argument \fBstrings\fR\|\. The last argument to any
-\fBnpm\.commands\.<cmd>\fR function is a callback\. Some commands take other
+\fBnpm\.commands\.<cmd>\fP function is a callback\. Some commands take other
optional arguments\. Read the source\.
.P
You cannot set configs individually for any single npm function at this
-time\. Since \fBnpm\fR is a singleton, any call to \fBnpm\.config\.set\fR will
+time\. Since \fBnpm\fP is a singleton, any call to \fBnpm\.config\.set\fP will
change the value for \fIall\fR npm commands in that process\.
.P
-See \fB\|\./bin/npm\-cli\.js\fR for an example of pulling config values off of the
+See \fB\|\./bin/npm\-cli\.js\fP for an example of pulling config values off of the
command line arguments using nopt\. You may also want to check out \fBnpm
-help config\fR to learn about all the options you can set there\.
+help config\fP to learn about all the options you can set there\.
.SH More Docs
.P
Check out the docs \fIhttps://docs\.npmjs\.com/\fR,
especially the faq \fIhttps://docs\.npmjs\.com/misc/faq\fR\|\.
.P
-You can use the \fBnpm help\fR command to read any of them\.
+You can use the \fBnpm help\fP command to read any of them\.
.P
If you're a developer, and you want to use npm to publish your program,
you should read this \fIhttps://docs\.npmjs\.com/misc/developers\fR
@@ -237,7 +237,7 @@ https://github\.com/npm/npm/issues
.RE
.P
Be sure to include \fIall\fR of the output from the npm command that didn't work
-as expected\. The \fBnpm\-debug\.log\fR file is also helpful to provide\.
+as expected\. The \fBnpm\-debug\.log\fP file is also helpful to provide\.
.P
You can also look for isaacs in #node\.js on irc://irc\.freenode\.net\. He
will no doubt tell you to put the output in a gist or email\.
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 6e0caff83..c0e4f0352 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ACCESS" "1" "June 2015" "" ""
+.TH "NPM\-ACCESS" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-access\fR \- Set access level on published packages
.SH SYNOPSIS
@@ -19,7 +19,7 @@ npm access edit [<package>]
.P
Used to set access controls on private packages\.
.P
-For all of the subcommands, \fBnpm access\fR will perform actions on the packages
+For all of the subcommands, \fBnpm access\fP will perform actions on the packages
in the current working directory if no package name is passed to the
subcommand\.
.RS 0
@@ -36,19 +36,19 @@ Show all of the access privileges for a package\. Will only show permissions
for packages to which you have at least read access\.
.IP \(bu 2
edit:
-Set the access privileges for a package at once using \fB$EDITOR\fR\|\.
+Set the access privileges for a package at once using \fB$EDITOR\fP\|\.
.RE
.SH DETAILS
.P
-\fBnpm access\fR always operates directly on the current registry, configurable
-from the command line using \fB\-\-registry=<registry url>\fR\|\.
+\fBnpm access\fP always operates directly on the current registry, configurable
+from the command line using \fB\-\-registry=<registry url>\fP\|\.
.P
Unscoped packages are \fIalways public\fR\|\.
.P
Scoped packages \fIdefault to restricted\fR, but you can either publish them as
-public using \fBnpm publish \-\-access=public\fR, or set their access as public using
-\fBnpm access public\fR after the initial publish\.
+public using \fBnpm publish \-\-access=public\fP, or set their access as public using
+\fBnpm access public\fP after the initial publish\.
.P
You must have privileges to set the access of a package:
.RS 0
@@ -64,7 +64,7 @@ of a team or directly as an owner\.
.P
If your account is not paid, then attempts to publish scoped packages will fail
with an HTTP 402 status code (logically enough), unless you use
-\fB\-\-access=public\fR\|\.
+\fB\-\-access=public\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index 4c8015eff..ca4155402 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ADDUSER" "1" "June 2015" "" ""
+.TH "NPM\-ADDUSER" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-adduser\fR \- Add a registry user account
.SH SYNOPSIS
@@ -10,9 +10,9 @@ npm adduser [\-\-registry=url] [\-\-scope=@orgname] [\-\-always\-auth]
.RE
.SH DESCRIPTION
.P
-Create or verify a user named \fB<username>\fR in the specified registry, and
-save the credentials to the \fB\|\.npmrc\fR file\. If no registry is specified,
-the default registry will be used (see npm help 7 \fBnpm\-config\fR)\.
+Create or verify a user named \fB<username>\fP in the specified registry, and
+save the credentials to the \fB\|\.npmrc\fP file\. If no registry is specified,
+the default registry will be used (see npm help 7 \fBnpm\-config\fP)\.
.P
The username, password, and email are read in from prompts\.
.P
@@ -25,20 +25,20 @@ authorize on a new machine\. When authenticating on a new machine,
the username, password and email address must all match with
your existing record\.
.P
-\fBnpm login\fR is an alias to \fBadduser\fR and behaves exactly the same way\.
+\fBnpm login\fP is an alias to \fBadduser\fP and behaves exactly the same way\.
.SH CONFIGURATION
.SS registry
.P
Default: http://registry\.npmjs\.org/
.P
-The base URL of the npm package registry\. If \fBscope\fR is also specified,
-this registry will only be used for packages with that scope\. See npm help 7 \fBnpm\-scope\fR\|\.
+The base URL of the npm package registry\. If \fBscope\fP is also specified,
+this registry will only be used for packages with that scope\. See npm help 7 \fBnpm\-scope\fP\|\.
.SS scope
.P
Default: none
.P
If specified, the user and login credentials given will be associated
-with the specified scope\. See npm help 7 \fBnpm\-scope\fR\|\. You can use both at the same time,
+with the specified scope\. See npm help 7 \fBnpm\-scope\fP\|\. You can use both at the same time,
e\.g\.
.P
.RS 2
@@ -55,7 +55,7 @@ Default: false
.P
If specified, save configuration indicating that all requests to the given
registry should include authorization information\. Useful for private
-registries\. Can be used with \fB\-\-registry\fR and / or \fB\-\-scope\fR, e\.g\.
+registries\. Can be used with \fB\-\-registry\fP and / or \fB\-\-scope\fP, e\.g\.
.P
.RS 2
.nf
@@ -64,8 +64,8 @@ npm adduser \-\-registry=http://private\-registry\.example\.com \-\-always\-auth
.RE
.P
This will ensure that all requests to that registry (including for tarballs)
-include an authorization header\. See \fBalways\-auth\fR in npm help 7 \fBnpm\-config\fR for more
-details on always\-auth\. Registry\-specific configuration of \fBalways\-auth\fR takes
+include an authorization header\. See \fBalways\-auth\fP in npm help 7 \fBnpm\-config\fP for more
+details on always\-auth\. Registry\-specific configuration of \fBalways\-auth\fP takes
precedence over any global configuration\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-bin.1 b/deps/npm/man/man1/npm-bin.1
index 2ddb41bd1..56ee96280 100644
--- a/deps/npm/man/man1/npm-bin.1
+++ b/deps/npm/man/man1/npm-bin.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BIN" "1" "June 2015" "" ""
+.TH "NPM\-BIN" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-bin\fR \- Display npm bin folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index d1aa52202..c8ed16b78 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUGS" "1" "June 2015" "" ""
+.TH "NPM\-BUGS" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-bugs\fR \- Bugs for a package in a web browser maybe
.SH SYNOPSIS
@@ -12,20 +12,20 @@ npm bugs (with no args in a package dir)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-bug tracker URL, and then tries to open it using the \fB\-\-browser\fR
+bug tracker URL, and then tries to open it using the \fB\-\-browser\fP
config param\. If no package name is provided, it will search for
-a \fBpackage\.json\fR in the current folder and use the \fBname\fR property\.
+a \fBpackage\.json\fP in the current folder and use the \fBname\fP property\.
.SH CONFIGURATION
.SS browser
.RS 0
.IP \(bu 2
-Default: OS X: \fB"open"\fR, Windows: \fB"start"\fR, Others: \fB"xdg\-open"\fR
+Default: OS X: \fB"open"\fP, Windows: \fB"start"\fP, Others: \fB"xdg\-open"\fP
.IP \(bu 2
Type: String
.RE
.P
-The browser that is called by the \fBnpm bugs\fR command to open websites\.
+The browser that is called by the \fBnpm bugs\fP command to open websites\.
.SS registry
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-build.1 b/deps/npm/man/man1/npm-build.1
index ed46425c5..1d348eacf 100644
--- a/deps/npm/man/man1/npm-build.1
+++ b/deps/npm/man/man1/npm-build.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUILD" "1" "June 2015" "" ""
+.TH "NPM\-BUILD" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-build\fR \- Build a package
.SH SYNOPSIS
@@ -10,13 +10,13 @@ npm build <package\-folder>
.RE
.RS 0
.IP \(bu 2
-\fB<package\-folder>\fR:
-A folder containing a \fBpackage\.json\fR file in its root\.
+\fB<package\-folder>\fP:
+A folder containing a \fBpackage\.json\fP file in its root\.
.RE
.SH DESCRIPTION
.P
-This is the plumbing command called by \fBnpm link\fR and \fBnpm install\fR\|\.
+This is the plumbing command called by \fBnpm link\fP and \fBnpm install\fP\|\.
.P
It should generally be called during installation, but if you need to run it
directly, run:
diff --git a/deps/npm/man/man1/npm-bundle.1 b/deps/npm/man/man1/npm-bundle.1
index 68ae0ae9a..ccd6d821f 100644
--- a/deps/npm/man/man1/npm-bundle.1
+++ b/deps/npm/man/man1/npm-bundle.1
@@ -1,13 +1,13 @@
-.TH "NPM\-BUNDLE" "1" "June 2015" "" ""
+.TH "NPM\-BUNDLE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-bundle\fR \- REMOVED
.SH DESCRIPTION
.P
-The \fBnpm bundle\fR command has been removed in 1\.0, for the simple reason
+The \fBnpm bundle\fP command has been removed in 1\.0, for the simple reason
that it is no longer necessary, as the default behavior is now to
install packages into the local space\.
.P
-Just use \fBnpm install\fR now to do what \fBnpm bundle\fR used to do\.
+Just use \fBnpm install\fP now to do what \fBnpm bundle\fP used to do\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index effe1b30b..40e124e5e 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CACHE" "1" "June 2015" "" ""
+.TH "NPM\-CACHE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-cache\fR \- Manipulates packages cache
.SH SYNOPSIS
@@ -27,8 +27,8 @@ add data to the local installation cache explicitly\.
.IP \(bu 2
ls:
Show the data in the cache\. Argument is a path to show in the cache
-folder\. Works a bit like the \fBfind\fR program, but limited by the
-\fBdepth\fR config\.
+folder\. Works a bit like the \fBfind\fP program, but limited by the
+\fBdepth\fP config\.
.IP \(bu 2
clean:
Delete data out of the cache folder\. If an argument is provided, then
@@ -38,9 +38,9 @@ the entire cache is cleared\.
.RE
.SH DETAILS
.P
-npm stores cache data in the directory specified in \fBnpm config get cache\fR\|\.
+npm stores cache data in the directory specified in \fBnpm config get cache\fP\|\.
For each package that is added to the cache, three pieces of information are
-stored in \fB{cache}/{name}/{version}\fR:
+stored in \fB{cache}/{name}/{version}\fP:
.RS 0
.IP \(bu 2
\|\.\.\./package/package\.json:
@@ -51,18 +51,18 @@ The tarball for that version\.
.RE
.P
-Additionally, whenever a registry request is made, a \fB\|\.cache\.json\fR file
+Additionally, whenever a registry request is made, a \fB\|\.cache\.json\fP file
is placed at the corresponding URI, to store the ETag and the requested
-data\. This is stored in \fB{cache}/{hostname}/{path}/\.cache\.json\fR\|\.
+data\. This is stored in \fB{cache}/{hostname}/{path}/\.cache\.json\fP\|\.
.P
-Commands that make non\-essential registry requests (such as \fBsearch\fR and
-\fBview\fR, or the completion scripts) generally specify a minimum timeout\.
-If the \fB\|\.cache\.json\fR file is younger than the specified timeout, then
+Commands that make non\-essential registry requests (such as \fBsearch\fP and
+\fBview\fP, or the completion scripts) generally specify a minimum timeout\.
+If the \fB\|\.cache\.json\fP file is younger than the specified timeout, then
they do not make an HTTP request to the registry\.
.SH CONFIGURATION
.SS cache
.P
-Default: \fB~/\.npm\fR on Posix, or \fB%AppData%/npm\-cache\fR on Windows\.
+Default: \fB~/\.npm\fP on Posix, or \fB%AppData%/npm\-cache\fP on Windows\.
.P
The root cache folder\.
.SH SEE ALSO
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index 5a5d24eaf..feb5e79d5 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM\-COMPLETION" "1" "June 2015" "" ""
+.TH "NPM\-COMPLETION" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-completion\fR \- Tab Completion for npm
.SH SYNOPSIS
@@ -18,11 +18,11 @@ your ~/\.bashrc or ~/\.zshrc will make the completions available
everywhere\.
.P
You may of course also pipe the output of npm completion to a file
-such as \fB/usr/local/etc/bash_completion\.d/npm\fR if you have a system
+such as \fB/usr/local/etc/bash_completion\.d/npm\fP if you have a system
that will read that file for you\.
.P
-When \fBCOMP_CWORD\fR, \fBCOMP_LINE\fR, and \fBCOMP_POINT\fR are defined in the
-environment, \fBnpm completion\fR acts in "plumbing mode", and outputs
+When \fBCOMP_CWORD\fP, \fBCOMP_LINE\fP, and \fBCOMP_POINT\fP are defined in the
+environment, \fBnpm completion\fP acts in "plumbing mode", and outputs
completions based on the arguments\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index 17d74488a..97a290c1b 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "1" "June 2015" "" ""
+.TH "NPM\-CONFIG" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- Manage the npm configuration files
.SH SYNOPSIS
@@ -18,14 +18,14 @@ npm set <key> <value> [\-\-global]
.SH DESCRIPTION
.P
npm gets its config settings from the command line, environment
-variables, \fBnpmrc\fR files, and in some cases, the \fBpackage\.json\fR file\.
+variables, \fBnpmrc\fP files, and in some cases, the \fBpackage\.json\fP file\.
.P
See npm help 5 npmrc for more information about the npmrc files\.
.P
-See npm help 7 \fBnpm\-config\fR for a more thorough discussion of the mechanisms
+See npm help 7 \fBnpm\-config\fP for a more thorough discussion of the mechanisms
involved\.
.P
-The \fBnpm config\fR command can be used to update and edit the contents
+The \fBnpm config\fP command can be used to update and edit the contents
of the user and global npmrc files\.
.SH Sub\-commands
.P
@@ -76,7 +76,7 @@ npm config edit
.fi
.RE
.P
-Opens the config file in an editor\. Use the \fB\-\-global\fR flag to edit the
+Opens the config file in an editor\. Use the \fB\-\-global\fP flag to edit the
global config\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index 89a3302f6..e567831f6 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEDUPE" "1" "June 2015" "" ""
+.TH "NPM\-DEDUPE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-dedupe\fR \- Reduce duplication
.SH SYNOPSIS
@@ -27,7 +27,7 @@ a
.fi
.RE
.P
-In this case, npm help \fBnpm\-dedupe\fR will transform the tree to:
+In this case, npm help \fBnpm\-dedupe\fP will transform the tree to:
.P
.RS 2
.nf
@@ -58,7 +58,7 @@ registry\.
.P
This feature is experimental, and may change in future versions\.
.P
-The \fB\-\-tag\fR argument will apply to all of the affected dependencies\. If a
+The \fB\-\-tag\fP argument will apply to all of the affected dependencies\. If a
tag with the given name exists, the tagged version is preferred over newer
versions\.
.SH SEE ALSO
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index c50e78410..be91456a3 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEPRECATE" "1" "June 2015" "" ""
+.TH "NPM\-DEPRECATE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-deprecate\fR \- Deprecate a version of a package
.SH SYNOPSIS
@@ -23,9 +23,9 @@ npm deprecate my\-thing@"< 0\.2\.3" "critical bug fixed in v0\.2\.3"
.RE
.P
Note that you must be the package owner to deprecate something\. See the
-\fBowner\fR and \fBadduser\fR help topics\.
+\fBowner\fP and \fBadduser\fP help topics\.
.P
-To un\-deprecate a package, specify an empty string (\fB""\fR) for the \fBmessage\fR argument\.
+To un\-deprecate a package, specify an empty string (\fB""\fP) for the \fBmessage\fP argument\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 4c8e977ac..b6abdb44b 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DIST\-TAG" "1" "June 2015" "" ""
+.TH "NPM\-DIST\-TAG" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR \- Modify package distribution tags
.SH SYNOPSIS
@@ -17,7 +17,7 @@ Add, remove, and enumerate distribution tags on a package:
.IP \(bu 2
add:
Tags the specified version of the package with the specified tag, or the
-\fB\-\-tag\fR config if not specified\.
+\fB\-\-tag\fP config if not specified\.
.IP \(bu 2
rm:
Clear a tag that is no longer in use from the package\.
@@ -45,32 +45,32 @@ npm install \-\-tag <tag>
.fi
.RE
.P
-This also applies to \fBnpm dedupe\fR\|\.
+This also applies to \fBnpm dedupe\fP\|\.
.P
Publishing a package sets the "latest" tag to the published version unless the
-\fB\-\-tag\fR option is used\. For example, \fBnpm publish \-\-tag=beta\fR\|\.
+\fB\-\-tag\fP option is used\. For example, \fBnpm publish \-\-tag=beta\fP\|\.
.SH PURPOSE
.P
Tags can be used to provide an alias instead of version numbers\. For
-example, \fBnpm\fR currently uses the tag "next" to identify the upcoming
+example, \fBnpm\fP currently uses the tag "next" to identify the upcoming
version, and the tag "latest" to identify the current version\.
.P
A project might choose to have multiple streams of development, e\.g\.,
"stable", "canary"\.
.SH CAVEATS
.P
-This command used to be known as \fBnpm tag\fR, which only created new tags, and so
+This command used to be known as \fBnpm tag\fP, which only created new tags, and so
had a different syntax\.
.P
Tags must share a namespace with version numbers, because they are specified in
-the same slot: \fBnpm install <pkg>@<version>\fR vs \fBnpm install <pkg>@<tag>\fR\|\.
+the same slot: \fBnpm install <pkg>@<version>\fP vs \fBnpm install <pkg>@<tag>\fP\|\.
.P
Tags that can be interpreted as valid semver ranges will be rejected\. For
-example, \fBv1\.4\fR cannot be used as a tag, because it is interpreted by semver as
-\fB>=1\.4\.0 <1\.5\.0\fR\|\. See https://github\.com/npm/npm/issues/6082\|\.
+example, \fBv1\.4\fP cannot be used as a tag, because it is interpreted by semver as
+\fB>=1\.4\.0 <1\.5\.0\fP\|\. See https://github\.com/npm/npm/issues/6082\|\.
.P
The simplest way to avoid semver problems with tags is to use tags that do not
-begin with a number or the letter \fBv\fR\|\.
+begin with a number or the letter \fBv\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 8e9867067..2a221c644 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCS" "1" "June 2015" "" ""
+.TH "NPM\-DOCS" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-docs\fR \- Docs for a package in a web browser maybe
.SH SYNOPSIS
@@ -14,21 +14,21 @@ npm home (with no args in a package dir)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-documentation URL, and then tries to open it using the \fB\-\-browser\fR
+documentation URL, and then tries to open it using the \fB\-\-browser\fP
config param\. You can pass multiple package names at once\. If no
-package name is provided, it will search for a \fBpackage\.json\fR in
-the current folder and use the \fBname\fR property\.
+package name is provided, it will search for a \fBpackage\.json\fP in
+the current folder and use the \fBname\fP property\.
.SH CONFIGURATION
.SS browser
.RS 0
.IP \(bu 2
-Default: OS X: \fB"open"\fR, Windows: \fB"start"\fR, Others: \fB"xdg\-open"\fR
+Default: OS X: \fB"open"\fP, Windows: \fB"start"\fP, Others: \fB"xdg\-open"\fP
.IP \(bu 2
Type: String
.RE
.P
-The browser that is called by the \fBnpm docs\fR command to open websites\.
+The browser that is called by the \fBnpm docs\fP command to open websites\.
.SS registry
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index 14787f507..1668bba24 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EDIT" "1" "June 2015" "" ""
+.TH "NPM\-EDIT" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-edit\fR \- Edit an installed package
.SH SYNOPSIS
@@ -11,26 +11,26 @@ npm edit <name>[@<version>]
.SH DESCRIPTION
.P
Opens the package folder in the default editor (or whatever you've
-configured as the npm \fBeditor\fR config \-\- see npm help 7 \fBnpm\-config\fR\|\.)
+configured as the npm \fBeditor\fP config \-\- see npm help 7 \fBnpm\-config\fP\|\.)
.P
After it has been edited, the package is rebuilt so as to pick up any
changes in compiled packages\.
.P
-For instance, you can do \fBnpm install connect\fR to install connect
-into your package, and then \fBnpm edit connect\fR to make a few
+For instance, you can do \fBnpm install connect\fP to install connect
+into your package, and then \fBnpm edit connect\fP to make a few
changes to your locally installed copy\.
.SH CONFIGURATION
.SS editor
.RS 0
.IP \(bu 2
-Default: \fBEDITOR\fR environment variable if set, or \fB"vi"\fR on Posix,
-or \fB"notepad"\fR on Windows\.
+Default: \fBEDITOR\fP environment variable if set, or \fB"vi"\fP on Posix,
+or \fB"notepad"\fP on Windows\.
.IP \(bu 2
Type: path
.RE
.P
-The command to run for \fBnpm edit\fR or \fBnpm config edit\fR\|\.
+The command to run for \fBnpm edit\fP or \fBnpm config edit\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index a90a66ac9..73ca3b5ac 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EXPLORE" "1" "June 2015" "" ""
+.TH "NPM\-EXPLORE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-explore\fR \- Browse an installed package
.SH SYNOPSIS
@@ -16,7 +16,7 @@ If a command is specified, then it is run in the subshell, which then
immediately terminates\.
.P
This is particularly handy in the case of git submodules in the
-\fBnode_modules\fR folder:
+\fBnode_modules\fP folder:
.P
.RS 2
.nf
@@ -25,7 +25,7 @@ npm explore some\-dependency \-\- git pull origin master
.RE
.P
Note that the package is \fInot\fR automatically rebuilt afterwards, so be
-sure to use \fBnpm rebuild <pkg>\fR if you make any changes\.
+sure to use \fBnpm rebuild <pkg>\fP if you make any changes\.
.SH CONFIGURATION
.SS shell
.RS 0
@@ -37,7 +37,7 @@ Type: path
.RE
.P
-The shell to run for the \fBnpm explore\fR command\.
+The shell to run for the \fBnpm explore\fP command\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index 1dae614a2..4f2ed84de 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP\-SEARCH" "1" "June 2015" "" ""
+.TH "NPM\-HELP\-SEARCH" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-help-search\fR \- Search npm help documentation
.SH SYNOPSIS
@@ -15,8 +15,8 @@ terms provided, and then list the results, sorted by relevance\.
.P
If only one result is found, then it will show that help topic\.
.P
-If the argument to \fBnpm help\fR is not a known help topic, then it will
-call \fBhelp\-search\fR\|\. It is rarely if ever necessary to call this
+If the argument to \fBnpm help\fP is not a known help topic, then it will
+call \fBhelp\-search\fP\|\. It is rarely if ever necessary to call this
command directly\.
.SH CONFIGURATION
.SS long
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index e8cd06d8c..c735cb747 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP" "1" "June 2015" "" ""
+.TH "NPM\-HELP" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-help\fR \- Get help on npm
.SH SYNOPSIS
@@ -14,8 +14,8 @@ npm help some search terms
If supplied a topic, then show the appropriate documentation page\.
.P
If the topic does not exist, or if multiple terms are provided, then run
-the \fBhelp\-search\fR command to find a match\. Note that, if \fBhelp\-search\fR
-finds a single subject, then it will run \fBhelp\fR on that topic, so unique
+the \fBhelp\-search\fP command to find a match\. Note that, if \fBhelp\-search\fP
+finds a single subject, then it will run \fBhelp\fP on that topic, so unique
matches are equivalent to specifying a topic name\.
.SH CONFIGURATION
.SS viewer
@@ -29,7 +29,7 @@ Type: path
.P
The program to use to view help content\.
.P
-Set to \fB"browser"\fR to view html help content in the default web browser\.
+Set to \fB"browser"\fP to view html help content in the default web browser\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index e91ef4acf..712fd25fa 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INIT" "1" "June 2015" "" ""
+.TH "NPM\-INIT" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-init\fR \- Interactively create a package\.json file
.SH SYNOPSIS
@@ -21,7 +21,7 @@ the options in there\.
It is strictly additive, so it does not delete options from your package\.json
without a really good reason to do so\.
.P
-If you invoke it with \fB\-f\fR, \fB\-\-force\fR, \fB\-y\fR, or \fB\-\-yes\fR, it will use only
+If you invoke it with \fB\-f\fP, \fB\-\-force\fP, \fB\-y\fP, or \fB\-\-yes\fP, it will use only
defaults and not prompt you for any options\.
.SH CONFIGURATION
.SS scope
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index aae70eb8b..e4831263e 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INSTALL" "1" "June 2015" "" ""
+.TH "NPM\-INSTALL" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-install\fR \- Install a package
.SH SYNOPSIS
@@ -22,7 +22,7 @@ This command installs a package, and any packages that it depends on\. If the
package has a shrinkwrap file, the installation of dependencies will be driven
by that\. See npm help shrinkwrap\.
.P
-A \fBpackage\fR is:
+A \fBpackage\fP is:
.RS 0
.IP \(bu 2
a) a folder containing a program described by a package\.json file
@@ -31,13 +31,13 @@ b) a gzipped tarball containing (a)
.IP \(bu 2
c) a url that resolves to (b)
.IP \(bu 2
-d) a \fB<name>@<version>\fR that is published on the registry (see npm help 7 \fBnpm\-registry\fR) with (c)
+d) a \fB<name>@<version>\fP that is published on the registry (see npm help 7 \fBnpm\-registry\fP) with (c)
.IP \(bu 2
-e) a \fB<name>@<tag>\fR that points to (d)
+e) a \fB<name>@<tag>\fP that points to (d)
.IP \(bu 2
-f) a \fB<name>\fR that has a "latest" tag satisfying (e)
+f) a \fB<name>\fP that has a "latest" tag satisfying (e)
.IP \(bu 2
-g) a \fB<git remote url>\fR that resolves to (b)
+g) a \fB<git remote url>\fP that resolves to (b)
.RE
.P
@@ -47,23 +47,23 @@ perhaps if you also want to be able to easily install it elsewhere
after packing it up into a tarball (b)\.
.RS 0
.IP \(bu 2
-\fBnpm install\fR (in package directory, no arguments):
+\fBnpm install\fP (in package directory, no arguments):
Install the dependencies in the local node_modules folder\.
- In global mode (ie, with \fB\-g\fR or \fB\-\-global\fR appended to the command),
+ In global mode (ie, with \fB\-g\fP or \fB\-\-global\fP appended to the command),
it installs the current package context (ie, the current working
directory) as a global package\.
- By default, \fBnpm install\fR will install all modules listed as dependencies\.
- With the \fB\-\-production\fR flag (or when the \fBNODE_ENV\fR environment variable
- is set to \fBproduction\fR), npm will not install modules listed in
- \fBdevDependencies\fR\|\.
+ By default, \fBnpm install\fP will install all modules listed as dependencies\.
+ With the \fB\-\-production\fP flag (or when the \fBNODE_ENV\fP environment variable
+ is set to \fBproduction\fP), npm will not install modules listed in
+ \fBdevDependencies\fP\|\.
.IP \(bu 2
-\fBnpm install <folder>\fR:
+\fBnpm install <folder>\fP:
Install a package that is sitting in a folder on the filesystem\.
.IP \(bu 2
-\fBnpm install <tarball file>\fR:
+\fBnpm install <tarball file>\fP:
Install a package that is sitting on the filesystem\. Note: if you just want
to link a dev directory into your npm root, you can do this more easily by
- using \fBnpm link\fR\|\.
+ using \fBnpm link\fP\|\.
Example:
.P
.RS 2
@@ -72,7 +72,7 @@ after packing it up into a tarball (b)\.
.fi
.RE
.IP \(bu 2
-\fBnpm install <tarball url>\fR:
+\fBnpm install <tarball url>\fP:
Fetch the tarball url, and then install it\. In order to distinguish between
this and other options, the argument must start with "http://" or "https://"
Example:
@@ -83,9 +83,9 @@ after packing it up into a tarball (b)\.
.fi
.RE
.IP \(bu 2
-\fBnpm install [@<scope>/]<name> [\-\-save|\-\-save\-dev|\-\-save\-optional]\fR:
- Do a \fB<name>@<tag>\fR install, where \fB<tag>\fR is the "tag" config\. (See
- npm help 7 \fBnpm\-config\fR\|\.)
+\fBnpm install [@<scope>/]<name> [\-\-save|\-\-save\-dev|\-\-save\-optional]\fP:
+ Do a \fB<name>@<tag>\fP install, where \fB<tag>\fP is the "tag" config\. (See
+ npm help 7 \fBnpm\-config\fP\|\.)
In most cases, this will install the latest version
of the module published on npm\.
Example:
@@ -95,24 +95,24 @@ after packing it up into a tarball (b)\.
npm install sax
.fi
.RE
- \fBnpm install\fR takes 3 exclusive, optional flags which save or update
+ \fBnpm install\fP takes 3 exclusive, optional flags which save or update
the package version in your main package\.json:
.RS 0
.IP \(bu 2
-\fB\-\-save\fR: Package will appear in your \fBdependencies\fR\|\.
+\fB\-\-save\fP: Package will appear in your \fBdependencies\fP\|\.
.IP \(bu 2
-\fB\-\-save\-dev\fR: Package will appear in your \fBdevDependencies\fR\|\.
+\fB\-\-save\-dev\fP: Package will appear in your \fBdevDependencies\fP\|\.
.IP \(bu 2
-\fB\-\-save\-optional\fR: Package will appear in your \fBoptionalDependencies\fR\|\.
+\fB\-\-save\-optional\fP: Package will appear in your \fBoptionalDependencies\fP\|\.
When using any of the above options to save dependencies to your
package\.json, there is an additional, optional flag:
.IP \(bu 2
-\fB\-\-save\-exact\fR: Saved dependencies will be configured with an
+\fB\-\-save\-exact\fP: Saved dependencies will be configured with an
exact version rather than using npm's default semver range
operator\.
-\fB<scope>\fR is optional\. The package will be downloaded from the registry
+\fB<scope>\fP is optional\. The package will be downloaded from the registry
associated with the specified scope\. If no registry is associated with
-the given scope the default registry is assumed\. See npm help 7 \fBnpm\-scope\fR\|\.
+the given scope the default registry is assumed\. See npm help 7 \fBnpm\-scope\fP\|\.
Note: if you do not include the @\-symbol on your scope name, npm will
interpret this as a GitHub repository instead, see below\. Scopes names
must also be followed by a slash\.
@@ -142,7 +142,7 @@ fetch the package by name if it is not valid\.
.RE
.RS 0
.IP \(bu 2
-\fBnpm install [@<scope>/]<name>@<tag>\fR:
+\fBnpm install [@<scope>/]<name>@<tag>\fP:
Install the version of the package that is referenced by the specified tag\.
If the tag does not exist in the registry data for that package, then this
will fail\.
@@ -155,7 +155,7 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install [@<scope>/]<name>@<version>\fR:
+\fBnpm install [@<scope>/]<name>@<version>\fP:
Install the specified version of the package\. This will fail if the
version has not been published to the registry\.
Example:
@@ -167,9 +167,9 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install [@<scope>/]<name>@<version range>\fR:
+\fBnpm install [@<scope>/]<name>@<version range>\fP:
Install a version of the package matching the specified version range\. This
- will follow the same rules for resolving dependencies described in npm help 5 \fBpackage\.json\fR\|\.
+ will follow the same rules for resolving dependencies described in npm help 5 \fBpackage\.json\fP\|\.
Note that most version ranges must be put in quotes so that your shell will
treat it as a single argument\.
Example:
@@ -181,7 +181,7 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install <git remote url>\fR:
+\fBnpm install <git remote url>\fP:
Install a package by cloning a git remote url\. The format of the git
url is:
.P
@@ -190,25 +190,44 @@ fetch the package by name if it is not valid\.
<protocol>://[<user>[:<password>]@]<hostname>[:<port>][:/]<path>[#<commit\-ish>]
.fi
.RE
- \fB<protocol>\fR is one of \fBgit\fR, \fBgit+ssh\fR, \fBgit+http\fR, or
- \fBgit+https\fR\|\. If no \fB<commit\-ish>\fR is specified, then \fBmaster\fR is
+ \fB<protocol>\fP is one of \fBgit\fP, \fBgit+ssh\fP, \fBgit+http\fP, or
+ \fBgit+https\fP\|\. If no \fB<commit\-ish>\fP is specified, then \fBmaster\fP is
used\.
- Examples:
+ The following git environment variables are recognized by npm and will be added
+ to the environment when running git:
+.RS 0
+.IP \(bu 2
+\fBGIT_ASKPASS\fP
+.IP \(bu 2
+\fBGIT_PROXY_COMMAND\fP
+.IP \(bu 2
+\fBGIT_SSH\fP
+.IP \(bu 2
+\fBGIT_SSH_COMMAND\fP
+.IP \(bu 2
+\fBGIT_SSL_CAINFO\fP
+.IP \(bu 2
+\fBGIT_SSL_NO_VERIFY\fP
+See the git man page for details\.
+Examples:
.P
.RS 2
.nf
- git+ssh://git@github\.com:npm/npm\.git#v1\.0\.27
- git+https://isaacs@github\.com/npm/npm\.git
- git://github\.com/npm/npm\.git#v1\.0\.27
+npm install git+ssh://git@github\.com:npm/npm\.git#v1\.0\.27
+npm install git+https://isaacs@github\.com/npm/npm\.git
+npm install git://github\.com/npm/npm\.git#v1\.0\.27
+GIT_SSH_COMMAND='ssh \-i ~/\.ssh/custom_ident' npm install git+ssh://git@github\.com:npm/npm\.git
.fi
.RE
+
+.RE
.IP \(bu 2
-\fBnpm install <githubname>/<githubrepo>[#<commit\-ish>]\fR:
+\fBnpm install <githubname>/<githubrepo>[#<commit\-ish>]\fP:
.IP \(bu 2
-\fBnpm install github:<githubname>/<githubrepo>[#<commit\-ish>]\fR:
- Install the package at \fBhttps://github\.com/githubname/githubrepo\fR by
- attempting to clone it using \fBgit\fR\|\.
- If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
+\fBnpm install github:<githubname>/<githubrepo>[#<commit\-ish>]\fP:
+ Install the package at \fBhttps://github\.com/githubname/githubrepo\fP by
+ attempting to clone it using \fBgit\fP\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fP will be used\.
Examples:
.P
.RS 2
@@ -218,11 +237,11 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install gist:[<githubname>/]<gistID>[#<commit\-ish>]\fR:
- Install the package at \fBhttps://gist\.github\.com/gistID\fR by attempting to
- clone it using \fBgit\fR\|\. The GitHub username associated with the gist is
- optional and will not be saved in \fBpackage\.json\fR if \fB\-\-save\fR is used\.
- If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
+\fBnpm install gist:[<githubname>/]<gistID>[#<commit\-ish>]\fP:
+ Install the package at \fBhttps://gist\.github\.com/gistID\fP by attempting to
+ clone it using \fBgit\fP\|\. The GitHub username associated with the gist is
+ optional and will not be saved in \fBpackage\.json\fP if \fB\-\-save\fP is used\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fP will be used\.
Example:
.P
.RS 2
@@ -231,10 +250,10 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install bitbucket:<bitbucketname>/<bitbucketrepo>[#<commit\-ish>]\fR:
- Install the package at \fBhttps://bitbucket\.org/bitbucketname/bitbucketrepo\fR
- by attempting to clone it using \fBgit\fR\|\.
- If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
+\fBnpm install bitbucket:<bitbucketname>/<bitbucketrepo>[#<commit\-ish>]\fP:
+ Install the package at \fBhttps://bitbucket\.org/bitbucketname/bitbucketrepo\fP
+ by attempting to clone it using \fBgit\fP\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fP will be used\.
Example:
.P
.RS 2
@@ -243,10 +262,10 @@ fetch the package by name if it is not valid\.
.fi
.RE
.IP \(bu 2
-\fBnpm install gitlab:<gitlabname>/<gitlabrepo>[#<commit\-ish>]\fR:
- Install the package at \fBhttps://gitlab\.com/gitlabname/gitlabrepo\fR
- by attempting to clone it using \fBgit\fR\|\.
- If you don't specify a \fIcommit\-ish\fR then \fBmaster\fR will be used\.
+\fBnpm install gitlab:<gitlabname>/<gitlabrepo>[#<commit\-ish>]\fP:
+ Install the package at \fBhttps://gitlab\.com/gitlabname/gitlabrepo\fP
+ by attempting to clone it using \fBgit\fP\|\.
+ If you don't specify a \fIcommit\-ish\fR then \fBmaster\fP will be used\.
Example:
.P
.RS 2
@@ -266,11 +285,11 @@ npm install sax@">=0\.1\.0 <0\.2\.0" bench supervisor
.fi
.RE
.P
-The \fB\-\-tag\fR argument will apply to all of the specified install targets\. If a
+The \fB\-\-tag\fP argument will apply to all of the specified install targets\. If a
tag with the given name exists, the tagged version is preferred over newer
versions\.
.P
-The \fB\-\-force\fR argument will force npm to fetch remote resources even if a
+The \fB\-\-force\fP argument will force npm to fetch remote resources even if a
local copy exists on disk\.
.P
.RS 2
@@ -279,25 +298,25 @@ npm install sax \-\-force
.fi
.RE
.P
-The \fB\-\-global\fR argument will cause npm to install the package globally
-rather than locally\. See npm help 5 \fBnpm\-folders\fR\|\.
+The \fB\-\-global\fP argument will cause npm to install the package globally
+rather than locally\. See npm help 5 \fBnpm\-folders\fP\|\.
.P
-The \fB\-\-link\fR argument will cause npm to link global installs into the
+The \fB\-\-link\fP argument will cause npm to link global installs into the
local space in some cases\.
.P
-The \fB\-\-no\-bin\-links\fR argument will prevent npm from creating symlinks for
+The \fB\-\-no\-bin\-links\fP argument will prevent npm from creating symlinks for
any binaries the package might contain\.
.P
-The \fB\-\-no\-optional\fR argument will prevent optional dependencies from
+The \fB\-\-no\-optional\fP argument will prevent optional dependencies from
being installed\.
.P
-The \fB\-\-no\-shrinkwrap\fR argument, which will ignore an available
+The \fB\-\-no\-shrinkwrap\fP argument, which will ignore an available
shrinkwrap file and use the package\.json instead\.
.P
-The \fB\-\-nodedir=/path/to/node/source\fR argument will allow npm to find the
+The \fB\-\-nodedir=/path/to/node/source\fP argument will allow npm to find the
node source code so that npm can compile native modules\.
.P
-See npm help 7 \fBnpm\-config\fR\|\. Many of the configuration params have some
+See npm help 7 \fBnpm\-config\fP\|\. Many of the configuration params have some
effect on installation, since that's most of what npm does\.
.SH ALGORITHM
.P
@@ -317,7 +336,7 @@ for each dep@version in what\.dependencies
.fi
.RE
.P
-For this \fBpackage{dep}\fR structure: \fBA{B,C}, B{C}, C{D}\fR,
+For this \fBpackage{dep}\fP structure: \fBA{B,C}, B{C}, C{D}\fP,
this algorithm produces:
.P
.RS 2
@@ -346,15 +365,15 @@ A \-> B \-> A' \-> B' \-> A \-> B \-> A' \-> B' \-> A \-> \.\.\.
.fi
.RE
.P
-where \fBA\fR is some version of a package, and \fBA'\fR is a different version
-of the same package\. Because \fBB\fR depends on a different version of \fBA\fR
+where \fBA\fP is some version of a package, and \fBA'\fP is a different version
+of the same package\. Because \fBB\fP depends on a different version of \fBA\fP
than the one that is already in the tree, it must install a separate
-copy\. The same is true of \fBA'\fR, which must install \fBB'\fR\|\. Because \fBB'\fR
-depends on the original version of \fBA\fR, which has been overridden, the
+copy\. The same is true of \fBA'\fP, which must install \fBB'\fP\|\. Because \fBB'\fP
+depends on the original version of \fBA\fP, which has been overridden, the
cycle falls into infinite regress\.
.P
To avoid this situation, npm flat\-out refuses to install any
-\fBname@version\fR that is already present anywhere in the tree of package
+\fBname@version\fP that is already present anywhere in the tree of package
folder ancestors\. A more correct, but more complex, solution would be
to symlink the existing version into the new location\. If this ever
affects a real use\-case, it will be investigated\.
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index 9aa054dea..8063ef930 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LINK" "1" "June 2015" "" ""
+.TH "NPM\-LINK" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-link\fR \- Symlink a package folder
.SH SYNOPSIS
@@ -14,20 +14,20 @@ npm ln (with any of the previous argument usage)
.P
Package linking is a two\-step process\.
.P
-First, \fBnpm link\fR in a package folder will create a globally\-installed
-symbolic link from \fBprefix/package\-name\fR to the current folder (see
-npm help 7 \fBnpm\-config\fR for the value of \fBprefix\fR)\.
+First, \fBnpm link\fP in a package folder will create a globally\-installed
+symbolic link from \fBprefix/package\-name\fP to the current folder (see
+npm help 7 \fBnpm\-config\fP for the value of \fBprefix\fP)\.
.P
-Next, in some other location, \fBnpm link package\-name\fR will create a
-symlink from the local \fBnode_modules\fR folder to the global symlink\.
+Next, in some other location, \fBnpm link package\-name\fP will create a
+symlink from the local \fBnode_modules\fP folder to the global symlink\.
.P
-Note that \fBpackage\-name\fR is taken from \fBpackage\.json\fR,
+Note that \fBpackage\-name\fP is taken from \fBpackage\.json\fP,
not from directory name\.
.P
-The package name can be optionally prefixed with a scope\. See npm help 7 \fBnpm\-scope\fR\|\.
+The package name can be optionally prefixed with a scope\. See npm help 7 \fBnpm\-scope\fP\|\.
The scope must be preceded by an @\-symbol and followed by a slash\.
.P
-When creating tarballs for \fBnpm publish\fR, the linked packages are
+When creating tarballs for \fBnpm publish\fP, the linked packages are
"snapshotted" to their current state by resolving the symbolic links\.
.P
This is handy for installing your own stuff, so that you can work on it and
@@ -68,9 +68,9 @@ npm link node\-redis
.RE
.P
That is, it first creates a global link, and then links the global
-installation target into your project's \fBnode_modules\fR folder\.
+installation target into your project's \fBnode_modules\fP folder\.
.P
-If your linked package is scoped (see npm help 7 \fBnpm\-scope\fR) your link command must
+If your linked package is scoped (see npm help 7 \fBnpm\-scope\fP) your link command must
include that scope, e\.g\.
.P
.RS 2
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index a4c21651e..6847fbf59 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LOGOUT" "1" "June 2015" "" ""
+.TH "NPM\-LOGOUT" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-logout\fR \- Log out of the registry
.SH SYNOPSIS
@@ -18,21 +18,21 @@ When logged into a legacy registry that uses username and password authenticatio
clear the credentials in your user configuration\. In this case, it will \fIonly\fR affect
the current environment\.
.P
-If \fB\-\-scope\fR is provided, this will find the credentials for the registry
+If \fB\-\-scope\fP is provided, this will find the credentials for the registry
connected to that scope, if set\.
.SH CONFIGURATION
.SS registry
.P
Default: http://registry\.npmjs\.org/
.P
-The base URL of the npm package registry\. If \fBscope\fR is also specified,
+The base URL of the npm package registry\. If \fBscope\fP is also specified,
it takes precedence\.
.SS scope
.P
Default: none
.P
If specified, the user and login credentials given will be associated
-with the specified scope\. See npm help 7 \fBnpm\-scope\fR\|\. You can use both at the same time,
+with the specified scope\. See npm help 7 \fBnpm\-scope\fP\|\. You can use both at the same time,
e\.g\.
.P
.RS 2
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 31d93d17b..b24cc75fa 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LS" "1" "June 2015" "" ""
+.TH "NPM\-LS" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-ls\fR \- List installed packages
.SH SYNOPSIS
@@ -16,14 +16,14 @@ npm ll [[@<scope>/]<pkg> \.\.\.]
This command will print to stdout all the versions of packages that are
installed, as well as their dependencies, in a tree\-structure\.
.P
-Positional arguments are \fBname@version\-range\fR identifiers, which will
+Positional arguments are \fBname@version\-range\fP identifiers, which will
limit the results to only the paths to the packages named\. Note that
nested packages will \fIalso\fR show the paths to the specified packages\.
-For example, running \fBnpm ls promzard\fR in npm's source tree will show:
+For example, running \fBnpm ls promzard\fP in npm's source tree will show:
.P
.RS 2
.nf
-npm@2.11.3 /path/to/npm
+npm@2.13.4 /path/to/npm
└─┬ init\-package\-json@0\.0\.4
└── promzard@0\.1\.5
.fi
@@ -35,7 +35,7 @@ If a project specifies git urls for dependencies these are shown
in parentheses after the name@version to make it easier for users to
recognize potential forks of a project\.
.P
-When run as \fBll\fR or \fBla\fR, it shows extended information by default\.
+When run as \fBll\fP or \fBla\fP, it shows extended information by default\.
.SH CONFIGURATION
.SS json
.RS 0
@@ -95,7 +95,7 @@ Default: false
.RE
.P
-Display only the dependency tree for packages in \fBdependencies\fR\|\.
+Display only the dependency tree for packages in \fBdependencies\fP\|\.
.SS dev
.RS 0
.IP \(bu 2
@@ -105,7 +105,7 @@ Default: false
.RE
.P
-Display only the dependency tree for packages in \fBdevDependencies\fR\|\.
+Display only the dependency tree for packages in \fBdevDependencies\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index e9998f4a7..6967f0a98 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OUTDATED" "1" "June 2015" "" ""
+.TH "NPM\-OUTDATED" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-outdated\fR \- Check for outdated packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index ee291a46b..216e832c4 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OWNER" "1" "June 2015" "" ""
+.TH "NPM\-OWNER" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-owner\fR \- Manage package owners
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 16d56c949..380ebac86 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PACK" "1" "June 2015" "" ""
+.TH "NPM\-PACK" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-pack\fR \- Create a tarball from a package
.SH SYNOPSIS
@@ -13,7 +13,7 @@ npm pack [<pkg> [<pkg> \.\.\.]]
For anything that's installable (that is, a package folder, tarball,
tarball url, name@tag, name@version, or name), this command will fetch
it to the cache, and then copy the tarball to the current working
-directory as \fB<name>\-<version>\.tgz\fR, and then write the filenames out to
+directory as \fB<name>\-<version>\.tgz\fP, and then write the filenames out to
stdout\.
.P
If the same package is specified multiple times, then the file will be
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
new file mode 100644
index 000000000..8eb3faeca
--- /dev/null
+++ b/deps/npm/man/man1/npm-ping.1
@@ -0,0 +1,23 @@
+.TH "NPM\-PING" "1" "August 2015" "" ""
+.SH "NAME"
+\fBnpm-ping\fR \- Ping npm registry
+.SH SYNOPSIS
+.P
+.RS 2
+.nf
+npm ping [\-\-registry <registry>]
+.fi
+.RE
+.SH DESCRIPTION
+.P
+Ping the configured or given npm registry and verify authentication\.
+.SH SEE ALSO
+.RS 0
+.IP \(bu 2
+npm help config
+.IP \(bu 2
+npm help 7 config
+.IP \(bu 2
+npm help 5 npmrc
+
+.RE
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index 308b8be08..edfb6c1f2 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PREFIX" "1" "June 2015" "" ""
+.TH "NPM\-PREFIX" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-prefix\fR \- Display prefix
.SH SYNOPSIS
@@ -11,10 +11,10 @@ npm prefix [\-g]
.SH DESCRIPTION
.P
Print the local prefix to standard out\. This is the closest parent directory
-to contain a package\.json file unless \fB\-g\fR is also specified\.
+to contain a package\.json file unless \fB\-g\fP is also specified\.
.P
-If \fB\-g\fR is specified, this will be the value of the global prefix\. See
-npm help 7 \fBnpm\-config\fR for more detail\.
+If \fB\-g\fP is specified, this will be the value of the global prefix\. See
+npm help 7 \fBnpm\-config\fP for more detail\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index a6178226e..cdada7e07 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PRUNE" "1" "June 2015" "" ""
+.TH "NPM\-PRUNE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-prune\fR \- Remove extraneous packages
.SH SYNOPSIS
@@ -18,10 +18,10 @@ removed\.
Extraneous packages are packages that are not listed on the parent
package's dependencies list\.
.P
-If the \fB\-\-production\fR flag is specified or the \fBNODE_ENV\fR environment
-variable is set to \fBproduction\fR, this command will remove the packages
-specified in your \fBdevDependencies\fR\|\. Setting \fB\-\-production=false\fR will
-negate \fBNODE_ENV\fR being set to \fBproduction\fR\|\.
+If the \fB\-\-production\fP flag is specified or the \fBNODE_ENV\fP environment
+variable is set to \fBproduction\fP, this command will remove the packages
+specified in your \fBdevDependencies\fP\|\. Setting \fB\-\-production=false\fP will
+negate \fBNODE_ENV\fP being set to \fBproduction\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 5a092c16d..b02ff7d44 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PUBLISH" "1" "June 2015" "" ""
+.TH "NPM\-PUBLISH" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-publish\fR \- Publish a package
.SH SYNOPSIS
@@ -12,30 +12,30 @@ npm publish <folder> [\-\-tag <tag>] [\-\-access <public|restricted>]
.SH DESCRIPTION
.P
Publishes a package to the registry so that it can be installed by name\. See
-npm help 7 \fBnpm\-developers\fR for details on what's included in the published package, as
+npm help 7 \fBnpm\-developers\fP for details on what's included in the published package, as
well as details on how the package is built\.
.P
By default npm will publish to the public registry\. This can be overridden by
-specifying a different default registry or using a npm help 7 \fBnpm\-scope\fR in the name
-(see npm help 5 \fBpackage\.json\fR)\.
+specifying a different default registry or using a npm help 7 \fBnpm\-scope\fP in the name
+(see npm help 5 \fBpackage\.json\fP)\.
.RS 0
.IP \(bu 2
-\fB<folder>\fR:
+\fB<folder>\fP:
A folder containing a package\.json file
.IP \(bu 2
-\fB<tarball>\fR:
+\fB<tarball>\fP:
A url or file path to a gzipped tar archive containing a single folder
with a package\.json file inside\.
.IP \(bu 2
-\fB[\-\-tag <tag>]\fR
+\fB[\-\-tag <tag>]\fP
Registers the published package with the given tag, such that \fBnpm install
-<name>@<tag>\fR will install this version\. By default, \fBnpm publish\fR updates
-and \fBnpm install\fR installs the \fBlatest\fR tag\.
+<name>@<tag>\fP will install this version\. By default, \fBnpm publish\fP updates
+and \fBnpm install\fP installs the \fBlatest\fP tag\.
.IP \(bu 2
-\fB[\-\-access <public|restricted>]\fR
+\fB[\-\-access <public|restricted>]\fP
Tells the registry whether this package should be published as public or
-restricted\. Only applies to scoped packages, which default to \fBrestricted\fR\|\.
-If you don't have a paid account, you must publish with \fB\-\-access public\fR
+restricted\. Only applies to scoped packages, which default to \fBrestricted\fP\|\.
+If you don't have a paid account, you must publish with \fB\-\-access public\fP
to publish scoped packages\.
.RE
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index e8347ea55..5ad90138d 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REBUILD" "1" "June 2015" "" ""
+.TH "NPM\-REBUILD" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-rebuild\fR \- Rebuild a package
.SH SYNOPSIS
@@ -11,13 +11,13 @@ npm rb [<name> [<name> \.\.\.]]
.RE
.RS 0
.IP \(bu 2
-\fB<name>\fR:
+\fB<name>\fP:
The package to rebuild
.RE
.SH DESCRIPTION
.P
-This command runs the \fBnpm build\fR command on the matched folders\. This is useful
+This command runs the \fBnpm build\fP command on the matched folders\. This is useful
when you install a new version of node, and must recompile all your C++ addons with
the new binary\.
.SH SEE ALSO
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index bbbc8b128..9051863aa 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REPO" "1" "June 2015" "" ""
+.TH "NPM\-REPO" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-repo\fR \- Open package repository page in the browser
.SH SYNOPSIS
@@ -12,20 +12,20 @@ npm repo (with no args in a package dir)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-repository URL, and then tries to open it using the \fB\-\-browser\fR
+repository URL, and then tries to open it using the \fB\-\-browser\fP
config param\. If no package name is provided, it will search for
-a \fBpackage\.json\fR in the current folder and use the \fBname\fR property\.
+a \fBpackage\.json\fP in the current folder and use the \fBname\fP property\.
.SH CONFIGURATION
.SS browser
.RS 0
.IP \(bu 2
-Default: OS X: \fB"open"\fR, Windows: \fB"start"\fR, Others: \fB"xdg\-open"\fR
+Default: OS X: \fB"open"\fP, Windows: \fB"start"\fP, Others: \fB"xdg\-open"\fP
.IP \(bu 2
Type: String
.RE
.P
-The browser that is called by the \fBnpm repo\fR command to open websites\.
+The browser that is called by the \fBnpm repo\fP command to open websites\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index f45e64114..203fb24c4 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RESTART" "1" "June 2015" "" ""
+.TH "NPM\-RESTART" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-restart\fR \- Restart a package
.SH SYNOPSIS
@@ -40,7 +40,7 @@ postrestart
Note that the "restart" script is run \fBin addition to\fR the "stop"
and "start" scripts, not instead of them\.
.P
-This is the behavior as of \fBnpm\fR major version 2\. A change in this
+This is the behavior as of \fBnpm\fP major version 2\. A change in this
behavior will be accompanied by an increase in major version number
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-rm.1 b/deps/npm/man/man1/npm-rm.1
index 548a870f2..cd6b6b3ac 100644
--- a/deps/npm/man/man1/npm-rm.1
+++ b/deps/npm/man/man1/npm-rm.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RM" "1" "June 2015" "" ""
+.TH "NPM\-RM" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-rm\fR \- Remove a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 7e0eeacb3..71666d0d1 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ROOT" "1" "June 2015" "" ""
+.TH "NPM\-ROOT" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-root\fR \- Display npm root
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm root
.RE
.SH DESCRIPTION
.P
-Print the effective \fBnode_modules\fR folder to standard out\.
+Print the effective \fBnode_modules\fP folder to standard out\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 351800eb5..259aceb92 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RUN\-SCRIPT" "1" "June 2015" "" ""
+.TH "NPM\-RUN\-SCRIPT" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-run-script\fR \- Run arbitrary package scripts
.SH SYNOPSIS
@@ -11,16 +11,16 @@ npm run [command] [\-\- <args>]
.RE
.SH DESCRIPTION
.P
-This runs an arbitrary command from a package's \fB"scripts"\fR object\. If no
-\fB"command"\fR is provided, it will list the available scripts\. \fBrun[\-script]\fR is
+This runs an arbitrary command from a package's \fB"scripts"\fP object\. If no
+\fB"command"\fP is provided, it will list the available scripts\. \fBrun[\-script]\fP is
used by the test, start, restart, and stop commands, but can be called
directly, as well\. When the scripts in the package are printed out, they're
separated into lifecycle (test, start, restart) and directly\-run scripts\.
.P
-As of \fBnpm@2\.0\.0\fR \fIhttp://blog\.npmjs\.org/post/98131109725/npm\-2\-0\-0\fR, you can
-use custom arguments when executing scripts\. The special option \fB\-\-\fR is used by
+As of \fBnpm@2\.0\.0\fP \fIhttp://blog\.npmjs\.org/post/98131109725/npm\-2\-0\-0\fR, you can
+use custom arguments when executing scripts\. The special option \fB\-\-\fP is used by
getopt \fIhttp://goo\.gl/KxMmtG\fR to delimit the end of the options\. npm will pass
-all the arguments after the \fB\-\-\fR directly to your script:
+all the arguments after the \fB\-\-\fP directly to your script:
.P
.RS 2
.nf
@@ -28,20 +28,27 @@ npm run test \-\- \-\-grep="pattern"
.fi
.RE
.P
-The arguments will only be passed to the script specified after \fBnpm run\fR
+The arguments will only be passed to the script specified after \fBnpm run\fP
and not to any pre or post script\.
.P
-The \fBenv\fR script is a special built\-in command that can be used to list
+The \fBenv\fP script is a special built\-in command that can be used to list
environment variables that will be available to the script at runtime\. If an
"env" command is defined in your package it will take precedence over the
built\-in\.
.P
-In addition to the shell's pre\-existing \fBPATH\fR, \fBnpm run\fR adds
-\fBnode_modules/\.bin\fR to the \fBPATH\fR provided to scripts\. Any binaries provided by
-locally\-installed dependencies can be used without the \fBnode_modules/\.bin\fR
-prefix\. For example, if there is a \fBdevDependency\fR on \fBtap\fR in your package,
-you should write \fB"scripts": {"test": "tap test/\\*\.js"}\fR instead of \fB"scripts":
-{"test": "node_modules/\.bin/tap test/\\*\.js"}\fR to run your tests\.
+In addition to the shell's pre\-existing \fBPATH\fP, \fBnpm run\fP adds
+\fBnode_modules/\.bin\fP to the \fBPATH\fP provided to scripts\. Any binaries provided by
+locally\-installed dependencies can be used without the \fBnode_modules/\.bin\fP
+prefix\. For example, if there is a \fBdevDependency\fP on \fBtap\fP in your package,
+you should write:
+.P
+.RS 2
+.nf
+"scripts": {"test": "tap test/\\*\.js"}
+.fi
+.RE
+.P
+instead of \fB"scripts": {"test": "node_modules/\.bin/tap test/\\*\.js"}\fP to run your tests\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index b617bbbe7..eef7c072e 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SEARCH" "1" "June 2015" "" ""
+.TH "NPM\-SEARCH" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-search\fR \- Search for packages
.SH SYNOPSIS
@@ -14,8 +14,8 @@ npm se [search terms \.\.\.]
.P
Search the registry for packages matching the search terms\.
.P
-If a term starts with \fB/\fR, then it's interpreted as a regular expression\.
-A trailing \fB/\fR will be ignored in this case\. (Note that many regular
+If a term starts with \fB/\fP, then it's interpreted as a regular expression\.
+A trailing \fB/\fP will be ignored in this case\. (Note that many regular
expression characters must be escaped or quoted in most shells\.)
.SH CONFIGURATION
.SS long
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index 521634d13..39d910b54 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP" "1" "June 2015" "" ""
+.TH "NPM\-SHRINKWRAP" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR \- Lock down dependency versions
.SH SYNOPSIS
@@ -12,18 +12,18 @@ npm shrinkwrap
.P
This command locks down the versions of a package's dependencies so
that you can control exactly which versions of each dependency will be
-used when your package is installed\. The \fBpackage\.json\fR file is still
-required if you want to use \fBnpm install\fR\|\.
+used when your package is installed\. The \fBpackage\.json\fP file is still
+required if you want to use \fBnpm install\fP\|\.
.P
-By default, \fBnpm install\fR recursively installs the target's
-dependencies (as specified in \fBpackage\.json\fR), choosing the latest
+By default, \fBnpm install\fP recursively installs the target's
+dependencies (as specified in \fBpackage\.json\fP), choosing the latest
available version that satisfies the dependency's semver pattern\. In
some situations, particularly when shipping software where each change
is tightly managed, it's desirable to fully specify each version of
each dependency recursively so that subsequent builds and deploys do
not inadvertently pick up newer versions of a dependency that satisfy
the semver pattern\. Specifying specific semver patterns in each
-dependency's \fBpackage\.json\fR would facilitate this, but that's not always
+dependency's \fBpackage\.json\fP would facilitate this, but that's not always
possible or desirable, as when another author owns the npm package\.
It's also possible to check dependencies directly into source control,
but that may be undesirable for other reasons\.
@@ -68,7 +68,7 @@ and package C:
.RE
.P
If these are the only versions of A, B, and C available in the
-registry, then a normal \fBnpm install A\fR will install:
+registry, then a normal \fBnpm install A\fP will install:
.P
.RS 2
.nf
@@ -78,7 +78,7 @@ A@0\.1\.0
.fi
.RE
.P
-However, if B@0\.0\.2 is published, then a fresh \fBnpm install A\fR will
+However, if B@0\.0\.2 is published, then a fresh \fBnpm install A\fP will
install:
.P
.RS 2
@@ -105,7 +105,7 @@ npm shrinkwrap
.fi
.RE
.P
-This generates \fBnpm\-shrinkwrap\.json\fR, which will look something like this:
+This generates \fBnpm\-shrinkwrap\.json\fP, which will look something like this:
.P
.RS 2
.nf
@@ -127,30 +127,30 @@ This generates \fBnpm\-shrinkwrap\.json\fR, which will look something like this:
.RE
.P
The shrinkwrap command has locked down the dependencies based on
-what's currently installed in node_modules\. When \fBnpm install\fR
-installs a package with an \fBnpm\-shrinkwrap\.json\fR in the package
-root, the shrinkwrap file (rather than \fBpackage\.json\fR files) completely
+what's currently installed in node_modules\. When \fBnpm install\fP
+installs a package with an \fBnpm\-shrinkwrap\.json\fP in the package
+root, the shrinkwrap file (rather than \fBpackage\.json\fP files) completely
drives the installation of that package and all of its dependencies
(recursively)\. So now the author publishes A@0\.1\.0, and subsequent
installs of this package will use B@0\.0\.1 and C@0\.0\.1, regardless the
-dependencies and versions listed in A's, B's, and C's \fBpackage\.json\fR
+dependencies and versions listed in A's, B's, and C's \fBpackage\.json\fP
files\.
.SS Using shrinkwrapped packages
.P
Using a shrinkwrapped package is no different than using any other
-package: you can \fBnpm install\fR it by hand, or add a dependency to your
-\fBpackage\.json\fR file and \fBnpm install\fR it\.
+package: you can \fBnpm install\fP it by hand, or add a dependency to your
+\fBpackage\.json\fP file and \fBnpm install\fP it\.
.SS Building shrinkwrapped packages
.P
To shrinkwrap an existing package:
.RS 0
.IP 1. 3
-Run \fBnpm install\fR in the package root to install the current
+Run \fBnpm install\fP in the package root to install the current
versions of all dependencies\.
.IP 2. 3
Validate that the package works as expected with these versions\.
.IP 3. 3
-Run \fBnpm shrinkwrap\fR, add \fBnpm\-shrinkwrap\.json\fR to git, and publish
+Run \fBnpm shrinkwrap\fP, add \fBnpm\-shrinkwrap\.json\fP to git, and publish
your package\.
.RE
@@ -158,19 +158,19 @@ your package\.
To add or update a dependency in a shrinkwrapped package:
.RS 0
.IP 1. 3
-Run \fBnpm install\fR in the package root to install the current
+Run \fBnpm install\fP in the package root to install the current
versions of all dependencies\.
.IP 2. 3
-Add or update dependencies\. \fBnpm install\fR each new or updated
-package individually and then update \fBpackage\.json\fR\|\. Note that they
+Add or update dependencies\. \fBnpm install\fP each new or updated
+package individually and then update \fBpackage\.json\fP\|\. Note that they
must be explicitly named in order to be installed: running \fBnpm
-install\fR with no arguments will merely reproduce the existing
+install\fP with no arguments will merely reproduce the existing
shrinkwrap\.
.IP 3. 3
Validate that the package works as expected with the new
dependencies\.
.IP 4. 3
-Run \fBnpm shrinkwrap\fR, commit the new \fBnpm\-shrinkwrap\.json\fR, and
+Run \fBnpm shrinkwrap\fP, commit the new \fBnpm\-shrinkwrap\.json\fP, and
publish your package\.
.RE
@@ -179,19 +179,19 @@ You can use npm help outdated to view dependencies with newer versions
available\.
.SS Other Notes
.P
-A shrinkwrap file must be consistent with the package's \fBpackage\.json\fR
-file\. \fBnpm shrinkwrap\fR will fail if required dependencies are not
+A shrinkwrap file must be consistent with the package's \fBpackage\.json\fP
+file\. \fBnpm shrinkwrap\fP will fail if required dependencies are not
already installed, since that would result in a shrinkwrap that
wouldn't actually work\. Similarly, the command will fail if there are
-extraneous packages (not referenced by \fBpackage\.json\fR), since that would
-indicate that \fBpackage\.json\fR is not correct\.
+extraneous packages (not referenced by \fBpackage\.json\fP), since that would
+indicate that \fBpackage\.json\fP is not correct\.
.P
-Since \fBnpm shrinkwrap\fR is intended to lock down your dependencies for
-production use, \fBdevDependencies\fR will not be included unless you
-explicitly set the \fB\-\-dev\fR flag when you run \fBnpm shrinkwrap\fR\|\. If
-installed \fBdevDependencies\fR are excluded, then npm will print a
+Since \fBnpm shrinkwrap\fP is intended to lock down your dependencies for
+production use, \fBdevDependencies\fP will not be included unless you
+explicitly set the \fB\-\-dev\fP flag when you run \fBnpm shrinkwrap\fP\|\. If
+installed \fBdevDependencies\fP are excluded, then npm will print a
warning\. If you want them to be installed with your module by
-default, please consider adding them to \fBdependencies\fR instead\.
+default, please consider adding them to \fBdependencies\fP instead\.
.P
If shrinkwrapped package A depends on shrinkwrapped package B, B's
shrinkwrap will not be used as part of the installation of A\. However,
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index 0cbc9ed2e..f776e22b7 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STAR" "1" "June 2015" "" ""
+.TH "NPM\-STAR" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-star\fR \- Mark your favorite packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index 88208b575..070beaef6 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STARS" "1" "June 2015" "" ""
+.TH "NPM\-STARS" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-stars\fR \- View packages marked as favorites
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index f8b70dacf..ced564097 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM\-START" "1" "June 2015" "" ""
+.TH "NPM\-START" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-start\fR \- Start a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index b4d808404..2a16fd59a 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STOP" "1" "June 2015" "" ""
+.TH "NPM\-STOP" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-stop\fR \- Stop a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-submodule.1 b/deps/npm/man/man1/npm-submodule.1
deleted file mode 100644
index 771f0c0a2..000000000
--- a/deps/npm/man/man1/npm-submodule.1
+++ /dev/null
@@ -1,41 +0,0 @@
-.\" Generated with Ronnjs 0.3.8
-.\" http://github.com/kapouer/ronnjs/
-.
-.TH "NPM\-SUBMODULE" "1" "September 2014" "" ""
-.
-.SH "NAME"
-\fBnpm-submodule\fR \-\- Add a package as a git submodule
-.
-.SH "SYNOPSIS"
-.
-.nf
-npm submodule <pkg>
-.
-.fi
-.
-.SH "DESCRIPTION"
-If the specified package has a git repository url in its package\.json
-description, then this command will add it as a git submodule at \fBnode_modules/<pkg name>\fR\|\.
-.
-.P
-This is a convenience only\. From then on, it\'s up to you to manage
-updates by using the appropriate git commands\. npm will stubbornly
-refuse to update, modify, or remove anything with a \fB\|\.git\fR subfolder
-in it\.
-.
-.P
-This command also does not install missing dependencies, if the package
-does not include them in its git repository\. If \fBnpm ls\fR reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do \fBnpm explore <pkgname> \-\- npm install\fR to install the
-dependencies into the submodule folder\.
-.
-.SH "SEE ALSO"
-.
-.IP "\(bu" 4
-npm help 5 package\.json
-.
-.IP "\(bu" 4
-git help submodule
-.
-.IP "" 0
diff --git a/deps/npm/man/man1/npm-tag.1 b/deps/npm/man/man1/npm-tag.1
index c0336b2e6..d20ef4088 100644
--- a/deps/npm/man/man1/npm-tag.1
+++ b/deps/npm/man/man1/npm-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TAG" "1" "June 2015" "" ""
+.TH "NPM\-TAG" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-tag\fR \- Tag a published version
.SH SYNOPSIS
@@ -13,7 +13,7 @@ npm tag <name>@<version> [<tag>]
THIS COMMAND IS DEPRECATED\. See npm help dist\-tag for details\.
.P
Tags the specified version of the package with the specified tag, or the
-\fB\-\-tag\fR config if not specified\.
+\fB\-\-tag\fP config if not specified\.
.P
A tag can be used when installing packages as a reference to a version instead
of using a specific version number:
@@ -32,13 +32,13 @@ npm install \-\-tag <tag>
.fi
.RE
.P
-This also applies to \fBnpm dedupe\fR\|\.
+This also applies to \fBnpm dedupe\fP\|\.
.P
Publishing a package always sets the "latest" tag to the published version\.
.SH PURPOSE
.P
Tags can be used to provide an alias instead of version numbers\. For
-example, \fBnpm\fR currently uses the tag "next" to identify the upcoming
+example, \fBnpm\fP currently uses the tag "next" to identify the upcoming
version, and the tag "latest" to identify the current version\.
.P
A project might choose to have multiple streams of development, e\.g\.,
@@ -46,16 +46,16 @@ A project might choose to have multiple streams of development, e\.g\.,
.SH CAVEATS
.P
Tags must share a namespace with version numbers, because they are
-specified in the same slot: \fBnpm install <pkg>@<version>\fR vs \fBnpm
-install <pkg>@<tag>\fR\|\.
+specified in the same slot: \fBnpm install <pkg>@<version>\fP vs \fBnpm
+install <pkg>@<tag>\fP\|\.
.P
Tags that can be interpreted as valid semver ranges will be
-rejected\. For example, \fBv1\.4\fR cannot be used as a tag, because it is
-interpreted by semver as \fB>=1\.4\.0 <1\.5\.0\fR\|\. See
+rejected\. For example, \fBv1\.4\fP cannot be used as a tag, because it is
+interpreted by semver as \fB>=1\.4\.0 <1\.5\.0\fP\|\. See
https://github\.com/npm/npm/issues/6082\|\.
.P
The simplest way to avoid semver problems with tags is to use tags
-that do not begin with a number or the letter \fBv\fR\|\.
+that do not begin with a number or the letter \fBv\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index f5f409a64..f6da31d6e 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TEST" "1" "June 2015" "" ""
+.TH "NPM\-TEST" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-test\fR \- Test a package
.SH SYNOPSIS
@@ -13,7 +13,7 @@
.P
This runs a package's "test" script, if one was provided\.
.P
-To run tests as a condition of installation, set the \fBnpat\fR config to
+To run tests as a condition of installation, set the \fBnpat\fP config to
true\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index d59dcb14b..42aabe0ae 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RM" "1" "June 2015" "" ""
+.TH "NPM\-RM" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-rm\fR \- Remove a package
.SH SYNOPSIS
@@ -22,22 +22,22 @@ npm uninstall sax
.fi
.RE
.P
-In global mode (ie, with \fB\-g\fR or \fB\-\-global\fR appended to the command),
+In global mode (ie, with \fB\-g\fP or \fB\-\-global\fP appended to the command),
it uninstalls the current package context as a global package\.
.P
-\fBnpm uninstall\fR takes 3 exclusive, optional flags which save or update
+\fBnpm uninstall\fP takes 3 exclusive, optional flags which save or update
the package version in your main package\.json:
.RS 0
.IP \(bu 2
-\fB\-\-save\fR: Package will be removed from your \fBdependencies\fR\|\.
+\fB\-\-save\fP: Package will be removed from your \fBdependencies\fP\|\.
.IP \(bu 2
-\fB\-\-save\-dev\fR: Package will be removed from your \fBdevDependencies\fR\|\.
+\fB\-\-save\-dev\fP: Package will be removed from your \fBdevDependencies\fP\|\.
.IP \(bu 2
-\fB\-\-save\-optional\fR: Package will be removed from your \fBoptionalDependencies\fR\|\.
+\fB\-\-save\-optional\fP: Package will be removed from your \fBoptionalDependencies\fP\|\.
.RE
.P
-Scope is optional and follows the usual rules for npm help 7 \fBnpm\-scope\fR\|\.
+Scope is optional and follows the usual rules for npm help 7 \fBnpm\-scope\fP\|\.
.P
Examples:
.P
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index 8094933ab..d773cde02 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UNPUBLISH" "1" "June 2015" "" ""
+.TH "NPM\-UNPUBLISH" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-unpublish\fR \- Remove a package from the registry
.SH SYNOPSIS
@@ -13,7 +13,7 @@ npm unpublish [@<scope>/]<name>[@<version>]
\fBIt is generally considered bad behavior to remove versions of a library
that others are depending on!\fR
.P
-Consider using the \fBdeprecate\fR command
+Consider using the \fBdeprecate\fP command
instead, if your intent is to encourage users to upgrade\.
.P
There is plenty of room on the registry\.
@@ -29,7 +29,7 @@ Even if a package version is unpublished, that specific name and
version combination can never be reused\. In order to publish the
package again, a new version number must be used\.
.P
-The scope is optional and follows the usual rules for npm help 7 \fBnpm\-scope\fR\|\.
+The scope is optional and follows the usual rules for npm help 7 \fBnpm\-scope\fP\|\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 7b548a6e1..5b41dce6b 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UPDATE" "1" "June 2015" "" ""
+.TH "NPM\-UPDATE" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-update\fR \- Update a package
.SH SYNOPSIS
@@ -11,26 +11,26 @@ npm update [\-g] [<name> [<name> \.\.\.]]
.SH DESCRIPTION
.P
This command will update all the packages listed to the latest version
-(specified by the \fBtag\fR config), respecting semver\.
+(specified by the \fBtag\fP config), respecting semver\.
.P
It will also install missing packages\. As with all commands that install
-packages, the \fB\-\-dev\fR flag will cause \fBdevDependencies\fR to be processed
+packages, the \fB\-\-dev\fP flag will cause \fBdevDependencies\fP to be processed
as well\.
.P
-If the \fB\-g\fR flag is specified, this command will update globally installed
+If the \fB\-g\fP flag is specified, this command will update globally installed
packages\.
.P
If no package name is specified, all packages in the specified location (global
or local) will be updated\.
.P
-As of \fBnpm@2\.6\.1\fR, the \fBnpm update\fR will only inspect top\-level packages\.
-Prior versions of \fBnpm\fR would also recursively inspect all dependencies\.
-To get the old behavior, use \fBnpm \-\-depth 9999 update\fR, but be warned that
-simultaneous asynchronous update of all packages, including \fBnpm\fR itself
-and packages that \fBnpm\fR depends on, often causes problems up to and including
-the uninstallation of \fBnpm\fR itself\.
+As of \fBnpm@2\.6\.1\fP, the \fBnpm update\fP will only inspect top\-level packages\.
+Prior versions of \fBnpm\fP would also recursively inspect all dependencies\.
+To get the old behavior, use \fBnpm \-\-depth 9999 update\fP, but be warned that
+simultaneous asynchronous update of all packages, including \fBnpm\fP itself
+and packages that \fBnpm\fP depends on, often causes problems up to and including
+the uninstallation of \fBnpm\fP itself\.
.P
-To restore a missing \fBnpm\fR, use the command:
+To restore a missing \fBnpm\fP, use the command:
.P
.RS 2
.nf
@@ -39,12 +39,12 @@ curl \-L https://npmjs\.com/install\.sh | sh
.RE
.SH EXAMPLES
.P
-IMPORTANT VERSION NOTE: these examples assume \fBnpm@2\.6\.1\fR or later\. For
-older versions of \fBnpm\fR, you must specify \fB\-\-depth 0\fR to get the behavior
+IMPORTANT VERSION NOTE: these examples assume \fBnpm@2\.6\.1\fP or later\. For
+older versions of \fBnpm\fP, you must specify \fB\-\-depth 0\fP to get the behavior
described below\.
.P
-For the examples below, assume that the current package is \fBapp\fR and it depends
-on dependencies, \fBdep1\fR (\fBdep2\fR, \.\. etc\.)\. The published versions of \fBdep1\fR are:
+For the examples below, assume that the current package is \fBapp\fP and it depends
+on dependencies, \fBdep1\fP (\fBdep2\fP, \.\. etc\.)\. The published versions of \fBdep1\fP are:
.P
.RS 2
.nf
@@ -65,7 +65,7 @@ on dependencies, \fBdep1\fR (\fBdep2\fR, \.\. etc\.)\. The published versions o
.RE
.SS Caret Dependencies
.P
-If \fBapp\fR\|'s \fBpackage\.json\fR contains:
+If \fBapp\fP\|'s \fBpackage\.json\fP contains:
.P
.RS 2
.nf
@@ -75,11 +75,11 @@ dependencies: {
.fi
.RE
.P
-Then \fBnpm update\fR will install \fBdep1@1\.2\.2\fR, because \fB1\.2\.2\fR is \fBlatest\fR and
-\fB1\.2\.2\fR satisfies \fB^1\.1\.1\fR\|\.
+Then \fBnpm update\fP will install \fBdep1@1\.2\.2\fP, because \fB1\.2\.2\fP is \fBlatest\fP and
+\fB1\.2\.2\fP satisfies \fB^1\.1\.1\fP\|\.
.SS Tilde Dependencies
.P
-However, if \fBapp\fR\|'s \fBpackage\.json\fR contains:
+However, if \fBapp\fP\|'s \fBpackage\.json\fP contains:
.P
.RS 2
.nf
@@ -89,13 +89,13 @@ dependencies: {
.fi
.RE
.P
-In this case, running \fBnpm update\fR will install \fBdep1@1\.1\.2\fR\|\. Even though the \fBlatest\fR
-tag points to \fB1\.2\.2\fR, this version does not satisfy \fB~1\.1\.1\fR, which is equivalent
-to \fB>=1\.1\.1 <1\.2\.0\fR\|\. So the highest\-sorting version that satisfies \fB~1\.1\.1\fR is used,
-which is \fB1\.1\.2\fR\|\.
+In this case, running \fBnpm update\fP will install \fBdep1@1\.1\.2\fP\|\. Even though the \fBlatest\fP
+tag points to \fB1\.2\.2\fP, this version does not satisfy \fB~1\.1\.1\fP, which is equivalent
+to \fB>=1\.1\.1 <1\.2\.0\fP\|\. So the highest\-sorting version that satisfies \fB~1\.1\.1\fP is used,
+which is \fB1\.1\.2\fP\|\.
.SS Caret Dependencies below 1\.0\.0
.P
-Suppose \fBapp\fR has a caret dependency on a version below \fB1\.0\.0\fR, for example:
+Suppose \fBapp\fP has a caret dependency on a version below \fB1\.0\.0\fP, for example:
.P
.RS 2
.nf
@@ -105,10 +105,10 @@ dependencies: {
.fi
.RE
.P
-\fBnpm update\fR will install \fBdep1@0\.2\.0\fR, because there are no other
-versions which satisfy \fB^0\.2\.0\fR\|\.
+\fBnpm update\fP will install \fBdep1@0\.2\.0\fP, because there are no other
+versions which satisfy \fB^0\.2\.0\fP\|\.
.P
-If the dependence were on \fB^0\.4\.0\fR:
+If the dependence were on \fB^0\.4\.0\fP:
.P
.RS 2
.nf
@@ -118,13 +118,13 @@ dependencies: {
.fi
.RE
.P
-Then \fBnpm update\fR will install \fBdep1@0\.4\.1\fR, because that is the highest\-sorting
-version that satisfies \fB^0\.4\.0\fR (\fB>= 0\.4\.0 <0\.5\.0\fR)
-.SS Recording Updates with \fB\-\-save\fR
+Then \fBnpm update\fP will install \fBdep1@0\.4\.1\fP, because that is the highest\-sorting
+version that satisfies \fB^0\.4\.0\fP (\fB>= 0\.4\.0 <0\.5\.0\fP)
+.SS Recording Updates with \fB\-\-save\fP
.P
When you want to update a package and save the new version as
-the minimum required dependency in \fBpackage\.json\fR, you can use
-\fBnpm update \-\-save\fR\|\. For example if \fBpackage\.json\fR contains
+the minimum required dependency in \fBpackage\.json\fP, you can use
+\fBnpm update \-\-save\fP\|\. For example if \fBpackage\.json\fP contains
.P
.RS 2
.nf
@@ -134,8 +134,8 @@ dependencies: {
.fi
.RE
.P
-Then \fBnpm update \-\-save\fR will install \fBdep1@1\.2\.2\fR (i\.e\., \fBlatest\fR),
-and \fBpackage\.json\fR will be modified:
+Then \fBnpm update \-\-save\fP will install \fBdep1@1\.2\.2\fP (i\.e\., \fBlatest\fP),
+and \fBpackage\.json\fP will be modified:
.P
.RS 2
.nf
@@ -145,15 +145,15 @@ dependencies: {
.fi
.RE
.P
-Note that \fBnpm\fR will only write an updated version to \fBpackage\.json\fR
+Note that \fBnpm\fP will only write an updated version to \fBpackage\.json\fP
if it installs a new package\.
.SS Updating Globally\-Installed Packages
.P
-\fBnpm update \-g\fR will apply the \fBupdate\fR action to each globally\- installed
-package that is \fBoutdated\fR \-\- that is, has a version that is different from
-\fBlatest\fR\|\.
+\fBnpm update \-g\fP will apply the \fBupdate\fP action to each globally\- installed
+package that is \fBoutdated\fP \-\- that is, has a version that is different from
+\fBlatest\fP\|\.
.P
-NOTE: If a package has been upgraded to a version newer than \fBlatest\fR, it will
+NOTE: If a package has been upgraded to a version newer than \fBlatest\fP, it will
be \fIdowngraded\fR\|\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index 6c9444ff7..1cfe82618 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VERSION" "1" "June 2015" "" ""
+.TH "NPM\-VERSION" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-version\fR \- Bump a package version
.SH SYNOPSIS
@@ -11,21 +11,22 @@ npm version [<newversion> | major | minor | patch | premajor | preminor | prepat
.SH DESCRIPTION
.P
Run this in a package directory to bump the version and write the new
-data back to \fBpackage\.json\fR and, if present, \fBnpm\-shrinkwrap\.json\fR\|\.
+data back to \fBpackage\.json\fP and, if present, \fBnpm\-shrinkwrap\.json\fP\|\.
.P
-The \fBnewversion\fR argument should be a valid semver string, \fIor\fR a
-valid second argument to semver\.inc (one of "patch", "minor", "major",
-"prepatch", "preminor", "premajor", "prerelease")\. In the second case,
+The \fBnewversion\fP argument should be a valid semver string, \fIor\fR a
+valid second argument to semver\.inc (one of \fBpatch\fP, \fBminor\fP, \fBmajor\fP,
+\fBprepatch\fP, \fBpreminor\fP, \fBpremajor\fP, \fBprerelease\fP)\. In the second case,
the existing version will be incremented by 1 in the specified field\.
.P
-If run in a git repo, it will also create a version commit and tag, and fail if
-the repo is not clean\. This behavior is controlled by \fBgit\-tag\-version\fR (see
-below), and can be disabled on the command line by running \fBnpm
-\-\-no\-git\-tag\-version version\fR
+If run in a git repo, it will also create a version commit and tag\.
+This behavior is controlled by \fBgit\-tag\-version\fP (see below), and can
+be disabled on the command line by running \fBnpm \-\-no\-git\-tag\-version version\fP\|\.
+It will fail if the working directory is not clean, unless the \fB\-\-force\fP
+flag is set\.
.P
-If supplied with \fB\-\-message\fR (shorthand: \fB\-m\fR) config option, npm will
+If supplied with \fB\-\-message\fP (shorthand: \fB\-m\fP) config option, npm will
use it as a commit message when creating a version commit\. If the
-\fBmessage\fR config contains \fB%s\fR then that will be replaced with the
+\fBmessage\fP config contains \fB%s\fP then that will be replaced with the
resulting version number\. For example:
.P
.RS 2
@@ -34,8 +35,8 @@ npm version patch \-m "Upgrade to %s for reasons"
.fi
.RE
.P
-If the \fBsign\-git\-tag\fR config is set, then the tag will be signed using
-the \fB\-s\fR flag to git\. Note that you must have a default GPG key set up
+If the \fBsign\-git\-tag\fP config is set, then the tag will be signed using
+the \fB\-s\fP flag to git\. Note that you must have a default GPG key set up
in your git config for this to work properly\. For example:
.P
.RS 2
@@ -51,17 +52,48 @@ Enter passphrase:
.fi
.RE
.P
-If "preversion", "version", "postversion" in the "scripts" property of
-the package\.json, it will execute by running \fBnpm version\fR\|\. preversion
-and version ware executed before bump the package version, postversion
-was executed after bump the package version\. For example to run \fBnpm version\fR
-after passed all test:
+If \fBpreversion\fP, \fBversion\fP, or \fBpostversion\fP are in the \fBscripts\fP property of
+the package\.json, they will be executed as part of running \fBnpm version\fP\|\.
+.P
+The exact order of execution is as follows:
+.RS 0
+.IP 1. 3
+Check to make sure the git working directory is clean before we get started\.
+Your scripts may add files to the commit in future steps\.
+This step is skipped if the \fB\-\-force\fP flag is set\.
+.IP 2. 3
+Run the \fBpreversion\fP script\. These scripts have access to the old \fBversion\fP in package\.json\.
+A typical use would be running your full test suite before deploying\.
+Any files you want added to the commit should be explicitly added using \fBgit add\fP\|\.
+.IP 3. 3
+Bump \fBversion\fP in \fBpackage\.json\fP as requested (\fBpatch\fP, \fBminor\fP, \fBmajor\fP, etc)\.
+.IP 4. 3
+Run the \fBversion\fP script\. These scripts have access to the new \fBversion\fP in package\.json
+(so they can incorporate it into file headers in generated files for example)\.
+Again, scripts should explicitly add generated files to the commit using \fBgit add\fP\|\.
+.IP 5. 3
+Commit and tag\.
+.IP 6. 3
+Run the \fBpostversion\fP script\. Use it to clean up the file system or automatically push
+the commit and/or tag\.
+
+.RE
+.P
+Take the following example:
.P
.RS 2
.nf
-"scripts": { "preversion": "npm test" }
+"scripts": {
+ "preversion": "npm test",
+ "version": "npm run build && git add \-A dist",
+ "postversion": "git push && git push \-\-tags && rm \-rf build/temp"
+}
.fi
.RE
+.P
+This runs all your tests, and proceeds only if they pass\. Then runs your \fBbuild\fP script, and
+adds everything in the \fBdist\fP directory to the commit\. After the commit, it pushes the new commit
+and tag up to the server, and deletes the \fBbuild/temp\fP directory\.
.SH CONFIGURATION
.SS git\-tag\-version
.RS 0
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 8960067d0..ef5daf4b5 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VIEW" "1" "June 2015" "" ""
+.TH "NPM\-VIEW" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-view\fR \- View registry info
.SH SYNOPSIS
@@ -12,9 +12,9 @@ npm v [@<scope>/]<name>[@<version>] [<field>[\.<subfield>]\.\.\.]
.SH DESCRIPTION
.P
This command shows data about a package and prints it to the stream
-referenced by the \fBoutfd\fR config, which defaults to stdout\.
+referenced by the \fBoutfd\fP config, which defaults to stdout\.
.P
-To show the package registry entry for the \fBconnect\fR package, you can do
+To show the package registry entry for the \fBconnect\fP package, you can do
this:
.P
.RS 2
@@ -26,7 +26,7 @@ npm view connect
The default version is "latest" if unspecified\.
.P
Field names can be specified after the package descriptor\.
-For example, to show the dependencies of the \fBronn\fR package at version
+For example, to show the dependencies of the \fBronn\fP package at version
0\.3\.5, you could do the following:
.P
.RS 2
@@ -87,7 +87,7 @@ npm view express contributors\.name contributors\.email
.P
"Person" fields are shown as a string if they would be shown as an
object\. So, for example, this will show the list of npm contributors in
-the shortened string format\. (See npm help 5 \fBpackage\.json\fR for more on this\.)
+the shortened string format\. (See npm help 5 \fBpackage\.json\fP for more on this\.)
.P
.RS 2
.nf
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index 812bbf87a..a8ab8dc74 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM\-WHOAMI" "1" "June 2015" "" ""
+.TH "NPM\-WHOAMI" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-whoami\fR \- Display npm username
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm whoami
.RE
.SH DESCRIPTION
.P
-Print the \fBusername\fR config to standard output\.
+Print the \fBusername\fP config to standard output\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index b38102a3f..69f64f16f 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "June 2015" "" ""
+.TH "NPM" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm\fR \- javascript package manager
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm <command> [args]
.RE
.SH VERSION
.P
-2.11.3
+2.13.4
.SH DESCRIPTION
.P
npm is the package manager for the Node JavaScript platform\. It puts
@@ -21,16 +21,16 @@ It is extremely configurable to support a wide variety of use cases\.
Most commonly, it is used to publish, discover, install, and develop node
programs\.
.P
-Run \fBnpm help\fR to get a list of available commands\.
+Run \fBnpm help\fP to get a list of available commands\.
.SH INTRODUCTION
.P
You probably got npm because you want to install stuff\.
.P
-Use \fBnpm install blerg\fR to install the latest version of "blerg"\. Check out
-npm help \fBnpm\-install\fR for more info\. It can do a lot of stuff\.
+Use \fBnpm install blerg\fP to install the latest version of "blerg"\. Check out
+npm help \fBnpm\-install\fP for more info\. It can do a lot of stuff\.
.P
-Use the \fBnpm search\fR command to show everything that's available\.
-Use \fBnpm ls\fR to show everything you've installed\.
+Use the \fBnpm search\fP command to show everything that's available\.
+Use \fBnpm ls\fP to show everything you've installed\.
.SH DEPENDENCIES
.P
If a package references to another package with a git URL, npm depends
@@ -48,7 +48,7 @@ the node\-gyp repository \fIhttps://github\.com/TooTallNate/node\-gyp\fR and
the node\-gyp Wiki \fIhttps://github\.com/TooTallNate/node\-gyp/wiki\fR\|\.
.SH DIRECTORIES
.P
-See npm help 5 \fBnpm\-folders\fR to learn about where npm puts stuff\.
+See npm help 5 \fBnpm\-folders\fP to learn about where npm puts stuff\.
.P
In particular, npm has two modes of operation:
.RS 0
@@ -56,17 +56,17 @@ In particular, npm has two modes of operation:
global mode:
.br
npm installs packages into the install prefix at
-\fBprefix/lib/node_modules\fR and bins are installed in \fBprefix/bin\fR\|\.
+\fBprefix/lib/node_modules\fP and bins are installed in \fBprefix/bin\fP\|\.
.IP \(bu 2
local mode:
.br
npm installs packages into the current project directory, which
defaults to the current working directory\. Packages are installed to
-\fB\|\./node_modules\fR, and bins are installed to \fB\|\./node_modules/\.bin\fR\|\.
+\fB\|\./node_modules\fP, and bins are installed to \fB\|\./node_modules/\.bin\fP\|\.
.RE
.P
-Local mode is the default\. Use \fB\-\-global\fR or \fB\-g\fR on any command to
+Local mode is the default\. Use \fB\-\-global\fP or \fB\-g\fP on any command to
operate in global mode instead\.
.SH DEVELOPER USAGE
.P
@@ -75,24 +75,24 @@ following help topics:
.RS 0
.IP \(bu 2
json:
-Make a package\.json file\. See npm help 5 \fBpackage\.json\fR\|\.
+Make a package\.json file\. See npm help 5 \fBpackage\.json\fP\|\.
.IP \(bu 2
link:
For linking your current working code into Node's path, so that you
don't have to reinstall every time you make a change\. Use
-\fBnpm link\fR to do this\.
+\fBnpm link\fP to do this\.
.IP \(bu 2
install:
It's a good idea to install things if you don't need the symbolic link\.
Especially, installing other peoples code from the registry is done via
-\fBnpm install\fR
+\fBnpm install\fP
.IP \(bu 2
adduser:
Create an account or log in\. Credentials are stored in the
user config file\.
.IP \(bu 2
publish:
-Use the \fBnpm publish\fR command to upload your code to the registry\.
+Use the \fBnpm publish\fP command to upload your code to the registry\.
.RE
.SH CONFIGURATION
@@ -103,27 +103,27 @@ npm is extremely configurable\. It reads its configuration options from
.IP \(bu 2
Command line switches:
.br
-Set a config with \fB\-\-key val\fR\|\. All keys take a value, even if they
+Set a config with \fB\-\-key val\fP\|\. All keys take a value, even if they
are booleans (the config parser doesn't know what the options are at
the time of parsing\.) If no value is provided, then the option is set
-to boolean \fBtrue\fR\|\.
+to boolean \fBtrue\fP\|\.
.IP \(bu 2
Environment Variables:
.br
Set any config by prefixing the name in an environment variable with
-\fBnpm_config_\fR\|\. For example, \fBexport npm_config_key=val\fR\|\.
+\fBnpm_config_\fP\|\. For example, \fBexport npm_config_key=val\fP\|\.
.IP \(bu 2
User Configs:
.br
The file at $HOME/\.npmrc is an ini\-formatted list of configs\. If
-present, it is parsed\. If the \fBuserconfig\fR option is set in the cli
+present, it is parsed\. If the \fBuserconfig\fP option is set in the cli
or env, then that will be used instead\.
.IP \(bu 2
Global Configs:
.br
The file found at \.\./etc/npmrc (from the node executable, by default
this resolves to /usr/local/etc/npmrc) will be parsed if it is found\.
-If the \fBglobalconfig\fR option is set in the cli, env, or user config,
+If the \fBglobalconfig\fP option is set in the cli, env, or user config,
then that file is parsed instead\.
.IP \(bu 2
Defaults:
@@ -133,14 +133,14 @@ lib/utils/config\-defs\.js\. These must not be changed\.
.RE
.P
-See npm help 7 \fBnpm\-config\fR for much much more information\.
+See npm help 7 \fBnpm\-config\fP for much much more information\.
.SH CONTRIBUTIONS
.P
Patches welcome!
.RS 0
.IP \(bu 2
code:
-Read through npm help 7 \fBnpm\-coding\-style\fR if you plan to submit code\.
+Read through npm help 7 \fBnpm\-coding\-style\fP if you plan to submit code\.
You don't have to agree with it, but you do have to follow it\.
.IP \(bu 2
docs:
@@ -149,8 +149,8 @@ file in the "doc" folder\. (Don't worry about generating the man page\.)
.RE
.P
-Contributors are listed in npm's \fBpackage\.json\fR file\. You can view them
-easily by doing \fBnpm view npm contributors\fR\|\.
+Contributors are listed in npm's \fBpackage\.json\fP file\. You can view them
+easily by doing \fBnpm view npm contributors\fP\|\.
.P
If you would like to contribute, but don't know what to work on, check
the issues list or ask on the mailing list\.
@@ -175,7 +175,7 @@ npm\-@googlegroups\.com
.RE
.P
Be sure to include \fIall\fR of the output from the npm command that didn't work
-as expected\. The \fBnpm\-debug\.log\fR file is also helpful to provide\.
+as expected\. The \fBnpm\-debug\.log\fP file is also helpful to provide\.
.P
You can also look for isaacs in #node\.js on irc://irc\.freenode\.net\. He
will no doubt tell you to put the output in a gist or email\.
diff --git a/deps/npm/man/man3/npm-bin.3 b/deps/npm/man/man3/npm-bin.3
index 12f03b262..bfb93455d 100644
--- a/deps/npm/man/man3/npm-bin.3
+++ b/deps/npm/man/man3/npm-bin.3
@@ -1,4 +1,4 @@
-.TH "NPM\-BIN" "3" "June 2015" "" ""
+.TH "NPM\-BIN" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-bin\fR \- Display npm bin folder
.SH SYNOPSIS
@@ -13,5 +13,5 @@ npm\.commands\.bin(args, cb)
Print the folder where npm will install executables\.
.P
This function should not be used programmatically\. Instead, just refer
-to the \fBnpm\.bin\fR property\.
+to the \fBnpm\.bin\fP property\.
diff --git a/deps/npm/man/man3/npm-bugs.3 b/deps/npm/man/man3/npm-bugs.3
index f04fdf2af..2740034a5 100644
--- a/deps/npm/man/man3/npm-bugs.3
+++ b/deps/npm/man/man3/npm-bugs.3
@@ -1,4 +1,4 @@
-.TH "NPM\-BUGS" "3" "June 2015" "" ""
+.TH "NPM\-BUGS" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-bugs\fR \- Bugs for a package in a web browser maybe
.SH SYNOPSIS
@@ -11,7 +11,7 @@ npm\.commands\.bugs(package, callback)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-bug tracker URL, and then tries to open it using the \fB\-\-browser\fR
+bug tracker URL, and then tries to open it using the \fB\-\-browser\fP
config param\.
.P
Like other commands, the first parameter is an array\. This command only
diff --git a/deps/npm/man/man3/npm-cache.3 b/deps/npm/man/man3/npm-cache.3
index bcfb6138b..50a6b7a38 100644
--- a/deps/npm/man/man3/npm-cache.3
+++ b/deps/npm/man/man3/npm-cache.3
@@ -1,4 +1,4 @@
-.TH "NPM\-CACHE" "3" "June 2015" "" ""
+.TH "NPM\-CACHE" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-cache\fR \- manage the npm cache programmatically
.SH SYNOPSIS
@@ -21,8 +21,8 @@ functionality\.
The callback is called with the package\.json data of the thing that is
eventually added to or read from the cache\.
.P
-The top level \fBnpm\.commands\.cache(\.\.\.)\fR functionality is a public
-interface, and like all commands on the \fBnpm\.commands\fR object, it will
+The top level \fBnpm\.commands\.cache(\.\.\.)\fP functionality is a public
+interface, and like all commands on the \fBnpm\.commands\fP object, it will
match the command line behavior exactly\.
.P
However, the cache folder structure and the cache helper functions are
diff --git a/deps/npm/man/man3/npm-commands.3 b/deps/npm/man/man3/npm-commands.3
index 0dca71c8c..03d0dc40b 100644
--- a/deps/npm/man/man3/npm-commands.3
+++ b/deps/npm/man/man3/npm-commands.3
@@ -1,4 +1,4 @@
-.TH "NPM\-COMMANDS" "3" "June 2015" "" ""
+.TH "NPM\-COMMANDS" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-commands\fR \- npm commands
.SH SYNOPSIS
@@ -17,8 +17,8 @@ In general, all commands on the command object take an \fBarray\fR of positional
argument \fBstrings\fR\|\. The last argument to any function is a callback\. Some
commands are special and take other optional arguments\.
.P
-All commands have their own man page\. See \fBman npm\-<command>\fR for command\-line
-usage, or \fBman 3 npm\-<command>\fR for programmatic usage\.
+All commands have their own man page\. See \fBman npm\-<command>\fP for command\-line
+usage, or \fBman 3 npm\-<command>\fP for programmatic usage\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man3/npm-config.3 b/deps/npm/man/man3/npm-config.3
index 0c92c3a28..d2e57c26b 100644
--- a/deps/npm/man/man3/npm-config.3
+++ b/deps/npm/man/man3/npm-config.3
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "3" "June 2015" "" ""
+.TH "NPM\-CONFIG" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- Manage the npm configuration files
.SH SYNOPSIS
@@ -16,29 +16,29 @@ This function acts much the same way as the command\-line version\. The first
element in the array tells config what to do\. Possible values are:
.RS 0
.IP \(bu 2
-\fBset\fR
- Sets a config parameter\. The second element in \fBargs\fR is interpreted as the
+\fBset\fP
+ Sets a config parameter\. The second element in \fBargs\fP is interpreted as the
key, and the third element is interpreted as the value\.
.IP \(bu 2
-\fBget\fR
- Gets the value of a config parameter\. The second element in \fBargs\fR is the
+\fBget\fP
+ Gets the value of a config parameter\. The second element in \fBargs\fP is the
key to get the value of\.
.IP \(bu 2
-\fBdelete\fR (\fBrm\fR or \fBdel\fR)
- Deletes a parameter from the config\. The second element in \fBargs\fR is the
+\fBdelete\fP (\fBrm\fP or \fBdel\fP)
+ Deletes a parameter from the config\. The second element in \fBargs\fP is the
key to delete\.
.IP \(bu 2
-\fBlist\fR (\fBls\fR)
+\fBlist\fP (\fBls\fP)
Show all configs that aren't secret\. No parameters necessary\.
.IP \(bu 2
-\fBedit\fR:
+\fBedit\fP:
Opens the config file in the default editor\. This command isn't very useful
programmatically, but it is made available\.
.RE
.P
To programmatically access npm configuration settings, or set them for
-the duration of a program, use the \fBnpm\.config\.set\fR and \fBnpm\.config\.get\fR
+the duration of a program, use the \fBnpm\.config\.set\fP and \fBnpm\.config\.get\fP
functions instead\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man3/npm-deprecate.3 b/deps/npm/man/man3/npm-deprecate.3
index 54237fce6..f0e35ab9d 100644
--- a/deps/npm/man/man3/npm-deprecate.3
+++ b/deps/npm/man/man3/npm-deprecate.3
@@ -1,4 +1,4 @@
-.TH "NPM\-DEPRECATE" "3" "June 2015" "" ""
+.TH "NPM\-DEPRECATE" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-deprecate\fR \- Deprecate a version of a package
.SH SYNOPSIS
@@ -16,20 +16,20 @@ a deprecation warning to all who attempt to install it\.
The 'args' parameter must have exactly two elements:
.RS 0
.IP \(bu 2
-\fBpackage[@version]\fR
- The \fBversion\fR portion is optional, and may be either a range, or a
+\fBpackage[@version]\fP
+ The \fBversion\fP portion is optional, and may be either a range, or a
specific version, or a tag\.
.IP \(bu 2
-\fBmessage\fR
+\fBmessage\fP
The warning message that will be printed whenever a user attempts to
install the package\.
.RE
.P
Note that you must be the package owner to deprecate something\. See the
-\fBowner\fR and \fBadduser\fR help topics\.
+\fBowner\fP and \fBadduser\fP help topics\.
.P
-To un\-deprecate a package, specify an empty string (\fB""\fR) for the \fBmessage\fR argument\.
+To un\-deprecate a package, specify an empty string (\fB""\fP) for the \fBmessage\fP argument\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man3/npm-docs.3 b/deps/npm/man/man3/npm-docs.3
index e23f1dcad..4305dae16 100644
--- a/deps/npm/man/man3/npm-docs.3
+++ b/deps/npm/man/man3/npm-docs.3
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCS" "3" "June 2015" "" ""
+.TH "NPM\-DOCS" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-docs\fR \- Docs for a package in a web browser maybe
.SH SYNOPSIS
@@ -11,7 +11,7 @@ npm\.commands\.docs(package, callback)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-documentation URL, and then tries to open it using the \fB\-\-browser\fR
+documentation URL, and then tries to open it using the \fB\-\-browser\fP
config param\.
.P
Like other commands, the first parameter is an array\. This command only
diff --git a/deps/npm/man/man3/npm-edit.3 b/deps/npm/man/man3/npm-edit.3
index ceb99ec01..27ccd879f 100644
--- a/deps/npm/man/man3/npm-edit.3
+++ b/deps/npm/man/man3/npm-edit.3
@@ -1,4 +1,4 @@
-.TH "NPM\-EDIT" "3" "June 2015" "" ""
+.TH "NPM\-EDIT" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-edit\fR \- Edit an installed package
.SH SYNOPSIS
@@ -11,13 +11,13 @@ npm\.commands\.edit(package, callback)
.SH DESCRIPTION
.P
Opens the package folder in the default editor (or whatever you've
-configured as the npm \fBeditor\fR config \-\- see \fBnpm help config\fR\|\.)
+configured as the npm \fBeditor\fP config \-\- see \fBnpm help config\fP\|\.)
.P
After it has been edited, the package is rebuilt so as to pick up any
changes in compiled packages\.
.P
-For instance, you can do \fBnpm install connect\fR to install connect
-into your package, and then \fBnpm\.commands\.edit(["connect"], callback)\fR
+For instance, you can do \fBnpm install connect\fP to install connect
+into your package, and then \fBnpm\.commands\.edit(["connect"], callback)\fP
to make a few changes to your locally installed copy\.
.P
The first parameter is a string array with a single element, the package
diff --git a/deps/npm/man/man3/npm-explore.3 b/deps/npm/man/man3/npm-explore.3
index d9887378d..9fb6e008a 100644
--- a/deps/npm/man/man3/npm-explore.3
+++ b/deps/npm/man/man3/npm-explore.3
@@ -1,4 +1,4 @@
-.TH "NPM\-EXPLORE" "3" "June 2015" "" ""
+.TH "NPM\-EXPLORE" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-explore\fR \- Browse an installed package
.SH SYNOPSIS
@@ -16,7 +16,7 @@ If a command is specified, then it is run in the subshell, which then
immediately terminates\.
.P
Note that the package is \fInot\fR automatically rebuilt afterwards, so be
-sure to use \fBnpm rebuild <pkg>\fR if you make any changes\.
+sure to use \fBnpm rebuild <pkg>\fP if you make any changes\.
.P
The first element in the 'args' parameter must be a package name\. After that is the optional command, which can be any number of strings\. All of the strings will be combined into one, space\-delimited command\.
diff --git a/deps/npm/man/man3/npm-help-search.3 b/deps/npm/man/man3/npm-help-search.3
index ae547a529..243e5d2a2 100644
--- a/deps/npm/man/man3/npm-help-search.3
+++ b/deps/npm/man/man3/npm-help-search.3
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP\-SEARCH" "3" "June 2015" "" ""
+.TH "NPM\-HELP\-SEARCH" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-help-search\fR \- Search the help pages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-init.3 b/deps/npm/man/man3/npm-init.3
index 325e21728..947398d7a 100644
--- a/deps/npm/man/man3/npm-init.3
+++ b/deps/npm/man/man3/npm-init.3
@@ -1,4 +1,4 @@
-.TH "NPM" "" "June 2015" "" ""
+.TH "NPM" "" "August 2015" "" ""
.SH "NAME"
\fBnpm\fR
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-install.3 b/deps/npm/man/man3/npm-install.3
index 1d9497e39..d2951dd0d 100644
--- a/deps/npm/man/man3/npm-install.3
+++ b/deps/npm/man/man3/npm-install.3
@@ -1,4 +1,4 @@
-.TH "NPM\-INSTALL" "3" "June 2015" "" ""
+.TH "NPM\-INSTALL" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-install\fR \- install a package programmatically
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-link.3 b/deps/npm/man/man3/npm-link.3
index ab67f3933..5877e03fb 100644
--- a/deps/npm/man/man3/npm-link.3
+++ b/deps/npm/man/man3/npm-link.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LINK" "3" "June 2015" "" ""
+.TH "NPM\-LINK" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-link\fR \- Symlink a package folder
.SH SYNOPSIS
@@ -14,12 +14,12 @@ npm\.commands\.link(packages, callback)
Package linking is a two\-step process\.
.P
Without parameters, link will create a globally\-installed
-symbolic link from \fBprefix/package\-name\fR to the current folder\.
+symbolic link from \fBprefix/package\-name\fP to the current folder\.
.P
-With a parameters, link will create a symlink from the local \fBnode_modules\fR
+With a parameters, link will create a symlink from the local \fBnode_modules\fP
folder to the global symlink\.
.P
-When creating tarballs for \fBnpm publish\fR, the linked packages are
+When creating tarballs for \fBnpm publish\fP, the linked packages are
"snapshotted" to their current state by resolving the symbolic links\.
.P
This is
diff --git a/deps/npm/man/man3/npm-load.3 b/deps/npm/man/man3/npm-load.3
index dd92ce684..ce409b6a5 100644
--- a/deps/npm/man/man3/npm-load.3
+++ b/deps/npm/man/man3/npm-load.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LOAD" "3" "June 2015" "" ""
+.TH "NPM\-LOAD" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-load\fR \- Load config settings
.SH SYNOPSIS
@@ -30,5 +30,5 @@ For example, to emulate the \-\-dev flag, pass an object that looks like this:
.fi
.RE
.P
-For a list of all the available command\-line configs, see \fBnpm help config\fR
+For a list of all the available command\-line configs, see \fBnpm help config\fP
diff --git a/deps/npm/man/man3/npm-ls.3 b/deps/npm/man/man3/npm-ls.3
index 82d3dbd94..4fc976f2e 100644
--- a/deps/npm/man/man3/npm-ls.3
+++ b/deps/npm/man/man3/npm-ls.3
@@ -1,4 +1,4 @@
-.TH "NPM\-LS" "3" "June 2015" "" ""
+.TH "NPM\-LS" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-ls\fR \- List installed packages
.SH SYNOPSIS
@@ -17,7 +17,7 @@ return that data using the callback\.
This command does not take any arguments, but args must be defined\.
Beyond that, if any arguments are passed in, npm will politely warn that it
does not take positional arguments, though you may set config flags
-like with any other command, such as \fBglobal\fR to list global packages\.
+like with any other command, such as \fBglobal\fP to list global packages\.
.P
It will print out extraneous, missing, and invalid packages\.
.P
diff --git a/deps/npm/man/man3/npm-outdated.3 b/deps/npm/man/man3/npm-outdated.3
index 01cdf0c68..f2fd1f73b 100644
--- a/deps/npm/man/man3/npm-outdated.3
+++ b/deps/npm/man/man3/npm-outdated.3
@@ -1,4 +1,4 @@
-.TH "NPM\-OUTDATED" "3" "June 2015" "" ""
+.TH "NPM\-OUTDATED" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-outdated\fR \- Check for outdated packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-owner.3 b/deps/npm/man/man3/npm-owner.3
index 5a82efdae..16e2c5401 100644
--- a/deps/npm/man/man3/npm-owner.3
+++ b/deps/npm/man/man3/npm-owner.3
@@ -1,4 +1,4 @@
-.TH "NPM\-OWNER" "3" "June 2015" "" ""
+.TH "NPM\-OWNER" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-owner\fR \- Manage package owners
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-pack.3 b/deps/npm/man/man3/npm-pack.3
index f1d65b2d3..037ec3c34 100644
--- a/deps/npm/man/man3/npm-pack.3
+++ b/deps/npm/man/man3/npm-pack.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PACK" "3" "June 2015" "" ""
+.TH "NPM\-PACK" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-pack\fR \- Create a tarball from a package
.SH SYNOPSIS
@@ -13,7 +13,7 @@ npm\.commands\.pack([packages,] callback)
For anything that's installable (that is, a package folder, tarball,
tarball url, name@tag, name@version, or name), this command will fetch
it to the cache, and then copy the tarball to the current working
-directory as \fB<name>\-<version>\.tgz\fR, and then write the filenames out to
+directory as \fB<name>\-<version>\.tgz\fP, and then write the filenames out to
stdout\.
.P
If the same package is specified multiple times, then the file will be
diff --git a/deps/npm/man/man3/npm-ping.3 b/deps/npm/man/man3/npm-ping.3
new file mode 100644
index 000000000..607fc8423
--- /dev/null
+++ b/deps/npm/man/man3/npm-ping.3
@@ -0,0 +1,17 @@
+.TH "NPM\-PING" "3" "August 2015" "" ""
+.SH "NAME"
+\fBnpm-ping\fR \- Ping npm registry
+.SH SYNOPSIS
+.P
+.RS 2
+.nf
+npm\.registry\.ping(registry, options, function (er, pong))
+.fi
+.RE
+.SH DESCRIPTION
+.P
+Attempts to connect to the given registry, returning a \fBpong\fP
+object with various metadata if it succeeds\.
+.P
+This function is primarily useful for debugging connection issues
+to npm registries\.
diff --git a/deps/npm/man/man3/npm-prefix.3 b/deps/npm/man/man3/npm-prefix.3
index 9bf285db1..5ca38b5ec 100644
--- a/deps/npm/man/man3/npm-prefix.3
+++ b/deps/npm/man/man3/npm-prefix.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PREFIX" "3" "June 2015" "" ""
+.TH "NPM\-PREFIX" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-prefix\fR \- Display prefix
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-prune.3 b/deps/npm/man/man3/npm-prune.3
index 4ea946400..4bf3f2803 100644
--- a/deps/npm/man/man3/npm-prune.3
+++ b/deps/npm/man/man3/npm-prune.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PRUNE" "3" "June 2015" "" ""
+.TH "NPM\-PRUNE" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-prune\fR \- Remove extraneous packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-publish.3 b/deps/npm/man/man3/npm-publish.3
index fe3ce639c..4a303370b 100644
--- a/deps/npm/man/man3/npm-publish.3
+++ b/deps/npm/man/man3/npm-publish.3
@@ -1,4 +1,4 @@
-.TH "NPM\-PUBLISH" "3" "June 2015" "" ""
+.TH "NPM\-PUBLISH" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-publish\fR \- Publish a package
.SH SYNOPSIS
@@ -14,10 +14,10 @@ Publishes a package to the registry so that it can be installed by name\.
Possible values in the 'packages' array are:
.RS 0
.IP \(bu 2
-\fB<folder>\fR:
+\fB<folder>\fP:
A folder containing a package\.json file
.IP \(bu 2
-\fB<tarball>\fR:
+\fB<tarball>\fP:
A url or file path to a gzipped tar archive containing a single folder
with a package\.json file inside\.
diff --git a/deps/npm/man/man3/npm-rebuild.3 b/deps/npm/man/man3/npm-rebuild.3
index 668e2c99e..b59df5400 100644
--- a/deps/npm/man/man3/npm-rebuild.3
+++ b/deps/npm/man/man3/npm-rebuild.3
@@ -1,4 +1,4 @@
-.TH "NPM\-REBUILD" "3" "June 2015" "" ""
+.TH "NPM\-REBUILD" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-rebuild\fR \- Rebuild a package
.SH SYNOPSIS
@@ -10,10 +10,10 @@ npm\.commands\.rebuild([packages,] callback)
.RE
.SH DESCRIPTION
.P
-This command runs the \fBnpm build\fR command on each of the matched packages\. This is useful
+This command runs the \fBnpm build\fP command on each of the matched packages\. This is useful
when you install a new version of node, and must recompile all your C++ addons with
the new binary\. If no 'packages' parameter is specify, every package will be rebuilt\.
.SH CONFIGURATION
.P
-See \fBnpm help build\fR
+See \fBnpm help build\fP
diff --git a/deps/npm/man/man3/npm-repo.3 b/deps/npm/man/man3/npm-repo.3
index c88f5d22a..53983bca9 100644
--- a/deps/npm/man/man3/npm-repo.3
+++ b/deps/npm/man/man3/npm-repo.3
@@ -1,4 +1,4 @@
-.TH "NPM\-REPO" "3" "June 2015" "" ""
+.TH "NPM\-REPO" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-repo\fR \- Open package repository page in the browser
.SH SYNOPSIS
@@ -11,7 +11,7 @@ npm\.commands\.repo(package, callback)
.SH DESCRIPTION
.P
This command tries to guess at the likely location of a package's
-repository URL, and then tries to open it using the \fB\-\-browser\fR
+repository URL, and then tries to open it using the \fB\-\-browser\fP
config param\.
.P
Like other commands, the first parameter is an array\. This command only
diff --git a/deps/npm/man/man3/npm-restart.3 b/deps/npm/man/man3/npm-restart.3
index f792e5004..478c2f5f3 100644
--- a/deps/npm/man/man3/npm-restart.3
+++ b/deps/npm/man/man3/npm-restart.3
@@ -1,4 +1,4 @@
-.TH "NPM\-RESTART" "3" "June 2015" "" ""
+.TH "NPM\-RESTART" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-restart\fR \- Restart a package
.SH SYNOPSIS
@@ -39,13 +39,13 @@ postrestart
If no version is specified, then it restarts the "active" version\.
.P
npm can restart multiple packages\. Just specify multiple packages in
-the \fBpackages\fR parameter\.
+the \fBpackages\fP parameter\.
.SH NOTE
.P
Note that the "restart" script is run \fBin addition to\fR the "stop"
and "start" scripts, not instead of them\.
.P
-This is the behavior as of \fBnpm\fR major version 2\. A change in this
+This is the behavior as of \fBnpm\fP major version 2\. A change in this
behavior will be accompanied by an increase in major version number
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man3/npm-root.3 b/deps/npm/man/man3/npm-root.3
index 1bcf8913c..8ade1a682 100644
--- a/deps/npm/man/man3/npm-root.3
+++ b/deps/npm/man/man3/npm-root.3
@@ -1,4 +1,4 @@
-.TH "NPM\-ROOT" "3" "June 2015" "" ""
+.TH "NPM\-ROOT" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-root\fR \- Display npm root
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm\.commands\.root(args, callback)
.RE
.SH DESCRIPTION
.P
-Print the effective \fBnode_modules\fR folder to standard out\.
+Print the effective \fBnode_modules\fP folder to standard out\.
.P
\|'args' is never used and callback is never called with data\.
\|'args' must be present or things will break\.
diff --git a/deps/npm/man/man3/npm-run-script.3 b/deps/npm/man/man3/npm-run-script.3
index 2ec6ff50c..416f1ff79 100644
--- a/deps/npm/man/man3/npm-run-script.3
+++ b/deps/npm/man/man3/npm-run-script.3
@@ -1,4 +1,4 @@
-.TH "NPM\-RUN\-SCRIPT" "3" "June 2015" "" ""
+.TH "NPM\-RUN\-SCRIPT" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-run-script\fR \- Run arbitrary package scripts
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-search.3 b/deps/npm/man/man3/npm-search.3
index 7186d9d31..823eff13b 100644
--- a/deps/npm/man/man3/npm-search.3
+++ b/deps/npm/man/man3/npm-search.3
@@ -1,4 +1,4 @@
-.TH "NPM\-SEARCH" "3" "June 2015" "" ""
+.TH "NPM\-SEARCH" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-search\fR \- Search for packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-shrinkwrap.3 b/deps/npm/man/man3/npm-shrinkwrap.3
index e4047cde3..fa89d927d 100644
--- a/deps/npm/man/man3/npm-shrinkwrap.3
+++ b/deps/npm/man/man3/npm-shrinkwrap.3
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP" "3" "June 2015" "" ""
+.TH "NPM\-SHRINKWRAP" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR \- programmatically generate package shrinkwrap file
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-start.3 b/deps/npm/man/man3/npm-start.3
index ef2c760c3..050c108e8 100644
--- a/deps/npm/man/man3/npm-start.3
+++ b/deps/npm/man/man3/npm-start.3
@@ -1,4 +1,4 @@
-.TH "NPM\-START" "3" "June 2015" "" ""
+.TH "NPM\-START" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-start\fR \- Start a package
.SH SYNOPSIS
@@ -13,5 +13,5 @@ npm\.commands\.start(packages, callback)
This runs a package's "start" script, if one was provided\.
.P
npm can start multiple packages\. Just specify multiple packages in the
-\fBpackages\fR parameter\.
+\fBpackages\fP parameter\.
diff --git a/deps/npm/man/man3/npm-stop.3 b/deps/npm/man/man3/npm-stop.3
index ed4b4c64c..9df3b4c69 100644
--- a/deps/npm/man/man3/npm-stop.3
+++ b/deps/npm/man/man3/npm-stop.3
@@ -1,4 +1,4 @@
-.TH "NPM\-STOP" "3" "June 2015" "" ""
+.TH "NPM\-STOP" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-stop\fR \- Stop a package
.SH SYNOPSIS
@@ -13,5 +13,5 @@ npm\.commands\.stop(packages, callback)
This runs a package's "stop" script, if one was provided\.
.P
npm can run stop on multiple packages\. Just specify multiple packages
-in the \fBpackages\fR parameter\.
+in the \fBpackages\fP parameter\.
diff --git a/deps/npm/man/man3/npm-submodule.3 b/deps/npm/man/man3/npm-submodule.3
deleted file mode 100644
index d2e23d7e8..000000000
--- a/deps/npm/man/man3/npm-submodule.3
+++ /dev/null
@@ -1,41 +0,0 @@
-.\" Generated with Ronnjs 0.3.8
-.\" http://github.com/kapouer/ronnjs/
-.
-.TH "NPM\-SUBMODULE" "3" "September 2014" "" ""
-.
-.SH "NAME"
-\fBnpm-submodule\fR \-\- Add a package as a git submodule
-.
-.SH "SYNOPSIS"
-.
-.nf
-npm\.commands\.submodule(packages, callback)
-.
-.fi
-.
-.SH "DESCRIPTION"
-For each package specified, npm will check if it has a git repository url
-in its package\.json description then add it as a git submodule at \fBnode_modules/<pkg name>\fR\|\.
-.
-.P
-This is a convenience only\. From then on, it\'s up to you to manage
-updates by using the appropriate git commands\. npm will stubbornly
-refuse to update, modify, or remove anything with a \fB\|\.git\fR subfolder
-in it\.
-.
-.P
-This command also does not install missing dependencies, if the package
-does not include them in its git repository\. If \fBnpm ls\fR reports that
-things are missing, you can either install, link, or submodule them yourself,
-or you can do \fBnpm explore <pkgname> \-\- npm install\fR to install the
-dependencies into the submodule folder\.
-.
-.SH "SEE ALSO"
-.
-.IP "\(bu" 4
-npm help json
-.
-.IP "\(bu" 4
-git help submodule
-.
-.IP "" 0
diff --git a/deps/npm/man/man3/npm-tag.3 b/deps/npm/man/man3/npm-tag.3
index c60b49fc8..5c3f35583 100644
--- a/deps/npm/man/man3/npm-tag.3
+++ b/deps/npm/man/man3/npm-tag.3
@@ -1,4 +1,4 @@
-.TH "NPM\-TAG" "3" "June 2015" "" ""
+.TH "NPM\-TAG" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-tag\fR \- Tag a published version
.SH SYNOPSIS
@@ -11,7 +11,7 @@ npm\.commands\.tag(package@version, tag, callback)
.SH DESCRIPTION
.P
Tags the specified version of the package with the specified tag, or the
-\fB\-\-tag\fR config if not specified\.
+\fB\-\-tag\fP config if not specified\.
.P
The 'package@version' is an array of strings, but only the first two elements are
currently used\.
@@ -23,5 +23,5 @@ specific version)\.
The second element is the name of the tag to tag this version with\. If this
parameter is missing or falsey (empty), the default from the config will be
used\. For more information about how to set this config, check
-\fBman 3 npm\-config\fR for programmatic usage or \fBman npm\-config\fR for cli usage\.
+\fBman 3 npm\-config\fP for programmatic usage or \fBman npm\-config\fP for cli usage\.
diff --git a/deps/npm/man/man3/npm-test.3 b/deps/npm/man/man3/npm-test.3
index 1cde98cb3..f0a67f124 100644
--- a/deps/npm/man/man3/npm-test.3
+++ b/deps/npm/man/man3/npm-test.3
@@ -1,4 +1,4 @@
-.TH "NPM\-TEST" "3" "June 2015" "" ""
+.TH "NPM\-TEST" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-test\fR \- Test a package
.SH SYNOPSIS
@@ -12,9 +12,9 @@
.P
This runs a package's "test" script, if one was provided\.
.P
-To run tests as a condition of installation, set the \fBnpat\fR config to
+To run tests as a condition of installation, set the \fBnpat\fP config to
true\.
.P
npm can run tests on multiple packages\. Just specify multiple packages
-in the \fBpackages\fR parameter\.
+in the \fBpackages\fP parameter\.
diff --git a/deps/npm/man/man3/npm-uninstall.3 b/deps/npm/man/man3/npm-uninstall.3
index 0bd8ec302..fe9ebaaf1 100644
--- a/deps/npm/man/man3/npm-uninstall.3
+++ b/deps/npm/man/man3/npm-uninstall.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UNINSTALL" "3" "June 2015" "" ""
+.TH "NPM\-UNINSTALL" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-uninstall\fR \- uninstall a package programmatically
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-unpublish.3 b/deps/npm/man/man3/npm-unpublish.3
index 5cbc92dc3..721bf206b 100644
--- a/deps/npm/man/man3/npm-unpublish.3
+++ b/deps/npm/man/man3/npm-unpublish.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UNPUBLISH" "3" "June 2015" "" ""
+.TH "NPM\-UNPUBLISH" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-unpublish\fR \- Remove a package from the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-update.3 b/deps/npm/man/man3/npm-update.3
index 4ac31fe15..da780d598 100644
--- a/deps/npm/man/man3/npm-update.3
+++ b/deps/npm/man/man3/npm-update.3
@@ -1,4 +1,4 @@
-.TH "NPM\-UPDATE" "3" "June 2015" "" ""
+.TH "NPM\-UPDATE" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-update\fR \- Update a package
.SH SYNOPSIS
@@ -8,14 +8,14 @@
npm\.commands\.update(packages, callback)
.fi
.RE
-.TH "DESCRIPTION" "" "June 2015" "" ""
+.TH "DESCRIPTION" "" "August 2015" "" ""
.SH "NAME"
\fBDESCRIPTION\fR
.P
Updates a package, upgrading it to the latest version\. It also installs any
missing packages\.
.P
-The \fBpackages\fR argument is an array of packages to update\. The \fBcallback\fR
+The \fBpackages\fP argument is an array of packages to update\. The \fBcallback\fP
parameter will be called when done or when an error occurs\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man3/npm-version.3 b/deps/npm/man/man3/npm-version.3
index 61ed5d174..7a13475c4 100644
--- a/deps/npm/man/man3/npm-version.3
+++ b/deps/npm/man/man3/npm-version.3
@@ -1,4 +1,4 @@
-.TH "NPM\-VERSION" "3" "June 2015" "" ""
+.TH "NPM\-VERSION" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-version\fR \- Bump a package version
.SH SYNOPSIS
diff --git a/deps/npm/man/man3/npm-view.3 b/deps/npm/man/man3/npm-view.3
index 2b8645ee8..6921675a4 100644
--- a/deps/npm/man/man3/npm-view.3
+++ b/deps/npm/man/man3/npm-view.3
@@ -1,4 +1,4 @@
-.TH "NPM\-VIEW" "3" "June 2015" "" ""
+.TH "NPM\-VIEW" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-view\fR \- View registry info
.SH SYNOPSIS
@@ -11,7 +11,7 @@ npm\.commands\.view(args, [silent,] callback)
.SH DESCRIPTION
.P
This command shows data about a package and prints it to the stream
-referenced by the \fBoutfd\fR config, which defaults to stdout\.
+referenced by the \fBoutfd\fP config, which defaults to stdout\.
.P
The "args" parameter is an ordered list that closely resembles the command\-line
usage\. The elements should be ordered such that the first element is
@@ -21,7 +21,7 @@ which can be used to get only the information desired from the registry\.
.P
The callback will be passed all of the data returned by the query\.
.P
-For example, to get the package registry entry for the \fBconnect\fR package,
+For example, to get the package registry entry for the \fBconnect\fP package,
you can do this:
.P
.RS 2
@@ -33,7 +33,7 @@ npm\.commands\.view(["connect"], callback)
If no version is specified, "latest" is assumed\.
.P
Field names can be specified after the package descriptor\.
-For example, to show the dependencies of the \fBronn\fR package at version
+For example, to show the dependencies of the \fBronn\fP package at version
0\.3\.5, you could do the following:
.P
.RS 2
@@ -84,7 +84,7 @@ npm\.commands\.view(["express", "contributors\.name", "contributors\.email"], ca
.P
"Person" fields are shown as a string if they would be shown as an
object\. So, for example, this will show the list of npm contributors in
-the shortened string format\. (See \fBnpm help json\fR for more on this\.)
+the shortened string format\. (See \fBnpm help json\fP for more on this\.)
.P
.RS 2
.nf
diff --git a/deps/npm/man/man3/npm-whoami.3 b/deps/npm/man/man3/npm-whoami.3
index 6f0fc323f..0c29cf748 100644
--- a/deps/npm/man/man3/npm-whoami.3
+++ b/deps/npm/man/man3/npm-whoami.3
@@ -1,4 +1,4 @@
-.TH "NPM\-WHOAMI" "3" "June 2015" "" ""
+.TH "NPM\-WHOAMI" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm-whoami\fR \- Display npm username
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm\.commands\.whoami(args, callback)
.RE
.SH DESCRIPTION
.P
-Print the \fBusername\fR config to standard output\.
+Print the \fBusername\fP config to standard output\.
.P
\|'args' is never used and callback is never called with data\.
\|'args' must be present or things will break\.
diff --git a/deps/npm/man/man3/npm.3 b/deps/npm/man/man3/npm.3
index a27078deb..790675dd7 100644
--- a/deps/npm/man/man3/npm.3
+++ b/deps/npm/man/man3/npm.3
@@ -1,4 +1,4 @@
-.TH "NPM" "3" "June 2015" "" ""
+.TH "NPM" "3" "August 2015" "" ""
.SH "NAME"
\fBnpm\fR \- javascript package manager
.SH SYNOPSIS
@@ -20,22 +20,22 @@ npm\.load([configObject, ]function (er, npm) {
.RE
.SH VERSION
.P
-2.11.3
+2.13.4
.SH DESCRIPTION
.P
This is the API documentation for npm\.
To find documentation of the command line
-client, see npm help \fBnpm\fR\|\.
+client, see npm help \fBnpm\fP\|\.
.P
-Prior to using npm's commands, \fBnpm\.load()\fR must be called\. If you provide
-\fBconfigObject\fR as an object map of top\-level configs, they override the values
+Prior to using npm's commands, \fBnpm\.load()\fP must be called\. If you provide
+\fBconfigObject\fP as an object map of top\-level configs, they override the values
stored in the various config locations\. In the npm command line client, this
set of configs is parsed from the command line options\. Additional
configuration params are loaded from two configuration files\. See
-npm help \fBnpm\-config\fR, npm help 7 \fBnpm\-config\fR, and npm help 5 \fBnpmrc\fR for more information\.
+npm help \fBnpm\-config\fP, npm help 7 \fBnpm\-config\fP, and npm help 5 \fBnpmrc\fP for more information\.
.P
After that, each of the functions are accessible in the
-commands object: \fBnpm\.commands\.<cmd>\fR\|\. See npm help 7 \fBnpm\-index\fR for a list of
+commands object: \fBnpm\.commands\.<cmd>\fP\|\. See npm help 7 \fBnpm\-index\fP for a list of
all possible commands\.
.P
All commands on the command object take an \fBarray\fR of positional argument
@@ -45,50 +45,50 @@ commands take other optional arguments\.
Configs cannot currently be set on a per function basis, as each call to
npm\.config\.set will change the value for \fIall\fR npm commands in that process\.
.P
-To find API documentation for a specific command, run the \fBnpm apihelp\fR
+To find API documentation for a specific command, run the \fBnpm apihelp\fP
command\.
.SH METHODS AND PROPERTIES
.RS 0
.IP \(bu 2
-\fBnpm\.load(configs, cb)\fR
- Load the configuration params, and call the \fBcb\fR function once the
+\fBnpm\.load(configs, cb)\fP
+ Load the configuration params, and call the \fBcb\fP function once the
globalconfig and userconfig files have been loaded as well, or on
nextTick if they've already been loaded\.
.IP \(bu 2
-\fBnpm\.config\fR
+\fBnpm\.config\fP
An object for accessing npm configuration parameters\.
.RS 0
.IP \(bu 2
-\fBnpm\.config\.get(key)\fR
+\fBnpm\.config\.get(key)\fP
.IP \(bu 2
-\fBnpm\.config\.set(key, val)\fR
+\fBnpm\.config\.set(key, val)\fP
.IP \(bu 2
-\fBnpm\.config\.del(key)\fR
+\fBnpm\.config\.del(key)\fP
.RE
.IP \(bu 2
-\fBnpm\.dir\fR or \fBnpm\.root\fR
- The \fBnode_modules\fR directory where npm will operate\.
+\fBnpm\.dir\fP or \fBnpm\.root\fP
+ The \fBnode_modules\fP directory where npm will operate\.
.IP \(bu 2
-\fBnpm\.prefix\fR
+\fBnpm\.prefix\fP
The prefix where npm is operating\. (Most often the current working
directory\.)
.IP \(bu 2
-\fBnpm\.cache\fR
+\fBnpm\.cache\fP
The place where npm keeps JSON and tarballs it fetches from the
registry (or uploads to the registry)\.
.IP \(bu 2
-\fBnpm\.tmp\fR
+\fBnpm\.tmp\fP
npm's temporary working directory\.
.IP \(bu 2
-\fBnpm\.deref\fR
+\fBnpm\.deref\fP
Get the "real" name for a command that has either an alias or
abbreviation\.
.RE
.SH MAGIC
.P
-For each of the methods in the \fBnpm\.commands\fR object, a method is added to the
+For each of the methods in the \fBnpm\.commands\fP object, a method is added to the
npm object, which takes a set of positional string arguments rather than an
array and a callback\.
.P
@@ -106,13 +106,13 @@ For example, this would work in a node repl:
.fi
.RE
.P
-Note that that \fIwon't\fR work in a node program, since the \fBinstall\fR
+Note that that \fIwon't\fR work in a node program, since the \fBinstall\fP
method will get called before the configuration load is completed\.
.SH ABBREVS
.P
-In order to support \fBnpm ins foo\fR instead of \fBnpm install foo\fR, the
-\fBnpm\.commands\fR object has a set of abbreviations as well as the full
-method names\. Use the \fBnpm\.deref\fR method to find the real name\.
+In order to support \fBnpm ins foo\fP instead of \fBnpm install foo\fP, the
+\fBnpm\.commands\fP object has a set of abbreviations as well as the full
+method names\. Use the \fBnpm\.deref\fP method to find the real name\.
.P
For example:
.P
diff --git a/deps/npm/man/man5/npm-folders.5 b/deps/npm/man/man5/npm-folders.5
index 71816580c..71a3c1574 100644
--- a/deps/npm/man/man5/npm-folders.5
+++ b/deps/npm/man/man5/npm-folders.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "June 2015" "" ""
+.TH "NPM\-FOLDERS" "5" "August 2015" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
@@ -9,89 +9,89 @@ This document will tell you what it puts where\.
.SS tl;dr
.RS 0
.IP \(bu 2
-Local install (default): puts stuff in \fB\|\./node_modules\fR of the current
+Local install (default): puts stuff in \fB\|\./node_modules\fP of the current
package root\.
.IP \(bu 2
-Global install (with \fB\-g\fR): puts stuff in /usr/local or wherever node
+Global install (with \fB\-g\fP): puts stuff in /usr/local or wherever node
is installed\.
.IP \(bu 2
-Install it \fBlocally\fR if you're going to \fBrequire()\fR it\.
+Install it \fBlocally\fR if you're going to \fBrequire()\fP it\.
.IP \(bu 2
Install it \fBglobally\fR if you're going to run it on the command line\.
.IP \(bu 2
-If you need both, then install it in both places, or use \fBnpm link\fR\|\.
+If you need both, then install it in both places, or use \fBnpm link\fP\|\.
.RE
.SS prefix Configuration
.P
-The \fBprefix\fR config defaults to the location where node is installed\.
-On most systems, this is \fB/usr/local\fR, and most of the time is the same
-as node's \fBprocess\.installPrefix\fR\|\.
+The \fBprefix\fP config defaults to the location where node is installed\.
+On most systems, this is \fB/usr/local\fP, and most of the time is the same
+as node's \fBprocess\.installPrefix\fP\|\.
.P
On windows, this is the exact location of the node\.exe binary\. On Unix
systems, it's one level up, since node is typically installed at
-\fB{prefix}/bin/node\fR rather than \fB{prefix}/node\.exe\fR\|\.
+\fB{prefix}/bin/node\fP rather than \fB{prefix}/node\.exe\fP\|\.
.P
-When the \fBglobal\fR flag is set, npm installs things into this prefix\.
+When the \fBglobal\fP flag is set, npm installs things into this prefix\.
When it is not set, it uses the root of the current package, or the
current working directory if not in a package already\.
.SS Node Modules
.P
-Packages are dropped into the \fBnode_modules\fR folder under the \fBprefix\fR\|\.
+Packages are dropped into the \fBnode_modules\fP folder under the \fBprefix\fP\|\.
When installing locally, this means that you can
-\fBrequire("packagename")\fR to load its main module, or
-\fBrequire("packagename/lib/path/to/sub/module")\fR to load other modules\.
+\fBrequire("packagename")\fP to load its main module, or
+\fBrequire("packagename/lib/path/to/sub/module")\fP to load other modules\.
.P
-Global installs on Unix systems go to \fB{prefix}/lib/node_modules\fR\|\.
-Global installs on Windows go to \fB{prefix}/node_modules\fR (that is, no
-\fBlib\fR folder\.)
+Global installs on Unix systems go to \fB{prefix}/lib/node_modules\fP\|\.
+Global installs on Windows go to \fB{prefix}/node_modules\fP (that is, no
+\fBlib\fP folder\.)
.P
Scoped packages are installed the same way, except they are grouped together
-in a sub\-folder of the relevant \fBnode_modules\fR folder with the name of that
-scope prefix by the @ symbol, e\.g\. \fBnpm install @myorg/package\fR would place
-the package in \fB{prefix}/node_modules/@myorg/package\fR\|\. See npm help 7 \fBscopes\fR for
+in a sub\-folder of the relevant \fBnode_modules\fP folder with the name of that
+scope prefix by the @ symbol, e\.g\. \fBnpm install @myorg/package\fP would place
+the package in \fB{prefix}/node_modules/@myorg/package\fP\|\. See npm help 7 \fBscopes\fP for
more details\.
.P
-If you wish to \fBrequire()\fR a package, then install it locally\.
+If you wish to \fBrequire()\fP a package, then install it locally\.
.SS Executables
.P
-When in global mode, executables are linked into \fB{prefix}/bin\fR on Unix,
-or directly into \fB{prefix}\fR on Windows\.
+When in global mode, executables are linked into \fB{prefix}/bin\fP on Unix,
+or directly into \fB{prefix}\fP on Windows\.
.P
When in local mode, executables are linked into
-\fB\|\./node_modules/\.bin\fR so that they can be made available to scripts run
+\fB\|\./node_modules/\.bin\fP so that they can be made available to scripts run
through npm\. (For example, so that a test runner will be in the path
-when you run \fBnpm test\fR\|\.)
+when you run \fBnpm test\fP\|\.)
.SS Man Pages
.P
-When in global mode, man pages are linked into \fB{prefix}/share/man\fR\|\.
+When in global mode, man pages are linked into \fB{prefix}/share/man\fP\|\.
.P
When in local mode, man pages are not installed\.
.P
Man pages are not installed on Windows systems\.
.SS Cache
.P
-See npm help \fBnpm\-cache\fR\|\. Cache files are stored in \fB~/\.npm\fR on Posix, or
-\fB~/npm\-cache\fR on Windows\.
+See npm help \fBnpm\-cache\fP\|\. Cache files are stored in \fB~/\.npm\fP on Posix, or
+\fB~/npm\-cache\fP on Windows\.
.P
-This is controlled by the \fBcache\fR configuration param\.
+This is controlled by the \fBcache\fP configuration param\.
.SS Temp Files
.P
Temporary files are stored by default in the folder specified by the
-\fBtmp\fR config, which defaults to the TMPDIR, TMP, or TEMP environment
-variables, or \fB/tmp\fR on Unix and \fBc:\\windows\\temp\fR on Windows\.
+\fBtmp\fP config, which defaults to the TMPDIR, TMP, or TEMP environment
+variables, or \fB/tmp\fP on Unix and \fBc:\\windows\\temp\fP on Windows\.
.P
Temp files are given a unique folder under this root for each run of the
program, and are deleted upon successful exit\.
.SH More Information
.P
When installing locally, npm first tries to find an appropriate
-\fBprefix\fR folder\. This is so that \fBnpm install foo@1\.2\.3\fR will install
-to the sensible root of your package, even if you happen to have \fBcd\fRed
+\fBprefix\fP folder\. This is so that \fBnpm install foo@1\.2\.3\fP will install
+to the sensible root of your package, even if you happen to have \fBcd\fPed
into some other folder\.
.P
Starting at the $PWD, npm will walk up the folder tree checking for a
-folder that contains either a \fBpackage\.json\fR file, or a \fBnode_modules\fR
+folder that contains either a \fBpackage\.json\fP file, or a \fBnode_modules\fP
folder\. If such a thing is found, then that is treated as the effective
"current directory" for the purpose of running npm commands\. (This
behavior is inspired by and similar to git's \.git\-folder seeking
@@ -99,16 +99,16 @@ logic when running git commands in a working dir\.)
.P
If no package root is found, then the current folder is used\.
.P
-When you run \fBnpm install foo@1\.2\.3\fR, then the package is loaded into
-the cache, and then unpacked into \fB\|\./node_modules/foo\fR\|\. Then, any of
+When you run \fBnpm install foo@1\.2\.3\fP, then the package is loaded into
+the cache, and then unpacked into \fB\|\./node_modules/foo\fP\|\. Then, any of
foo's dependencies are similarly unpacked into
-\fB\|\./node_modules/foo/node_modules/\.\.\.\fR\|\.
+\fB\|\./node_modules/foo/node_modules/\.\.\.\fP\|\.
.P
-Any bin files are symlinked to \fB\|\./node_modules/\.bin/\fR, so that they may
+Any bin files are symlinked to \fB\|\./node_modules/\.bin/\fP, so that they may
be found by npm scripts when necessary\.
.SS Global Installation
.P
-If the \fBglobal\fR configuration is set to true, then npm will
+If the \fBglobal\fP configuration is set to true, then npm will
install packages "globally"\.
.P
For global installation, packages are installed roughly the same way,
@@ -116,21 +116,21 @@ but using the folders described above\.
.SS Cycles, Conflicts, and Folder Parsimony
.P
Cycles are handled using the property of node's module system that it
-walks up the directories looking for \fBnode_modules\fR folders\. So, at every
-stage, if a package is already installed in an ancestor \fBnode_modules\fR
+walks up the directories looking for \fBnode_modules\fP folders\. So, at every
+stage, if a package is already installed in an ancestor \fBnode_modules\fP
folder, then it is not installed at the current location\.
.P
-Consider the case above, where \fBfoo \-> bar \-> baz\fR\|\. Imagine if, in
+Consider the case above, where \fBfoo \-> bar \-> baz\fP\|\. Imagine if, in
addition to that, baz depended on bar, so you'd have:
-\fBfoo \-> bar \-> baz \-> bar \-> baz \.\.\.\fR\|\. However, since the folder
-structure is: \fBfoo/node_modules/bar/node_modules/baz\fR, there's no need to
-put another copy of bar into \fB\|\.\.\./baz/node_modules\fR, since when it calls
+\fBfoo \-> bar \-> baz \-> bar \-> baz \.\.\.\fP\|\. However, since the folder
+structure is: \fBfoo/node_modules/bar/node_modules/baz\fP, there's no need to
+put another copy of bar into \fB\|\.\.\./baz/node_modules\fP, since when it calls
require("bar"), it will get the copy that is installed in
-\fBfoo/node_modules/bar\fR\|\.
+\fBfoo/node_modules/bar\fP\|\.
.P
This shortcut is only used if the exact same
-version would be installed in multiple nested \fBnode_modules\fR folders\. It
-is still possible to have \fBa/node_modules/b/node_modules/a\fR if the two
+version would be installed in multiple nested \fBnode_modules\fP folders\. It
+is still possible to have \fBa/node_modules/b/node_modules/a\fP if the two
"a" packages are different versions\. However, without repeating the
exact same package multiple times, an infinite regress will always be
prevented\.
@@ -176,36 +176,36 @@ foo
.fi
.RE
.P
-Since foo depends directly on \fBbar@1\.2\.3\fR and \fBbaz@1\.2\.3\fR, those are
-installed in foo's \fBnode_modules\fR folder\.
+Since foo depends directly on \fBbar@1\.2\.3\fP and \fBbaz@1\.2\.3\fP, those are
+installed in foo's \fBnode_modules\fP folder\.
.P
Even though the latest copy of blerg is 1\.3\.7, foo has a specific
dependency on version 1\.2\.5\. So, that gets installed at [A]\. Since the
-parent installation of blerg satisfies bar's dependency on \fBblerg@1\.x\fR,
+parent installation of blerg satisfies bar's dependency on \fBblerg@1\.x\fP,
it does not install another copy under [B]\.
.P
Bar [B] also has dependencies on baz and asdf, so those are installed in
-bar's \fBnode_modules\fR folder\. Because it depends on \fBbaz@2\.x\fR, it cannot
-re\-use the \fBbaz@1\.2\.3\fR installed in the parent \fBnode_modules\fR folder [D],
+bar's \fBnode_modules\fP folder\. Because it depends on \fBbaz@2\.x\fP, it cannot
+re\-use the \fBbaz@1\.2\.3\fP installed in the parent \fBnode_modules\fP folder [D],
and must install its own copy [C]\.
.P
-Underneath bar, the \fBbaz \-> quux \-> bar\fR dependency creates a cycle\.
+Underneath bar, the \fBbaz \-> quux \-> bar\fP dependency creates a cycle\.
However, because bar is already in quux's ancestry [B], it does not
unpack another copy of bar into that folder\.
.P
-Underneath \fBfoo \-> baz\fR [D], quux's [E] folder tree is empty, because its
+Underneath \fBfoo \-> baz\fP [D], quux's [E] folder tree is empty, because its
dependency on bar is satisfied by the parent folder copy installed at [B]\.
.P
-For a graphical breakdown of what is installed where, use \fBnpm ls\fR\|\.
+For a graphical breakdown of what is installed where, use \fBnpm ls\fP\|\.
.SS Publishing
.P
-Upon publishing, npm will look in the \fBnode_modules\fR folder\. If any of
-the items there are not in the \fBbundledDependencies\fR array, then they will
+Upon publishing, npm will look in the \fBnode_modules\fP folder\. If any of
+the items there are not in the \fBbundledDependencies\fP array, then they will
not be included in the package tarball\.
.P
This allows a package maintainer to install all of their dependencies
(and dev dependencies) locally, but only re\-publish those items that
-cannot be found elsewhere\. See npm help 5 \fBpackage\.json\fR for more information\.
+cannot be found elsewhere\. See npm help 5 \fBpackage\.json\fP for more information\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 71816580c..71a3c1574 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "June 2015" "" ""
+.TH "NPM\-FOLDERS" "5" "August 2015" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
@@ -9,89 +9,89 @@ This document will tell you what it puts where\.
.SS tl;dr
.RS 0
.IP \(bu 2
-Local install (default): puts stuff in \fB\|\./node_modules\fR of the current
+Local install (default): puts stuff in \fB\|\./node_modules\fP of the current
package root\.
.IP \(bu 2
-Global install (with \fB\-g\fR): puts stuff in /usr/local or wherever node
+Global install (with \fB\-g\fP): puts stuff in /usr/local or wherever node
is installed\.
.IP \(bu 2
-Install it \fBlocally\fR if you're going to \fBrequire()\fR it\.
+Install it \fBlocally\fR if you're going to \fBrequire()\fP it\.
.IP \(bu 2
Install it \fBglobally\fR if you're going to run it on the command line\.
.IP \(bu 2
-If you need both, then install it in both places, or use \fBnpm link\fR\|\.
+If you need both, then install it in both places, or use \fBnpm link\fP\|\.
.RE
.SS prefix Configuration
.P
-The \fBprefix\fR config defaults to the location where node is installed\.
-On most systems, this is \fB/usr/local\fR, and most of the time is the same
-as node's \fBprocess\.installPrefix\fR\|\.
+The \fBprefix\fP config defaults to the location where node is installed\.
+On most systems, this is \fB/usr/local\fP, and most of the time is the same
+as node's \fBprocess\.installPrefix\fP\|\.
.P
On windows, this is the exact location of the node\.exe binary\. On Unix
systems, it's one level up, since node is typically installed at
-\fB{prefix}/bin/node\fR rather than \fB{prefix}/node\.exe\fR\|\.
+\fB{prefix}/bin/node\fP rather than \fB{prefix}/node\.exe\fP\|\.
.P
-When the \fBglobal\fR flag is set, npm installs things into this prefix\.
+When the \fBglobal\fP flag is set, npm installs things into this prefix\.
When it is not set, it uses the root of the current package, or the
current working directory if not in a package already\.
.SS Node Modules
.P
-Packages are dropped into the \fBnode_modules\fR folder under the \fBprefix\fR\|\.
+Packages are dropped into the \fBnode_modules\fP folder under the \fBprefix\fP\|\.
When installing locally, this means that you can
-\fBrequire("packagename")\fR to load its main module, or
-\fBrequire("packagename/lib/path/to/sub/module")\fR to load other modules\.
+\fBrequire("packagename")\fP to load its main module, or
+\fBrequire("packagename/lib/path/to/sub/module")\fP to load other modules\.
.P
-Global installs on Unix systems go to \fB{prefix}/lib/node_modules\fR\|\.
-Global installs on Windows go to \fB{prefix}/node_modules\fR (that is, no
-\fBlib\fR folder\.)
+Global installs on Unix systems go to \fB{prefix}/lib/node_modules\fP\|\.
+Global installs on Windows go to \fB{prefix}/node_modules\fP (that is, no
+\fBlib\fP folder\.)
.P
Scoped packages are installed the same way, except they are grouped together
-in a sub\-folder of the relevant \fBnode_modules\fR folder with the name of that
-scope prefix by the @ symbol, e\.g\. \fBnpm install @myorg/package\fR would place
-the package in \fB{prefix}/node_modules/@myorg/package\fR\|\. See npm help 7 \fBscopes\fR for
+in a sub\-folder of the relevant \fBnode_modules\fP folder with the name of that
+scope prefix by the @ symbol, e\.g\. \fBnpm install @myorg/package\fP would place
+the package in \fB{prefix}/node_modules/@myorg/package\fP\|\. See npm help 7 \fBscopes\fP for
more details\.
.P
-If you wish to \fBrequire()\fR a package, then install it locally\.
+If you wish to \fBrequire()\fP a package, then install it locally\.
.SS Executables
.P
-When in global mode, executables are linked into \fB{prefix}/bin\fR on Unix,
-or directly into \fB{prefix}\fR on Windows\.
+When in global mode, executables are linked into \fB{prefix}/bin\fP on Unix,
+or directly into \fB{prefix}\fP on Windows\.
.P
When in local mode, executables are linked into
-\fB\|\./node_modules/\.bin\fR so that they can be made available to scripts run
+\fB\|\./node_modules/\.bin\fP so that they can be made available to scripts run
through npm\. (For example, so that a test runner will be in the path
-when you run \fBnpm test\fR\|\.)
+when you run \fBnpm test\fP\|\.)
.SS Man Pages
.P
-When in global mode, man pages are linked into \fB{prefix}/share/man\fR\|\.
+When in global mode, man pages are linked into \fB{prefix}/share/man\fP\|\.
.P
When in local mode, man pages are not installed\.
.P
Man pages are not installed on Windows systems\.
.SS Cache
.P
-See npm help \fBnpm\-cache\fR\|\. Cache files are stored in \fB~/\.npm\fR on Posix, or
-\fB~/npm\-cache\fR on Windows\.
+See npm help \fBnpm\-cache\fP\|\. Cache files are stored in \fB~/\.npm\fP on Posix, or
+\fB~/npm\-cache\fP on Windows\.
.P
-This is controlled by the \fBcache\fR configuration param\.
+This is controlled by the \fBcache\fP configuration param\.
.SS Temp Files
.P
Temporary files are stored by default in the folder specified by the
-\fBtmp\fR config, which defaults to the TMPDIR, TMP, or TEMP environment
-variables, or \fB/tmp\fR on Unix and \fBc:\\windows\\temp\fR on Windows\.
+\fBtmp\fP config, which defaults to the TMPDIR, TMP, or TEMP environment
+variables, or \fB/tmp\fP on Unix and \fBc:\\windows\\temp\fP on Windows\.
.P
Temp files are given a unique folder under this root for each run of the
program, and are deleted upon successful exit\.
.SH More Information
.P
When installing locally, npm first tries to find an appropriate
-\fBprefix\fR folder\. This is so that \fBnpm install foo@1\.2\.3\fR will install
-to the sensible root of your package, even if you happen to have \fBcd\fRed
+\fBprefix\fP folder\. This is so that \fBnpm install foo@1\.2\.3\fP will install
+to the sensible root of your package, even if you happen to have \fBcd\fPed
into some other folder\.
.P
Starting at the $PWD, npm will walk up the folder tree checking for a
-folder that contains either a \fBpackage\.json\fR file, or a \fBnode_modules\fR
+folder that contains either a \fBpackage\.json\fP file, or a \fBnode_modules\fP
folder\. If such a thing is found, then that is treated as the effective
"current directory" for the purpose of running npm commands\. (This
behavior is inspired by and similar to git's \.git\-folder seeking
@@ -99,16 +99,16 @@ logic when running git commands in a working dir\.)
.P
If no package root is found, then the current folder is used\.
.P
-When you run \fBnpm install foo@1\.2\.3\fR, then the package is loaded into
-the cache, and then unpacked into \fB\|\./node_modules/foo\fR\|\. Then, any of
+When you run \fBnpm install foo@1\.2\.3\fP, then the package is loaded into
+the cache, and then unpacked into \fB\|\./node_modules/foo\fP\|\. Then, any of
foo's dependencies are similarly unpacked into
-\fB\|\./node_modules/foo/node_modules/\.\.\.\fR\|\.
+\fB\|\./node_modules/foo/node_modules/\.\.\.\fP\|\.
.P
-Any bin files are symlinked to \fB\|\./node_modules/\.bin/\fR, so that they may
+Any bin files are symlinked to \fB\|\./node_modules/\.bin/\fP, so that they may
be found by npm scripts when necessary\.
.SS Global Installation
.P
-If the \fBglobal\fR configuration is set to true, then npm will
+If the \fBglobal\fP configuration is set to true, then npm will
install packages "globally"\.
.P
For global installation, packages are installed roughly the same way,
@@ -116,21 +116,21 @@ but using the folders described above\.
.SS Cycles, Conflicts, and Folder Parsimony
.P
Cycles are handled using the property of node's module system that it
-walks up the directories looking for \fBnode_modules\fR folders\. So, at every
-stage, if a package is already installed in an ancestor \fBnode_modules\fR
+walks up the directories looking for \fBnode_modules\fP folders\. So, at every
+stage, if a package is already installed in an ancestor \fBnode_modules\fP
folder, then it is not installed at the current location\.
.P
-Consider the case above, where \fBfoo \-> bar \-> baz\fR\|\. Imagine if, in
+Consider the case above, where \fBfoo \-> bar \-> baz\fP\|\. Imagine if, in
addition to that, baz depended on bar, so you'd have:
-\fBfoo \-> bar \-> baz \-> bar \-> baz \.\.\.\fR\|\. However, since the folder
-structure is: \fBfoo/node_modules/bar/node_modules/baz\fR, there's no need to
-put another copy of bar into \fB\|\.\.\./baz/node_modules\fR, since when it calls
+\fBfoo \-> bar \-> baz \-> bar \-> baz \.\.\.\fP\|\. However, since the folder
+structure is: \fBfoo/node_modules/bar/node_modules/baz\fP, there's no need to
+put another copy of bar into \fB\|\.\.\./baz/node_modules\fP, since when it calls
require("bar"), it will get the copy that is installed in
-\fBfoo/node_modules/bar\fR\|\.
+\fBfoo/node_modules/bar\fP\|\.
.P
This shortcut is only used if the exact same
-version would be installed in multiple nested \fBnode_modules\fR folders\. It
-is still possible to have \fBa/node_modules/b/node_modules/a\fR if the two
+version would be installed in multiple nested \fBnode_modules\fP folders\. It
+is still possible to have \fBa/node_modules/b/node_modules/a\fP if the two
"a" packages are different versions\. However, without repeating the
exact same package multiple times, an infinite regress will always be
prevented\.
@@ -176,36 +176,36 @@ foo
.fi
.RE
.P
-Since foo depends directly on \fBbar@1\.2\.3\fR and \fBbaz@1\.2\.3\fR, those are
-installed in foo's \fBnode_modules\fR folder\.
+Since foo depends directly on \fBbar@1\.2\.3\fP and \fBbaz@1\.2\.3\fP, those are
+installed in foo's \fBnode_modules\fP folder\.
.P
Even though the latest copy of blerg is 1\.3\.7, foo has a specific
dependency on version 1\.2\.5\. So, that gets installed at [A]\. Since the
-parent installation of blerg satisfies bar's dependency on \fBblerg@1\.x\fR,
+parent installation of blerg satisfies bar's dependency on \fBblerg@1\.x\fP,
it does not install another copy under [B]\.
.P
Bar [B] also has dependencies on baz and asdf, so those are installed in
-bar's \fBnode_modules\fR folder\. Because it depends on \fBbaz@2\.x\fR, it cannot
-re\-use the \fBbaz@1\.2\.3\fR installed in the parent \fBnode_modules\fR folder [D],
+bar's \fBnode_modules\fP folder\. Because it depends on \fBbaz@2\.x\fP, it cannot
+re\-use the \fBbaz@1\.2\.3\fP installed in the parent \fBnode_modules\fP folder [D],
and must install its own copy [C]\.
.P
-Underneath bar, the \fBbaz \-> quux \-> bar\fR dependency creates a cycle\.
+Underneath bar, the \fBbaz \-> quux \-> bar\fP dependency creates a cycle\.
However, because bar is already in quux's ancestry [B], it does not
unpack another copy of bar into that folder\.
.P
-Underneath \fBfoo \-> baz\fR [D], quux's [E] folder tree is empty, because its
+Underneath \fBfoo \-> baz\fP [D], quux's [E] folder tree is empty, because its
dependency on bar is satisfied by the parent folder copy installed at [B]\.
.P
-For a graphical breakdown of what is installed where, use \fBnpm ls\fR\|\.
+For a graphical breakdown of what is installed where, use \fBnpm ls\fP\|\.
.SS Publishing
.P
-Upon publishing, npm will look in the \fBnode_modules\fR folder\. If any of
-the items there are not in the \fBbundledDependencies\fR array, then they will
+Upon publishing, npm will look in the \fBnode_modules\fP folder\. If any of
+the items there are not in the \fBbundledDependencies\fP array, then they will
not be included in the package tarball\.
.P
This allows a package maintainer to install all of their dependencies
(and dev dependencies) locally, but only re\-publish those items that
-cannot be found elsewhere\. See npm help 5 \fBpackage\.json\fR for more information\.
+cannot be found elsewhere\. See npm help 5 \fBpackage\.json\fP for more information\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index ed3ee7c24..f309144b5 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "June 2015" "" ""
+.TH "PACKAGE\.JSON" "5" "August 2015" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -7,7 +7,7 @@ This document is all you need to know about what's required in your package\.jso
file\. It must be actual JSON, not just a JavaScript object literal\.
.P
A lot of the behavior described in this document is affected by the config
-settings described in npm help 7 \fBnpm\-config\fR\|\.
+settings described in npm help 7 \fBnpm\-config\fP\|\.
.SH name
.P
The \fImost\fR important things in your package\.json are the name and version fields\.
@@ -50,8 +50,8 @@ already, before you get too attached to it\. https://www\.npmjs\.com/
.RE
.P
-A name can be optionally prefixed by a scope, e\.g\. \fB@myorg/mypackage\fR\|\. See
-npm help 7 \fBnpm\-scope\fR for more detail\.
+A name can be optionally prefixed by a scope, e\.g\. \fB@myorg/mypackage\fP\|\. See
+npm help 7 \fBnpm\-scope\fP for more detail\.
.SH version
.P
The \fImost\fR important things in your package\.json are the name and version fields\.
@@ -62,17 +62,17 @@ changes to the version\.
.P
Version must be parseable by
node\-semver \fIhttps://github\.com/isaacs/node\-semver\fR, which is bundled
-with npm as a dependency\. (\fBnpm install semver\fR to use it yourself\.)
+with npm as a dependency\. (\fBnpm install semver\fP to use it yourself\.)
.P
More on version numbers and ranges at npm help 7 semver\.
.SH description
.P
Put a description in it\. It's a string\. This helps people discover your
-package, as it's listed in \fBnpm search\fR\|\.
+package, as it's listed in \fBnpm search\fP\|\.
.SH keywords
.P
Put keywords in it\. It's an array of strings\. This helps people
-discover your package as it's listed in \fBnpm search\fR\|\.
+discover your package as it's listed in \fBnpm search\fP\|\.
.SH homepage
.P
The url to the project homepage\.
@@ -101,7 +101,7 @@ It should look like this:
You can specify either one or both values\. If you want to provide only a url,
you can specify the value for "bugs" as a simple string instead of an object\.
.P
-If a url is provided, it will be used by the \fBnpm bugs\fR command\.
+If a url is provided, it will be used by the \fBnpm bugs\fP command\.
.SH license
.P
You should specify a license for your package so that people know how they are
@@ -134,11 +134,11 @@ you are using a custom license, use the following valid SPDX expression:
.P
.RS 2
.nf
-{ "license" : "LicenseRef\-LICENSE" }
+{ "license" : "SEE LICENSE IN <filename>" }
.fi
.RE
.P
-Then include a LICENSE file at the top level of the package\.
+Then include a file named \fB<filename>\fP at the top level of the package\.
.P
Some old packages used license objects or a "licenses" property containing an
array of license objects:
@@ -175,6 +175,17 @@ Those styles are now deprecated\. Instead, use SPDX expressions, like this:
{ "license": "(MIT OR Apache\-2\.0)" }
.fi
.RE
+.P
+Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:
+.P
+.RS 2
+.nf
+{ "license": "UNLICENSED"}
+.fi
+.RE
+.P
+Consider also setting \fB"private": true\fP to prevent accidental publication\.
.SH people fields: author, contributors
.P
The "author" is one person\. "contributors" is an array of people\. A "person"
@@ -210,11 +221,49 @@ You can also provide a "\.npmignore" file in the root of your package,
which will keep files from being included, even if they would be picked
up by the files array\. The "\.npmignore" file works just like a
"\.gitignore"\.
+.P
+Certain files are always included, regardless of settings:
+.RS 0
+.IP \(bu 2
+\fBpackage\.json\fP
+.IP \(bu 2
+\fBREADME\fP (and its variants)
+.IP \(bu 2
+\fBCHANGELOG\fP (and its variants)
+.IP \(bu 2
+\fBLICENSE\fP / \fBLICENCE\fP
+
+.RE
+.P
+Conversely, some files are always ignored:
+.RS 0
+.IP \(bu 2
+\fB\|\.git\fP
+.IP \(bu 2
+\fBCVS\fP
+.IP \(bu 2
+\fB\|\.svn\fP
+.IP \(bu 2
+\fB\|\.hg\fP
+.IP \(bu 2
+\fB\|\.lock\-wscript\fP
+.IP \(bu 2
+\fB\|\.wafpickle\-N\fP
+.IP \(bu 2
+\fB*\.swp\fP
+.IP \(bu 2
+\fB\|\.DS_Store\fP
+.IP \(bu 2
+\fB\|\._*\fP
+.IP \(bu 2
+\fBnpm\-debug\.log\fP
+
+.RE
.SH main
.P
The main field is a module ID that is the primary entry point to your program\.
-That is, if your package is named \fBfoo\fR, and a user installs it, and then does
-\fBrequire("foo")\fR, then your main module's exports object will be returned\.
+That is, if your package is named \fBfoo\fP, and a user installs it, and then does
+\fBrequire("foo")\fP, then your main module's exports object will be returned\.
.P
This should be a module ID relative to the root of your package folder\.
.P
@@ -226,9 +275,9 @@ A lot of packages have one or more executable files that they'd like to
install into the PATH\. npm makes this pretty easy (in fact, it uses this
feature to install the "npm" executable\.)
.P
-To use this, supply a \fBbin\fR field in your package\.json which is a map of
+To use this, supply a \fBbin\fP field in your package\.json which is a map of
command name to local file name\. On install, npm will symlink that file into
-\fBprefix/bin\fR for global installs, or \fB\|\./node_modules/\.bin/\fR for local
+\fBprefix/bin\fP for global installs, or \fB\|\./node_modules/\.bin/\fP for local
installs\.
.P
For example, myapp could have this:
@@ -239,8 +288,8 @@ For example, myapp could have this:
.fi
.RE
.P
-So, when you install myapp, it'll create a symlink from the \fBcli\.js\fR script to
-\fB/usr/local/bin/myapp\fR\|\.
+So, when you install myapp, it'll create a symlink from the \fBcli\.js\fP script to
+\fB/usr/local/bin/myapp\fP\|\.
.P
If you have a single executable, and its name should be the name
of the package, then you can just supply it as a string\. For example:
@@ -265,10 +314,10 @@ would be the same as this:
.SH man
.P
Specify either a single file or an array of filenames to put in place for the
-\fBman\fR program to find\.
+\fBman\fP program to find\.
.P
If only a single file is provided, then it's installed such that it is the
-result from \fBman <pkgname>\fR, regardless of its actual filename\. For example:
+result from \fBman <pkgname>\fP, regardless of its actual filename\. For example:
.P
.RS 2
.nf
@@ -281,7 +330,7 @@ result from \fBman <pkgname>\fR, regardless of its actual filename\. For exampl
.fi
.RE
.P
-would link the \fB\|\./man/doc\.1\fR file in such that it is the target for \fBman foo\fR
+would link the \fB\|\./man/doc\.1\fP file in such that it is the target for \fBman foo\fP
.P
If the filename doesn't start with the package name, then it's prefixed\.
So, this:
@@ -297,9 +346,9 @@ So, this:
.fi
.RE
.P
-will create files to do \fBman foo\fR and \fBman foo\-bar\fR\|\.
+will create files to do \fBman foo\fP and \fBman foo\-bar\fP\|\.
.P
-Man files must end with a number, and optionally a \fB\|\.gz\fR suffix if they are
+Man files must end with a number, and optionally a \fB\|\.gz\fP suffix if they are
compressed\. The number dictates which man section the file is installed into\.
.P
.RS 2
@@ -313,11 +362,11 @@ compressed\. The number dictates which man section the file is installed into\.
.fi
.RE
.P
-will create entries for \fBman foo\fR and \fBman 2 foo\fR
+will create entries for \fBman foo\fP and \fBman 2 foo\fP
.SH directories
.P
The CommonJS Packages \fIhttp://wiki\.commonjs\.org/wiki/Packages/1\.0\fR spec details a
-few ways that you can indicate the structure of your package using a \fBdirectories\fR
+few ways that you can indicate the structure of your package using a \fBdirectories\fP
object\. If you look at npm's package\.json \fIhttps://registry\.npmjs\.org/npm/latest\fR,
you'll see that it has directories for doc, lib, and man\.
.P
@@ -328,10 +377,13 @@ Tell people where the bulk of your library is\. Nothing special is done
with the lib folder in any way, but it's useful meta info\.
.SS directories\.bin
.P
-If you specify a \fBbin\fR directory, then all the files in that folder will
-be added as children of the \fBbin\fR path\.
+If you specify a \fBbin\fP directory in \fBdirectories\.bin\fP, all the files in
+that folder will be added\.
.P
-If you have a \fBbin\fR path already, then this has no effect\.
+Because of the way the \fBbin\fP directive works, specifying both a
+\fBbin\fP path and setting \fBdirectories\.bin\fP is an error\. If you want to
+specify individual files, use \fBbin\fP, and for all the files in an
+existing \fBbin\fP directory, use \fBdirectories\.bin\fP\|\.
.SS directories\.man
.P
A folder that is full of man pages\. Sugar to generate a "man" array by
@@ -346,7 +398,7 @@ Put example scripts in here\. Someday, it might be exposed in some clever way\.
.SH repository
.P
Specify the place where your code lives\. This is helpful for people who
-want to contribute\. If the git repo is on GitHub, then the \fBnpm docs\fR
+want to contribute\. If the git repo is on GitHub, then the \fBnpm docs\fP
command will be able to find you\.
.P
Do it like this:
@@ -370,7 +422,7 @@ directly to a VCS program without any modification\. It should not be a url to
html project page that you put in your browser\. It's for computers\.
.P
For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same
-shortcut syntax you use for \fBnpm install\fR:
+shortcut syntax you use for \fBnpm install\fP:
.P
.RS 2
.nf
@@ -389,7 +441,7 @@ The "scripts" property is a dictionary containing script commands that are run
at various times in the lifecycle of your package\. The key is the lifecycle
event, and the value is the command to run at that point\.
.P
-See npm help 7 \fBnpm\-scripts\fR to find out more about writing package scripts\.
+See npm help 7 \fBnpm\-scripts\fP to find out more about writing package scripts\.
.SH config
.P
A "config" object can be used to set configuration parameters used in package
@@ -404,10 +456,10 @@ following:
.RE
.P
and then had a "start" command that then referenced the
-\fBnpm_package_config_port\fR environment variable, then the user could
-override that by doing \fBnpm config set foo:port 8001\fR\|\.
+\fBnpm_package_config_port\fP environment variable, then the user could
+override that by doing \fBnpm config set foo:port 8001\fP\|\.
.P
-See npm help 7 \fBnpm\-config\fR and npm help 7 \fBnpm\-scripts\fR for more on package
+See npm help 7 \fBnpm\-config\fP and npm help 7 \fBnpm\-scripts\fP for more on package
configs\.
.SH dependencies
.P
@@ -417,44 +469,44 @@ space\-separated descriptors\. Dependencies can also be identified with a
tarball or git URL\.
.P
\fBPlease do not put test harnesses or transpilers in your
-\fBdependencies\fR object\.\fR See \fBdevDependencies\fR, below\.
+\fBdependencies\fP object\.\fR See \fBdevDependencies\fP, below\.
.P
See npm help 7 semver for more details about specifying version ranges\.
.RS 0
.IP \(bu 2
-\fBversion\fR Must match \fBversion\fR exactly
+\fBversion\fP Must match \fBversion\fP exactly
.IP \(bu 2
-\fB>version\fR Must be greater than \fBversion\fR
+\fB>version\fP Must be greater than \fBversion\fP
.IP \(bu 2
-\fB>=version\fR etc
+\fB>=version\fP etc
.IP \(bu 2
-\fB<version\fR
+\fB<version\fP
.IP \(bu 2
-\fB<=version\fR
+\fB<=version\fP
.IP \(bu 2
-\fB~version\fR "Approximately equivalent to version" See npm help 7 semver
+\fB~version\fP "Approximately equivalent to version" See npm help 7 semver
.IP \(bu 2
-\fB^version\fR "Compatible with version" See npm help 7 semver
+\fB^version\fP "Compatible with version" See npm help 7 semver
.IP \(bu 2
-\fB1\.2\.x\fR 1\.2\.0, 1\.2\.1, etc\., but not 1\.3\.0
+\fB1\.2\.x\fP 1\.2\.0, 1\.2\.1, etc\., but not 1\.3\.0
.IP \(bu 2
-\fBhttp://\.\.\.\fR See 'URLs as Dependencies' below
+\fBhttp://\.\.\.\fP See 'URLs as Dependencies' below
.IP \(bu 2
-\fB*\fR Matches any version
+\fB*\fP Matches any version
.IP \(bu 2
-\fB""\fR (just an empty string) Same as \fB*\fR
+\fB""\fP (just an empty string) Same as \fB*\fP
.IP \(bu 2
-\fBversion1 \- version2\fR Same as \fB>=version1 <=version2\fR\|\.
+\fBversion1 \- version2\fP Same as \fB>=version1 <=version2\fP\|\.
.IP \(bu 2
-\fBrange1 || range2\fR Passes if either range1 or range2 are satisfied\.
+\fBrange1 || range2\fP Passes if either range1 or range2 are satisfied\.
.IP \(bu 2
-\fBgit\.\.\.\fR See 'Git URLs as Dependencies' below
+\fBgit\.\.\.\fP See 'Git URLs as Dependencies' below
.IP \(bu 2
-\fBuser/repo\fR See 'GitHub URLs' below
+\fBuser/repo\fP See 'GitHub URLs' below
.IP \(bu 2
-\fBtag\fR A specific version tagged and published as \fBtag\fR See npm help \fBnpm\-tag\fR
+\fBtag\fP A specific version tagged and published as \fBtag\fP See npm help \fBnpm\-tag\fP
.IP \(bu 2
-\fBpath/path/path\fR See Local Paths below
+\fBpath/path/path\fP See Local Paths below
.RE
.P
@@ -499,12 +551,12 @@ git+https://user@hostname/project/blah\.git#commit\-ish
.fi
.RE
.P
-The \fBcommit\-ish\fR can be any tag, sha, or branch which can be supplied as
-an argument to \fBgit checkout\fR\|\. The default is \fBmaster\fR\|\.
+The \fBcommit\-ish\fP can be any tag, sha, or branch which can be supplied as
+an argument to \fBgit checkout\fP\|\. The default is \fBmaster\fP\|\.
.SH GitHub URLs
.P
As of version 1\.1\.65, you can refer to GitHub urls as just "foo":
-"user/foo\-project"\. Just as with git URLs, a \fBcommit\-ish\fR suffix can be
+"user/foo\-project"\. Just as with git URLs, a \fBcommit\-ish\fP suffix can be
included\. For example:
.P
.RS 2
@@ -522,7 +574,7 @@ included\. For example:
.SH Local Paths
.P
As of version 2\.0\.0 you can provide a path to a local directory that contains a
-package\. Local paths can be saved using \fBnpm install \-\-save\fR, using any of
+package\. Local paths can be saved using \fBnpm install \-\-save\fP, using any of
these forms:
.P
.RS 2
@@ -535,7 +587,7 @@ these forms:
.RE
.P
in which case they will be normalized to a relative path and added to your
-\fBpackage\.json\fR\|\. For example:
+\fBpackage\.json\fP\|\. For example:
.P
.RS 2
.nf
@@ -558,15 +610,15 @@ If someone is planning on downloading and using your module in their
program, then they probably don't want or need to download and build
the external test or documentation framework that you use\.
.P
-In this case, it's best to map these additional items in a \fBdevDependencies\fR
+In this case, it's best to map these additional items in a \fBdevDependencies\fP
object\.
.P
-These things will be installed when doing \fBnpm link\fR or \fBnpm install\fR
+These things will be installed when doing \fBnpm link\fP or \fBnpm install\fP
from the root of a package, and can be managed like any other npm
-configuration param\. See npm help 7 \fBnpm\-config\fR for more on the topic\.
+configuration param\. See npm help 7 \fBnpm\-config\fP for more on the topic\.
.P
For build steps that are not platform\-specific, such as compiling
-CoffeeScript or other languages to JavaScript, use the \fBprepublish\fR
+CoffeeScript or other languages to JavaScript, use the \fBprepublish\fP
script to do this, and make the required package a devDependency\.
.P
For example:
@@ -587,14 +639,14 @@ For example:
.fi
.RE
.P
-The \fBprepublish\fR script will be run before publishing, so that users
+The \fBprepublish\fP script will be run before publishing, so that users
can consume the functionality without requiring them to compile it
-themselves\. In dev mode (ie, locally running \fBnpm install\fR), it'll
+themselves\. In dev mode (ie, locally running \fBnpm install\fP), it'll
run this script as well, so that you can test it easily\.
.SH peerDependencies
.P
In some cases, you want to express the compatibility of your package with a
-host tool or library, while not necessarily doing a \fBrequire\fR of this host\.
+host tool or library, while not necessarily doing a \fBrequire\fP of this host\.
This is usually referred to as a \fIplugin\fR\|\. Notably, your module may be exposing
a specific interface, expected and specified by the host documentation\.
.P
@@ -612,8 +664,8 @@ For example:
.fi
.RE
.P
-This ensures your package \fBtea\-latte\fR can be installed \fIalong\fR with the second
-major version of the host package \fBtea\fR only\. \fBnpm install tea\-latte\fR could
+This ensures your package \fBtea\-latte\fP can be installed \fIalong\fR with the second
+major version of the host package \fBtea\fP only\. \fBnpm install tea\-latte\fP could
possibly yield the following dependency graph:
.P
.RS 2
@@ -623,7 +675,7 @@ possibly yield the following dependency graph:
.fi
.RE
.P
-\fBNOTE: npm versions 1 and 2 will automatically install \fBpeerDependencies\fR if
+\fBNOTE: npm versions 1 and 2 will automatically install \fBpeerDependencies\fP if
they are not explicitly depended upon higher in the dependency tree\. In the
next major version of npm (npm@3), this will no longer be the case\. You will
receive a warning that the peerDependency is not installed instead\.\fR The
@@ -636,19 +688,19 @@ possible, and not to lock it down to specific patch versions\.
.P
Assuming the host complies with semver \fIhttp://semver\.org/\fR, only changes in
the host package's major version will break your plugin\. Thus, if you've worked
-with every 1\.x version of the host package, use \fB"^1\.0"\fR or \fB"1\.x"\fR to express
-this\. If you depend on features introduced in 1\.5\.2, use \fB">= 1\.5\.2 < 2"\fR\|\.
+with every 1\.x version of the host package, use \fB"^1\.0"\fP or \fB"1\.x"\fP to express
+this\. If you depend on features introduced in 1\.5\.2, use \fB">= 1\.5\.2 < 2"\fP\|\.
.SH bundledDependencies
.P
Array of package names that will be bundled when publishing the package\.
.P
-If this is spelled \fB"bundleDependencies"\fR, then that is also honorable\.
+If this is spelled \fB"bundleDependencies"\fP, then that is also honorable\.
.SH optionalDependencies
.P
If a dependency can be used, but you would like npm to proceed if it cannot be
-found or fails to install, then you may put it in the \fBoptionalDependencies\fR
+found or fails to install, then you may put it in the \fBoptionalDependencies\fP
object\. This is a map of package name to version or url, just like the
-\fBdependencies\fR object\. The difference is that build failures do not cause
+\fBdependencies\fP object\. The difference is that build failures do not cause
installation to fail\.
.P
It is still your program's responsibility to handle the lack of the
@@ -674,8 +726,8 @@ if (foo) {
.fi
.RE
.P
-Entries in \fBoptionalDependencies\fR will override entries of the same name in
-\fBdependencies\fR, so it's usually best to only put in one place\.
+Entries in \fBoptionalDependencies\fP will override entries of the same name in
+\fBdependencies\fP, so it's usually best to only put in one place\.
.SH engines
.P
You can specify the version of node that your stuff works on:
@@ -702,16 +754,16 @@ are capable of properly installing your program\. For example:
.fi
.RE
.P
-Note that, unless the user has set the \fBengine\-strict\fR config flag, this
+Note that, unless the user has set the \fBengine\-strict\fP config flag, this
field is advisory only\.
.SH engineStrict
.P
\fBNOTE: This feature is deprecated and will be removed in npm 3\.0\.0\.\fR
.P
If you are sure that your module will \fIdefinitely not\fR run properly on
-versions of Node/npm other than those specified in the \fBengines\fR object,
-then you can set \fB"engineStrict": true\fR in your package\.json file\.
-This will override the user's \fBengine\-strict\fR config setting\.
+versions of Node/npm other than those specified in the \fBengines\fP object,
+then you can set \fB"engineStrict": true\fP in your package\.json file\.
+This will override the user's \fBengine\-strict\fP config setting\.
.P
Please do not do this unless you are really very very sure\. If your
engines object is something overly restrictive, you can quite easily and
@@ -737,7 +789,7 @@ just prepend the blacklisted os with a '!':
.fi
.RE
.P
-The host operating system is determined by \fBprocess\.platform\fR
+The host operating system is determined by \fBprocess\.platform\fP
.P
It is allowed to both blacklist, and whitelist, although there isn't any
good reason to do this\.
@@ -752,7 +804,7 @@ you can specify which ones\.
.fi
.RE
.P
-Like the \fBos\fR option, you can also blacklist architectures:
+Like the \fBos\fP option, you can also blacklist architectures:
.P
.RS 2
.nf
@@ -760,54 +812,54 @@ Like the \fBos\fR option, you can also blacklist architectures:
.fi
.RE
.P
-The host architecture is determined by \fBprocess\.arch\fR
+The host architecture is determined by \fBprocess\.arch\fP
.SH preferGlobal
.P
If your package is primarily a command\-line application that should be
-installed globally, then set this value to \fBtrue\fR to provide a warning
+installed globally, then set this value to \fBtrue\fP to provide a warning
if it is installed locally\.
.P
It doesn't actually prevent users from installing it locally, but it
does help prevent some confusion if it doesn't work as expected\.
.SH private
.P
-If you set \fB"private": true\fR in your package\.json, then npm will refuse
+If you set \fB"private": true\fP in your package\.json, then npm will refuse
to publish it\.
.P
This is a way to prevent accidental publication of private repositories\. If
you would like to ensure that a given package is only ever published to a
specific registry (for example, an internal registry), then use the
-\fBpublishConfig\fR dictionary described below to override the \fBregistry\fR config
+\fBpublishConfig\fP dictionary described below to override the \fBregistry\fP config
param at publish\-time\.
.SH publishConfig
.P
-This is a set of config values that will be used at publish\-time\. It's
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with "latest" or published to
-the global public registry by default\.
+This is a set of config values that will be used at publish\-time\. It's
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with "latest", published
+to the global public registry or that a scoped module is private by default\.
.P
-Any config values can be overridden, but of course only "tag" and
-"registry" probably matter for the purposes of publishing\.
+Any config values can be overridden, but of course only "tag", "registry" and
+"access" probably matter for the purposes of publishing\.
.P
-See npm help 7 \fBnpm\-config\fR to see the list of config options that can be
+See npm help 7 \fBnpm\-config\fP to see the list of config options that can be
overridden\.
.SH DEFAULT VALUES
.P
npm will default some values based on package contents\.
.RS 0
.IP \(bu 2
-\fB"scripts": {"start": "node server\.js"}\fR
-If there is a \fBserver\.js\fR file in the root of your package, then npm
-will default the \fBstart\fR command to \fBnode server\.js\fR\|\.
+\fB"scripts": {"start": "node server\.js"}\fP
+If there is a \fBserver\.js\fP file in the root of your package, then npm
+will default the \fBstart\fP command to \fBnode server\.js\fP\|\.
.IP \(bu 2
-\fB"scripts":{"preinstall": "node\-gyp rebuild"}\fR
-If there is a \fBbinding\.gyp\fR file in the root of your package, npm will
-default the \fBpreinstall\fR command to compile using node\-gyp\.
+\fB"scripts":{"preinstall": "node\-gyp rebuild"}\fP
+If there is a \fBbinding\.gyp\fP file in the root of your package, npm will
+default the \fBpreinstall\fP command to compile using node\-gyp\.
.IP \(bu 2
-\fB"contributors": [\.\.\.]\fR
-If there is an \fBAUTHORS\fR file in the root of your package, npm will
-treat each line as a \fBName <email> (url)\fR format, where email and url
-are optional\. Lines which start with a \fB#\fR or are blank, will be
+\fB"contributors": [\.\.\.]\fP
+If there is an \fBAUTHORS\fP file in the root of your package, npm will
+treat each line as a \fBName <email> (url)\fP format, where email and url
+are optional\. Lines which start with a \fB#\fP or are blank, will be
ignored\.
.RE
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 172b909b8..5088566d8 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,12 +1,12 @@
-.TH "NPMRC" "5" "June 2015" "" ""
+.TH "NPMRC" "5" "August 2015" "" ""
.SH "NAME"
\fBnpmrc\fR \- The npm config files
.SH DESCRIPTION
.P
npm gets its config settings from the command line, environment
-variables, and \fBnpmrc\fR files\.
+variables, and \fBnpmrc\fP files\.
.P
-The \fBnpm config\fR command can be used to update and edit the contents
+The \fBnpm config\fP command can be used to update and edit the contents
of the user and global npmrc files\.
.P
For a list of available configuration options, see npm help 7 config\.
@@ -25,9 +25,9 @@ npm builtin config file (/path/to/npm/npmrc)
.RE
.P
-All npm config files are an ini\-formatted list of \fBkey = value\fR
+All npm config files are an ini\-formatted list of \fBkey = value\fP
parameters\. Environment variables can be replaced using
-\fB${VARIABLE_NAME}\fR\|\. For example:
+\fB${VARIABLE_NAME}\fP\|\. For example:
.P
.RS 2
.nf
@@ -49,35 +49,38 @@ key[] = "second value"
.fi
.RE
.P
-\fBNOTE:\fR Because local (per\-project or per\-user) \fB\|\.npmrc\fR files can contain
+\fBNOTE:\fR Because local (per\-project or per\-user) \fB\|\.npmrc\fP files can contain
sensitive credentials, they must be readable and writable \fIonly\fR by your user
-account (i\.e\. must have a mode of \fB0600\fR), otherwise they \fIwill be ignored by
+account (i\.e\. must have a mode of \fB0600\fP), otherwise they \fIwill be ignored by
npm!\fR
.SS Per\-project config file
.P
-When working locally in a project, a \fB\|\.npmrc\fR file in the root of the
-project (ie, a sibling of \fBnode_modules\fR and \fBpackage\.json\fR) will set
+When working locally in a project, a \fB\|\.npmrc\fP file in the root of the
+project (ie, a sibling of \fBnode_modules\fP and \fBpackage\.json\fP) will set
config values specific to this project\.
.P
Note that this only applies to the root of the project that you're
running npm in\. It has no effect when your module is published\. For
example, you can't publish a module that forces itself to install
globally, or in a different location\.
+.P
+Additionally, this file is not read in global mode, such as when running
+\fBnpm install \-g\fP\|\.
.SS Per\-user config file
.P
-\fB$HOME/\.npmrc\fR (or the \fBuserconfig\fR param, if set in the environment
+\fB$HOME/\.npmrc\fP (or the \fBuserconfig\fP param, if set in the environment
or on the command line)
.SS Global config file
.P
-\fB$PREFIX/etc/npmrc\fR (or the \fBglobalconfig\fR param, if set above):
-This file is an ini\-file formatted list of \fBkey = value\fR parameters\.
+\fB$PREFIX/etc/npmrc\fP (or the \fBglobalconfig\fP param, if set above):
+This file is an ini\-file formatted list of \fBkey = value\fP parameters\.
Environment variables can be replaced as above\.
.SS Built\-in config file
.P
-\fBpath/to/npm/itself/npmrc\fR
+\fBpath/to/npm/itself/npmrc\fP
.P
This is an unchangeable "builtin" configuration file that npm keeps
-consistent across updates\. Set fields in here using the \fB\|\./configure\fR
+consistent across updates\. Set fields in here using the \fB\|\./configure\fP
script that comes with npm\. This is primarily for distribution
maintainers to override default configs in a standard and consistent
manner\.
diff --git a/deps/npm/man/man5/package.json.5 b/deps/npm/man/man5/package.json.5
index ed3ee7c24..f309144b5 100644
--- a/deps/npm/man/man5/package.json.5
+++ b/deps/npm/man/man5/package.json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "June 2015" "" ""
+.TH "PACKAGE\.JSON" "5" "August 2015" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -7,7 +7,7 @@ This document is all you need to know about what's required in your package\.jso
file\. It must be actual JSON, not just a JavaScript object literal\.
.P
A lot of the behavior described in this document is affected by the config
-settings described in npm help 7 \fBnpm\-config\fR\|\.
+settings described in npm help 7 \fBnpm\-config\fP\|\.
.SH name
.P
The \fImost\fR important things in your package\.json are the name and version fields\.
@@ -50,8 +50,8 @@ already, before you get too attached to it\. https://www\.npmjs\.com/
.RE
.P
-A name can be optionally prefixed by a scope, e\.g\. \fB@myorg/mypackage\fR\|\. See
-npm help 7 \fBnpm\-scope\fR for more detail\.
+A name can be optionally prefixed by a scope, e\.g\. \fB@myorg/mypackage\fP\|\. See
+npm help 7 \fBnpm\-scope\fP for more detail\.
.SH version
.P
The \fImost\fR important things in your package\.json are the name and version fields\.
@@ -62,17 +62,17 @@ changes to the version\.
.P
Version must be parseable by
node\-semver \fIhttps://github\.com/isaacs/node\-semver\fR, which is bundled
-with npm as a dependency\. (\fBnpm install semver\fR to use it yourself\.)
+with npm as a dependency\. (\fBnpm install semver\fP to use it yourself\.)
.P
More on version numbers and ranges at npm help 7 semver\.
.SH description
.P
Put a description in it\. It's a string\. This helps people discover your
-package, as it's listed in \fBnpm search\fR\|\.
+package, as it's listed in \fBnpm search\fP\|\.
.SH keywords
.P
Put keywords in it\. It's an array of strings\. This helps people
-discover your package as it's listed in \fBnpm search\fR\|\.
+discover your package as it's listed in \fBnpm search\fP\|\.
.SH homepage
.P
The url to the project homepage\.
@@ -101,7 +101,7 @@ It should look like this:
You can specify either one or both values\. If you want to provide only a url,
you can specify the value for "bugs" as a simple string instead of an object\.
.P
-If a url is provided, it will be used by the \fBnpm bugs\fR command\.
+If a url is provided, it will be used by the \fBnpm bugs\fP command\.
.SH license
.P
You should specify a license for your package so that people know how they are
@@ -134,11 +134,11 @@ you are using a custom license, use the following valid SPDX expression:
.P
.RS 2
.nf
-{ "license" : "LicenseRef\-LICENSE" }
+{ "license" : "SEE LICENSE IN <filename>" }
.fi
.RE
.P
-Then include a LICENSE file at the top level of the package\.
+Then include a file named \fB<filename>\fP at the top level of the package\.
.P
Some old packages used license objects or a "licenses" property containing an
array of license objects:
@@ -175,6 +175,17 @@ Those styles are now deprecated\. Instead, use SPDX expressions, like this:
{ "license": "(MIT OR Apache\-2\.0)" }
.fi
.RE
+.P
+Finally, if you do not wish to grant others the right to use a private or
+unpublished package under any terms:
+.P
+.RS 2
+.nf
+{ "license": "UNLICENSED"}
+.fi
+.RE
+.P
+Consider also setting \fB"private": true\fP to prevent accidental publication\.
.SH people fields: author, contributors
.P
The "author" is one person\. "contributors" is an array of people\. A "person"
@@ -210,11 +221,49 @@ You can also provide a "\.npmignore" file in the root of your package,
which will keep files from being included, even if they would be picked
up by the files array\. The "\.npmignore" file works just like a
"\.gitignore"\.
+.P
+Certain files are always included, regardless of settings:
+.RS 0
+.IP \(bu 2
+\fBpackage\.json\fP
+.IP \(bu 2
+\fBREADME\fP (and its variants)
+.IP \(bu 2
+\fBCHANGELOG\fP (and its variants)
+.IP \(bu 2
+\fBLICENSE\fP / \fBLICENCE\fP
+
+.RE
+.P
+Conversely, some files are always ignored:
+.RS 0
+.IP \(bu 2
+\fB\|\.git\fP
+.IP \(bu 2
+\fBCVS\fP
+.IP \(bu 2
+\fB\|\.svn\fP
+.IP \(bu 2
+\fB\|\.hg\fP
+.IP \(bu 2
+\fB\|\.lock\-wscript\fP
+.IP \(bu 2
+\fB\|\.wafpickle\-N\fP
+.IP \(bu 2
+\fB*\.swp\fP
+.IP \(bu 2
+\fB\|\.DS_Store\fP
+.IP \(bu 2
+\fB\|\._*\fP
+.IP \(bu 2
+\fBnpm\-debug\.log\fP
+
+.RE
.SH main
.P
The main field is a module ID that is the primary entry point to your program\.
-That is, if your package is named \fBfoo\fR, and a user installs it, and then does
-\fBrequire("foo")\fR, then your main module's exports object will be returned\.
+That is, if your package is named \fBfoo\fP, and a user installs it, and then does
+\fBrequire("foo")\fP, then your main module's exports object will be returned\.
.P
This should be a module ID relative to the root of your package folder\.
.P
@@ -226,9 +275,9 @@ A lot of packages have one or more executable files that they'd like to
install into the PATH\. npm makes this pretty easy (in fact, it uses this
feature to install the "npm" executable\.)
.P
-To use this, supply a \fBbin\fR field in your package\.json which is a map of
+To use this, supply a \fBbin\fP field in your package\.json which is a map of
command name to local file name\. On install, npm will symlink that file into
-\fBprefix/bin\fR for global installs, or \fB\|\./node_modules/\.bin/\fR for local
+\fBprefix/bin\fP for global installs, or \fB\|\./node_modules/\.bin/\fP for local
installs\.
.P
For example, myapp could have this:
@@ -239,8 +288,8 @@ For example, myapp could have this:
.fi
.RE
.P
-So, when you install myapp, it'll create a symlink from the \fBcli\.js\fR script to
-\fB/usr/local/bin/myapp\fR\|\.
+So, when you install myapp, it'll create a symlink from the \fBcli\.js\fP script to
+\fB/usr/local/bin/myapp\fP\|\.
.P
If you have a single executable, and its name should be the name
of the package, then you can just supply it as a string\. For example:
@@ -265,10 +314,10 @@ would be the same as this:
.SH man
.P
Specify either a single file or an array of filenames to put in place for the
-\fBman\fR program to find\.
+\fBman\fP program to find\.
.P
If only a single file is provided, then it's installed such that it is the
-result from \fBman <pkgname>\fR, regardless of its actual filename\. For example:
+result from \fBman <pkgname>\fP, regardless of its actual filename\. For example:
.P
.RS 2
.nf
@@ -281,7 +330,7 @@ result from \fBman <pkgname>\fR, regardless of its actual filename\. For exampl
.fi
.RE
.P
-would link the \fB\|\./man/doc\.1\fR file in such that it is the target for \fBman foo\fR
+would link the \fB\|\./man/doc\.1\fP file in such that it is the target for \fBman foo\fP
.P
If the filename doesn't start with the package name, then it's prefixed\.
So, this:
@@ -297,9 +346,9 @@ So, this:
.fi
.RE
.P
-will create files to do \fBman foo\fR and \fBman foo\-bar\fR\|\.
+will create files to do \fBman foo\fP and \fBman foo\-bar\fP\|\.
.P
-Man files must end with a number, and optionally a \fB\|\.gz\fR suffix if they are
+Man files must end with a number, and optionally a \fB\|\.gz\fP suffix if they are
compressed\. The number dictates which man section the file is installed into\.
.P
.RS 2
@@ -313,11 +362,11 @@ compressed\. The number dictates which man section the file is installed into\.
.fi
.RE
.P
-will create entries for \fBman foo\fR and \fBman 2 foo\fR
+will create entries for \fBman foo\fP and \fBman 2 foo\fP
.SH directories
.P
The CommonJS Packages \fIhttp://wiki\.commonjs\.org/wiki/Packages/1\.0\fR spec details a
-few ways that you can indicate the structure of your package using a \fBdirectories\fR
+few ways that you can indicate the structure of your package using a \fBdirectories\fP
object\. If you look at npm's package\.json \fIhttps://registry\.npmjs\.org/npm/latest\fR,
you'll see that it has directories for doc, lib, and man\.
.P
@@ -328,10 +377,13 @@ Tell people where the bulk of your library is\. Nothing special is done
with the lib folder in any way, but it's useful meta info\.
.SS directories\.bin
.P
-If you specify a \fBbin\fR directory, then all the files in that folder will
-be added as children of the \fBbin\fR path\.
+If you specify a \fBbin\fP directory in \fBdirectories\.bin\fP, all the files in
+that folder will be added\.
.P
-If you have a \fBbin\fR path already, then this has no effect\.
+Because of the way the \fBbin\fP directive works, specifying both a
+\fBbin\fP path and setting \fBdirectories\.bin\fP is an error\. If you want to
+specify individual files, use \fBbin\fP, and for all the files in an
+existing \fBbin\fP directory, use \fBdirectories\.bin\fP\|\.
.SS directories\.man
.P
A folder that is full of man pages\. Sugar to generate a "man" array by
@@ -346,7 +398,7 @@ Put example scripts in here\. Someday, it might be exposed in some clever way\.
.SH repository
.P
Specify the place where your code lives\. This is helpful for people who
-want to contribute\. If the git repo is on GitHub, then the \fBnpm docs\fR
+want to contribute\. If the git repo is on GitHub, then the \fBnpm docs\fP
command will be able to find you\.
.P
Do it like this:
@@ -370,7 +422,7 @@ directly to a VCS program without any modification\. It should not be a url to
html project page that you put in your browser\. It's for computers\.
.P
For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same
-shortcut syntax you use for \fBnpm install\fR:
+shortcut syntax you use for \fBnpm install\fP:
.P
.RS 2
.nf
@@ -389,7 +441,7 @@ The "scripts" property is a dictionary containing script commands that are run
at various times in the lifecycle of your package\. The key is the lifecycle
event, and the value is the command to run at that point\.
.P
-See npm help 7 \fBnpm\-scripts\fR to find out more about writing package scripts\.
+See npm help 7 \fBnpm\-scripts\fP to find out more about writing package scripts\.
.SH config
.P
A "config" object can be used to set configuration parameters used in package
@@ -404,10 +456,10 @@ following:
.RE
.P
and then had a "start" command that then referenced the
-\fBnpm_package_config_port\fR environment variable, then the user could
-override that by doing \fBnpm config set foo:port 8001\fR\|\.
+\fBnpm_package_config_port\fP environment variable, then the user could
+override that by doing \fBnpm config set foo:port 8001\fP\|\.
.P
-See npm help 7 \fBnpm\-config\fR and npm help 7 \fBnpm\-scripts\fR for more on package
+See npm help 7 \fBnpm\-config\fP and npm help 7 \fBnpm\-scripts\fP for more on package
configs\.
.SH dependencies
.P
@@ -417,44 +469,44 @@ space\-separated descriptors\. Dependencies can also be identified with a
tarball or git URL\.
.P
\fBPlease do not put test harnesses or transpilers in your
-\fBdependencies\fR object\.\fR See \fBdevDependencies\fR, below\.
+\fBdependencies\fP object\.\fR See \fBdevDependencies\fP, below\.
.P
See npm help 7 semver for more details about specifying version ranges\.
.RS 0
.IP \(bu 2
-\fBversion\fR Must match \fBversion\fR exactly
+\fBversion\fP Must match \fBversion\fP exactly
.IP \(bu 2
-\fB>version\fR Must be greater than \fBversion\fR
+\fB>version\fP Must be greater than \fBversion\fP
.IP \(bu 2
-\fB>=version\fR etc
+\fB>=version\fP etc
.IP \(bu 2
-\fB<version\fR
+\fB<version\fP
.IP \(bu 2
-\fB<=version\fR
+\fB<=version\fP
.IP \(bu 2
-\fB~version\fR "Approximately equivalent to version" See npm help 7 semver
+\fB~version\fP "Approximately equivalent to version" See npm help 7 semver
.IP \(bu 2
-\fB^version\fR "Compatible with version" See npm help 7 semver
+\fB^version\fP "Compatible with version" See npm help 7 semver
.IP \(bu 2
-\fB1\.2\.x\fR 1\.2\.0, 1\.2\.1, etc\., but not 1\.3\.0
+\fB1\.2\.x\fP 1\.2\.0, 1\.2\.1, etc\., but not 1\.3\.0
.IP \(bu 2
-\fBhttp://\.\.\.\fR See 'URLs as Dependencies' below
+\fBhttp://\.\.\.\fP See 'URLs as Dependencies' below
.IP \(bu 2
-\fB*\fR Matches any version
+\fB*\fP Matches any version
.IP \(bu 2
-\fB""\fR (just an empty string) Same as \fB*\fR
+\fB""\fP (just an empty string) Same as \fB*\fP
.IP \(bu 2
-\fBversion1 \- version2\fR Same as \fB>=version1 <=version2\fR\|\.
+\fBversion1 \- version2\fP Same as \fB>=version1 <=version2\fP\|\.
.IP \(bu 2
-\fBrange1 || range2\fR Passes if either range1 or range2 are satisfied\.
+\fBrange1 || range2\fP Passes if either range1 or range2 are satisfied\.
.IP \(bu 2
-\fBgit\.\.\.\fR See 'Git URLs as Dependencies' below
+\fBgit\.\.\.\fP See 'Git URLs as Dependencies' below
.IP \(bu 2
-\fBuser/repo\fR See 'GitHub URLs' below
+\fBuser/repo\fP See 'GitHub URLs' below
.IP \(bu 2
-\fBtag\fR A specific version tagged and published as \fBtag\fR See npm help \fBnpm\-tag\fR
+\fBtag\fP A specific version tagged and published as \fBtag\fP See npm help \fBnpm\-tag\fP
.IP \(bu 2
-\fBpath/path/path\fR See Local Paths below
+\fBpath/path/path\fP See Local Paths below
.RE
.P
@@ -499,12 +551,12 @@ git+https://user@hostname/project/blah\.git#commit\-ish
.fi
.RE
.P
-The \fBcommit\-ish\fR can be any tag, sha, or branch which can be supplied as
-an argument to \fBgit checkout\fR\|\. The default is \fBmaster\fR\|\.
+The \fBcommit\-ish\fP can be any tag, sha, or branch which can be supplied as
+an argument to \fBgit checkout\fP\|\. The default is \fBmaster\fP\|\.
.SH GitHub URLs
.P
As of version 1\.1\.65, you can refer to GitHub urls as just "foo":
-"user/foo\-project"\. Just as with git URLs, a \fBcommit\-ish\fR suffix can be
+"user/foo\-project"\. Just as with git URLs, a \fBcommit\-ish\fP suffix can be
included\. For example:
.P
.RS 2
@@ -522,7 +574,7 @@ included\. For example:
.SH Local Paths
.P
As of version 2\.0\.0 you can provide a path to a local directory that contains a
-package\. Local paths can be saved using \fBnpm install \-\-save\fR, using any of
+package\. Local paths can be saved using \fBnpm install \-\-save\fP, using any of
these forms:
.P
.RS 2
@@ -535,7 +587,7 @@ these forms:
.RE
.P
in which case they will be normalized to a relative path and added to your
-\fBpackage\.json\fR\|\. For example:
+\fBpackage\.json\fP\|\. For example:
.P
.RS 2
.nf
@@ -558,15 +610,15 @@ If someone is planning on downloading and using your module in their
program, then they probably don't want or need to download and build
the external test or documentation framework that you use\.
.P
-In this case, it's best to map these additional items in a \fBdevDependencies\fR
+In this case, it's best to map these additional items in a \fBdevDependencies\fP
object\.
.P
-These things will be installed when doing \fBnpm link\fR or \fBnpm install\fR
+These things will be installed when doing \fBnpm link\fP or \fBnpm install\fP
from the root of a package, and can be managed like any other npm
-configuration param\. See npm help 7 \fBnpm\-config\fR for more on the topic\.
+configuration param\. See npm help 7 \fBnpm\-config\fP for more on the topic\.
.P
For build steps that are not platform\-specific, such as compiling
-CoffeeScript or other languages to JavaScript, use the \fBprepublish\fR
+CoffeeScript or other languages to JavaScript, use the \fBprepublish\fP
script to do this, and make the required package a devDependency\.
.P
For example:
@@ -587,14 +639,14 @@ For example:
.fi
.RE
.P
-The \fBprepublish\fR script will be run before publishing, so that users
+The \fBprepublish\fP script will be run before publishing, so that users
can consume the functionality without requiring them to compile it
-themselves\. In dev mode (ie, locally running \fBnpm install\fR), it'll
+themselves\. In dev mode (ie, locally running \fBnpm install\fP), it'll
run this script as well, so that you can test it easily\.
.SH peerDependencies
.P
In some cases, you want to express the compatibility of your package with a
-host tool or library, while not necessarily doing a \fBrequire\fR of this host\.
+host tool or library, while not necessarily doing a \fBrequire\fP of this host\.
This is usually referred to as a \fIplugin\fR\|\. Notably, your module may be exposing
a specific interface, expected and specified by the host documentation\.
.P
@@ -612,8 +664,8 @@ For example:
.fi
.RE
.P
-This ensures your package \fBtea\-latte\fR can be installed \fIalong\fR with the second
-major version of the host package \fBtea\fR only\. \fBnpm install tea\-latte\fR could
+This ensures your package \fBtea\-latte\fP can be installed \fIalong\fR with the second
+major version of the host package \fBtea\fP only\. \fBnpm install tea\-latte\fP could
possibly yield the following dependency graph:
.P
.RS 2
@@ -623,7 +675,7 @@ possibly yield the following dependency graph:
.fi
.RE
.P
-\fBNOTE: npm versions 1 and 2 will automatically install \fBpeerDependencies\fR if
+\fBNOTE: npm versions 1 and 2 will automatically install \fBpeerDependencies\fP if
they are not explicitly depended upon higher in the dependency tree\. In the
next major version of npm (npm@3), this will no longer be the case\. You will
receive a warning that the peerDependency is not installed instead\.\fR The
@@ -636,19 +688,19 @@ possible, and not to lock it down to specific patch versions\.
.P
Assuming the host complies with semver \fIhttp://semver\.org/\fR, only changes in
the host package's major version will break your plugin\. Thus, if you've worked
-with every 1\.x version of the host package, use \fB"^1\.0"\fR or \fB"1\.x"\fR to express
-this\. If you depend on features introduced in 1\.5\.2, use \fB">= 1\.5\.2 < 2"\fR\|\.
+with every 1\.x version of the host package, use \fB"^1\.0"\fP or \fB"1\.x"\fP to express
+this\. If you depend on features introduced in 1\.5\.2, use \fB">= 1\.5\.2 < 2"\fP\|\.
.SH bundledDependencies
.P
Array of package names that will be bundled when publishing the package\.
.P
-If this is spelled \fB"bundleDependencies"\fR, then that is also honorable\.
+If this is spelled \fB"bundleDependencies"\fP, then that is also honorable\.
.SH optionalDependencies
.P
If a dependency can be used, but you would like npm to proceed if it cannot be
-found or fails to install, then you may put it in the \fBoptionalDependencies\fR
+found or fails to install, then you may put it in the \fBoptionalDependencies\fP
object\. This is a map of package name to version or url, just like the
-\fBdependencies\fR object\. The difference is that build failures do not cause
+\fBdependencies\fP object\. The difference is that build failures do not cause
installation to fail\.
.P
It is still your program's responsibility to handle the lack of the
@@ -674,8 +726,8 @@ if (foo) {
.fi
.RE
.P
-Entries in \fBoptionalDependencies\fR will override entries of the same name in
-\fBdependencies\fR, so it's usually best to only put in one place\.
+Entries in \fBoptionalDependencies\fP will override entries of the same name in
+\fBdependencies\fP, so it's usually best to only put in one place\.
.SH engines
.P
You can specify the version of node that your stuff works on:
@@ -702,16 +754,16 @@ are capable of properly installing your program\. For example:
.fi
.RE
.P
-Note that, unless the user has set the \fBengine\-strict\fR config flag, this
+Note that, unless the user has set the \fBengine\-strict\fP config flag, this
field is advisory only\.
.SH engineStrict
.P
\fBNOTE: This feature is deprecated and will be removed in npm 3\.0\.0\.\fR
.P
If you are sure that your module will \fIdefinitely not\fR run properly on
-versions of Node/npm other than those specified in the \fBengines\fR object,
-then you can set \fB"engineStrict": true\fR in your package\.json file\.
-This will override the user's \fBengine\-strict\fR config setting\.
+versions of Node/npm other than those specified in the \fBengines\fP object,
+then you can set \fB"engineStrict": true\fP in your package\.json file\.
+This will override the user's \fBengine\-strict\fP config setting\.
.P
Please do not do this unless you are really very very sure\. If your
engines object is something overly restrictive, you can quite easily and
@@ -737,7 +789,7 @@ just prepend the blacklisted os with a '!':
.fi
.RE
.P
-The host operating system is determined by \fBprocess\.platform\fR
+The host operating system is determined by \fBprocess\.platform\fP
.P
It is allowed to both blacklist, and whitelist, although there isn't any
good reason to do this\.
@@ -752,7 +804,7 @@ you can specify which ones\.
.fi
.RE
.P
-Like the \fBos\fR option, you can also blacklist architectures:
+Like the \fBos\fP option, you can also blacklist architectures:
.P
.RS 2
.nf
@@ -760,54 +812,54 @@ Like the \fBos\fR option, you can also blacklist architectures:
.fi
.RE
.P
-The host architecture is determined by \fBprocess\.arch\fR
+The host architecture is determined by \fBprocess\.arch\fP
.SH preferGlobal
.P
If your package is primarily a command\-line application that should be
-installed globally, then set this value to \fBtrue\fR to provide a warning
+installed globally, then set this value to \fBtrue\fP to provide a warning
if it is installed locally\.
.P
It doesn't actually prevent users from installing it locally, but it
does help prevent some confusion if it doesn't work as expected\.
.SH private
.P
-If you set \fB"private": true\fR in your package\.json, then npm will refuse
+If you set \fB"private": true\fP in your package\.json, then npm will refuse
to publish it\.
.P
This is a way to prevent accidental publication of private repositories\. If
you would like to ensure that a given package is only ever published to a
specific registry (for example, an internal registry), then use the
-\fBpublishConfig\fR dictionary described below to override the \fBregistry\fR config
+\fBpublishConfig\fP dictionary described below to override the \fBregistry\fP config
param at publish\-time\.
.SH publishConfig
.P
-This is a set of config values that will be used at publish\-time\. It's
-especially handy if you want to set the tag or registry, so that you can
-ensure that a given package is not tagged with "latest" or published to
-the global public registry by default\.
+This is a set of config values that will be used at publish\-time\. It's
+especially handy if you want to set the tag, registry or access, so that
+you can ensure that a given package is not tagged with "latest", published
+to the global public registry or that a scoped module is private by default\.
.P
-Any config values can be overridden, but of course only "tag" and
-"registry" probably matter for the purposes of publishing\.
+Any config values can be overridden, but of course only "tag", "registry" and
+"access" probably matter for the purposes of publishing\.
.P
-See npm help 7 \fBnpm\-config\fR to see the list of config options that can be
+See npm help 7 \fBnpm\-config\fP to see the list of config options that can be
overridden\.
.SH DEFAULT VALUES
.P
npm will default some values based on package contents\.
.RS 0
.IP \(bu 2
-\fB"scripts": {"start": "node server\.js"}\fR
-If there is a \fBserver\.js\fR file in the root of your package, then npm
-will default the \fBstart\fR command to \fBnode server\.js\fR\|\.
+\fB"scripts": {"start": "node server\.js"}\fP
+If there is a \fBserver\.js\fP file in the root of your package, then npm
+will default the \fBstart\fP command to \fBnode server\.js\fP\|\.
.IP \(bu 2
-\fB"scripts":{"preinstall": "node\-gyp rebuild"}\fR
-If there is a \fBbinding\.gyp\fR file in the root of your package, npm will
-default the \fBpreinstall\fR command to compile using node\-gyp\.
+\fB"scripts":{"preinstall": "node\-gyp rebuild"}\fP
+If there is a \fBbinding\.gyp\fP file in the root of your package, npm will
+default the \fBpreinstall\fP command to compile using node\-gyp\.
.IP \(bu 2
-\fB"contributors": [\.\.\.]\fR
-If there is an \fBAUTHORS\fR file in the root of your package, npm will
-treat each line as a \fBName <email> (url)\fR format, where email and url
-are optional\. Lines which start with a \fB#\fR or are blank, will be
+\fB"contributors": [\.\.\.]\fP
+If there is an \fBAUTHORS\fP file in the root of your package, npm will
+treat each line as a \fBName <email> (url)\fP format, where email and url
+are optional\. Lines which start with a \fB#\fP or are blank, will be
ignored\.
.RE
diff --git a/deps/npm/man/man7/npm-coding-style.7 b/deps/npm/man/man7/npm-coding-style.7
index ce9923214..e8487a91f 100644
--- a/deps/npm/man/man7/npm-coding-style.7
+++ b/deps/npm/man/man7/npm-coding-style.7
@@ -1,4 +1,4 @@
-.TH "NPM\-CODING\-STYLE" "7" "June 2015" "" ""
+.TH "NPM\-CODING\-STYLE" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-coding-style\fR \- npm's "funny" coding style
.SH DESCRIPTION
@@ -71,14 +71,14 @@ while (foo) {
Don't use them except in four situations:
.RS 0
.IP \(bu 2
-\fBfor (;;)\fR loops\. They're actually required\.
+\fBfor (;;)\fP loops\. They're actually required\.
.IP \(bu 2
-null loops like: \fBwhile (something) ;\fR (But you'd better have a good
+null loops like: \fBwhile (something) ;\fP (But you'd better have a good
reason for doing that\.)
.IP \(bu 2
-\fBcase "foo": doSomething(); break\fR
+\fBcase "foo": doSomething(); break\fP
.IP \(bu 2
-In front of a leading \fB(\fR or \fB[\fR at the start of the line\.
+In front of a leading \fB(\fP or \fB[\fP at the start of the line\.
This prevents the expression from being interpreted
as a function call or property access, respectively\.
@@ -101,7 +101,7 @@ for (var i = 0; i < 10; i ++) {
.fi
.RE
.P
-Note that starting lines with \fB\-\fR and \fB+\fR also should be prefixed
+Note that starting lines with \fB\-\fP and \fB+\fP also should be prefixed
with a semicolon, but this is much less common\.
.SH Comma First
.P
@@ -161,20 +161,20 @@ logging the same object over and over again is not helpful\. Logs should
report what's happening so that it's easier to track down where a fault
occurs\.
.P
-Use appropriate log levels\. See npm help 7 \fBnpm\-config\fR and search for
+Use appropriate log levels\. See npm help 7 \fBnpm\-config\fP and search for
"loglevel"\.
.SH Case, naming, etc\.
.P
-Use \fBlowerCamelCase\fR for multiword identifiers when they refer to objects,
+Use \fBlowerCamelCase\fP for multiword identifiers when they refer to objects,
functions, methods, properties, or anything not specified in this section\.
.P
-Use \fBUpperCamelCase\fR for class names (things that you'd pass to "new")\.
+Use \fBUpperCamelCase\fP for class names (things that you'd pass to "new")\.
.P
-Use \fBall\-lower\-hyphen\-css\-case\fR for multiword filenames and config keys\.
+Use \fBall\-lower\-hyphen\-css\-case\fP for multiword filenames and config keys\.
.P
Use named functions\. They make stack traces easier to follow\.
.P
-Use \fBCAPS_SNAKE_CASE\fR for constants, things that should never change
+Use \fBCAPS_SNAKE_CASE\fP for constants, things that should never change
and are rarely used\.
.P
Use a single uppercase letter for function names where the function
@@ -182,12 +182,12 @@ would normally be anonymous, but needs to call itself recursively\. It
makes it clear that it's a "throwaway" function\.
.SH null, undefined, false, 0
.P
-Boolean variables and functions should always be either \fBtrue\fR or
-\fBfalse\fR\|\. Don't set it to 0 unless it's supposed to be a number\.
+Boolean variables and functions should always be either \fBtrue\fP or
+\fBfalse\fP\|\. Don't set it to 0 unless it's supposed to be a number\.
.P
-When something is intentionally missing or removed, set it to \fBnull\fR\|\.
+When something is intentionally missing or removed, set it to \fBnull\fP\|\.
.P
-Don't set things to \fBundefined\fR\|\. Reserve that value to mean "not yet
+Don't set things to \fBundefined\fP\|\. Reserve that value to mean "not yet
set to anything\."
.P
Boolean objects are verboten\.
diff --git a/deps/npm/man/man7/npm-config.7 b/deps/npm/man/man7/npm-config.7
index 5e4a9a2af..b99b699c5 100644
--- a/deps/npm/man/man7/npm-config.7
+++ b/deps/npm/man/man7/npm-config.7
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "7" "June 2015" "" ""
+.TH "NPM\-CONFIG" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-config\fR \- More than you probably want to know about npm configuration
.SH DESCRIPTION
@@ -6,18 +6,18 @@
npm gets its configuration values from the following sources, sorted by priority:
.SS Command Line Flags
.P
-Putting \fB\-\-foo bar\fR on the command line sets the \fBfoo\fR configuration
-parameter to \fB"bar"\fR\|\. A \fB\-\-\fR argument tells the cli parser to stop
-reading flags\. A \fB\-\-flag\fR parameter that is at the \fIend\fR of the
-command will be given the value of \fBtrue\fR\|\.
+Putting \fB\-\-foo bar\fP on the command line sets the \fBfoo\fP configuration
+parameter to \fB"bar"\fP\|\. A \fB\-\-\fP argument tells the cli parser to stop
+reading flags\. A \fB\-\-flag\fP parameter that is at the \fIend\fR of the
+command will be given the value of \fBtrue\fP\|\.
.SS Environment Variables
.P
-Any environment variables that start with \fBnpm_config_\fR will be
+Any environment variables that start with \fBnpm_config_\fP will be
interpreted as a configuration parameter\. For example, putting
-\fBnpm_config_foo=bar\fR in your environment will set the \fBfoo\fR
-configuration parameter to \fBbar\fR\|\. Any environment configurations that
-are not given a value will be given the value of \fBtrue\fR\|\. Config
-values are case\-insensitive, so \fBNPM_CONFIG_FOO=bar\fR will work the
+\fBnpm_config_foo=bar\fP in your environment will set the \fBfoo\fP
+configuration parameter to \fBbar\fP\|\. Any environment configurations that
+are not given a value will be given the value of \fBtrue\fP\|\. Config
+values are case\-insensitive, so \fBNPM_CONFIG_FOO=bar\fP will work the
same\.
.SS npmrc Files
.P
@@ -44,51 +44,51 @@ defaults if nothing else is specified\.
The following shorthands are parsed on the command\-line:
.RS 0
.IP \(bu 2
-\fB\-v\fR: \fB\-\-version\fR
+\fB\-v\fP: \fB\-\-version\fP
.IP \(bu 2
-\fB\-h\fR, \fB\-?\fR, \fB\-\-help\fR, \fB\-H\fR: \fB\-\-usage\fR
+\fB\-h\fP, \fB\-?\fP, \fB\-\-help\fP, \fB\-H\fP: \fB\-\-usage\fP
.IP \(bu 2
-\fB\-s\fR, \fB\-\-silent\fR: \fB\-\-loglevel silent\fR
+\fB\-s\fP, \fB\-\-silent\fP: \fB\-\-loglevel silent\fP
.IP \(bu 2
-\fB\-q\fR, \fB\-\-quiet\fR: \fB\-\-loglevel warn\fR
+\fB\-q\fP, \fB\-\-quiet\fP: \fB\-\-loglevel warn\fP
.IP \(bu 2
-\fB\-d\fR: \fB\-\-loglevel info\fR
+\fB\-d\fP: \fB\-\-loglevel info\fP
.IP \(bu 2
-\fB\-dd\fR, \fB\-\-verbose\fR: \fB\-\-loglevel verbose\fR
+\fB\-dd\fP, \fB\-\-verbose\fP: \fB\-\-loglevel verbose\fP
.IP \(bu 2
-\fB\-ddd\fR: \fB\-\-loglevel silly\fR
+\fB\-ddd\fP: \fB\-\-loglevel silly\fP
.IP \(bu 2
-\fB\-g\fR: \fB\-\-global\fR
+\fB\-g\fP: \fB\-\-global\fP
.IP \(bu 2
-\fB\-C\fR: \fB\-\-prefix\fR
+\fB\-C\fP: \fB\-\-prefix\fP
.IP \(bu 2
-\fB\-l\fR: \fB\-\-long\fR
+\fB\-l\fP: \fB\-\-long\fP
.IP \(bu 2
-\fB\-m\fR: \fB\-\-message\fR
+\fB\-m\fP: \fB\-\-message\fP
.IP \(bu 2
-\fB\-p\fR, \fB\-\-porcelain\fR: \fB\-\-parseable\fR
+\fB\-p\fP, \fB\-\-porcelain\fP: \fB\-\-parseable\fP
.IP \(bu 2
-\fB\-reg\fR: \fB\-\-registry\fR
+\fB\-reg\fP: \fB\-\-registry\fP
.IP \(bu 2
-\fB\-f\fR: \fB\-\-force\fR
+\fB\-f\fP: \fB\-\-force\fP
.IP \(bu 2
-\fB\-desc\fR: \fB\-\-description\fR
+\fB\-desc\fP: \fB\-\-description\fP
.IP \(bu 2
-\fB\-S\fR: \fB\-\-save\fR
+\fB\-S\fP: \fB\-\-save\fP
.IP \(bu 2
-\fB\-D\fR: \fB\-\-save\-dev\fR
+\fB\-D\fP: \fB\-\-save\-dev\fP
.IP \(bu 2
-\fB\-O\fR: \fB\-\-save\-optional\fR
+\fB\-O\fP: \fB\-\-save\-optional\fP
.IP \(bu 2
-\fB\-B\fR: \fB\-\-save\-bundle\fR
+\fB\-B\fP: \fB\-\-save\-bundle\fP
.IP \(bu 2
-\fB\-E\fR: \fB\-\-save\-exact\fR
+\fB\-E\fP: \fB\-\-save\-exact\fP
.IP \(bu 2
-\fB\-y\fR: \fB\-\-yes\fR
+\fB\-y\fP: \fB\-\-yes\fP
.IP \(bu 2
-\fB\-n\fR: \fB\-\-yes false\fR
+\fB\-n\fP: \fB\-\-yes false\fP
.IP \(bu 2
-\fBll\fR and \fBla\fR commands: \fBls \-\-long\fR
+\fBll\fP and \fBla\fP commands: \fBls \-\-long\fP
.RE
.P
@@ -118,9 +118,9 @@ npm ls \-\-global \-\-parseable \-\-long \-\-loglevel info
.RE
.SH Per\-Package Config Settings
.P
-When running scripts (see npm help 7 \fBnpm\-scripts\fR) the package\.json "config"
+When running scripts (see npm help 7 \fBnpm\-scripts\fP) the package\.json "config"
keys are overwritten in the environment if there is a config param of
-\fB<name>[@<version>]:<key>\fR\|\. For example, if the package\.json has
+\fB<name>[@<version>]:<key>\fP\|\. For example, if the package\.json has
this:
.P
.RS 2
@@ -152,16 +152,16 @@ See npm help 5 package\.json for more information\.
.SS access
.RS 0
.IP \(bu 2
-Default: \fBrestricted\fR
+Default: \fBrestricted\fP
.IP \(bu 2
Type: Access
.RE
.P
-When publishing scoped packages, the access level defaults to \fBrestricted\fR\|\. If
+When publishing scoped packages, the access level defaults to \fBrestricted\fP\|\. If
you want your scoped package to be publicly viewable (and installable) set
-\fB\-\-access=public\fR\|\. The only valid values for \fBaccess\fR are \fBpublic\fR and
-\fBrestricted\fR\|\. Unscoped packages \fIalways\fR have an access level of \fBpublic\fR\|\.
+\fB\-\-access=public\fP\|\. The only valid values for \fBaccess\fP are \fBpublic\fP and
+\fBrestricted\fP\|\. Unscoped packages \fIalways\fR have an access level of \fBpublic\fP\|\.
.SS always\-auth
.RS 0
.IP \(bu 2
@@ -172,17 +172,17 @@ Type: Boolean
.RE
.P
Force npm to always require authentication when accessing the registry,
-even for \fBGET\fR requests\.
+even for \fBGET\fP requests\.
.SS bin\-links
.RS 0
.IP \(bu 2
-Default: \fBtrue\fR
+Default: \fBtrue\fP
.IP \(bu 2
Type: Boolean
.RE
.P
-Tells npm to create symlinks (or \fB\|\.cmd\fR shims on Windows) for package
+Tells npm to create symlinks (or \fB\|\.cmd\fP shims on Windows) for package
executables\.
.P
Set to false to have it not do this\. This can be used to work around
@@ -191,13 +191,13 @@ ostensibly Unix systems\.
.SS browser
.RS 0
.IP \(bu 2
-Default: OS X: \fB"open"\fR, Windows: \fB"start"\fR, Others: \fB"xdg\-open"\fR
+Default: OS X: \fB"open"\fP, Windows: \fB"start"\fP, Others: \fB"xdg\-open"\fP
.IP \(bu 2
Type: String
.RE
.P
-The browser that is called by the \fBnpm docs\fR command to open websites\.
+The browser that is called by the \fBnpm docs\fP command to open websites\.
.SS ca
.RS 0
.IP \(bu 2
@@ -217,7 +217,7 @@ ca="\-\-\-\-\-BEGIN CERTIFICATE\-\-\-\-\-\\nXXXX\\nXXXX\\n\-\-\-\-\-END CERTIFIC
.fi
.RE
.P
-Set to \fBnull\fR to only allow "known" registrars, or to a specific CA cert
+Set to \fBnull\fP to only allow "known" registrars, or to a specific CA cert
to trust only that specific signing authority\.
.P
Multiple CAs can be trusted by specifying an array of certificates:
@@ -229,29 +229,29 @@ ca[]="\.\.\."
.fi
.RE
.P
-See also the \fBstrict\-ssl\fR config\.
+See also the \fBstrict\-ssl\fP config\.
.SS cafile
.RS 0
.IP \(bu 2
-Default: \fBnull\fR
+Default: \fBnull\fP
.IP \(bu 2
Type: path
.RE
.P
A path to a file containing one or multiple Certificate Authority signing
-certificates\. Similar to the \fBca\fR setting, but allows for multiple CA's, as
+certificates\. Similar to the \fBca\fP setting, but allows for multiple CA's, as
well as for the CA information to be stored in a file on disk\.
.SS cache
.RS 0
.IP \(bu 2
-Default: Windows: \fB%AppData%\\npm\-cache\fR, Posix: \fB~/\.npm\fR
+Default: Windows: \fB%AppData%\\npm\-cache\fP, Posix: \fB~/\.npm\fP
.IP \(bu 2
Type: path
.RE
.P
-The location of npm's cache directory\. See npm help \fBnpm\-cache\fR
+The location of npm's cache directory\. See npm help \fBnpm\-cache\fP
.SS cache\-lock\-stale
.RS 0
.IP \(bu 2
@@ -294,7 +294,7 @@ Type: Number
The maximum time (in seconds) to keep items in the registry cache before
re\-checking against the registry\.
.P
-Note that no purging is done unless the \fBnpm cache clean\fR command is
+Note that no purging is done unless the \fBnpm cache clean\fP command is
explicitly used, and that only GET requests use the cache\.
.SS cache\-min
.RS 0
@@ -308,12 +308,12 @@ Type: Number
The minimum time (in seconds) to keep items in the registry cache before
re\-checking against the registry\.
.P
-Note that no purging is done unless the \fBnpm cache clean\fR command is
+Note that no purging is done unless the \fBnpm cache clean\fP command is
explicitly used, and that only GET requests use the cache\.
.SS cert
.RS 0
.IP \(bu 2
-Default: \fBnull\fR
+Default: \fBnull\fP
.IP \(bu 2
Type: String
@@ -325,11 +325,11 @@ A client certificate to pass when accessing the registry\.
.IP \(bu 2
Default: true on Posix, false on Windows
.IP \(bu 2
-Type: Boolean or \fB"always"\fR
+Type: Boolean or \fB"always"\fP
.RE
.P
-If false, never shows colors\. If \fB"always"\fR then always shows colors\.
+If false, never shows colors\. If \fB"always"\fP then always shows colors\.
If true, then only prints color codes for tty file descriptors\.
.SS depth
.RS 0
@@ -340,13 +340,13 @@ Type: Number
.RE
.P
-The depth to go when recursing directories for \fBnpm ls\fR,
-\fBnpm cache ls\fR, and \fBnpm outdated\fR\|\.
+The depth to go when recursing directories for \fBnpm ls\fP,
+\fBnpm cache ls\fP, and \fBnpm outdated\fP\|\.
.P
-For \fBnpm outdated\fR, a setting of \fBInfinity\fR will be treated as \fB0\fR
+For \fBnpm outdated\fP, a setting of \fBInfinity\fP will be treated as \fB0\fP
since that gives more useful information\. To show the outdated status
of all packages and dependents, use a large integer value,
-e\.g\., \fBnpm outdated \-\-depth 9999\fR
+e\.g\., \fBnpm outdated \-\-depth 9999\fP
.SS description
.RS 0
.IP \(bu 2
@@ -356,7 +356,7 @@ Type: Boolean
.RE
.P
-Show the description in \fBnpm search\fR
+Show the description in \fBnpm search\fP
.SS dev
.RS 0
.IP \(bu 2
@@ -366,21 +366,21 @@ Type: Boolean
.RE
.P
-Install \fBdev\-dependencies\fR along with packages\.
+Install \fBdev\-dependencies\fP along with packages\.
.P
-Note that \fBdev\-dependencies\fR are also installed if the \fBnpat\fR flag is
+Note that \fBdev\-dependencies\fP are also installed if the \fBnpat\fP flag is
set\.
.SS editor
.RS 0
.IP \(bu 2
-Default: \fBEDITOR\fR environment variable if set, or \fB"vi"\fR on Posix,
-or \fB"notepad"\fR on Windows\.
+Default: \fBEDITOR\fP environment variable if set, or \fB"vi"\fP on Posix,
+or \fB"notepad"\fP on Windows\.
.IP \(bu 2
Type: path
.RE
.P
-The command to run for \fBnpm edit\fR or \fBnpm config edit\fR\|\.
+The command to run for \fBnpm edit\fP or \fBnpm config edit\fP\|\.
.SS engine\-strict
.RS 0
.IP \(bu 2
@@ -423,7 +423,7 @@ Type: Number
.RE
.P
-The "retries" config for the \fBretry\fR module to use when fetching
+The "retries" config for the \fBretry\fP module to use when fetching
packages from the registry\.
.SS fetch\-retry\-factor
.RS 0
@@ -434,7 +434,7 @@ Type: Number
.RE
.P
-The "factor" config for the \fBretry\fR module to use when fetching
+The "factor" config for the \fBretry\fP module to use when fetching
packages\.
.SS fetch\-retry\-mintimeout
.RS 0
@@ -445,7 +445,7 @@ Type: Number
.RE
.P
-The "minTimeout" config for the \fBretry\fR module to use when fetching
+The "minTimeout" config for the \fBretry\fP module to use when fetching
packages\.
.SS fetch\-retry\-maxtimeout
.RS 0
@@ -456,30 +456,30 @@ Type: Number
.RE
.P
-The "maxTimeout" config for the \fBretry\fR module to use when fetching
+The "maxTimeout" config for the \fBretry\fP module to use when fetching
packages\.
.SS git
.RS 0
.IP \(bu 2
-Default: \fB"git"\fR
+Default: \fB"git"\fP
.IP \(bu 2
Type: String
.RE
.P
The command to use for git commands\. If git is installed on the
-computer, but is not in the \fBPATH\fR, then set this to the full path to
+computer, but is not in the \fBPATH\fP, then set this to the full path to
the git binary\.
.SS git\-tag\-version
.RS 0
.IP \(bu 2
-Default: \fBtrue\fR
+Default: \fBtrue\fP
.IP \(bu 2
Type: Boolean
.RE
.P
-Tag the commit when using the \fBnpm version\fR command\.
+Tag the commit when using the \fBnpm version\fP command\.
.SS global
.RS 0
.IP \(bu 2
@@ -490,16 +490,16 @@ Type: Boolean
.RE
.P
Operates in "global" mode, so that packages are installed into the
-\fBprefix\fR folder instead of the current working directory\. See
-npm help 5 \fBnpm\-folders\fR for more on the differences in behavior\.
+\fBprefix\fP folder instead of the current working directory\. See
+npm help 5 \fBnpm\-folders\fP for more on the differences in behavior\.
.RS 0
.IP \(bu 2
-packages are installed into the \fB{prefix}/lib/node_modules\fR folder, instead of the
+packages are installed into the \fB{prefix}/lib/node_modules\fP folder, instead of the
current working directory\.
.IP \(bu 2
-bin files are linked to \fB{prefix}/bin\fR
+bin files are linked to \fB{prefix}/bin\fP
.IP \(bu 2
-man pages are linked to \fB{prefix}/share/man\fR
+man pages are linked to \fB{prefix}/share/man\fP
.RE
.SS globalconfig
@@ -526,7 +526,7 @@ user\.
.SS heading
.RS 0
.IP \(bu 2
-Default: \fB"npm"\fR
+Default: \fB"npm"\fP
.IP \(bu 2
Type: String
@@ -542,9 +542,9 @@ Type: url
.RE
.P
-A proxy to use for outgoing https requests\. If the \fBHTTPS_PROXY\fR or
-\fBhttps_proxy\fR or \fBHTTP_PROXY\fR or \fBhttp_proxy\fR environment variables are set,
-proxy settings will be honored by the underlying \fBrequest\fR library\.
+A proxy to use for outgoing https requests\. If the \fBHTTPS_PROXY\fP or
+\fBhttps_proxy\fP or \fBHTTP_PROXY\fP or \fBhttp_proxy\fP environment variables are set,
+proxy settings will be honored by the underlying \fBrequest\fP library\.
.SS if\-present
.RS 0
.IP \(bu 2
@@ -554,8 +554,8 @@ Type: Boolean
.RE
.P
-If true, npm will not exit with an error code when \fBrun\-script\fR is invoked for
-a script that isn't defined in the \fBscripts\fR section of \fBpackage\.json\fR\|\. This
+If true, npm will not exit with an error code when \fBrun\-script\fP is invoked for
+a script that isn't defined in the \fBscripts\fP section of \fBpackage\.json\fP\|\. This
option can be used when it's desirable to optionally run a script when it's
present and fail if the script fails\. This is useful, for example, when running
scripts that may only apply for some builds in an otherwise generic CI setup\.
@@ -578,7 +578,7 @@ Type: path
.RE
.P
-A module that will be loaded by the \fBnpm init\fR command\. See the
+A module that will be loaded by the \fBnpm init\fP command\. See the
documentation for the
init\-package\-json \fIhttps://github\.com/isaacs/init\-package\-json\fR module
for more information, or npm help init\.
@@ -591,7 +591,7 @@ Type: String
.RE
.P
-The value \fBnpm init\fR should use by default for the package author's name\.
+The value \fBnpm init\fP should use by default for the package author's name\.
.SS init\-author\-email
.RS 0
.IP \(bu 2
@@ -601,7 +601,7 @@ Type: String
.RE
.P
-The value \fBnpm init\fR should use by default for the package author's email\.
+The value \fBnpm init\fP should use by default for the package author's email\.
.SS init\-author\-url
.RS 0
.IP \(bu 2
@@ -611,7 +611,7 @@ Type: String
.RE
.P
-The value \fBnpm init\fR should use by default for the package author's homepage\.
+The value \fBnpm init\fP should use by default for the package author's homepage\.
.SS init\-license
.RS 0
.IP \(bu 2
@@ -621,7 +621,7 @@ Type: String
.RE
.P
-The value \fBnpm init\fR should use by default for the package license\.
+The value \fBnpm init\fP should use by default for the package license\.
.SS init\-version
.RS 0
.IP \(bu 2
@@ -631,7 +631,7 @@ Type: semver
.RE
.P
-The value that \fBnpm init\fR should use by default for the package
+The value that \fBnpm init\fP should use by default for the package
version number, if not already set in package\.json\.
.SS json
.RS 0
@@ -646,11 +646,11 @@ Whether or not to output JSON data, rather than the normal output\.
.P
This feature is currently experimental, and the output data structures
for many commands is either not implemented in JSON yet, or subject to
-change\. Only the output from \fBnpm ls \-\-json\fR is currently valid\.
+change\. Only the output from \fBnpm ls \-\-json\fP is currently valid\.
.SS key
.RS 0
.IP \(bu 2
-Default: \fBnull\fR
+Default: \fBnull\fP
.IP \(bu 2
Type: String
@@ -703,7 +703,7 @@ Values: "silent", "error", "warn", "http", "info", "verbose", "silly"
.RE
.P
What level of logs to report\. On failure, \fIall\fR logs are written to
-\fBnpm\-debug\.log\fR in the current working directory\.
+\fBnpm\-debug\.log\fP in the current working directory\.
.P
Any logs of a higher level than the setting are shown\.
The default is "warn", which shows warn and error output\.
@@ -723,7 +723,7 @@ It cannot be set from the command line, but if you are using npm
programmatically, you may wish to send logs to somewhere other than
stderr\.
.P
-If the \fBcolor\fR config is set to true, then this stream will receive
+If the \fBcolor\fP config is set to true, then this stream will receive
colored output if it is a TTY\.
.SS long
.RS 0
@@ -734,7 +734,7 @@ Type: Boolean
.RE
.P
-Show extended information in \fBnpm ls\fR and \fBnpm search\fR\|\.
+Show extended information in \fBnpm ls\fP and \fBnpm search\fP\|\.
.SS message
.RS 0
.IP \(bu 2
@@ -744,7 +744,7 @@ Type: String
.RE
.P
-Commit message which is used by \fBnpm version\fR when creating version commit\.
+Commit message which is used by \fBnpm version\fP when creating version commit\.
.P
Any "%s" in the message will be replaced with the version number\.
.SS node\-version
@@ -756,7 +756,7 @@ Type: semver or false
.RE
.P
-The node version to use when checking a package's \fBengines\fR map\.
+The node version to use when checking a package's \fBengines\fP map\.
.SS npat
.RS 0
.IP \(bu 2
@@ -776,7 +776,7 @@ Type: path
.RE
.P
-A node module to \fBrequire()\fR when npm loads\. Useful for programmatic
+A node module to \fBrequire()\fP when npm loads\. Useful for programmatic
usage\.
.SS optional
.RS 0
@@ -787,7 +787,7 @@ Type: Boolean
.RE
.P
-Attempt to install packages in the \fBoptionalDependencies\fR object\. Note
+Attempt to install packages in the \fBoptionalDependencies\fP object\. Note
that if these packages fail to install, the overall installation
process is not aborted\.
.SS parseable
@@ -825,7 +825,7 @@ Set to true to run in "production" mode\.
.RS 0
.IP 1. 3
devDependencies are not installed at the topmost level when running
-local \fBnpm install\fR without any arguments\.
+local \fBnpm install\fP without any arguments\.
.IP 2. 3
Set the NODE_ENV="production" for lifecycle scripts\.
@@ -854,9 +854,9 @@ Type: url
.RE
.P
-A proxy to use for outgoing http requests\. If the \fBHTTP_PROXY\fR or
-\fBhttp_proxy\fR environment variables are set, proxy settings will be
-honored by the underlying \fBrequest\fR library\.
+A proxy to use for outgoing http requests\. If the \fBHTTP_PROXY\fP or
+\fBhttp_proxy\fP environment variables are set, proxy settings will be
+honored by the underlying \fBrequest\fP library\.
.SS rebuild\-bundle
.RS 0
.IP \(bu 2
@@ -898,7 +898,7 @@ Type: Boolean
.P
Save installed packages to a package\.json file as dependencies\.
.P
-When used with the \fBnpm rm\fR command, it removes it from the \fBdependencies\fR
+When used with the \fBnpm rm\fP command, it removes it from the \fBdependencies\fP
object\.
.P
Only works if there is already a package\.json file present\.
@@ -911,11 +911,11 @@ Type: Boolean
.RE
.P
-If a package would be saved at install time by the use of \fB\-\-save\fR,
-\fB\-\-save\-dev\fR, or \fB\-\-save\-optional\fR, then also put it in the
-\fBbundleDependencies\fR list\.
+If a package would be saved at install time by the use of \fB\-\-save\fP,
+\fB\-\-save\-dev\fP, or \fB\-\-save\-optional\fP, then also put it in the
+\fBbundleDependencies\fP list\.
.P
-When used with the \fBnpm rm\fR command, it removes it from the
+When used with the \fBnpm rm\fP command, it removes it from the
bundledDependencies list\.
.SS save\-dev
.RS 0
@@ -926,10 +926,10 @@ Type: Boolean
.RE
.P
-Save installed packages to a package\.json file as \fBdevDependencies\fR\|\.
+Save installed packages to a package\.json file as \fBdevDependencies\fP\|\.
.P
-When used with the \fBnpm rm\fR command, it removes it from the
-\fBdevDependencies\fR object\.
+When used with the \fBnpm rm\fP command, it removes it from the
+\fBdevDependencies\fP object\.
.P
Only works if there is already a package\.json file present\.
.SS save\-exact
@@ -941,8 +941,8 @@ Type: Boolean
.RE
.P
-Dependencies saved to package\.json using \fB\-\-save\fR, \fB\-\-save\-dev\fR or
-\fB\-\-save\-optional\fR will be configured with an exact version rather than
+Dependencies saved to package\.json using \fB\-\-save\fP, \fB\-\-save\-dev\fP or
+\fB\-\-save\-optional\fP will be configured with an exact version rather than
using npm's default semver range operator\.
.SS save\-optional
.RS 0
@@ -956,8 +956,8 @@ Type: Boolean
Save installed packages to a package\.json file as
optionalDependencies\.
.P
-When used with the \fBnpm rm\fR command, it removes it from the
-\fBdevDependencies\fR object\.
+When used with the \fBnpm rm\fP command, it removes it from the
+\fBdevDependencies\fP object\.
.P
Only works if there is already a package\.json file present\.
.SS save\-prefix
@@ -970,11 +970,11 @@ Type: String
.RE
.P
Configure how versions of packages installed to a package\.json file via
-\fB\-\-save\fR or \fB\-\-save\-dev\fR get prefixed\.
+\fB\-\-save\fP or \fB\-\-save\-dev\fP get prefixed\.
.P
-For example if a package has version \fB1\.2\.3\fR, by default its version is
-set to \fB^1\.2\.3\fR which allows minor upgrades for that package, but after
-\fBnpm config set save\-prefix='~'\fR it would be set to \fB~1\.2\.3\fR which only allows
+For example if a package has version \fB1\.2\.3\fP, by default its version is
+set to \fB^1\.2\.3\fP which allows minor upgrades for that package, but after
+\fBnpm config set save\-prefix='~'\fP it would be set to \fB~1\.2\.3\fP which only allows
patch upgrades\.
.SS scope
.RS 0
@@ -987,9 +987,9 @@ Type: String
.P
Associate an operation with a scope for a scoped registry\. Useful when logging
in to a private registry for the first time:
-\fBnpm login \-\-scope=@organization \-\-registry=registry\.organization\.com\fR, which
-will cause \fB@organization\fR to be mapped to the registry for future installation
-of packages specified according to the pattern \fB@organization/package\fR\|\.
+\fBnpm login \-\-scope=@organization \-\-registry=registry\.organization\.com\fP, which
+will cause \fB@organization\fP to be mapped to the registry for future installation
+of packages specified according to the pattern \fB@organization/package\fP\|\.
.SS searchopts
.RS 0
.IP \(bu 2
@@ -1022,7 +1022,7 @@ Values: "name", "\-name", "date", "\-date", "description",
.RE
.P
-Indication of which field to sort search results by\. Prefix with a \fB\-\fR
+Indication of which field to sort search results by\. Prefix with a \fB\-\fP
character to indicate reverse sort\.
.SS shell
.RS 0
@@ -1034,7 +1034,7 @@ Type: path
.RE
.P
-The shell to run for the \fBnpm explore\fR command\.
+The shell to run for the \fBnpm explore\fP command\.
.SS shrinkwrap
.RS 0
.IP \(bu 2
@@ -1044,7 +1044,7 @@ Type: Boolean
.RE
.P
-If set to false, then ignore \fBnpm\-shrinkwrap\.json\fR files when
+If set to false, then ignore \fBnpm\-shrinkwrap\.json\fP files when
installing\.
.SS sign\-git\-tag
.RS 0
@@ -1055,8 +1055,8 @@ Type: Boolean
.RE
.P
-If set to true, then the \fBnpm version\fR command will tag the version
-using \fB\-s\fR to add a signature\.
+If set to true, then the \fBnpm version\fP command will tag the version
+using \fB\-s\fP to add a signature\.
.P
Note that git requires you to have set up GPG keys in your git configs
for this to work properly\.
@@ -1065,14 +1065,14 @@ for this to work properly\.
.IP \(bu 2
Default: true
.IP \(bu 2
-Type: Boolean or \fB"always"\fR
+Type: Boolean or \fB"always"\fP
.RE
.P
-When set to \fBtrue\fR, npm will display an ascii spinner while it is doing
-things, if \fBprocess\.stderr\fR is a TTY\.
+When set to \fBtrue\fP, npm will display an ascii spinner while it is doing
+things, if \fBprocess\.stderr\fP is a TTY\.
.P
-Set to \fBfalse\fR to suppress the spinner, or set to \fBalways\fR to output
+Set to \fBfalse\fP to suppress the spinner, or set to \fBalways\fP to output
the spinner even for non\-TTY outputs\.
.SS strict\-ssl
.RS 0
@@ -1086,7 +1086,7 @@ Type: Boolean
Whether or not to do SSL key validation when making requests to the
registry via https\.
.P
-See also the \fBca\fR config\.
+See also the \fBca\fP config\.
.SS tag
.RS 0
.IP \(bu 2
@@ -1100,22 +1100,22 @@ If you ask npm to install a package and don't tell it a specific version, then
it will install the specified tag\.
.P
Also the tag that is added to the package@version specified by the \fBnpm
-tag\fR command, if no explicit tag is given\.
+tag\fP command, if no explicit tag is given\.
.SS tag\-version\-prefix
.RS 0
.IP \(bu 2
-Default: \fB"v"\fR
+Default: \fB"v"\fP
.IP \(bu 2
Type: String
.RE
.P
If set, alters the prefix used when tagging a new version when performing a
-version increment using \fBnpm\-version\fR\|\. To remove the prefix altogether, set it
-to the empty string: \fB""\fR\|\.
+version increment using \fBnpm\-version\fP\|\. To remove the prefix altogether, set it
+to the empty string: \fB""\fP\|\.
.P
Because other tools may rely on the convention that npm version tags look like
-\fBv1\.0\.0\fR, \fIonly use this property if it is absolutely necessary\fR\|\. In
+\fBv1\.0\.0\fP, \fIonly use this property if it is absolutely necessary\fR\|\. In
particular, use care when overriding this setting for public packages\.
.SS tmp
.RS 0
@@ -1161,7 +1161,7 @@ Type: Boolean
.RE
.P
Set to show short usage output (like the \-H output)
-instead of complete help when doing npm help \fBnpm\-help\fR\|\.
+instead of complete help when doing npm help \fBnpm\-help\fP\|\.
.SS user
.RS 0
.IP \(bu 2
@@ -1194,9 +1194,9 @@ Type: Octal numeric string in range 0000\.\.0777 (0\.\.511)
The "umask" value to use when setting the file creation mode on files
and folders\.
.P
-Folders and executables are given a mode which is \fB0777\fR masked against
-this value\. Other files are given a mode which is \fB0666\fR masked against
-this value\. Thus, the defaults are \fB0755\fR and \fB0644\fR respectively\.
+Folders and executables are given a mode which is \fB0777\fP masked against
+this value\. Other files are given a mode which is \fB0666\fP masked against
+this value\. Thus, the defaults are \fB0755\fP and \fB0644\fP respectively\.
.SS user\-agent
.RS 0
.IP \(bu 2
@@ -1228,7 +1228,7 @@ Type: boolean
.RE
.P
-If true, output the npm version as well as node's \fBprocess\.versions\fR map, and
+If true, output the npm version as well as node's \fBprocess\.versions\fP map, and
exit successfully\.
.P
Only relevant when specified explicitly on the command line\.
@@ -1243,7 +1243,7 @@ Type: path
.P
The program to use to view help content\.
.P
-Set to \fB"browser"\fR to view html help content in the default web browser\.
+Set to \fB"browser"\fP to view html help content in the default web browser\.
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man7/npm-developers.7 b/deps/npm/man/man7/npm-developers.7
index 3709946de..eb31fc10e 100644
--- a/deps/npm/man/man7/npm-developers.7
+++ b/deps/npm/man/man7/npm-developers.7
@@ -1,4 +1,4 @@
-.TH "NPM\-DEVELOPERS" "7" "June 2015" "" ""
+.TH "NPM\-DEVELOPERS" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-developers\fR \- Developer Guide
.SH DESCRIPTION
@@ -13,9 +13,9 @@ that your users will do to install your program\.
.SH About These Documents
.P
These are man pages\. If you install npm, you should be able to
-then do \fBman npm\-thing\fR to get the documentation on a particular
-topic, or \fBnpm help thing\fR to see the same information\.
-.SH What is a \fBpackage\fR
+then do \fBman npm\-thing\fP to get the documentation on a particular
+topic, or \fBnpm help thing\fP to see the same information\.
+.SH What is a \fBpackage\fP
.P
A package is:
.RS 0
@@ -26,13 +26,13 @@ b) a gzipped tarball containing (a)
.IP \(bu 2
c) a url that resolves to (b)
.IP \(bu 2
-d) a \fB<name>@<version>\fR that is published on the registry with (c)
+d) a \fB<name>@<version>\fP that is published on the registry with (c)
.IP \(bu 2
-e) a \fB<name>@<tag>\fR that points to (d)
+e) a \fB<name>@<tag>\fP that points to (d)
.IP \(bu 2
-f) a \fB<name>\fR that has a "latest" tag satisfying (e)
+f) a \fB<name>\fP that has a "latest" tag satisfying (e)
.IP \(bu 2
-g) a \fBgit\fR url that, when cloned, results in (a)\.
+g) a \fBgit\fP url that, when cloned, results in (a)\.
.RE
.P
@@ -52,14 +52,14 @@ git+https://user@hostname/project/blah\.git#commit\-ish
.fi
.RE
.P
-The \fBcommit\-ish\fR can be any tag, sha, or branch which can be supplied as
-an argument to \fBgit checkout\fR\|\. The default is \fBmaster\fR\|\.
+The \fBcommit\-ish\fP can be any tag, sha, or branch which can be supplied as
+an argument to \fBgit checkout\fP\|\. The default is \fBmaster\fP\|\.
.SH The package\.json File
.P
-You need to have a \fBpackage\.json\fR file in the root of your project to do
+You need to have a \fBpackage\.json\fP file in the root of your project to do
much of anything with npm\. That is basically the whole interface\.
.P
-See npm help 5 \fBpackage\.json\fR for details about what goes in that file\. At the very
+See npm help 5 \fBpackage\.json\fP for details about what goes in that file\. At the very
least, you need:
.RS 0
.IP \(bu 2
@@ -70,7 +70,7 @@ You can use the "engines" field to explicitly state the versions of
node (or whatever else) that your program requires, and it's pretty
well assumed that it's javascript\.
It does not necessarily need to match your github repository name\.
-So, \fBnode\-foo\fR and \fBbar\-js\fR are bad names\. \fBfoo\fR or \fBbar\fR are better\.
+So, \fBnode\-foo\fP and \fBbar\-js\fP are bad names\. \fBfoo\fP or \fBbar\fP are better\.
.IP \(bu 2
version:
A semver\-compatible version\.
@@ -85,7 +85,7 @@ Take some credit\.
.IP \(bu 2
scripts:
If you have a special compilation or installation script, then you
-should put it in the \fBscripts\fR object\. You should definitely have at
+should put it in the \fBscripts\fP object\. You should definitely have at
least a basic smoke\-test command as the "scripts\.test" field\.
See npm help 7 scripts\.
.IP \(bu 2
@@ -101,78 +101,78 @@ they'll get installed just like these ones\.
.RE
.P
-You can use \fBnpm init\fR in the root of your package in order to get you
-started with a pretty basic package\.json file\. See npm help \fBnpm\-init\fR for
+You can use \fBnpm init\fP in the root of your package in order to get you
+started with a pretty basic package\.json file\. See npm help \fBnpm\-init\fP for
more info\.
.SH Keeping files \fIout\fR of your package
.P
-Use a \fB\|\.npmignore\fR file to keep stuff out of your package\. If there's
-no \fB\|\.npmignore\fR file, but there \fIis\fR a \fB\|\.gitignore\fR file, then npm will
-ignore the stuff matched by the \fB\|\.gitignore\fR file\. If you \fIwant\fR to
-include something that is excluded by your \fB\|\.gitignore\fR file, you can
-create an empty \fB\|\.npmignore\fR file to override it\.
+Use a \fB\|\.npmignore\fP file to keep stuff out of your package\. If there's
+no \fB\|\.npmignore\fP file, but there \fIis\fR a \fB\|\.gitignore\fP file, then npm will
+ignore the stuff matched by the \fB\|\.gitignore\fP file\. If you \fIwant\fR to
+include something that is excluded by your \fB\|\.gitignore\fP file, you can
+create an empty \fB\|\.npmignore\fP file to override it\.
.P
-\fB\|\.npmignore\fR files follow the same pattern rules \fIhttp://git\-scm\.com/book/en/v2/Git\-Basics\-Recording\-Changes\-to\-the\-Repository#Ignoring\-Files\fR
-as \fB\|\.gitignore\fR files:
+\fB\|\.npmignore\fP files follow the same pattern rules \fIhttp://git\-scm\.com/book/en/v2/Git\-Basics\-Recording\-Changes\-to\-the\-Repository#Ignoring\-Files\fR
+as \fB\|\.gitignore\fP files:
.RS 0
.IP \(bu 2
-Blank lines or lines starting with \fB#\fR are ignored\.
+Blank lines or lines starting with \fB#\fP are ignored\.
.IP \(bu 2
Standard glob patterns work\.
.IP \(bu 2
-You can end patterns with a forward slash \fB/\fR to specify a directory\.
+You can end patterns with a forward slash \fB/\fP to specify a directory\.
.IP \(bu 2
-You can negate a pattern by starting it with an exclamation point \fB!\fR\|\.
+You can negate a pattern by starting it with an exclamation point \fB!\fP\|\.
.RE
.P
By default, the following paths and files are ignored, so there's no
-need to add them to \fB\|\.npmignore\fR explicitly:
+need to add them to \fB\|\.npmignore\fP explicitly:
.RS 0
.IP \(bu 2
-\fB\|\.*\.swp\fR
+\fB\|\.*\.swp\fP
.IP \(bu 2
-\fB\|\._*\fR
+\fB\|\._*\fP
.IP \(bu 2
-\fB\|\.DS_Store\fR
+\fB\|\.DS_Store\fP
.IP \(bu 2
-\fB\|\.git\fR
+\fB\|\.git\fP
.IP \(bu 2
-\fB\|\.hg\fR
+\fB\|\.hg\fP
.IP \(bu 2
-\fB\|\.lock\-wscript\fR
+\fB\|\.lock\-wscript\fP
.IP \(bu 2
-\fB\|\.svn\fR
+\fB\|\.svn\fP
.IP \(bu 2
-\fB\|\.wafpickle\-*\fR
+\fB\|\.wafpickle\-*\fP
.IP \(bu 2
-\fBCVS\fR
+\fBCVS\fP
.IP \(bu 2
-\fBnpm\-debug\.log\fR
+\fBnpm\-debug\.log\fP
.RE
.P
-Additionally, everything in \fBnode_modules\fR is ignored, except for
+Additionally, everything in \fBnode_modules\fP is ignored, except for
bundled dependencies\. npm automatically handles this for you, so don't
-bother adding \fBnode_modules\fR to \fB\|\.npmignore\fR\|\.
+bother adding \fBnode_modules\fP to \fB\|\.npmignore\fP\|\.
.P
The following paths and files are never ignored, so adding them to
-\fB\|\.npmignore\fR is pointless:
+\fB\|\.npmignore\fP is pointless:
.RS 0
.IP \(bu 2
-\fBpackage\.json\fR
+\fBpackage\.json\fP
.IP \(bu 2
-\fBREADME\.*\fR
+\fBREADME\.*\fP
.RE
.SH Link Packages
.P
-\fBnpm link\fR is designed to install a development package and see the
+\fBnpm link\fP is designed to install a development package and see the
changes in real time without having to keep re\-installing it\. (You do
-need to either re\-link or \fBnpm rebuild \-g\fR to update compiled packages,
+need to either re\-link or \fBnpm rebuild \-g\fP to update compiled packages,
of course\.)
.P
-More info at npm help \fBnpm\-link\fR\|\.
+More info at npm help \fBnpm\-link\fP\|\.
.SH Before Publishing: Make Sure Your Package Installs and Works
.P
\fBThis is important\.\fR
@@ -199,7 +199,7 @@ npm link
.fi
.RE
.P
-Use \fBnpm ls \-g\fR to see if it's there\.
+Use \fBnpm ls \-g\fP to see if it's there\.
.P
To test a local install, go into some other folder, and then do:
.P
@@ -242,7 +242,7 @@ or a path to a folder\.
.P
Note that pretty much \fBeverything in that folder will be exposed\fR
by default\. So, if you have secret stuff in there, use a
-\fB\|\.npmignore\fR file to list out the globs to ignore, or publish
+\fB\|\.npmignore\fP file to list out the globs to ignore, or publish
from a fresh checkout\.
.SH Brag about it
.P
diff --git a/deps/npm/man/man7/npm-disputes.7 b/deps/npm/man/man7/npm-disputes.7
index a740f66d5..77350cdc5 100644
--- a/deps/npm/man/man7/npm-disputes.7
+++ b/deps/npm/man/man7/npm-disputes.7
@@ -1,10 +1,10 @@
-.TH "NPM\-DISPUTES" "7" "June 2015" "" ""
+.TH "NPM\-DISPUTES" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-disputes\fR \- Handling Module Name Disputes
.SH SYNOPSIS
.RS 0
.IP 1. 3
-Get the author email with \fBnpm owner ls <pkgname>\fR
+Get the author email with \fBnpm owner ls <pkgname>\fP
.IP 2. 3
Email the author, CC support@npmjs\.com
.IP 3. 3
@@ -20,30 +20,30 @@ later, some other user wants to use that name\. Here are some common
ways that happens (each of these is based on actual events\.)
.RS 0
.IP 1. 3
-Joe writes a JavaScript module \fBfoo\fR, which is not node\-specific\.
-Joe doesn't use node at all\. Bob wants to use \fBfoo\fR in node, so he
+Joe writes a JavaScript module \fBfoo\fP, which is not node\-specific\.
+Joe doesn't use node at all\. Bob wants to use \fBfoo\fP in node, so he
wraps it in an npm module\. Some time later, Joe starts using node,
and wants to take over management of his program\.
.IP 2. 3
-Bob writes an npm module \fBfoo\fR, and publishes it\. Perhaps much
-later, Joe finds a bug in \fBfoo\fR, and fixes it\. He sends a pull
+Bob writes an npm module \fBfoo\fP, and publishes it\. Perhaps much
+later, Joe finds a bug in \fBfoo\fP, and fixes it\. He sends a pull
request to Bob, but Bob doesn't have the time to deal with it,
because he has a new job and a new baby and is focused on his new
erlang project, and kind of not involved with node any more\. Joe
-would like to publish a new \fBfoo\fR, but can't, because the name is
+would like to publish a new \fBfoo\fP, but can't, because the name is
taken\.
.IP 3. 3
-Bob writes a 10\-line flow\-control library, and calls it \fBfoo\fR, and
+Bob writes a 10\-line flow\-control library, and calls it \fBfoo\fP, and
publishes it to the npm registry\. Being a simple little thing, it
never really has to be updated\. Joe works for Foo Inc, the makers
-of the critically acclaimed and widely\-marketed \fBfoo\fR JavaScript
-toolkit framework\. They publish it to npm as \fBfoojs\fR, but people are
-routinely confused when \fBnpm install foo\fR is some different thing\.
+of the critically acclaimed and widely\-marketed \fBfoo\fP JavaScript
+toolkit framework\. They publish it to npm as \fBfoojs\fP, but people are
+routinely confused when \fBnpm install foo\fP is some different thing\.
.IP 4. 3
-Bob writes a parser for the widely\-known \fBfoo\fR file format, because
+Bob writes a parser for the widely\-known \fBfoo\fP file format, because
he needs it for work\. Then, he gets a new job, and never updates the
-prototype\. Later on, Joe writes a much more complete \fBfoo\fR parser,
-but can't publish, because Bob's \fBfoo\fR is in the way\.
+prototype\. Later on, Joe writes a much more complete \fBfoo\fP parser,
+but can't publish, because Bob's \fBfoo\fP is in the way\.
.RE
.P
@@ -51,14 +51,14 @@ The validity of Joe's claim in each situation can be debated\. However,
Joe's appropriate course of action in each case is the same\.
.RS 0
.IP 1. 3
-\fBnpm owner ls foo\fR\|\. This will tell Joe the email address of the
+\fBnpm owner ls foo\fP\|\. This will tell Joe the email address of the
owner (Bob)\.
.IP 2. 3
Joe emails Bob, explaining the situation \fBas respectfully as
possible\fR, and what he would like to do with the module name\. He
adds the npm support staff support@npmjs\.com to the CC list of
the email\. Mention in the email that Bob can run \fBnpm owner add
-joe foo\fR to add Joe as an owner of the \fBfoo\fR package\.
+joe foo\fP to add Joe as an owner of the \fBfoo\fP package\.
.IP 3. 3
After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can't come to any sort of resolution, email support
diff --git a/deps/npm/man/man7/npm-faq.7 b/deps/npm/man/man7/npm-faq.7
index 1ee30f516..651597a2b 100644
--- a/deps/npm/man/man7/npm-faq.7
+++ b/deps/npm/man/man7/npm-faq.7
@@ -1,4 +1,4 @@
-.TH "NPM\-FAQ" "7" "June 2015" "" ""
+.TH "NPM\-FAQ" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-faq\fR \- Frequently Asked Questions
.SH Where can I find these docs in HTML?
@@ -11,7 +11,7 @@ npm config set viewer browser
.fi
.RE
.P
-to open these documents in your default web browser rather than \fBman\fR\|\.
+to open these documents in your default web browser rather than \fBman\fP\|\.
.SH It didn't work\.
.P
That's not really a question\.
@@ -23,32 +23,32 @@ Read the error output, and if you can't figure out what it means,
do what it says and post a bug with all the information it asks for\.
.SH Where does npm put stuff?
.P
-See npm help 5 \fBnpm\-folders\fR
+See npm help 5 \fBnpm\-folders\fP
.P
tl;dr:
.RS 0
.IP \(bu 2
-Use the \fBnpm root\fR command to see where modules go, and the \fBnpm bin\fR
+Use the \fBnpm root\fP command to see where modules go, and the \fBnpm bin\fP
command to see where executables go
.IP \(bu 2
Global installs are different from local installs\. If you install
-something with the \fB\-g\fR flag, then its executables go in \fBnpm bin \-g\fR
-and its modules go in \fBnpm root \-g\fR\|\.
+something with the \fB\-g\fP flag, then its executables go in \fBnpm bin \-g\fP
+and its modules go in \fBnpm root \-g\fP\|\.
.RE
.SH How do I install something on my computer in a central location?
.P
-Install it globally by tacking \fB\-g\fR or \fB\-\-global\fR to the command\. (This
+Install it globally by tacking \fB\-g\fP or \fB\-\-global\fP to the command\. (This
is especially important for command line utilities that need to add
-their bins to the global system \fBPATH\fR\|\.)
-.SH I installed something globally, but I can't \fBrequire()\fR it
+their bins to the global system \fBPATH\fP\|\.)
+.SH I installed something globally, but I can't \fBrequire()\fP it
.P
Install it locally\.
.P
The global install location is a place for command\-line utilities
-to put their bins in the system \fBPATH\fR\|\. It's not for use with \fBrequire()\fR\|\.
+to put their bins in the system \fBPATH\fP\|\. It's not for use with \fBrequire()\fP\|\.
.P
-If you \fBrequire()\fR a module in your code, then that means it's a
+If you \fBrequire()\fP a module in your code, then that means it's a
dependency, and a part of your program\. You need to install it locally
in your program\.
.SH Why can't npm just put everything in one place, like other package managers?
@@ -60,20 +60,20 @@ problems than it solves\.
.P
It is much harder to avoid dependency conflicts without nesting
dependencies\. This is fundamental to the way that npm works, and has
-proven to be an extremely successful approach\. See npm help 5 \fBnpm\-folders\fR for
+proven to be an extremely successful approach\. See npm help 5 \fBnpm\-folders\fP for
more details\.
.P
If you want a package to be installed in one place, and have all your
-programs reference the same copy of it, then use the \fBnpm link\fR command\.
+programs reference the same copy of it, then use the \fBnpm link\fP command\.
That's what it's for\. Install it globally, then link it into each
program that uses it\.
.SH Whatever, I really want the old style 'everything global' style\.
.P
-Write your own package manager\. You could probably even wrap up \fBnpm\fR
+Write your own package manager\. You could probably even wrap up \fBnpm\fP
in a shell script if you really wanted to\.
.P
npm will not help you do something that is known to be a bad idea\.
-.SH Should I check my \fBnode_modules\fR folder into git?
+.SH Should I check my \fBnode_modules\fP folder into git?
.P
Usually, no\. Allow npm to resolve dependencies for your packages\.
.P
@@ -111,23 +111,23 @@ about the (capitalized) NPM program at http://www\.cabiatl\.com/mricro/npm/\|\.
The first seed that eventually grew into this flower was a bash utility
named "pm", which was a shortened descendent of "pkgmakeinst", a
bash function that was used to install various different things on different
-platforms, most often using Yahoo's \fByinst\fR\|\. If \fBnpm\fR was ever an
-acronym for anything, it was \fBnode pm\fR or maybe \fBnew pm\fR\|\.
+platforms, most often using Yahoo's \fByinst\fP\|\. If \fBnpm\fP was ever an
+acronym for anything, it was \fBnode pm\fP or maybe \fBnew pm\fP\|\.
.P
So, in all seriousness, the "npm" project is named after its command\-line
utility, which was organically selected to be easily typed by a right\-handed
programmer using a US QWERTY keyboard layout, ending with the
-right\-ring\-finger in a postition to type the \fB\-\fR key for flags and
+right\-ring\-finger in a postition to type the \fB\-\fP key for flags and
other command\-line arguments\. That command\-line utility is always
lower\-case, though it starts most sentences it is a part of\.
.SH How do I list installed packages?
.P
-\fBnpm ls\fR
+\fBnpm ls\fP
.SH How do I search for packages?
.P
-\fBnpm search\fR
+\fBnpm search\fP
.P
-Arguments are greps\. \fBnpm search jsdom\fR shows jsdom packages\.
+Arguments are greps\. \fBnpm search jsdom\fP shows jsdom packages\.
.SH How do I update npm?
.P
.RS 2
@@ -136,12 +136,12 @@ npm install npm \-g
.fi
.RE
.P
-You can also update all outdated local packages by doing \fBnpm update\fR without
-any arguments, or global packages by doing \fBnpm update \-g\fR\|\.
+You can also update all outdated local packages by doing \fBnpm update\fP without
+any arguments, or global packages by doing \fBnpm update \-g\fP\|\.
.P
Occasionally, the version of npm will progress such that the current
version cannot be properly installed with the version that you have
-installed already\. (Consider, if there is ever a bug in the \fBupdate\fR
+installed already\. (Consider, if there is ever a bug in the \fBupdate\fP
command\.)
.P
In those cases, you can do this:
@@ -151,7 +151,7 @@ In those cases, you can do this:
curl https://www\.npmjs\.com/install\.sh | sh
.fi
.RE
-.SH What is a \fBpackage\fR?
+.SH What is a \fBpackage\fP?
.P
A package is:
.RS 0
@@ -162,13 +162,13 @@ b) a gzipped tarball containing (a)
.IP \(bu 2
c) a url that resolves to (b)
.IP \(bu 2
-d) a \fB<name>@<version>\fR that is published on the registry with (c)
+d) a \fB<name>@<version>\fP that is published on the registry with (c)
.IP \(bu 2
-e) a \fB<name>@<tag>\fR that points to (d)
+e) a \fB<name>@<tag>\fP that points to (d)
.IP \(bu 2
-f) a \fB<name>\fR that has a "latest" tag satisfying (e)
+f) a \fB<name>\fP that has a "latest" tag satisfying (e)
.IP \(bu 2
-g) a \fBgit\fR url that, when cloned, results in (a)\.
+g) a \fBgit\fP url that, when cloned, results in (a)\.
.RE
.P
@@ -188,33 +188,33 @@ git+https://user@hostname/project/blah\.git#commit\-ish
.fi
.RE
.P
-The \fBcommit\-ish\fR can be any tag, sha, or branch which can be supplied as
-an argument to \fBgit checkout\fR\|\. The default is \fBmaster\fR\|\.
-.SH What is a \fBmodule\fR?
+The \fBcommit\-ish\fP can be any tag, sha, or branch which can be supplied as
+an argument to \fBgit checkout\fP\|\. The default is \fBmaster\fP\|\.
+.SH What is a \fBmodule\fP?
.P
-A module is anything that can be loaded with \fBrequire()\fR in a Node\.js
+A module is anything that can be loaded with \fBrequire()\fP in a Node\.js
program\. The following things are all examples of things that can be
loaded as modules:
.RS 0
.IP \(bu 2
-A folder with a \fBpackage\.json\fR file containing a \fBmain\fR field\.
+A folder with a \fBpackage\.json\fP file containing a \fBmain\fP field\.
.IP \(bu 2
-A folder with an \fBindex\.js\fR file in it\.
+A folder with an \fBindex\.js\fP file in it\.
.IP \(bu 2
A JavaScript file\.
.RE
.P
Most npm packages are modules, because they are libraries that you
-load with \fBrequire\fR\|\. However, there's no requirement that an npm
+load with \fBrequire\fP\|\. However, there's no requirement that an npm
package be a module! Some only contain an executable command\-line
-interface, and don't provide a \fBmain\fR field for use in Node programs\.
+interface, and don't provide a \fBmain\fP field for use in Node programs\.
.P
Almost all npm packages (at least, those that are Node programs)
\fIcontain\fR many modules within them (because every file they load with
-\fBrequire()\fR is a module)\.
+\fBrequire()\fP is a module)\.
.P
-In the context of a Node program, the \fBmodule\fR is also the thing that
+In the context of a Node program, the \fBmodule\fP is also the thing that
was loaded \fIfrom\fR a file\. For example, in the following program:
.P
.RS 2
@@ -223,25 +223,25 @@ var req = require('request')
.fi
.RE
.P
-we might say that "The variable \fBreq\fR refers to the \fBrequest\fR module"\.
-.SH So, why is it the "\fBnode_modules\fR" folder, but "\fBpackage\.json\fR" file? Why not \fBnode_packages\fR or \fBmodule\.json\fR?
+we might say that "The variable \fBreq\fP refers to the \fBrequest\fP module"\.
+.SH So, why is it the "\fBnode_modules\fP" folder, but "\fBpackage\.json\fP" file? Why not \fBnode_packages\fP or \fBmodule\.json\fP?
.P
-The \fBpackage\.json\fR file defines the package\. (See "What is a
+The \fBpackage\.json\fP file defines the package\. (See "What is a
package?" above\.)
.P
-The \fBnode_modules\fR folder is the place Node\.js looks for modules\.
+The \fBnode_modules\fP folder is the place Node\.js looks for modules\.
(See "What is a module?" above\.)
.P
-For example, if you create a file at \fBnode_modules/foo\.js\fR and then
-had a program that did \fBvar f = require('foo\.js')\fR then it would load
-the module\. However, \fBfoo\.js\fR is not a "package" in this case,
+For example, if you create a file at \fBnode_modules/foo\.js\fP and then
+had a program that did \fBvar f = require('foo\.js')\fP then it would load
+the module\. However, \fBfoo\.js\fP is not a "package" in this case,
because it does not have a package\.json\.
.P
Alternatively, if you create a package which does not have an
-\fBindex\.js\fR or a \fB"main"\fR field in the \fBpackage\.json\fR file, then it is
-not a module\. Even if it's installed in \fBnode_modules\fR, it can't be
-an argument to \fBrequire()\fR\|\.
-.SH \fB"node_modules"\fR is the name of my deity's arch\-rival, and a Forbidden Word in my religion\. Can I configure npm to use a different folder?
+\fBindex\.js\fP or a \fB"main"\fP field in the \fBpackage\.json\fP file, then it is
+not a module\. Even if it's installed in \fBnode_modules\fP, it can't be
+an argument to \fBrequire()\fP\|\.
+.SH \fB"node_modules"\fP is the name of my deity's arch\-rival, and a Forbidden Word in my religion\. Can I configure npm to use a different folder?
.P
No\. This will never happen\. This question comes up sometimes,
because it seems silly from the outside that npm couldn't just be
@@ -249,7 +249,7 @@ configured to put stuff somewhere else, and then npm could load them
from there\. It's an arbitrary spelling choice, right? What's the big
deal?
.P
-At the time of this writing, the string \fB\|'node_modules'\fR appears 151
+At the time of this writing, the string \fB\|'node_modules'\fP appears 151
times in 53 separate files in npm and node core (excluding tests and
documentation)\.
.P
@@ -263,27 +263,27 @@ deference to your deity's delicate feelings regarding spelling\.
Many of the others are in dependencies that npm uses, which are not
necessarily tightly coupled to npm (in the sense that they do not read
npm's configuration files, etc\.) Each of these would have to be
-configured to take the name of the \fBnode_modules\fR folder as a
+configured to take the name of the \fBnode_modules\fP folder as a
parameter\. Complexity hurdle #2\.
.P
Furthermore, npm has the ability to "bundle" dependencies by adding
-the dep names to the \fB"bundledDependencies"\fR list in package\.json,
+the dep names to the \fB"bundledDependencies"\fP list in package\.json,
which causes the folder to be included in the package tarball\. What
if the author of a module bundles its dependencies, and they use a
-different spelling for \fBnode_modules\fR? npm would have to rename the
+different spelling for \fBnode_modules\fP? npm would have to rename the
folder at publish time, and then be smart enough to unpack it using
your locally configured name\. Complexity hurdle #3\.
.P
Furthermore, what happens when you \fIchange\fR this name? Fine, it's
-easy enough the first time, just rename the \fBnode_modules\fR folders to
-\fB\|\./blergyblerp/\fR or whatever name you choose\. But what about when you
+easy enough the first time, just rename the \fBnode_modules\fP folders to
+\fB\|\./blergyblerp/\fP or whatever name you choose\. But what about when you
change it again? npm doesn't currently track any state about past
configuration settings, so this would be rather difficult to do
properly\. It would have to track every previous value for this
config, and always accept any of them, or else yesterday's install may
be broken tomorrow\. Complexity hurdle #4\.
.P
-Never going to happen\. The folder is named \fBnode_modules\fR\|\. It is
+Never going to happen\. The folder is named \fBnode_modules\fP\|\. It is
written indelibly in the Node Way, handed down from the ancient times
of Node 0\.3\.
.SH How do I install node with npm?
@@ -306,6 +306,8 @@ Windows:
.IP \(bu 2
http://github\.com/marcelklehr/nodist
.IP \(bu 2
+https://github\.com/coreybutler/nvm\-windows
+.IP \(bu 2
https://github\.com/hakobera/nvmw
.IP \(bu 2
https://github\.com/nanjingboy/nvmw
@@ -313,12 +315,12 @@ https://github\.com/nanjingboy/nvmw
.RE
.SH How can I use npm for development?
.P
-See npm help 7 \fBnpm\-developers\fR and npm help 5 \fBpackage\.json\fR\|\.
+See npm help 7 \fBnpm\-developers\fP and npm help 5 \fBpackage\.json\fP\|\.
.P
-You'll most likely want to \fBnpm link\fR your development folder\. That's
+You'll most likely want to \fBnpm link\fP your development folder\. That's
awesomely handy\.
.P
-To set up your own private registry, check out npm help 7 \fBnpm\-registry\fR\|\.
+To set up your own private registry, check out npm help 7 \fBnpm\-registry\fP\|\.
.SH Can I list a url as a dependency?
.P
Yes\. It should be a url to a gzipped tarball containing a single folder
@@ -326,10 +328,10 @@ that has a package\.json in its root, or a git url\.
(See "what is a package?" above\.)
.SH How do I symlink to a dev folder so I don't have to keep re\-installing?
.P
-See npm help \fBnpm\-link\fR
+See npm help \fBnpm\-link\fP
.SH The package registry website\. What is that exactly?
.P
-See npm help 7 \fBnpm\-registry\fR\|\.
+See npm help 7 \fBnpm\-registry\fP\|\.
.SH I forgot my password, and can't publish\. How do I reset it?
.P
Go to https://npmjs\.com/forgot\|\.
@@ -351,7 +353,7 @@ on Freenode IRC\.
.SH Why no namespaces?
.P
npm has only one global namespace\. If you want to namespace your own packages,
-you may: simply use the \fB\-\fR character to separate the names or use scoped
+you may: simply use the \fB\-\fP character to separate the names or use scoped
packages\. npm is a mostly anarchic system\. There is not sufficient need to
impose namespace rules on everyone\.
.P
@@ -359,9 +361,9 @@ As of 2\.0, npm supports scoped packages, which allow you to publish a group of
related modules without worrying about name collisions\.
.P
Every npm user owns the scope associated with their username\. For example, the
-user named \fBnpm\fR owns the scope \fB@npm\fR\|\. Scoped packages are published inside a
+user named \fBnpm\fP owns the scope \fB@npm\fP\|\. Scoped packages are published inside a
scope by naming them as if they were files under the scope directory, e\.g\., by
-setting \fBname\fR in \fBpackage\.json\fR to \fB@npm/npm\fR\|\.
+setting \fBname\fP in \fBpackage\.json\fP to \fB@npm/npm\fP\|\.
.P
Scoped packages are supported by the public npm registry\. The npm client is
backwards\-compatible with un\-scoped registries, so it can be used to work with
diff --git a/deps/npm/man/man7/npm-index.7 b/deps/npm/man/man7/npm-index.7
index 413e94b66..b67a68a46 100644
--- a/deps/npm/man/man7/npm-index.7
+++ b/deps/npm/man/man7/npm-index.7
@@ -1,4 +1,4 @@
-.TH "NPM\-INDEX" "7" "June 2015" "" ""
+.TH "NPM\-INDEX" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-index\fR \- Index of all npm documentation
.SS npm help README
@@ -85,6 +85,9 @@ Manage package owners
.SS npm help pack
.P
Create a tarball from a package
+.SS npm help ping
+.P
+Ping npm registry
.SS npm help prefix
.P
Display prefix
@@ -214,6 +217,9 @@ Manage package owners
.SS npm apihelp pack
.P
Create a tarball from a package
+.SS npm apihelp ping
+.P
+Ping npm registry
.SS npm apihelp prefix
.P
Display prefix
diff --git a/deps/npm/man/man7/npm-registry.7 b/deps/npm/man/man7/npm-registry.7
index 7a52caced..1cb4db20a 100644
--- a/deps/npm/man/man7/npm-registry.7
+++ b/deps/npm/man/man7/npm-registry.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REGISTRY" "7" "June 2015" "" ""
+.TH "NPM\-REGISTRY" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-registry\fR \- The JavaScript Package Registry
.SH DESCRIPTION
@@ -17,9 +17,9 @@ http://skimdb\.npmjs\.com/registry\|\. The code for the couchapp is
available at http://github\.com/npm/npm\-registry\-couchapp\|\.
.P
The registry URL used is determined by the scope of the package (see
-npm help 7 \fBnpm\-scope\fR)\. If no scope is specified, the default registry is used, which is
-supplied by the \fBregistry\fR config parameter\. See npm help \fBnpm\-config\fR,
-npm help 5 \fBnpmrc\fR, and npm help 7 \fBnpm\-config\fR for more on managing npm's configuration\.
+npm help 7 \fBnpm\-scope\fP)\. If no scope is specified, the default registry is used, which is
+supplied by the \fBregistry\fP config parameter\. See npm help \fBnpm\-config\fP,
+npm help 5 \fBnpmrc\fP, and npm help 7 \fBnpm\-config\fP for more on managing npm's configuration\.
.SH Can I run my own private registry?
.P
Yes!
@@ -33,15 +33,15 @@ to read any published packages, in addition to your private ones, and by
default will only publish internally\.
.P
If you then want to publish a package for the whole world to see, you can
-simply override the \fB\-\-registry\fR option for that \fBpublish\fR command\.
+simply override the \fB\-\-registry\fP option for that \fBpublish\fP command\.
.SH I don't want my package published in the official registry\. It's private\.
.P
-Set \fB"private": true\fR in your package\.json to prevent it from being
+Set \fB"private": true\fP in your package\.json to prevent it from being
published at all, or
-\fB"publishConfig":{"registry":"http://my\-internal\-registry\.local"}\fR
+\fB"publishConfig":{"registry":"http://my\-internal\-registry\.local"}\fP
to force it to be published only to your internal registry\.
.P
-See npm help 5 \fBpackage\.json\fR for more info on what goes in the package\.json file\.
+See npm help 5 \fBpackage\.json\fP for more info on what goes in the package\.json file\.
.SH Will you replicate from my registry into the public one?
.P
No\. If you want things to be public, then publish them into the public
diff --git a/deps/npm/man/man7/npm-scope.7 b/deps/npm/man/man7/npm-scope.7
index 29586c7a7..4bfe58c7f 100644
--- a/deps/npm/man/man7/npm-scope.7
+++ b/deps/npm/man/man7/npm-scope.7
@@ -1,4 +1,4 @@
-.TH "NPM\-SCOPE" "7" "June 2015" "" ""
+.TH "NPM\-SCOPE" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-scope\fR \- Scoped packages
.SH DESCRIPTION
@@ -23,13 +23,13 @@ used to work with scoped and un\-scoped registries at the same time\.
.SH Installing scoped packages
.P
Scoped packages are installed to a sub\-folder of the regular installation
-folder, e\.g\. if your other packages are installed in \fBnode_modules/packagename\fR,
-scoped modules will be in \fBnode_modules/@myorg/packagename\fR\|\. The scope folder
-(\fB@myorg\fR) is simply the name of the scope preceded by an @\-symbol, and can
+folder, e\.g\. if your other packages are installed in \fBnode_modules/packagename\fP,
+scoped modules will be in \fBnode_modules/@myorg/packagename\fP\|\. The scope folder
+(\fB@myorg\fP) is simply the name of the scope preceded by an @\-symbol, and can
contain any number of scoped packages\.
.P
A scoped package is installed by referencing it by name, preceded by an
-@\-symbol, in \fBnpm install\fR:
+@\-symbol, in \fBnpm install\fP:
.P
.RS 2
.nf
@@ -37,7 +37,7 @@ npm install @myorg/mypackage
.fi
.RE
.P
-Or in \fBpackage\.json\fR:
+Or in \fBpackage\.json\fP:
.P
.RS 2
.nf
@@ -48,7 +48,7 @@ Or in \fBpackage\.json\fR:
.RE
.P
Note that if the @\-symbol is omitted in either case npm will instead attempt to
-install from GitHub; see npm help \fBnpm\-install\fR\|\.
+install from GitHub; see npm help \fBnpm\-install\fP\|\.
.SH Requiring scoped packages
.P
Because scoped packages are installed into a scope folder, you have to
@@ -61,7 +61,7 @@ require('@myorg/mypackage')
.RE
.P
There is nothing special about the way Node treats scope folders, this is
-just specifying to require the module \fBmypackage\fR in the folder called \fB@myorg\fR\|\.
+just specifying to require the module \fBmypackage\fP in the folder called \fB@myorg\fP\|\.
.SH Publishing scoped packages
.P
Scoped packages can be published to any registry that supports them, including
@@ -72,19 +72,19 @@ the public npm registry\.
If you wish, you may associate a scope with a registry; see below\.
.SS Publishing public scoped packages to the public npm registry
.P
-To publish a public scoped package, you must specify \fB\-\-access public\fR with
+To publish a public scoped package, you must specify \fB\-\-access public\fP with
the initial publication\. This will publish the package and set access
-to \fBpublic\fR as if you had run \fBnpm access public\fR after publishing\.
+to \fBpublic\fP as if you had run \fBnpm access public\fP after publishing\.
.SS Publishing private scoped packages to the npm registry
.P
To publish a private scoped package to the npm registry, you must have
an npm Private Modules \fIhttps://www\.npmjs\.com/private\-modules\fR
account\.
.P
-You can then publish the module with \fBnpm publish\fR or \fBnpm publish
-\-\-access restricted\fR, and it will be present in the npm registry, with
+You can then publish the module with \fBnpm publish\fP or \fBnpm publish
+\-\-access restricted\fP, and it will be present in the npm registry, with
restricted access\. You can then change the access permissions, if
-desired, with \fBnpm access\fR or on the npmjs\.com website\.
+desired, with \fBnpm access\fP or on the npmjs\.com website\.
.SH Associating a scope with a registry
.P
Scopes can be associated with a separate registry\. This allows you to
@@ -102,7 +102,7 @@ npm login \-\-registry=http://reg\.example\.com \-\-scope=@myco
Scopes have a many\-to\-one relationship with registries: one registry can
host multiple scopes, but a scope only ever points to one registry\.
.P
-You can also associate a scope with a registry using \fBnpm config\fR:
+You can also associate a scope with a registry using \fBnpm config\fP:
.P
.RS 2
.nf
@@ -110,9 +110,9 @@ npm config set @myco:registry http://reg\.example\.com
.fi
.RE
.P
-Once a scope is associated with a registry, any \fBnpm install\fR for a package
+Once a scope is associated with a registry, any \fBnpm install\fP for a package
with that scope will request packages from that registry instead\. Any
-\fBnpm publish\fR for a package name that contains the scope will be published to
+\fBnpm publish\fP for a package name that contains the scope will be published to
that registry instead\.
.SH SEE ALSO
.RS 0
diff --git a/deps/npm/man/man7/npm-scripts.7 b/deps/npm/man/man7/npm-scripts.7
index cdd740396..dd58324d8 100644
--- a/deps/npm/man/man7/npm-scripts.7
+++ b/deps/npm/man/man7/npm-scripts.7
@@ -1,4 +1,4 @@
-.TH "NPM\-SCRIPTS" "7" "June 2015" "" ""
+.TH "NPM\-SCRIPTS" "7" "August 2015" "" ""
.SH "NAME"
\fBnpm-scripts\fR \- How npm handles the "scripts" field
.SH DESCRIPTION
@@ -9,7 +9,7 @@ following scripts:
.IP \(bu 2
prepublish:
Run BEFORE the package is published\. (Also run on local \fBnpm
-install\fR without any arguments\.)
+install\fP without any arguments\.)
.IP \(bu 2
publish, postpublish:
Run AFTER the package is published\.
@@ -33,29 +33,29 @@ postversion:
Run AFTER bump the package version\.
.IP \(bu 2
pretest, test, posttest:
-Run by the \fBnpm test\fR command\.
+Run by the \fBnpm test\fP command\.
.IP \(bu 2
prestop, stop, poststop:
-Run by the \fBnpm stop\fR command\.
+Run by the \fBnpm stop\fP command\.
.IP \(bu 2
prestart, start, poststart:
-Run by the \fBnpm start\fR command\.
+Run by the \fBnpm start\fP command\.
.IP \(bu 2
prerestart, restart, postrestart:
-Run by the \fBnpm restart\fR command\. Note: \fBnpm restart\fR will run the
-stop and start scripts if no \fBrestart\fR script is provided\.
+Run by the \fBnpm restart\fP command\. Note: \fBnpm restart\fP will run the
+stop and start scripts if no \fBrestart\fP script is provided\.
.RE
.P
Additionally, arbitrary scripts can be executed by running \fBnpm
-run\-script <pkg> <stage>\fR\|\. \fIPre\fR and \fIpost\fR commands with matching
-names will be run for those as well (e\.g\. \fBpremyscript\fR, \fBmyscript\fR,
-\fBpostmyscript\fR)\.
+run\-script <pkg> <stage>\fP\|\. \fIPre\fR and \fIpost\fR commands with matching
+names will be run for those as well (e\.g\. \fBpremyscript\fP, \fBmyscript\fP,
+\fBpostmyscript\fP)\.
.SH COMMON USES
.P
If you need to perform operations on your package before it is used, in a way
that is not dependent on the operating system or architecture of the
-target system, use a \fBprepublish\fR script\. This includes
+target system, use a \fBprepublish\fP script\. This includes
tasks such as:
.RS 0
.IP \(bu 2
@@ -67,18 +67,18 @@ Fetching remote resources that your package will use\.
.RE
.P
-The advantage of doing these things at \fBprepublish\fR time is that they can be done once, in a
+The advantage of doing these things at \fBprepublish\fP time is that they can be done once, in a
single place, thus reducing complexity and variability\.
Additionally, this means that:
.RS 0
.IP \(bu 2
-You can depend on \fBcoffee\-script\fR as a \fBdevDependency\fR, and thus
+You can depend on \fBcoffee\-script\fP as a \fBdevDependency\fP, and thus
your users don't need to have it installed\.
.IP \(bu 2
You don't need to include minifiers in your package, reducing
the size for your users\.
.IP \(bu 2
-You don't need to rely on your users having \fBcurl\fR or \fBwget\fR or
+You don't need to rely on your users having \fBcurl\fP or \fBwget\fP or
other system tools on the target machines\.
.RE
@@ -87,20 +87,20 @@ other system tools on the target machines\.
npm will default some script values based on package contents\.
.RS 0
.IP \(bu 2
-\fB"start": "node server\.js"\fR:
-If there is a \fBserver\.js\fR file in the root of your package, then npm
-will default the \fBstart\fR command to \fBnode server\.js\fR\|\.
+\fB"start": "node server\.js"\fP:
+If there is a \fBserver\.js\fP file in the root of your package, then npm
+will default the \fBstart\fP command to \fBnode server\.js\fP\|\.
.IP \(bu 2
-\fB"preinstall": "node\-waf clean || true; node\-waf configure build"\fR:
-If there is a \fBwscript\fR file in the root of your package, npm will
-default the \fBpreinstall\fR command to compile using node\-waf\.
+\fB"preinstall": "node\-waf clean || true; node\-waf configure build"\fP:
+If there is a \fBwscript\fP file in the root of your package, npm will
+default the \fBpreinstall\fP command to compile using node\-waf\.
.RE
.SH USER
.P
If npm was invoked with root privileges, then it will change the uid
-to the user account or uid specified by the \fBuser\fR config, which
-defaults to \fBnobody\fR\|\. Set the \fBunsafe\-perm\fR flag to run scripts with
+to the user account or uid specified by the \fBuser\fP config, which
+defaults to \fBnobody\fP\|\. Set the \fBunsafe\-perm\fP flag to run scripts with
root privileges\.
.SH ENVIRONMENT
.P
@@ -110,7 +110,7 @@ the process\.
.SS path
.P
If you depend on modules that define executable scripts, like test
-suites, then those executables will be added to the \fBPATH\fR for
+suites, then those executables will be added to the \fBPATH\fP for
executing the scripts\. So, if your package\.json has this:
.P
.RS 2
@@ -121,24 +121,24 @@ executing the scripts\. So, if your package\.json has this:
.fi
.RE
.P
-then you could run \fBnpm start\fR to execute the \fBbar\fR script, which is
-exported into the \fBnode_modules/\.bin\fR directory on \fBnpm install\fR\|\.
+then you could run \fBnpm start\fP to execute the \fBbar\fP script, which is
+exported into the \fBnode_modules/\.bin\fP directory on \fBnpm install\fP\|\.
.SS package\.json vars
.P
-The package\.json fields are tacked onto the \fBnpm_package_\fR prefix\. So,
-for instance, if you had \fB{"name":"foo", "version":"1\.2\.5"}\fR in your
+The package\.json fields are tacked onto the \fBnpm_package_\fP prefix\. So,
+for instance, if you had \fB{"name":"foo", "version":"1\.2\.5"}\fP in your
package\.json file, then your package scripts would have the
-\fBnpm_package_name\fR environment variable set to "foo", and the
-\fBnpm_package_version\fR set to "1\.2\.5"
+\fBnpm_package_name\fP environment variable set to "foo", and the
+\fBnpm_package_version\fP set to "1\.2\.5"
.SS configuration
.P
Configuration parameters are put in the environment with the
-\fBnpm_config_\fR prefix\. For instance, you can view the effective \fBroot\fR
-config by checking the \fBnpm_config_root\fR environment variable\.
+\fBnpm_config_\fP prefix\. For instance, you can view the effective \fBroot\fP
+config by checking the \fBnpm_config_root\fP environment variable\.
.SS Special: package\.json "config" object
.P
The package\.json "config" keys are overwritten in the environment if
-there is a config param of \fB<name>[@<version>]:<key>\fR\|\. For example,
+there is a config param of \fB<name>[@<version>]:<key>\fP\|\. For example,
if the package\.json has this:
.P
.RS 2
@@ -166,13 +166,13 @@ npm config set foo:port 80
.RE
.SS current lifecycle event
.P
-Lastly, the \fBnpm_lifecycle_event\fR environment variable is set to
+Lastly, the \fBnpm_lifecycle_event\fP environment variable is set to
whichever stage of the cycle is being executed\. So, you could have a
single script used for different parts of the process which switches
based on what's currently happening\.
.P
Objects are flattened following this format, so if you had
-\fB{"scripts":{"install":"foo\.js"}}\fR in your package\.json, then you'd
+\fB{"scripts":{"install":"foo\.js"}}\fP in your package\.json, then you'd
see this in the script:
.P
.RS 2
@@ -195,11 +195,11 @@ For example, if your package\.json contains this:
.fi
.RE
.P
-then the \fBscripts/install\.js\fR will be called for the install,
-post\-install, stages of the lifecycle, and the \fBscripts/uninstall\.js\fR
+then the \fBscripts/install\.js\fP will be called for the install,
+post\-install, stages of the lifecycle, and the \fBscripts/uninstall\.js\fP
would be called when the package is uninstalled\. Since
-\fBscripts/install\.js\fR is running for three different phases, it would
-be wise in this case to look at the \fBnpm_lifecycle_event\fR environment
+\fBscripts/install\.js\fP is running for three different phases, it would
+be wise in this case to look at the \fBnpm_lifecycle_event\fP environment
variable\.
.P
If you want to run a make command, you can do so\. This works just
@@ -217,7 +217,7 @@ fine:
.RE
.SH EXITING
.P
-Scripts are run by passing the line as a script argument to \fBsh\fR\|\.
+Scripts are run by passing the line as a script argument to \fBsh\fP\|\.
.P
If the script exits with a code other than 0, then this will abort the
process\.
@@ -230,7 +230,7 @@ file\.
If you want to run a specific script at a specific lifecycle event for
ALL packages, then you can use a hook script\.
.P
-Place an executable file at \fBnode_modules/\.hooks/{eventname}\fR, and
+Place an executable file at \fBnode_modules/\.hooks/{eventname}\fP, and
it'll get run for all packages when they are going through that point
in the package lifecycle for any packages installed in that root\.
.P
@@ -247,23 +247,23 @@ only will prevent some optional features, then it's better to just
print a warning and exit successfully\.
.IP \(bu 2
Try not to use scripts to do what npm can do for you\. Read through
-npm help 5 \fBpackage\.json\fR to see all the things that you can specify and enable
+npm help 5 \fBpackage\.json\fP to see all the things that you can specify and enable
by simply describing your package appropriately\. In general, this
will lead to a more robust and consistent state\.
.IP \(bu 2
Inspect the env to determine where to put things\. For instance, if
-the \fBnpm_config_binroot\fR environ is set to \fB/home/user/bin\fR, then
-don't try to install executables into \fB/usr/local/bin\fR\|\. The user
+the \fBnpm_config_binroot\fP environ is set to \fB/home/user/bin\fP, then
+don't try to install executables into \fB/usr/local/bin\fP\|\. The user
probably set it up that way for a reason\.
.IP \(bu 2
Don't prefix your script commands with "sudo"\. If root permissions
are required for some reason, then it'll fail with that error, and
the user will sudo the npm command in question\.
.IP \(bu 2
-Don't use \fBinstall\fR\|\. Use a \fB\|\.gyp\fR file for compilation, and \fBprepublish\fR
+Don't use \fBinstall\fP\|\. Use a \fB\|\.gyp\fP file for compilation, and \fBprepublish\fP
for anything else\. You should almost never have to explicitly set a
preinstall or install script\. If you are doing this, please consider if
-there is another option\. The only valid use of \fBinstall\fR or \fBpreinstall\fR
+there is another option\. The only valid use of \fBinstall\fP or \fBpreinstall\fP
scripts is for compilation which must be done on the target architecture\.
.RE
diff --git a/deps/npm/man/man7/removing-npm.7 b/deps/npm/man/man7/removing-npm.7
index 61426f8b9..ff6e7507d 100644
--- a/deps/npm/man/man7/removing-npm.7
+++ b/deps/npm/man/man7/removing-npm.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REMOVAL" "1" "June 2015" "" ""
+.TH "NPM\-REMOVAL" "1" "August 2015" "" ""
.SH "NAME"
\fBnpm-removal\fR \- Cleaning the Slate
.SH SYNOPSIS
@@ -27,14 +27,14 @@ If that doesn't work, or if you require more drastic measures,
continue reading\.
.P
Note that this is only necessary for globally\-installed packages\. Local
-installs are completely contained within a project's \fBnode_modules\fR
+installs are completely contained within a project's \fBnode_modules\fP
folder\. Delete that folder, and everything is gone (unless a package's
install script is particularly ill\-behaved)\.
.P
This assumes that you installed node and npm in the default place\. If
-you configured node with a different \fB\-\-prefix\fR, or installed npm with a
+you configured node with a different \fB\-\-prefix\fP, or installed npm with a
different prefix setting, then adjust the paths accordingly, replacing
-\fB/usr/local\fR with your install prefix\.
+\fB/usr/local\fP with your install prefix\.
.P
To remove everything npm\-related manually:
.P
diff --git a/deps/npm/man/man7/semver.7 b/deps/npm/man/man7/semver.7
index cac3ad617..24ce48b8f 100644
--- a/deps/npm/man/man7/semver.7
+++ b/deps/npm/man/man7/semver.7
@@ -1,4 +1,4 @@
-.TH "SEMVER" "7" "June 2015" "" ""
+.TH "SEMVER" "7" "August 2015" "" ""
.SH "NAME"
\fBsemver\fR \- The semantic versioner for npm
.SH Usage
@@ -41,64 +41,64 @@ multiple versions to the utility will just sort them\.
.RE
.SH Versions
.P
-A "version" is described by the \fBv2\.0\.0\fR specification found at
+A "version" is described by the \fBv2\.0\.0\fP specification found at
http://semver\.org/\|\.
.P
-A leading \fB"="\fR or \fB"v"\fR character is stripped off and ignored\.
+A leading \fB"="\fP or \fB"v"\fP character is stripped off and ignored\.
.SH Ranges
.P
-A \fBversion range\fR is a set of \fBcomparators\fR which specify versions
+A \fBversion range\fP is a set of \fBcomparators\fP which specify versions
that satisfy the range\.
.P
-A \fBcomparator\fR is composed of an \fBoperator\fR and a \fBversion\fR\|\. The set
-of primitive \fBoperators\fR is:
+A \fBcomparator\fP is composed of an \fBoperator\fP and a \fBversion\fP\|\. The set
+of primitive \fBoperators\fP is:
.RS 0
.IP \(bu 2
-\fB<\fR Less than
+\fB<\fP Less than
.IP \(bu 2
-\fB<=\fR Less than or equal to
+\fB<=\fP Less than or equal to
.IP \(bu 2
-\fB>\fR Greater than
+\fB>\fP Greater than
.IP \(bu 2
-\fB>=\fR Greater than or equal to
+\fB>=\fP Greater than or equal to
.IP \(bu 2
-\fB=\fR Equal\. If no operator is specified, then equality is assumed,
+\fB=\fP Equal\. If no operator is specified, then equality is assumed,
so this operator is optional, but MAY be included\.
.RE
.P
-For example, the comparator \fB>=1\.2\.7\fR would match the versions
-\fB1\.2\.7\fR, \fB1\.2\.8\fR, \fB2\.5\.3\fR, and \fB1\.3\.9\fR, but not the versions \fB1\.2\.6\fR
-or \fB1\.1\.0\fR\|\.
+For example, the comparator \fB>=1\.2\.7\fP would match the versions
+\fB1\.2\.7\fP, \fB1\.2\.8\fP, \fB2\.5\.3\fP, and \fB1\.3\.9\fP, but not the versions \fB1\.2\.6\fP
+or \fB1\.1\.0\fP\|\.
.P
-Comparators can be joined by whitespace to form a \fBcomparator set\fR,
+Comparators can be joined by whitespace to form a \fBcomparator set\fP,
which is satisfied by the \fBintersection\fR of all of the comparators
it includes\.
.P
-A range is composed of one or more comparator sets, joined by \fB||\fR\|\. A
+A range is composed of one or more comparator sets, joined by \fB||\fP\|\. A
version matches a range if and only if every comparator in at least
-one of the \fB||\fR\-separated comparator sets is satisfied by the version\.
+one of the \fB||\fP\-separated comparator sets is satisfied by the version\.
.P
-For example, the range \fB>=1\.2\.7 <1\.3\.0\fR would match the versions
-\fB1\.2\.7\fR, \fB1\.2\.8\fR, and \fB1\.2\.99\fR, but not the versions \fB1\.2\.6\fR, \fB1\.3\.0\fR,
-or \fB1\.1\.0\fR\|\.
+For example, the range \fB>=1\.2\.7 <1\.3\.0\fP would match the versions
+\fB1\.2\.7\fP, \fB1\.2\.8\fP, and \fB1\.2\.99\fP, but not the versions \fB1\.2\.6\fP, \fB1\.3\.0\fP,
+or \fB1\.1\.0\fP\|\.
.P
-The range \fB1\.2\.7 || >=1\.2\.9 <2\.0\.0\fR would match the versions \fB1\.2\.7\fR,
-\fB1\.2\.9\fR, and \fB1\.4\.6\fR, but not the versions \fB1\.2\.8\fR or \fB2\.0\.0\fR\|\.
+The range \fB1\.2\.7 || >=1\.2\.9 <2\.0\.0\fP would match the versions \fB1\.2\.7\fP,
+\fB1\.2\.9\fP, and \fB1\.4\.6\fP, but not the versions \fB1\.2\.8\fP or \fB2\.0\.0\fP\|\.
.SS Prerelease Tags
.P
-If a version has a prerelease tag (for example, \fB1\.2\.3\-alpha\.3\fR) then
+If a version has a prerelease tag (for example, \fB1\.2\.3\-alpha\.3\fP) then
it will only be allowed to satisfy comparator sets if at least one
-comparator with the same \fB[major, minor, patch]\fR tuple also has a
+comparator with the same \fB[major, minor, patch]\fP tuple also has a
prerelease tag\.
.P
-For example, the range \fB>1\.2\.3\-alpha\.3\fR would be allowed to match the
-version \fB1\.2\.3\-alpha\.7\fR, but it would \fInot\fR be satisfied by
-\fB3\.4\.5\-alpha\.9\fR, even though \fB3\.4\.5\-alpha\.9\fR is technically "greater
-than" \fB1\.2\.3\-alpha\.3\fR according to the SemVer sort rules\. The version
-range only accepts prerelease tags on the \fB1\.2\.3\fR version\. The
-version \fB3\.4\.5\fR \fIwould\fR satisfy the range, because it does not have a
-prerelease flag, and \fB3\.4\.5\fR is greater than \fB1\.2\.3\-alpha\.7\fR\|\.
+For example, the range \fB>1\.2\.3\-alpha\.3\fP would be allowed to match the
+version \fB1\.2\.3\-alpha\.7\fP, but it would \fInot\fR be satisfied by
+\fB3\.4\.5\-alpha\.9\fP, even though \fB3\.4\.5\-alpha\.9\fP is technically "greater
+than" \fB1\.2\.3\-alpha\.3\fP according to the SemVer sort rules\. The version
+range only accepts prerelease tags on the \fB1\.2\.3\fP version\. The
+version \fB3\.4\.5\fP \fIwould\fR satisfy the range, because it does not have a
+prerelease flag, and \fB3\.4\.5\fP is greater than \fB1\.2\.3\-alpha\.7\fP\|\.
.P
The purpose for this behavior is twofold\. First, prerelease versions
frequently are updated very quickly, and contain many breaking changes
@@ -114,7 +114,7 @@ is still not appropriate to assume that they have opted into taking a
similar risk on the \fInext\fR set of prerelease versions\.
.SS Prerelease Identifiers
.P
-The method \fB\|\.inc\fR takes an additional \fBidentifier\fR string argument that
+The method \fB\|\.inc\fP takes an additional \fBidentifier\fP string argument that
will append the value of the string as a prerelease identifier:
.P
.RS 2
@@ -147,13 +147,13 @@ Advanced range syntax desugars to primitive comparators in
deterministic ways\.
.P
Advanced ranges may be combined in the same way as primitive
-comparators using white space or \fB||\fR\|\.
-.SS Hyphen Ranges \fBX\.Y\.Z \- A\.B\.C\fR
+comparators using white space or \fB||\fP\|\.
+.SS Hyphen Ranges \fBX\.Y\.Z \- A\.B\.C\fP
.P
Specifies an inclusive set\.
.RS 0
.IP \(bu 2
-\fB1\.2\.3 \- 2\.3\.4\fR := \fB>=1\.2\.3 <=2\.3\.4\fR
+\fB1\.2\.3 \- 2\.3\.4\fP := \fB>=1\.2\.3 <=2\.3\.4\fP
.RE
.P
@@ -161,7 +161,7 @@ If a partial version is provided as the first version in the inclusive
range, then the missing pieces are replaced with zeroes\.
.RS 0
.IP \(bu 2
-\fB1\.2 \- 2\.3\.4\fR := \fB>=1\.2\.0 <=2\.3\.4\fR
+\fB1\.2 \- 2\.3\.4\fP := \fB>=1\.2\.0 <=2\.3\.4\fP
.RE
.P
@@ -171,22 +171,22 @@ of the tuple are accepted, but nothing that would be greater than the
provided tuple parts\.
.RS 0
.IP \(bu 2
-\fB1\.2\.3 \- 2\.3\fR := \fB>=1\.2\.3 <2\.4\.0\fR
+\fB1\.2\.3 \- 2\.3\fP := \fB>=1\.2\.3 <2\.4\.0\fP
.IP \(bu 2
-\fB1\.2\.3 \- 2\fR := \fB>=1\.2\.3 <3\.0\.0\fR
+\fB1\.2\.3 \- 2\fP := \fB>=1\.2\.3 <3\.0\.0\fP
.RE
-.SS X\-Ranges \fB1\.2\.x\fR \fB1\.X\fR \fB1\.2\.*\fR \fB*\fR
+.SS X\-Ranges \fB1\.2\.x\fP \fB1\.X\fP \fB1\.2\.*\fP \fB*\fP
.P
-Any of \fBX\fR, \fBx\fR, or \fB*\fR may be used to "stand in" for one of the
-numeric values in the \fB[major, minor, patch]\fR tuple\.
+Any of \fBX\fP, \fBx\fP, or \fB*\fP may be used to "stand in" for one of the
+numeric values in the \fB[major, minor, patch]\fP tuple\.
.RS 0
.IP \(bu 2
-\fB*\fR := \fB>=0\.0\.0\fR (Any version satisfies)
+\fB*\fP := \fB>=0\.0\.0\fP (Any version satisfies)
.IP \(bu 2
-\fB1\.x\fR := \fB>=1\.0\.0 <2\.0\.0\fR (Matching major version)
+\fB1\.x\fP := \fB>=1\.0\.0 <2\.0\.0\fP (Matching major version)
.IP \(bu 2
-\fB1\.2\.x\fR := \fB>=1\.2\.0 <1\.3\.0\fR (Matching major and minor versions)
+\fB1\.2\.x\fP := \fB>=1\.2\.0 <1\.3\.0\fP (Matching major and minor versions)
.RE
.P
@@ -194,99 +194,99 @@ A partial version range is treated as an X\-Range, so the special
character is in fact optional\.
.RS 0
.IP \(bu 2
-\fB""\fR (empty string) := \fB*\fR := \fB>=0\.0\.0\fR
+\fB""\fP (empty string) := \fB*\fP := \fB>=0\.0\.0\fP
.IP \(bu 2
-\fB1\fR := \fB1\.x\.x\fR := \fB>=1\.0\.0 <2\.0\.0\fR
+\fB1\fP := \fB1\.x\.x\fP := \fB>=1\.0\.0 <2\.0\.0\fP
.IP \(bu 2
-\fB1\.2\fR := \fB1\.2\.x\fR := \fB>=1\.2\.0 <1\.3\.0\fR
+\fB1\.2\fP := \fB1\.2\.x\fP := \fB>=1\.2\.0 <1\.3\.0\fP
.RE
-.SS Tilde Ranges \fB~1\.2\.3\fR \fB~1\.2\fR \fB~1\fR
+.SS Tilde Ranges \fB~1\.2\.3\fP \fB~1\.2\fP \fB~1\fP
.P
Allows patch\-level changes if a minor version is specified on the
comparator\. Allows minor\-level changes if not\.
.RS 0
.IP \(bu 2
-\fB~1\.2\.3\fR := \fB>=1\.2\.3 <1\.(2+1)\.0\fR := \fB>=1\.2\.3 <1\.3\.0\fR
+\fB~1\.2\.3\fP := \fB>=1\.2\.3 <1\.(2+1)\.0\fP := \fB>=1\.2\.3 <1\.3\.0\fP
.IP \(bu 2
-\fB~1\.2\fR := \fB>=1\.2\.0 <1\.(2+1)\.0\fR := \fB>=1\.2\.0 <1\.3\.0\fR (Same as \fB1\.2\.x\fR)
+\fB~1\.2\fP := \fB>=1\.2\.0 <1\.(2+1)\.0\fP := \fB>=1\.2\.0 <1\.3\.0\fP (Same as \fB1\.2\.x\fP)
.IP \(bu 2
-\fB~1\fR := \fB>=1\.0\.0 <(1+1)\.0\.0\fR := \fB>=1\.0\.0 <2\.0\.0\fR (Same as \fB1\.x\fR)
+\fB~1\fP := \fB>=1\.0\.0 <(1+1)\.0\.0\fP := \fB>=1\.0\.0 <2\.0\.0\fP (Same as \fB1\.x\fP)
.IP \(bu 2
-\fB~0\.2\.3\fR := \fB>=0\.2\.3 <0\.(2+1)\.0\fR := \fB>=0\.2\.3 <0\.3\.0\fR
+\fB~0\.2\.3\fP := \fB>=0\.2\.3 <0\.(2+1)\.0\fP := \fB>=0\.2\.3 <0\.3\.0\fP
.IP \(bu 2
-\fB~0\.2\fR := \fB>=0\.2\.0 <0\.(2+1)\.0\fR := \fB>=0\.2\.0 <0\.3\.0\fR (Same as \fB0\.2\.x\fR)
+\fB~0\.2\fP := \fB>=0\.2\.0 <0\.(2+1)\.0\fP := \fB>=0\.2\.0 <0\.3\.0\fP (Same as \fB0\.2\.x\fP)
.IP \(bu 2
-\fB~0\fR := \fB>=0\.0\.0 <(0+1)\.0\.0\fR := \fB>=0\.0\.0 <1\.0\.0\fR (Same as \fB0\.x\fR)
+\fB~0\fP := \fB>=0\.0\.0 <(0+1)\.0\.0\fP := \fB>=0\.0\.0 <1\.0\.0\fP (Same as \fB0\.x\fP)
.IP \(bu 2
-\fB~1\.2\.3\-beta\.2\fR := \fB>=1\.2\.3\-beta\.2 <1\.3\.0\fR Note that prereleases in
-the \fB1\.2\.3\fR version will be allowed, if they are greater than or
-equal to \fBbeta\.2\fR\|\. So, \fB1\.2\.3\-beta\.4\fR would be allowed, but
-\fB1\.2\.4\-beta\.2\fR would not, because it is a prerelease of a
-different \fB[major, minor, patch]\fR tuple\.
+\fB~1\.2\.3\-beta\.2\fP := \fB>=1\.2\.3\-beta\.2 <1\.3\.0\fP Note that prereleases in
+the \fB1\.2\.3\fP version will be allowed, if they are greater than or
+equal to \fBbeta\.2\fP\|\. So, \fB1\.2\.3\-beta\.4\fP would be allowed, but
+\fB1\.2\.4\-beta\.2\fP would not, because it is a prerelease of a
+different \fB[major, minor, patch]\fP tuple\.
.RE
-.SS Caret Ranges \fB^1\.2\.3\fR \fB^0\.2\.5\fR \fB^0\.0\.4\fR
+.SS Caret Ranges \fB^1\.2\.3\fP \fB^0\.2\.5\fP \fB^0\.0\.4\fP
.P
Allows changes that do not modify the left\-most non\-zero digit in the
-\fB[major, minor, patch]\fR tuple\. In other words, this allows patch and
-minor updates for versions \fB1\.0\.0\fR and above, patch updates for
-versions \fB0\.X >=0\.1\.0\fR, and \fIno\fR updates for versions \fB0\.0\.X\fR\|\.
+\fB[major, minor, patch]\fP tuple\. In other words, this allows patch and
+minor updates for versions \fB1\.0\.0\fP and above, patch updates for
+versions \fB0\.X >=0\.1\.0\fP, and \fIno\fR updates for versions \fB0\.0\.X\fP\|\.
.P
-Many authors treat a \fB0\.x\fR version as if the \fBx\fR were the major
+Many authors treat a \fB0\.x\fP version as if the \fBx\fP were the major
"breaking\-change" indicator\.
.P
Caret ranges are ideal when an author may make breaking changes
-between \fB0\.2\.4\fR and \fB0\.3\.0\fR releases, which is a common practice\.
+between \fB0\.2\.4\fP and \fB0\.3\.0\fP releases, which is a common practice\.
However, it presumes that there will \fInot\fR be breaking changes between
-\fB0\.2\.4\fR and \fB0\.2\.5\fR\|\. It allows for changes that are presumed to be
+\fB0\.2\.4\fP and \fB0\.2\.5\fP\|\. It allows for changes that are presumed to be
additive (but non\-breaking), according to commonly observed practices\.
.RS 0
.IP \(bu 2
-\fB^1\.2\.3\fR := \fB>=1\.2\.3 <2\.0\.0\fR
+\fB^1\.2\.3\fP := \fB>=1\.2\.3 <2\.0\.0\fP
.IP \(bu 2
-\fB^0\.2\.3\fR := \fB>=0\.2\.3 <0\.3\.0\fR
+\fB^0\.2\.3\fP := \fB>=0\.2\.3 <0\.3\.0\fP
.IP \(bu 2
-\fB^0\.0\.3\fR := \fB>=0\.0\.3 <0\.0\.4\fR
+\fB^0\.0\.3\fP := \fB>=0\.0\.3 <0\.0\.4\fP
.IP \(bu 2
-\fB^1\.2\.3\-beta\.2\fR := \fB>=1\.2\.3\-beta\.2 <2\.0\.0\fR Note that prereleases in
-the \fB1\.2\.3\fR version will be allowed, if they are greater than or
-equal to \fBbeta\.2\fR\|\. So, \fB1\.2\.3\-beta\.4\fR would be allowed, but
-\fB1\.2\.4\-beta\.2\fR would not, because it is a prerelease of a
-different \fB[major, minor, patch]\fR tuple\.
+\fB^1\.2\.3\-beta\.2\fP := \fB>=1\.2\.3\-beta\.2 <2\.0\.0\fP Note that prereleases in
+the \fB1\.2\.3\fP version will be allowed, if they are greater than or
+equal to \fBbeta\.2\fP\|\. So, \fB1\.2\.3\-beta\.4\fP would be allowed, but
+\fB1\.2\.4\-beta\.2\fP would not, because it is a prerelease of a
+different \fB[major, minor, patch]\fP tuple\.
.IP \(bu 2
-\fB^0\.0\.3\-beta\fR := \fB>=0\.0\.3\-beta <0\.0\.4\fR Note that prereleases in the
-\fB0\.0\.3\fR version \fIonly\fR will be allowed, if they are greater than or
-equal to \fBbeta\fR\|\. So, \fB0\.0\.3\-pr\.2\fR would be allowed\.
+\fB^0\.0\.3\-beta\fP := \fB>=0\.0\.3\-beta <0\.0\.4\fP Note that prereleases in the
+\fB0\.0\.3\fP version \fIonly\fR will be allowed, if they are greater than or
+equal to \fBbeta\fP\|\. So, \fB0\.0\.3\-pr\.2\fP would be allowed\.
.RE
.P
-When parsing caret ranges, a missing \fBpatch\fR value desugars to the
-number \fB0\fR, but will allow flexibility within that value, even if the
-major and minor versions are both \fB0\fR\|\.
+When parsing caret ranges, a missing \fBpatch\fP value desugars to the
+number \fB0\fP, but will allow flexibility within that value, even if the
+major and minor versions are both \fB0\fP\|\.
.RS 0
.IP \(bu 2
-\fB^1\.2\.x\fR := \fB>=1\.2\.0 <2\.0\.0\fR
+\fB^1\.2\.x\fP := \fB>=1\.2\.0 <2\.0\.0\fP
.IP \(bu 2
-\fB^0\.0\.x\fR := \fB>=0\.0\.0 <0\.1\.0\fR
+\fB^0\.0\.x\fP := \fB>=0\.0\.0 <0\.1\.0\fP
.IP \(bu 2
-\fB^0\.0\fR := \fB>=0\.0\.0 <0\.1\.0\fR
+\fB^0\.0\fP := \fB>=0\.0\.0 <0\.1\.0\fP
.RE
.P
-A missing \fBminor\fR and \fBpatch\fR values will desugar to zero, but also
+A missing \fBminor\fP and \fBpatch\fP values will desugar to zero, but also
allow flexibility within those values, even if the major version is
zero\.
.RS 0
.IP \(bu 2
-\fB^1\.x\fR := \fB>=1\.0\.0 <2\.0\.0\fR
+\fB^1\.x\fP := \fB>=1\.0\.0 <2\.0\.0\fP
.IP \(bu 2
-\fB^0\.x\fR := \fB>=0\.0\.0 <1\.0\.0\fR
+\fB^0\.x\fP := \fB>=0\.0\.0 <1\.0\.0\fP
.RE
.SH Functions
.P
-All methods and classes take a final \fBloose\fR boolean argument that, if
+All methods and classes take a final \fBloose\fP boolean argument that, if
true, will be more forgiving about not\-quite\-valid semver strings\.
The resulting output will always be 100% strict, of course\.
.P
@@ -294,96 +294,96 @@ Strict\-mode Comparators and Ranges will be strict about the SemVer
strings that they parse\.
.RS 0
.IP \(bu 2
-\fBvalid(v)\fR: Return the parsed version, or null if it's not valid\.
+\fBvalid(v)\fP: Return the parsed version, or null if it's not valid\.
.IP \(bu 2
-\fBinc(v, release)\fR: Return the version incremented by the release
-type (\fBmajor\fR, \fBpremajor\fR, \fBminor\fR, \fBpreminor\fR, \fBpatch\fR,
-\fBprepatch\fR, or \fBprerelease\fR), or null if it's not valid
+\fBinc(v, release)\fP: Return the version incremented by the release
+type (\fBmajor\fP, \fBpremajor\fP, \fBminor\fP, \fBpreminor\fP, \fBpatch\fP,
+\fBprepatch\fP, or \fBprerelease\fP), or null if it's not valid
.RS 0
.IP \(bu 2
-\fBpremajor\fR in one call will bump the version up to the next major
+\fBpremajor\fP in one call will bump the version up to the next major
version and down to a prerelease of that major version\.
-\fBpreminor\fR, and \fBprepatch\fR work the same way\.
+\fBpreminor\fP, and \fBprepatch\fP work the same way\.
.IP \(bu 2
-If called from a non\-prerelease version, the \fBprerelease\fR will work the
-same as \fBprepatch\fR\|\. It increments the patch version, then makes a
+If called from a non\-prerelease version, the \fBprerelease\fP will work the
+same as \fBprepatch\fP\|\. It increments the patch version, then makes a
prerelease\. If the input version is already a prerelease it simply
increments it\.
.RE
.IP \(bu 2
-\fBmajor(v)\fR: Return the major version number\.
+\fBmajor(v)\fP: Return the major version number\.
.IP \(bu 2
-\fBminor(v)\fR: Return the minor version number\.
+\fBminor(v)\fP: Return the minor version number\.
.IP \(bu 2
-\fBpatch(v)\fR: Return the patch version number\.
+\fBpatch(v)\fP: Return the patch version number\.
.RE
.SS Comparison
.RS 0
.IP \(bu 2
-\fBgt(v1, v2)\fR: \fBv1 > v2\fR
+\fBgt(v1, v2)\fP: \fBv1 > v2\fP
.IP \(bu 2
-\fBgte(v1, v2)\fR: \fBv1 >= v2\fR
+\fBgte(v1, v2)\fP: \fBv1 >= v2\fP
.IP \(bu 2
-\fBlt(v1, v2)\fR: \fBv1 < v2\fR
+\fBlt(v1, v2)\fP: \fBv1 < v2\fP
.IP \(bu 2
-\fBlte(v1, v2)\fR: \fBv1 <= v2\fR
+\fBlte(v1, v2)\fP: \fBv1 <= v2\fP
.IP \(bu 2
-\fBeq(v1, v2)\fR: \fBv1 == v2\fR This is true if they're logically equivalent,
+\fBeq(v1, v2)\fP: \fBv1 == v2\fP This is true if they're logically equivalent,
even if they're not the exact same string\. You already know how to
compare strings\.
.IP \(bu 2
-\fBneq(v1, v2)\fR: \fBv1 != v2\fR The opposite of \fBeq\fR\|\.
+\fBneq(v1, v2)\fP: \fBv1 != v2\fP The opposite of \fBeq\fP\|\.
.IP \(bu 2
-\fBcmp(v1, comparator, v2)\fR: Pass in a comparison string, and it'll call
-the corresponding function above\. \fB"==="\fR and \fB"!=="\fR do simple
+\fBcmp(v1, comparator, v2)\fP: Pass in a comparison string, and it'll call
+the corresponding function above\. \fB"==="\fP and \fB"!=="\fP do simple
string comparison, but are included for completeness\. Throws if an
invalid comparison string is provided\.
.IP \(bu 2
-\fBcompare(v1, v2)\fR: Return \fB0\fR if \fBv1 == v2\fR, or \fB1\fR if \fBv1\fR is greater, or \fB\-1\fR if
-\fBv2\fR is greater\. Sorts in ascending order if passed to \fBArray\.sort()\fR\|\.
+\fBcompare(v1, v2)\fP: Return \fB0\fP if \fBv1 == v2\fP, or \fB1\fP if \fBv1\fP is greater, or \fB\-1\fP if
+\fBv2\fP is greater\. Sorts in ascending order if passed to \fBArray\.sort()\fP\|\.
.IP \(bu 2
-\fBrcompare(v1, v2)\fR: The reverse of compare\. Sorts an array of versions
-in descending order when passed to \fBArray\.sort()\fR\|\.
+\fBrcompare(v1, v2)\fP: The reverse of compare\. Sorts an array of versions
+in descending order when passed to \fBArray\.sort()\fP\|\.
.IP \(bu 2
-\fBdiff(v1, v2)\fR: Returns difference between two versions by the release type
-(\fBmajor\fR, \fBpremajor\fR, \fBminor\fR, \fBpreminor\fR, \fBpatch\fR, \fBprepatch\fR, or \fBprerelease\fR),
+\fBdiff(v1, v2)\fP: Returns difference between two versions by the release type
+(\fBmajor\fP, \fBpremajor\fP, \fBminor\fP, \fBpreminor\fP, \fBpatch\fP, \fBprepatch\fP, or \fBprerelease\fP),
or null if the versions are the same\.
.RE
.SS Ranges
.RS 0
.IP \(bu 2
-\fBvalidRange(range)\fR: Return the valid range or null if it's not valid
+\fBvalidRange(range)\fP: Return the valid range or null if it's not valid
.IP \(bu 2
-\fBsatisfies(version, range)\fR: Return true if the version satisfies the
+\fBsatisfies(version, range)\fP: Return true if the version satisfies the
range\.
.IP \(bu 2
-\fBmaxSatisfying(versions, range)\fR: Return the highest version in the list
-that satisfies the range, or \fBnull\fR if none of them do\.
+\fBmaxSatisfying(versions, range)\fP: Return the highest version in the list
+that satisfies the range, or \fBnull\fP if none of them do\.
.IP \(bu 2
-\fBgtr(version, range)\fR: Return \fBtrue\fR if version is greater than all the
+\fBgtr(version, range)\fP: Return \fBtrue\fP if version is greater than all the
versions possible in the range\.
.IP \(bu 2
-\fBltr(version, range)\fR: Return \fBtrue\fR if version is less than all the
+\fBltr(version, range)\fP: Return \fBtrue\fP if version is less than all the
versions possible in the range\.
.IP \(bu 2
-\fBoutside(version, range, hilo)\fR: Return true if the version is outside
+\fBoutside(version, range, hilo)\fP: Return true if the version is outside
the bounds of the range in either the high or low direction\. The
-\fBhilo\fR argument must be either the string \fB\|'>'\fR or \fB\|'<'\fR\|\. (This is
-the function called by \fBgtr\fR and \fBltr\fR\|\.)
+\fBhilo\fP argument must be either the string \fB\|'>'\fP or \fB\|'<'\fP\|\. (This is
+the function called by \fBgtr\fP and \fBltr\fP\|\.)
.RE
.P
Note that, since ranges may be non\-contiguous, a version might not be
greater than a range, less than a range, \fIor\fR satisfy a range! For
-example, the range \fB1\.2 <1\.2\.9 || >2\.0\.0\fR would have a hole from \fB1\.2\.9\fR
-until \fB2\.0\.0\fR, so the version \fB1\.2\.10\fR would not be greater than the
-range (because \fB2\.0\.1\fR satisfies, which is higher), nor less than the
-range (since \fB1\.2\.8\fR satisfies, which is lower), and it also does not
+example, the range \fB1\.2 <1\.2\.9 || >2\.0\.0\fP would have a hole from \fB1\.2\.9\fP
+until \fB2\.0\.0\fP, so the version \fB1\.2\.10\fP would not be greater than the
+range (because \fB2\.0\.1\fP satisfies, which is higher), nor less than the
+range (since \fB1\.2\.8\fP satisfies, which is lower), and it also does not
satisfy the range\.
.P
If you want to know if a version satisfies or does not satisfy a
-range, use the \fBsatisfies(version, range)\fR function\.
+range, use the \fBsatisfies(version, range)\fP function\.
diff --git a/deps/npm/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/.npmignore
index c2658d7d1..c2658d7d1 100644
--- a/deps/npm/node_modules/graceful-fs/.npmignore
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/.npmignore
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/LICENSE b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/LICENSE
index 19129e315..19129e315 100644
--- a/deps/npm/node_modules/rimraf/node_modules/glob/LICENSE
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/LICENSE
diff --git a/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/README.md b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/package.json b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..dc3ce5501
--- /dev/null
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@>3.0.1 <4.0.0-0",
+ "/Users/isaacs/dev/npm/npm/node_modules/cmd-shim"
+ ]
+ ],
+ "_from": "graceful-fs@>3.0.1 <4.0.0-0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/cmd-shim/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@>3.0.1 <4.0.0-0",
+ "rawSpec": ">3.0.1 <4.0.0-0",
+ "scope": null,
+ "spec": ">3.0.1 <4.0.0-0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/cmd-shim"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@>3.0.1 <4.0.0-0",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/cmd-shim",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/max-open.js
index a6b9ba43d..a6b9ba43d 100644
--- a/deps/npm/node_modules/graceful-fs/test/max-open.js
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/max-open.js
diff --git a/deps/npm/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/open.js
index 85732f236..85732f236 100644
--- a/deps/npm/node_modules/graceful-fs/test/open.js
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/open.js
diff --git a/deps/npm/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/readdir-sort.js
index cb63a6846..cb63a6846 100644
--- a/deps/npm/node_modules/graceful-fs/test/readdir-sort.js
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/readdir-sort.js
diff --git a/deps/npm/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/write-then-read.js
index 21e4c26bf..21e4c26bf 100644
--- a/deps/npm/node_modules/graceful-fs/test/write-then-read.js
+++ b/deps/npm/node_modules/cmd-shim/node_modules/graceful-fs/test/write-then-read.js
diff --git a/deps/npm/node_modules/strip-ansi/cli.js b/deps/npm/node_modules/columnify/node_modules/strip-ansi/cli.js
index b83f63b90..b83f63b90 100755
--- a/deps/npm/node_modules/strip-ansi/cli.js
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/cli.js
diff --git a/deps/npm/node_modules/strip-ansi/index.js b/deps/npm/node_modules/columnify/node_modules/strip-ansi/index.js
index 099480fbf..099480fbf 100644
--- a/deps/npm/node_modules/strip-ansi/index.js
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/index.js
diff --git a/deps/npm/node_modules/ansi-regex/index.js b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/index.js
index 2fcdd1e47..2fcdd1e47 100644
--- a/deps/npm/node_modules/ansi-regex/index.js
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/index.js
diff --git a/deps/npm/node_modules/ansi-regex/license b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/license
index 654d0bfe9..654d0bfe9 100644
--- a/deps/npm/node_modules/ansi-regex/license
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/license
diff --git a/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/package.json b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/package.json
new file mode 100644
index 000000000..da92c5d12
--- /dev/null
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/package.json
@@ -0,0 +1,77 @@
+{
+ "name": "ansi-regex",
+ "version": "1.1.1",
+ "description": "Regular expression for matching ANSI escape codes",
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/ansi-regex.git"
+ },
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "http://sindresorhus.com"
+ },
+ "maintainers": [
+ {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "http://sindresorhus.com"
+ },
+ {
+ "name": "Joshua Appelman",
+ "email": "jappelman@xebia.com",
+ "url": "http://jbnicolai.com"
+ }
+ ],
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "mocha test/test.js",
+ "view-supported": "node test/viewCodes.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "ansi",
+ "styles",
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "tty",
+ "escape",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "command-line",
+ "text",
+ "regex",
+ "regexp",
+ "re",
+ "match",
+ "test",
+ "find",
+ "pattern"
+ ],
+ "devDependencies": {
+ "mocha": "*"
+ },
+ "readme": "# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)\n\n> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)\n\n\n## Install\n\n```sh\n$ npm install --save ansi-regex\n```\n\n\n## Usage\n\n```js\nvar ansiRegex = require('ansi-regex');\n\nansiRegex().test('\\u001b[4mcake\\u001b[0m');\n//=> true\n\nansiRegex().test('cake');\n//=> false\n\n'\\u001b[4mcake\\u001b[0m'.match(ansiRegex());\n//=> ['\\u001b[4m', '\\u001b[0m']\n```\n\n*It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.*\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
+ "readmeFilename": "readme.md",
+ "bugs": {
+ "url": "https://github.com/sindresorhus/ansi-regex/issues"
+ },
+ "homepage": "https://github.com/sindresorhus/ansi-regex#readme",
+ "_id": "ansi-regex@1.1.1",
+ "_shasum": "41c847194646375e6a1a5d10c3ca054ef9fc980d",
+ "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-1.1.1.tgz",
+ "_from": "ansi-regex@>=1.0.0 <2.0.0"
+}
diff --git a/deps/npm/node_modules/ansi-regex/readme.md b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/readme.md
index ae876e729..ae876e729 100644
--- a/deps/npm/node_modules/ansi-regex/readme.md
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/node_modules/ansi-regex/readme.md
diff --git a/deps/npm/node_modules/columnify/node_modules/strip-ansi/package.json b/deps/npm/node_modules/columnify/node_modules/strip-ansi/package.json
new file mode 100644
index 000000000..e43cfb5a5
--- /dev/null
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/package.json
@@ -0,0 +1,69 @@
+{
+ "name": "strip-ansi",
+ "version": "2.0.1",
+ "description": "Strip ANSI escape codes",
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/strip-ansi.git"
+ },
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "http://sindresorhus.com"
+ },
+ "bin": {
+ "strip-ansi": "cli.js"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "mocha"
+ },
+ "files": [
+ "index.js",
+ "cli.js"
+ ],
+ "keywords": [
+ "strip",
+ "trim",
+ "remove",
+ "ansi",
+ "styles",
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "tty",
+ "escape",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "log",
+ "logging",
+ "command-line",
+ "text"
+ ],
+ "dependencies": {
+ "ansi-regex": "^1.0.0"
+ },
+ "devDependencies": {
+ "mocha": "*"
+ },
+ "readme": "# strip-ansi [![Build Status](https://travis-ci.org/sindresorhus/strip-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-ansi)\n\n> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)\n\n\n## Install\n\n```sh\n$ npm install --save strip-ansi\n```\n\n\n## Usage\n\n```js\nvar stripAnsi = require('strip-ansi');\n\nstripAnsi('\\u001b[4mcake\\u001b[0m');\n//=> 'cake'\n```\n\n\n## CLI\n\n```sh\n$ npm install --global strip-ansi\n```\n\n```sh\n$ strip-ansi --help\n\n Usage\n strip-ansi <input-file> > <output-file>\n cat <input-file> | strip-ansi > <output-file>\n\n Example\n strip-ansi unicorn.txt > unicorn-stripped.txt\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
+ "readmeFilename": "readme.md",
+ "bugs": {
+ "url": "https://github.com/sindresorhus/strip-ansi/issues"
+ },
+ "homepage": "https://github.com/sindresorhus/strip-ansi#readme",
+ "_id": "strip-ansi@2.0.1",
+ "_shasum": "df62c1aa94ed2f114e1d0f21fd1d50482b79a60e",
+ "_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-2.0.1.tgz",
+ "_from": "strip-ansi@>=2.0.1 <3.0.0"
+}
diff --git a/deps/npm/node_modules/strip-ansi/readme.md b/deps/npm/node_modules/columnify/node_modules/strip-ansi/readme.md
index 53ec26436..53ec26436 100644
--- a/deps/npm/node_modules/strip-ansi/readme.md
+++ b/deps/npm/node_modules/columnify/node_modules/strip-ansi/readme.md
diff --git a/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/node_modules/clone/package.json b/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/node_modules/clone/package.json
index bc8e878a5..dc56f3f19 100644
--- a/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/node_modules/clone/package.json
+++ b/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/node_modules/clone/package.json
@@ -122,5 +122,6 @@
"tarball": "http://registry.npmjs.org/clone/-/clone-0.1.19.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/clone/-/clone-0.1.19.tgz"
+ "_resolved": "https://registry.npmjs.org/clone/-/clone-0.1.19.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/package.json b/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/package.json
index fdd074d0f..eea2e9b0f 100644
--- a/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/package.json
+++ b/deps/npm/node_modules/columnify/node_modules/wcwidth/node_modules/defaults/package.json
@@ -49,5 +49,6 @@
"tarball": "http://registry.npmjs.org/defaults/-/defaults-1.0.2.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.2.tgz"
+ "_resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/columnify/node_modules/wcwidth/package.json b/deps/npm/node_modules/columnify/node_modules/wcwidth/package.json
index 4744d9dc3..49fc6f040 100644
--- a/deps/npm/node_modules/columnify/node_modules/wcwidth/package.json
+++ b/deps/npm/node_modules/columnify/node_modules/wcwidth/package.json
@@ -56,5 +56,6 @@
"shasum": "02d059ff7a8fc741e0f6b5da1e69b2b40daeca6f",
"tarball": "http://registry.npmjs.org/wcwidth/-/wcwidth-1.0.0.tgz"
},
- "_resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.0.tgz"
+ "_resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/columnify/package.json b/deps/npm/node_modules/columnify/package.json
index 195a7f092..eaef8abcb 100644
--- a/deps/npm/node_modules/columnify/package.json
+++ b/deps/npm/node_modules/columnify/package.json
@@ -63,5 +63,6 @@
"shasum": "15fdda803a3875f87f9d302b3bc828932d664003",
"tarball": "http://registry.npmjs.org/columnify/-/columnify-1.5.1.tgz"
},
- "_resolved": "https://registry.npmjs.org/columnify/-/columnify-1.5.1.tgz"
+ "_resolved": "https://registry.npmjs.org/columnify/-/columnify-1.5.1.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/dezalgo/.travis.yml b/deps/npm/node_modules/dezalgo/.travis.yml
new file mode 100644
index 000000000..e1bcee1ac
--- /dev/null
+++ b/deps/npm/node_modules/dezalgo/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+before_script: npm install -g npm@latest
+node_js:
+ - '0.8'
+ - '0.10'
+ - '0.12'
+ - 'iojs'
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/CHANGES.md b/deps/npm/node_modules/dezalgo/node_modules/asap/CHANGES.md
new file mode 100644
index 000000000..8e6202305
--- /dev/null
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/CHANGES.md
@@ -0,0 +1,63 @@
+
+## 2.0.3
+
+Version 2.0.3 fixes a bug when adjusting the capacity of the task queue.
+
+## 2.0.1-2.02
+
+Version 2.0.1 fixes a bug in the way redirects were expressed that affected the
+function of Browserify, but which Mr would tolerate.
+
+## 2.0.0
+
+Version 2 of ASAP is a full rewrite with a few salient changes.
+First, the ASAP source is CommonJS only and designed with [Browserify][] and
+[Browserify-compatible][Mr] module loaders in mind.
+
+[Browserify]: https://github.com/substack/node-browserify
+[Mr]: https://github.com/montagejs/mr
+
+The new version has been refactored in two dimensions.
+Support for Node.js and browsers have been separated, using Browserify
+redirects and ASAP has been divided into two modules.
+The "raw" layer depends on the tasks to catch thrown exceptions and unravel
+Node.js domains.
+
+The full implementation of ASAP is loadable as `require("asap")` in both Node.js
+and browsers.
+
+The raw layer that lacks exception handling overhead is loadable as
+`require("asap/raw")`.
+The interface is the same for both layers.
+
+Tasks are no longer required to be functions, but can rather be any object that
+implements `task.call()`.
+With this feature you can recycle task objects to avoid garbage collector churn
+and avoid closures in general.
+
+The implementation has been rigorously documented so that our successors can
+understand the scope of the problem that this module solves and all of its
+nuances, ensuring that the next generation of implementations know what details
+are essential.
+
+- [asap.js](https://github.com/kriskowal/asap/blob/master/asap.js)
+- [raw.js](https://github.com/kriskowal/asap/blob/master/raw.js)
+- [browser-asap.js](https://github.com/kriskowal/asap/blob/master/browser-asap.js)
+- [browser-raw.js](https://github.com/kriskowal/asap/blob/master/browser-raw.js)
+
+The new version has also been rigorously tested across a broad spectrum of
+browsers, in both the window and worker context.
+The following charts capture the browser test results for the most recent
+release.
+The first chart shows test results for ASAP running in the main window context.
+The second chart shows test results for ASAP running in a web worker context.
+Test results are inconclusive (grey) on browsers that do not support web
+workers.
+These data are captured automatically by [Continuous
+Integration][].
+
+![Browser Compatibility](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-results-matrix.svg)
+
+![Compatibility in Web Workers](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-worker-results-matrix.svg)
+
+[Continuous Integration]: https://github.com/kriskowal/asap/blob/master/CONTRIBUTING.md
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/LICENSE.md b/deps/npm/node_modules/dezalgo/node_modules/asap/LICENSE.md
index 5d98ad8fe..ba18c6139 100644
--- a/deps/npm/node_modules/dezalgo/node_modules/asap/LICENSE.md
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/LICENSE.md
@@ -1,5 +1,6 @@
-Copyright 2009–2013 Contributors. All rights reserved.
+Copyright 2009–2014 Contributors. All rights reserved.
+
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/README.md b/deps/npm/node_modules/dezalgo/node_modules/asap/README.md
index 9a4275976..452fd8c20 100644
--- a/deps/npm/node_modules/dezalgo/node_modules/asap/README.md
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/README.md
@@ -1,9 +1,14 @@
-
# ASAP
-This `asap` CommonJS package contains a single `asap` module that
-exports a single `asap` function that executes a function **as soon as
-possible**.
+[![Build Status](https://travis-ci.org/kriskowal/asap.png?branch=master)](https://travis-ci.org/kriskowal/asap)
+
+Promise and asynchronous observer libraries, as well as hand-rolled callback
+programs and libraries, often need a mechanism to postpone the execution of a
+callback until the next available event.
+(See [Designing API’s for Asynchrony][Zalgo].)
+The `asap` function executes a task **as soon as possible** but not before it
+returns, waiting only for the completion of the current event and previously
+scheduled tasks.
```javascript
asap(function () {
@@ -11,27 +16,54 @@ asap(function () {
});
```
-More formally, ASAP provides a fast event queue that will execute tasks
-until it is empty before yielding to the JavaScript engine's underlying
-event-loop. When the event queue becomes non-empty, ASAP schedules a
-flush event, preferring for that event to occur before the JavaScript
-engine has an opportunity to perform IO tasks or rendering, thus making
-the first task and subsequent tasks semantically indistinguishable.
-ASAP uses a variety of techniques to preserve this invariant on
-different versions of browsers and NodeJS.
-
-By design, ASAP can starve the event loop on the theory that, if there
-is enough work to be done synchronously, albeit in separate events, long
-enough to starve input or output, it is a strong indicator that the
-program needs to push back on scheduling more work.
-
-Take care. ASAP can sustain infinite recursive calls indefinitely
-without warning. This is behaviorally equivalent to an infinite loop.
-It will not halt from a stack overflow, but it *will* chew through
-memory (which is an oddity I cannot explain at this time). Just as with
-infinite loops, you can monitor a Node process for this behavior with a
-heart-beat signal. As with infinite loops, a very small amount of
-caution goes a long way to avoiding problems.
+[Zalgo]: http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony
+
+This CommonJS package provides an `asap` module that exports a function that
+executes a task function *as soon as possible*.
+
+ASAP strives to schedule events to occur before yielding for IO, reflow,
+or redrawing.
+Each event receives an independent stack, with only platform code in parent
+frames and the events run in the order they are scheduled.
+
+ASAP provides a fast event queue that will execute tasks until it is
+empty before yielding to the JavaScript engine's underlying event-loop.
+When a task gets added to a previously empty event queue, ASAP schedules a flush
+event, preferring for that event to occur before the JavaScript engine has an
+opportunity to perform IO tasks or rendering, thus making the first task and
+subsequent tasks semantically indistinguishable.
+ASAP uses a variety of techniques to preserve this invariant on different
+versions of browsers and Node.js.
+
+By design, ASAP prevents input events from being handled until the task
+queue is empty.
+If the process is busy enough, this may cause incoming connection requests to be
+dropped, and may cause existing connections to inform the sender to reduce the
+transmission rate or stall.
+ASAP allows this on the theory that, if there is enough work to do, there is no
+sense in looking for trouble.
+As a consequence, ASAP can interfere with smooth animation.
+If your task should be tied to the rendering loop, consider using
+`requestAnimationFrame` instead.
+A long sequence of tasks can also effect the long running script dialog.
+If this is a problem, you may be able to use ASAP’s cousin `setImmediate` to
+break long processes into shorter intervals and periodically allow the browser
+to breathe.
+`setImmediate` will yield for IO, reflow, and repaint events.
+It also returns a handler and can be canceled.
+For a `setImmediate` shim, consider [YuzuJS setImmediate][setImmediate].
+
+[setImmediate]: https://github.com/YuzuJS/setImmediate
+
+Take care.
+ASAP can sustain infinite recursive calls without warning.
+It will not halt from a stack overflow, and it will not consume unbounded
+memory.
+This is behaviorally equivalent to an infinite loop.
+Just as with infinite loops, you can monitor a Node.js process for this behavior
+with a heart-beat signal.
+As with infinite loops, a very small amount of caution goes a long way to
+avoiding problems.
```javascript
function loop() {
@@ -40,16 +72,113 @@ function loop() {
loop();
```
-ASAP is distinct from `setImmediate` in that it does not suffer the
-overhead of returning a handle and being possible to cancel. For a
-`setImmediate` shim, consider [setImmediate][].
+In browsers, if a task throws an exception, it will not interrupt the flushing
+of high-priority tasks.
+The exception will be postponed to a later, low-priority event to avoid
+slow-downs.
+In Node.js, if a task throws an exception, ASAP will resume flushing only if—and
+only after—the error is handled by `domain.on("error")` or
+`process.on("uncaughtException")`.
+
+## Raw ASAP
+
+Checking for exceptions comes at a cost.
+The package also provides an `asap/raw` module that exports the underlying
+implementation which is faster but stalls if a task throws an exception.
+This internal version of the ASAP function does not check for errors.
+If a task does throw an error, it will stall the event queue unless you manually
+call `rawAsap.requestFlush()` before throwing the error, or any time after.
+
+In Node.js, `asap/raw` also runs all tasks outside any domain.
+If you need a task to be bound to your domain, you will have to do it manually.
+
+```js
+if (process.domain) {
+ task = process.domain.bind(task);
+}
+rawAsap(task);
+```
+
+## Tasks
+
+A task may be any object that implements `call()`.
+A function will suffice, but closures tend not to be reusable and can cause
+garbage collector churn.
+Both `asap` and `rawAsap` accept task objects to give you the option of
+recycling task objects or using higher callable object abstractions.
+See the `asap` source for an illustration.
+
+
+## Compatibility
+
+ASAP is tested on Node.js v0.10 and in a broad spectrum of web browsers.
+The following charts capture the browser test results for the most recent
+release.
+The first chart shows test results for ASAP running in the main window context.
+The second chart shows test results for ASAP running in a web worker context.
+Test results are inconclusive (grey) on browsers that do not support web
+workers.
+These data are captured automatically by [Continuous
+Integration][].
+
+[Continuous Integration]: https://github.com/kriskowal/asap/blob/master/CONTRIBUTING.md
+
+![Browser Compatibility](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-results-matrix.svg)
-[setImmediate]: https://github.com/noblejs/setimmediate
+![Compatibility in Web Workers](http://kriskowal-asap.s3-website-us-west-2.amazonaws.com/train/integration-2/saucelabs-worker-results-matrix.svg)
+
+## Caveats
+
+When a task is added to an empty event queue, it is not always possible to
+guarantee that the task queue will begin flushing immediately after the current
+event.
+However, once the task queue begins flushing, it will not yield until the queue
+is empty, even if the queue grows while executing tasks.
+
+The following browsers allow the use of [DOM mutation observers][] to access
+the HTML [microtask queue][], and thus begin flushing ASAP's task queue
+immediately at the end of the current event loop turn, before any rendering or
+IO:
+
+[microtask queue]: http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#microtask-queue
+[DOM mutation observers]: http://dom.spec.whatwg.org/#mutation-observers
+
+- Android 4–4.3
+- Chrome 26–34
+- Firefox 14–29
+- Internet Explorer 11
+- iPad Safari 6–7.1
+- iPhone Safari 7–7.1
+- Safari 6–7
+
+In the absense of mutation observers, there are a few browsers, and situations
+like web workers in some of the above browsers, where [message channels][]
+would be a useful way to avoid falling back to timers.
+Message channels give direct access to the HTML [task queue][], so the ASAP
+task queue would flush after any already queued rendering and IO tasks, but
+without having the minimum delay imposed by timers.
+However, among these browsers, Internet Explorer 10 and Safari do not reliably
+dispatch messages, so they are not worth the trouble to implement.
+
+[message channels]: http://www.whatwg.org/specs/web-apps/current-work/multipage/web-messaging.html#message-channels
+[task queue]: http://www.whatwg.org/specs/web-apps/current-work/multipage/webappapis.html#concept-task
+
+- Internet Explorer 10
+- Safair 5.0-1
+- Opera 11-12
+
+In the absense of mutation observers, these browsers and the following browsers
+all fall back to using `setTimeout` and `setInterval` to ensure that a `flush`
+occurs.
+The implementation uses both and cancels whatever handler loses the race, since
+`setTimeout` tends to occasionally skip tasks in unisolated circumstances.
+Timers generally delay the flushing of ASAP's task queue for four milliseconds.
+
+- Firefox 3–13
+- Internet Explorer 6–10
+- iPad Safari 4.3
+- Lynx 2.8.7
-If a task throws an exception, it will not interrupt the flushing of
-high-priority tasks. The exception will be postponed to a later,
-low-priority event to avoid slow-downs, when the underlying JavaScript
-engine will treat it as it does any unhandled exception.
## Heritage
@@ -58,13 +187,13 @@ It originally had a naïve implementation in terms of `setTimeout`, but
[Malte Ubl][NonBlocking] provided an insight that `postMessage` might be
useful for creating a high-priority, no-delay event dispatch hack.
Since then, Internet Explorer proposed and implemented `setImmediate`.
-Robert Kratić began contributing to Q by measuring the performance of
+Robert Katić began contributing to Q by measuring the performance of
the internal implementation of `asap`, paying particular attention to
-error recovery. Domenic, Robert, and I collectively settled on the
-current strategy of unrolling the high-priority event queue internally
-regardless of what strategy we used to dispatch the potentially
-lower-priority flush event. Domenic went on to make ASAP cooperate with
-NodeJS domains.
+error recovery.
+Domenic, Robert, and Kris Kowal collectively settled on the current strategy of
+unrolling the high-priority event queue internally regardless of what strategy
+we used to dispatch the potentially lower-priority flush event.
+Domenic went on to make ASAP cooperate with Node.js domains.
[Q]: https://github.com/kriskowal/q
[NonBlocking]: http://www.nonblocking.io/2011/06/windownexttick.html
@@ -74,8 +203,35 @@ Case for setImmediate][NCZ].
[NCZ]: http://www.nczonline.net/blog/2013/07/09/the-case-for-setimmediate/
+Ember’s RSVP promise implementation later [adopted][RSVP ASAP] the name ASAP but
+further developed the implentation.
+Particularly, The `MessagePort` implementation was abandoned due to interaction
+[problems with Mobile Internet Explorer][IE Problems] in favor of an
+implementation backed on the newer and more reliable DOM `MutationObserver`
+interface.
+These changes were back-ported into this library.
+
+[IE Problems]: https://github.com/cujojs/when/issues/197
+[RSVP ASAP]: https://github.com/tildeio/rsvp.js/blob/cddf7232546a9cf858524b75cde6f9edf72620a7/lib/rsvp/asap.js
+
+In addition, ASAP factored into `asap` and `asap/raw`, such that `asap` remained
+exception-safe, but `asap/raw` provided a tight kernel that could be used for
+tasks that guaranteed that they would not throw exceptions.
+This core is useful for promise implementations that capture thrown errors in
+rejected promises and do not need a second safety net.
+At the same time, the exception handling in `asap` was factored into separate
+implementations for Node.js and browsers, using the the [Browserify][Browser
+Config] `browser` property in `package.json` to instruct browser module loaders
+and bundlers, including [Browserify][], [Mr][], and [Mop][], to use the
+browser-only implementation.
+
+[Browser Config]: https://gist.github.com/defunctzombie/4339901
+[Browserify]: https://github.com/substack/node-browserify
+[Mr]: https://github.com/montagejs/mr
+[Mop]: https://github.com/montagejs/mop
+
## License
-Copyright 2009-2013 by Contributors
+Copyright 2009-2014 by Contributors
MIT License (enclosed)
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/asap.js b/deps/npm/node_modules/dezalgo/node_modules/asap/asap.js
index 2f85516cd..f04fcd58f 100644
--- a/deps/npm/node_modules/dezalgo/node_modules/asap/asap.js
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/asap.js
@@ -1,113 +1,65 @@
-
-// Use the fastest possible means to execute a task in a future turn
-// of the event loop.
-
-// linked list of tasks (single, with head node)
-var head = {task: void 0, next: null};
-var tail = head;
-var flushing = false;
-var requestFlush = void 0;
-var isNodeJS = false;
-
-function flush() {
- /* jshint loopfunc: true */
-
- while (head.next) {
- head = head.next;
- var task = head.task;
- head.task = void 0;
- var domain = head.domain;
-
- if (domain) {
- head.domain = void 0;
- domain.enter();
- }
-
- try {
- task();
-
- } catch (e) {
- if (isNodeJS) {
- // In node, uncaught exceptions are considered fatal errors.
- // Re-throw them synchronously to interrupt flushing!
-
- // Ensure continuation if the uncaught exception is suppressed
- // listening "uncaughtException" events (as domains does).
- // Continue in next event to avoid tick recursion.
- if (domain) {
- domain.exit();
- }
- setTimeout(flush, 0);
- if (domain) {
- domain.enter();
- }
-
- throw e;
-
- } else {
- // In browsers, uncaught exceptions are not fatal.
- // Re-throw them asynchronously to avoid slow-downs.
- setTimeout(function() {
- throw e;
- }, 0);
- }
- }
-
- if (domain) {
- domain.exit();
- }
- }
-
- flushing = false;
-}
-
-if (typeof process !== "undefined" && process.nextTick) {
- // Node.js before 0.9. Note that some fake-Node environments, like the
- // Mocha test runner, introduce a `process` global without a `nextTick`.
- isNodeJS = true;
-
- requestFlush = function () {
- process.nextTick(flush);
- };
-
-} else if (typeof setImmediate === "function") {
- // In IE10, Node.js 0.9+, or https://github.com/NobleJS/setImmediate
- if (typeof window !== "undefined") {
- requestFlush = setImmediate.bind(window, flush);
+"use strict";
+
+var rawAsap = require("./raw");
+var freeTasks = [];
+
+/**
+ * Calls a task as soon as possible after returning, in its own event, with
+ * priority over IO events. An exception thrown in a task can be handled by
+ * `process.on("uncaughtException") or `domain.on("error")`, but will otherwise
+ * crash the process. If the error is handled, all subsequent tasks will
+ * resume.
+ *
+ * @param {{call}} task A callable object, typically a function that takes no
+ * arguments.
+ */
+module.exports = asap;
+function asap(task) {
+ var rawTask;
+ if (freeTasks.length) {
+ rawTask = freeTasks.pop();
} else {
- requestFlush = function () {
- setImmediate(flush);
- };
+ rawTask = new RawTask();
}
-
-} else if (typeof MessageChannel !== "undefined") {
- // modern browsers
- // http://www.nonblocking.io/2011/06/windownexttick.html
- var channel = new MessageChannel();
- channel.port1.onmessage = flush;
- requestFlush = function () {
- channel.port2.postMessage(0);
- };
-
-} else {
- // old browsers
- requestFlush = function () {
- setTimeout(flush, 0);
- };
+ rawTask.task = task;
+ rawTask.domain = process.domain;
+ rawAsap(rawTask);
}
-function asap(task) {
- tail = tail.next = {
- task: task,
- domain: isNodeJS && process.domain,
- next: null
- };
+function RawTask() {
+ this.task = null;
+ this.domain = null;
+}
- if (!flushing) {
- flushing = true;
- requestFlush();
+RawTask.prototype.call = function () {
+ if (this.domain) {
+ this.domain.enter();
+ }
+ var threw = true;
+ try {
+ this.task.call();
+ threw = false;
+ // If the task throws an exception (presumably) Node.js restores the
+ // domain stack for the next event.
+ if (this.domain) {
+ this.domain.exit();
+ }
+ } finally {
+ // We use try/finally and a threw flag to avoid messing up stack traces
+ // when we catch and release errors.
+ if (threw) {
+ // In Node.js, uncaught exceptions are considered fatal errors.
+ // Re-throw them to interrupt flushing!
+ // Ensure that flushing continues if an uncaught exception is
+ // suppressed listening process.on("uncaughtException") or
+ // domain.on("error").
+ rawAsap.requestFlush();
+ }
+ // If the task threw an error, we do not want to exit the domain here.
+ // Exiting the domain would prevent the domain from catching the error.
+ this.task = null;
+ this.domain = null;
+ freeTasks.push(this);
}
};
-module.exports = asap;
-
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/browser-asap.js b/deps/npm/node_modules/dezalgo/node_modules/asap/browser-asap.js
new file mode 100644
index 000000000..805c98246
--- /dev/null
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/browser-asap.js
@@ -0,0 +1,66 @@
+"use strict";
+
+// rawAsap provides everything we need except exception management.
+var rawAsap = require("./raw");
+// RawTasks are recycled to reduce GC churn.
+var freeTasks = [];
+// We queue errors to ensure they are thrown in right order (FIFO).
+// Array-as-queue is good enough here, since we are just dealing with exceptions.
+var pendingErrors = [];
+var requestErrorThrow = rawAsap.makeRequestCallFromTimer(throwFirstError);
+
+function throwFirstError() {
+ if (pendingErrors.length) {
+ throw pendingErrors.shift();
+ }
+}
+
+/**
+ * Calls a task as soon as possible after returning, in its own event, with priority
+ * over other events like animation, reflow, and repaint. An error thrown from an
+ * event will not interrupt, nor even substantially slow down the processing of
+ * other events, but will be rather postponed to a lower priority event.
+ * @param {{call}} task A callable object, typically a function that takes no
+ * arguments.
+ */
+module.exports = asap;
+function asap(task) {
+ var rawTask;
+ if (freeTasks.length) {
+ rawTask = freeTasks.pop();
+ } else {
+ rawTask = new RawTask();
+ }
+ rawTask.task = task;
+ rawAsap(rawTask);
+}
+
+// We wrap tasks with recyclable task objects. A task object implements
+// `call`, just like a function.
+function RawTask() {
+ this.task = null;
+}
+
+// The sole purpose of wrapping the task is to catch the exception and recycle
+// the task object after its single use.
+RawTask.prototype.call = function () {
+ try {
+ this.task.call();
+ } catch (error) {
+ if (asap.onerror) {
+ // This hook exists purely for testing purposes.
+ // Its name will be periodically randomized to break any code that
+ // depends on its existence.
+ asap.onerror(error);
+ } else {
+ // In a web browser, exceptions are not fatal. However, to avoid
+ // slowing down the queue of pending tasks, we rethrow the error in a
+ // lower priority turn.
+ pendingErrors.push(error);
+ requestErrorThrow();
+ }
+ } finally {
+ this.task = null;
+ freeTasks[freeTasks.length] = this;
+ }
+};
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/browser-raw.js b/deps/npm/node_modules/dezalgo/node_modules/asap/browser-raw.js
new file mode 100644
index 000000000..1cfd77293
--- /dev/null
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/browser-raw.js
@@ -0,0 +1,220 @@
+"use strict";
+
+// Use the fastest means possible to execute a task in its own turn, with
+// priority over other events including IO, animation, reflow, and redraw
+// events in browsers.
+//
+// An exception thrown by a task will permanently interrupt the processing of
+// subsequent tasks. The higher level `asap` function ensures that if an
+// exception is thrown by a task, that the task queue will continue flushing as
+// soon as possible, but if you use `rawAsap` directly, you are responsible to
+// either ensure that no exceptions are thrown from your task, or to manually
+// call `rawAsap.requestFlush` if an exception is thrown.
+module.exports = rawAsap;
+function rawAsap(task) {
+ if (!queue.length) {
+ requestFlush();
+ flushing = true;
+ }
+ // Equivalent to push, but avoids a function call.
+ queue[queue.length] = task;
+}
+
+var queue = [];
+// Once a flush has been requested, no further calls to `requestFlush` are
+// necessary until the next `flush` completes.
+var flushing = false;
+// `requestFlush` is an implementation-specific method that attempts to kick
+// off a `flush` event as quickly as possible. `flush` will attempt to exhaust
+// the event queue before yielding to the browser's own event loop.
+var requestFlush;
+// The position of the next task to execute in the task queue. This is
+// preserved between calls to `flush` so that it can be resumed if
+// a task throws an exception.
+var index = 0;
+// If a task schedules additional tasks recursively, the task queue can grow
+// unbounded. To prevent memory exhaustion, the task queue will periodically
+// truncate already-completed tasks.
+var capacity = 1024;
+
+// The flush function processes all tasks that have been scheduled with
+// `rawAsap` unless and until one of those tasks throws an exception.
+// If a task throws an exception, `flush` ensures that its state will remain
+// consistent and will resume where it left off when called again.
+// However, `flush` does not make any arrangements to be called again if an
+// exception is thrown.
+function flush() {
+ while (index < queue.length) {
+ var currentIndex = index;
+ // Advance the index before calling the task. This ensures that we will
+ // begin flushing on the next task the task throws an error.
+ index = index + 1;
+ queue[currentIndex].call();
+ // Prevent leaking memory for long chains of recursive calls to `asap`.
+ // If we call `asap` within tasks scheduled by `asap`, the queue will
+ // grow, but to avoid an O(n) walk for every task we execute, we don't
+ // shift tasks off the queue after they have been executed.
+ // Instead, we periodically shift 1024 tasks off the queue.
+ if (index > capacity) {
+ // Manually shift all values starting at the index back to the
+ // beginning of the queue.
+ for (var scan = 0, newLength = queue.length - index; scan < newLength; scan++) {
+ queue[scan] = queue[scan + index];
+ }
+ queue.length -= index;
+ index = 0;
+ }
+ }
+ queue.length = 0;
+ index = 0;
+ flushing = false;
+}
+
+// `requestFlush` is implemented using a strategy based on data collected from
+// every available SauceLabs Selenium web driver worker at time of writing.
+// https://docs.google.com/spreadsheets/d/1mG-5UYGup5qxGdEMWkhP6BWCz053NUb2E1QoUTU16uA/edit#gid=783724593
+
+// Safari 6 and 6.1 for desktop, iPad, and iPhone are the only browsers that
+// have WebKitMutationObserver but not un-prefixed MutationObserver.
+// Must use `global` instead of `window` to work in both frames and web
+// workers. `global` is a provision of Browserify, Mr, Mrs, or Mop.
+var BrowserMutationObserver = global.MutationObserver || global.WebKitMutationObserver;
+
+// MutationObservers are desirable because they have high priority and work
+// reliably everywhere they are implemented.
+// They are implemented in all modern browsers.
+//
+// - Android 4-4.3
+// - Chrome 26-34
+// - Firefox 14-29
+// - Internet Explorer 11
+// - iPad Safari 6-7.1
+// - iPhone Safari 7-7.1
+// - Safari 6-7
+if (typeof BrowserMutationObserver === "function") {
+ requestFlush = makeRequestCallFromMutationObserver(flush);
+
+// MessageChannels are desirable because they give direct access to the HTML
+// task queue, are implemented in Internet Explorer 10, Safari 5.0-1, and Opera
+// 11-12, and in web workers in many engines.
+// Although message channels yield to any queued rendering and IO tasks, they
+// would be better than imposing the 4ms delay of timers.
+// However, they do not work reliably in Internet Explorer or Safari.
+
+// Internet Explorer 10 is the only browser that has setImmediate but does
+// not have MutationObservers.
+// Although setImmediate yields to the browser's renderer, it would be
+// preferrable to falling back to setTimeout since it does not have
+// the minimum 4ms penalty.
+// Unfortunately there appears to be a bug in Internet Explorer 10 Mobile (and
+// Desktop to a lesser extent) that renders both setImmediate and
+// MessageChannel useless for the purposes of ASAP.
+// https://github.com/kriskowal/q/issues/396
+
+// Timers are implemented universally.
+// We fall back to timers in workers in most engines, and in foreground
+// contexts in the following browsers.
+// However, note that even this simple case requires nuances to operate in a
+// broad spectrum of browsers.
+//
+// - Firefox 3-13
+// - Internet Explorer 6-9
+// - iPad Safari 4.3
+// - Lynx 2.8.7
+} else {
+ requestFlush = makeRequestCallFromTimer(flush);
+}
+
+// `requestFlush` requests that the high priority event queue be flushed as
+// soon as possible.
+// This is useful to prevent an error thrown in a task from stalling the event
+// queue if the exception handled by Node.js’s
+// `process.on("uncaughtException")` or by a domain.
+rawAsap.requestFlush = requestFlush;
+
+// To request a high priority event, we induce a mutation observer by toggling
+// the text of a text node between "1" and "-1".
+function makeRequestCallFromMutationObserver(callback) {
+ var toggle = 1;
+ var observer = new BrowserMutationObserver(callback);
+ var node = document.createTextNode("");
+ observer.observe(node, {characterData: true});
+ return function requestCall() {
+ toggle = -toggle;
+ node.data = toggle;
+ };
+}
+
+// The message channel technique was discovered by Malte Ubl and was the
+// original foundation for this library.
+// http://www.nonblocking.io/2011/06/windownexttick.html
+
+// Safari 6.0.5 (at least) intermittently fails to create message ports on a
+// page's first load. Thankfully, this version of Safari supports
+// MutationObservers, so we don't need to fall back in that case.
+
+// function makeRequestCallFromMessageChannel(callback) {
+// var channel = new MessageChannel();
+// channel.port1.onmessage = callback;
+// return function requestCall() {
+// channel.port2.postMessage(0);
+// };
+// }
+
+// For reasons explained above, we are also unable to use `setImmediate`
+// under any circumstances.
+// Even if we were, there is another bug in Internet Explorer 10.
+// It is not sufficient to assign `setImmediate` to `requestFlush` because
+// `setImmediate` must be called *by name* and therefore must be wrapped in a
+// closure.
+// Never forget.
+
+// function makeRequestCallFromSetImmediate(callback) {
+// return function requestCall() {
+// setImmediate(callback);
+// };
+// }
+
+// Safari 6.0 has a problem where timers will get lost while the user is
+// scrolling. This problem does not impact ASAP because Safari 6.0 supports
+// mutation observers, so that implementation is used instead.
+// However, if we ever elect to use timers in Safari, the prevalent work-around
+// is to add a scroll event listener that calls for a flush.
+
+// `setTimeout` does not call the passed callback if the delay is less than
+// approximately 7 in web workers in Firefox 8 through 18, and sometimes not
+// even then.
+
+function makeRequestCallFromTimer(callback) {
+ return function requestCall() {
+ // We dispatch a timeout with a specified delay of 0 for engines that
+ // can reliably accommodate that request. This will usually be snapped
+ // to a 4 milisecond delay, but once we're flushing, there's no delay
+ // between events.
+ var timeoutHandle = setTimeout(handleTimer, 0);
+ // However, since this timer gets frequently dropped in Firefox
+ // workers, we enlist an interval handle that will try to fire
+ // an event 20 times per second until it succeeds.
+ var intervalHandle = setInterval(handleTimer, 50);
+
+ function handleTimer() {
+ // Whichever timer succeeds will cancel both timers and
+ // execute the callback.
+ clearTimeout(timeoutHandle);
+ clearInterval(intervalHandle);
+ callback();
+ }
+ };
+}
+
+// This is for `asap.js` only.
+// Its name will be periodically randomized to break any code that depends on
+// its existence.
+rawAsap.makeRequestCallFromTimer = makeRequestCallFromTimer;
+
+// ASAP was originally a nextTick shim included in Q. This was factored out
+// into this ASAP package. It was later adapted to RSVP which made further
+// amendments. These decisions, particularly to marginalize MessageChannel and
+// to capture the MutationObserver implementation in a closure, were integrated
+// back into ASAP proper.
+// https://github.com/tildeio/rsvp.js/blob/cddf7232546a9cf858524b75cde6f9edf72620a7/lib/rsvp/asap.js
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/package.json b/deps/npm/node_modules/dezalgo/node_modules/asap/package.json
index 25ffeeb94..e01b3f06d 100644
--- a/deps/npm/node_modules/dezalgo/node_modules/asap/package.json
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/package.json
@@ -1,26 +1,66 @@
{
"name": "asap",
- "version": "1.0.0",
+ "version": "2.0.3",
"description": "High-priority task queue for Node.js and browsers",
"keywords": [
"event",
"task",
"queue"
],
- "licenses": [
- {
- "type": "MIT",
- "url": "https://github.com/kriskowal/asap/raw/master/LICENSE.md"
- }
+ "license": {
+ "type": "MIT",
+ "url": "https://github.com/kriskowal/asap/raw/master/LICENSE.md"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kriskowal/asap.git"
+ },
+ "main": "./asap.js",
+ "browser": {
+ "./asap.js": "./browser-asap.js",
+ "./raw.js": "./browser-raw.js",
+ "./test/domain.js": "./test/browser-domain.js"
+ },
+ "files": [
+ "raw.js",
+ "asap.js",
+ "browser-raw.js",
+ "browser-asap.js"
],
- "main": "asap",
- "_id": "asap@1.0.0",
- "dist": {
- "shasum": "b2a45da5fdfa20b0496fc3768cc27c12fa916a7d",
- "tarball": "http://registry.npmjs.org/asap/-/asap-1.0.0.tgz"
+ "scripts": {
+ "test": "npm run lint && npm run test-node",
+ "test-travis": "npm run lint && npm run test-node && npm run test-saucelabs && npm run test-saucelabs-worker",
+ "test-node": "node test/asap-test.js",
+ "test-publish": "node scripts/publish-bundle.js test/asap-test.js | pbcopy",
+ "test-browser": "node scripts/publish-bundle.js test/asap-test.js | xargs opener",
+ "test-saucelabs": "node scripts/saucelabs.js test/asap-test.js scripts/saucelabs-spot-configurations.json",
+ "test-saucelabs-all": "node scripts/saucelabs.js test/asap-test.js scripts/saucelabs-all-configurations.json",
+ "test-saucelabs-worker": "node scripts/saucelabs-worker-test.js scripts/saucelabs-spot-configurations.json",
+ "test-saucelabs-worker-all": "node scripts/saucelabs-worker-test.js scripts/saucelabs-all-configurations.json",
+ "lint": "jshint raw.js asap.js browser-raw.js browser-asap.js $(find scripts -name '*.js' | grep -v gauntlet)"
},
- "_from": "asap@>=1.0.0 <2.0.0",
- "_npmVersion": "1.2.15",
+ "devDependencies": {
+ "events": "^1.0.1",
+ "jshint": "^2.5.1",
+ "knox": "^0.8.10",
+ "mr": "^2.0.5",
+ "opener": "^1.3.0",
+ "q": "^2.0.3",
+ "q-io": "^2.0.3",
+ "saucelabs": "^0.1.1",
+ "wd": "^0.2.21",
+ "weak-map": "^1.0.5"
+ },
+ "gitHead": "ccbf94d4e4a0c3afc2df13331044020a46a74ab6",
+ "bugs": {
+ "url": "https://github.com/kriskowal/asap/issues"
+ },
+ "homepage": "https://github.com/kriskowal/asap#readme",
+ "_id": "asap@2.0.3",
+ "_shasum": "1fc1d1564ee11620dfca6d67029850913f9f4679",
+ "_from": "asap@>=2.0.0 <3.0.0",
+ "_npmVersion": "2.8.3",
+ "_nodeVersion": "1.8.1",
"_npmUser": {
"name": "kriskowal",
"email": "kris.kowal@cixar.com"
@@ -29,10 +69,17 @@
{
"name": "kriskowal",
"email": "kris.kowal@cixar.com"
+ },
+ {
+ "name": "forbeslindesay",
+ "email": "forbes@lindesay.co.uk"
}
],
+ "dist": {
+ "shasum": "1fc1d1564ee11620dfca6d67029850913f9f4679",
+ "tarball": "http://registry.npmjs.org/asap/-/asap-2.0.3.tgz"
+ },
"directories": {},
- "_shasum": "b2a45da5fdfa20b0496fc3768cc27c12fa916a7d",
- "_resolved": "https://registry.npmjs.org/asap/-/asap-1.0.0.tgz",
+ "_resolved": "https://registry.npmjs.org/asap/-/asap-2.0.3.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/dezalgo/node_modules/asap/raw.js b/deps/npm/node_modules/dezalgo/node_modules/asap/raw.js
new file mode 100644
index 000000000..ae3b89231
--- /dev/null
+++ b/deps/npm/node_modules/dezalgo/node_modules/asap/raw.js
@@ -0,0 +1,101 @@
+"use strict";
+
+var domain; // The domain module is executed on demand
+var hasSetImmediate = typeof setImmediate === "function";
+
+// Use the fastest means possible to execute a task in its own turn, with
+// priority over other events including network IO events in Node.js.
+//
+// An exception thrown by a task will permanently interrupt the processing of
+// subsequent tasks. The higher level `asap` function ensures that if an
+// exception is thrown by a task, that the task queue will continue flushing as
+// soon as possible, but if you use `rawAsap` directly, you are responsible to
+// either ensure that no exceptions are thrown from your task, or to manually
+// call `rawAsap.requestFlush` if an exception is thrown.
+module.exports = rawAsap;
+function rawAsap(task) {
+ if (!queue.length) {
+ requestFlush();
+ flushing = true;
+ }
+ // Avoids a function call
+ queue[queue.length] = task;
+}
+
+var queue = [];
+// Once a flush has been requested, no further calls to `requestFlush` are
+// necessary until the next `flush` completes.
+var flushing = false;
+// The position of the next task to execute in the task queue. This is
+// preserved between calls to `flush` so that it can be resumed if
+// a task throws an exception.
+var index = 0;
+// If a task schedules additional tasks recursively, the task queue can grow
+// unbounded. To prevent memory excaustion, the task queue will periodically
+// truncate already-completed tasks.
+var capacity = 1024;
+
+// The flush function processes all tasks that have been scheduled with
+// `rawAsap` unless and until one of those tasks throws an exception.
+// If a task throws an exception, `flush` ensures that its state will remain
+// consistent and will resume where it left off when called again.
+// However, `flush` does not make any arrangements to be called again if an
+// exception is thrown.
+function flush() {
+ while (index < queue.length) {
+ var currentIndex = index;
+ // Advance the index before calling the task. This ensures that we will
+ // begin flushing on the next task the task throws an error.
+ index = index + 1;
+ queue[currentIndex].call();
+ // Prevent leaking memory for long chains of recursive calls to `asap`.
+ // If we call `asap` within tasks scheduled by `asap`, the queue will
+ // grow, but to avoid an O(n) walk for every task we execute, we don't
+ // shift tasks off the queue after they have been executed.
+ // Instead, we periodically shift 1024 tasks off the queue.
+ if (index > capacity) {
+ // Manually shift all values starting at the index back to the
+ // beginning of the queue.
+ for (var scan = 0, newLength = queue.length - index; scan < newLength; scan++) {
+ queue[scan] = queue[scan + index];
+ }
+ queue.length -= index;
+ index = 0;
+ }
+ }
+ queue.length = 0;
+ index = 0;
+ flushing = false;
+}
+
+rawAsap.requestFlush = requestFlush;
+function requestFlush() {
+ // Ensure flushing is not bound to any domain.
+ // It is not sufficient to exit the domain, because domains exist on a stack.
+ // To execute code outside of any domain, the following dance is necessary.
+ var parentDomain = process.domain;
+ if (parentDomain) {
+ if (!domain) {
+ // Lazy execute the domain module.
+ // Only employed if the user elects to use domains.
+ domain = require("domain");
+ }
+ domain.active = process.domain = null;
+ }
+
+ // `setImmediate` is slower that `process.nextTick`, but `process.nextTick`
+ // cannot handle recursion.
+ // `requestFlush` will only be called recursively from `asap.js`, to resume
+ // flushing after an error is thrown into a domain.
+ // Conveniently, `setImmediate` was introduced in the same version
+ // `process.nextTick` started throwing recursion errors.
+ if (flushing && hasSetImmediate) {
+ setImmediate(flush);
+ } else {
+ process.nextTick(flush);
+ }
+
+ if (parentDomain) {
+ domain.active = process.domain = parentDomain;
+ }
+}
diff --git a/deps/npm/node_modules/dezalgo/package.json b/deps/npm/node_modules/dezalgo/package.json
index 656dd0c5f..ea2b1a6d3 100644
--- a/deps/npm/node_modules/dezalgo/package.json
+++ b/deps/npm/node_modules/dezalgo/package.json
@@ -1,17 +1,17 @@
{
"name": "dezalgo",
- "version": "1.0.2",
+ "version": "1.0.3",
"description": "Contain async insanity so that the dark pony lord doesn't eat souls",
"main": "dezalgo.js",
"directories": {
"test": "test"
},
"dependencies": {
- "asap": "^1.0.0",
+ "asap": "^2.0.0",
"wrappy": "1"
},
"devDependencies": {
- "tap": "^0.4.11"
+ "tap": "^1.2.0"
},
"scripts": {
"test": "tap test/*.js"
@@ -43,29 +43,10 @@
"url": "https://github.com/npm/dezalgo/issues"
},
"homepage": "https://github.com/npm/dezalgo",
- "gitHead": "fabfd09a9a4ad458d0c801a1dbfff2338b9bc001",
- "_id": "dezalgo@1.0.2",
- "_shasum": "2bc8b5a1683131764a98def7e4aa22105a688a5a",
- "_from": "dezalgo@>=1.0.1 <1.1.0",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "2.0.2",
- "_npmUser": {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- },
- "dist": {
- "shasum": "2bc8b5a1683131764a98def7e4aa22105a688a5a",
- "tarball": "http://registry.npmjs.org/dezalgo/-/dezalgo-1.0.2.tgz"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "isaacs@npmjs.com"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- }
- ],
- "_resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.2.tgz"
+ "readme": "# dezalgo\n\nContain async insanity so that the dark pony lord doesn't eat souls\n\nSee [this blog\npost](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony).\n\n## USAGE\n\nPass a callback to `dezalgo` and it will ensure that it is *always*\ncalled in a future tick, and never in this tick.\n\n```javascript\nvar dz = require('dezalgo')\n\nvar cache = {}\nfunction maybeSync(arg, cb) {\n cb = dz(cb)\n\n // this will actually defer to nextTick\n if (cache[arg]) cb(null, cache[arg])\n\n fs.readFile(arg, function (er, data) {\n // since this is *already* defered, it will call immediately\n if (er) cb(er)\n cb(null, cache[arg] = data)\n })\n}\n```\n",
+ "readmeFilename": "README.md",
+ "gitHead": "d4d3f3f6f47b1a326194d5281349c83dde258458",
+ "_id": "dezalgo@1.0.3",
+ "_shasum": "7f742de066fc748bc8db820569dddce49bf0d456",
+ "_from": "dezalgo@>=1.0.3 <1.1.0"
}
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/README.md b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/package.json b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..6aa482b61
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@^3.0.2",
+ "/Users/isaacs/dev/npm/npm/node_modules/fs-vacuum"
+ ]
+ ],
+ "_from": "graceful-fs@>=3.0.2 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/fs-vacuum/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@^3.0.2",
+ "rawSpec": "^3.0.2",
+ "scope": null,
+ "spec": ">=3.0.2 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/fs-vacuum"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@^3.0.2",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/fs-vacuum",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/fs-vacuum/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/README.md b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/package.json b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..221f8b781
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@^3.0.2",
+ "/Users/isaacs/dev/npm/npm/node_modules/fs-write-stream-atomic"
+ ]
+ ],
+ "_from": "graceful-fs@>=3.0.2 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/fs-write-stream-atomic/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@^3.0.2",
+ "rawSpec": "^3.0.2",
+ "scope": null,
+ "spec": ">=3.0.2 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/fs-write-stream-atomic"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@^3.0.2",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/fs-write-stream-atomic",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/fs-write-stream-atomic/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/fstream-npm/fstream-npm.js b/deps/npm/node_modules/fstream-npm/fstream-npm.js
index 7e44072bc..ab528952c 100644
--- a/deps/npm/node_modules/fstream-npm/fstream-npm.js
+++ b/deps/npm/node_modules/fstream-npm/fstream-npm.js
@@ -104,6 +104,10 @@ Packer.prototype.applyIgnores = function (entry, partial, entryObj) {
// special rules. see below.
if (entry === 'node_modules' && this.packageRoot) return true
+ // package.json main file should never be ignored.
+ var mainFile = this.package && this.package.main
+ if (mainFile && path.resolve(this.path, entry) === path.resolve(this.path, mainFile)) return true
+
// some files are *never* allowed under any circumstances
if (entry === '.git' ||
entry === '.lock-wscript' ||
diff --git a/deps/npm/node_modules/fstream-npm/package.json b/deps/npm/node_modules/fstream-npm/package.json
index f3c5d3cfc..e44a6dd92 100644
--- a/deps/npm/node_modules/fstream-npm/package.json
+++ b/deps/npm/node_modules/fstream-npm/package.json
@@ -6,7 +6,7 @@
},
"name": "fstream-npm",
"description": "fstream class for creating npm packages",
- "version": "1.0.2",
+ "version": "1.0.4",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/fstream-npm.git"
@@ -20,17 +20,17 @@
"inherits": "2"
},
"devDependencies": {
- "standard": "^2.7.3"
+ "standard": "^4.3.1"
},
"license": "ISC",
"readme": "# fstream-npm\n\nThis is an fstream DirReader class that will read a directory and filter\nthings according to the semantics of what goes in an npm package.\n\nFor example:\n\n```javascript\n// This will print out all the files that would be included\n// by 'npm publish' or 'npm install' of this directory.\n\nvar FN = require(\"fstream-npm\")\nFN({ path: \"./\" })\n .on(\"child\", function (e) {\n console.error(e.path.substr(e.root.path.length + 1))\n })\n```\n\n",
"readmeFilename": "README.md",
- "gitHead": "d5e26643135522925effa2c112258f1feeec2ba5",
+ "gitHead": "d4bec1d24e777b8cef64857d26af828cff1bba68",
"bugs": {
"url": "https://github.com/isaacs/fstream-npm/issues"
},
- "homepage": "https://github.com/isaacs/fstream-npm",
- "_id": "fstream-npm@1.0.2",
- "_shasum": "a1d2a4ce6ac2db731f0f66a85b4dddfea9565d77",
- "_from": "fstream-npm@>=1.0.2 <1.1.0"
+ "homepage": "https://github.com/isaacs/fstream-npm#readme",
+ "_id": "fstream-npm@1.0.4",
+ "_shasum": "22196318b8fc2ab5ce15fd330fff931165e0305a",
+ "_from": "fstream-npm@>=1.0.3 <1.1.0"
}
diff --git a/deps/npm/node_modules/fstream/examples/filter-pipe.js b/deps/npm/node_modules/fstream/examples/filter-pipe.js
index fc59cfc7a..83dadef8a 100644
--- a/deps/npm/node_modules/fstream/examples/filter-pipe.js
+++ b/deps/npm/node_modules/fstream/examples/filter-pipe.js
@@ -81,9 +81,9 @@ function missile (entry) {
return function (c) {
var e = Math.random() < 0.5
console.error(indent + '%s %s for %d damage!',
- entry.basename,
- e ? 'is struck' : 'fires a chunk',
- c.length)
+ entry.basename,
+ e ? 'is struck' : 'fires a chunk',
+ c.length)
}
}
@@ -91,8 +91,8 @@ function runaway (entry) {
return function () {
var e = Math.random() < 0.5
console.error(indent + '%s %s',
- entry.basename,
- e ? 'turns to flee' : 'is vanquished!')
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
indent = indent.slice(0, -1)
}
}
@@ -101,7 +101,7 @@ w.on('entry', attacks)
// w.on('ready', function () { attacks(w) })
function attacks (entry) {
console.error(indent + '%s %s!', entry.basename,
- entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
entry.on('entry', attacks)
}
@@ -121,13 +121,14 @@ r.on('end', function () {
checker.on('child', function (e) {
var ok = e.type === 'Directory'
console.log((ok ? '' : 'not ') + 'ok ' + (i++) +
- ' should be a dir: ' +
- e.path.substr(checker.path.length + 1))
+ ' should be a dir: ' +
+ e.path.substr(checker.path.length + 1))
})
})
process.on('exit', function () {
- console.log((ended ? '' : 'not ') + 'ok ' + (i++) + ' ended')
+ console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended')
+ console.log('1..' + i)
})
r.pipe(w)
diff --git a/deps/npm/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/fstream/examples/pipe.js
index c611dd5c4..3de42ef32 100644
--- a/deps/npm/node_modules/fstream/examples/pipe.js
+++ b/deps/npm/node_modules/fstream/examples/pipe.js
@@ -5,8 +5,8 @@ var r = fstream.Reader({
path: path.dirname(__dirname),
filter: function () {
return !this.basename.match(/^\./) &&
- !this.basename.match(/^node_modules$/) &&
- !this.basename.match(/^deep-copy$/)
+ !this.basename.match(/^node_modules$/) &&
+ !this.basename.match(/^deep-copy$/)
}
})
@@ -76,9 +76,9 @@ function missile (entry) {
return function (c) {
var e = Math.random() < 0.5
console.error(indent + '%s %s for %d damage!',
- entry.basename,
- e ? 'is struck' : 'fires a chunk',
- c.length)
+ entry.basename,
+ e ? 'is struck' : 'fires a chunk',
+ c.length)
}
}
@@ -86,8 +86,8 @@ function runaway (entry) {
return function () {
var e = Math.random() < 0.5
console.error(indent + '%s %s',
- entry.basename,
- e ? 'turns to flee' : 'is vanquished!')
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
indent = indent.slice(0, -1)
}
}
@@ -96,7 +96,7 @@ w.on('entry', attacks)
// w.on('ready', function () { attacks(w) })
function attacks (entry) {
console.error(indent + '%s %s!', entry.basename,
- entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
entry.on('entry', attacks)
}
@@ -112,6 +112,7 @@ r.on('end', function () {
process.on('exit', function () {
console.log((ended ? '' : 'not ') + 'ok 2 ended')
+ console.log('1..2')
})
r.pipe(w)
diff --git a/deps/npm/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/fstream/examples/symlink-write.js
index f6f51099b..19e81eea9 100644
--- a/deps/npm/node_modules/fstream/examples/symlink-write.js
+++ b/deps/npm/node_modules/fstream/examples/symlink-write.js
@@ -1,5 +1,6 @@
var fstream = require('../fstream.js')
var notOpen = false
+process.chdir(__dirname)
fstream
.Writer({
@@ -22,4 +23,5 @@ fstream
process.on('exit', function () {
console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed')
+ console.log('1..3')
})
diff --git a/deps/npm/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/fstream/lib/abstract.js
index 94af1ae08..97c120e1d 100644
--- a/deps/npm/node_modules/fstream/lib/abstract.js
+++ b/deps/npm/node_modules/fstream/lib/abstract.js
@@ -32,22 +32,22 @@ Abstract.prototype.warn = function (msg, code) {
var er = decorate(msg, code, self)
if (!self.listeners('warn')) {
console.error('%s %s\n' +
- 'path = %s\n' +
- 'syscall = %s\n' +
- 'fstream_type = %s\n' +
- 'fstream_path = %s\n' +
- 'fstream_unc_path = %s\n' +
- 'fstream_class = %s\n' +
- 'fstream_stack =\n%s\n',
- code || 'UNKNOWN',
- er.stack,
- er.path,
- er.syscall,
- er.fstream_type,
- er.fstream_path,
- er.fstream_unc_path,
- er.fstream_class,
- er.fstream_stack.join('\n'))
+ 'path = %s\n' +
+ 'syscall = %s\n' +
+ 'fstream_type = %s\n' +
+ 'fstream_path = %s\n' +
+ 'fstream_unc_path = %s\n' +
+ 'fstream_class = %s\n' +
+ 'fstream_stack =\n%s\n',
+ code || 'UNKNOWN',
+ er.stack,
+ er.path,
+ er.syscall,
+ er.fstream_type,
+ er.fstream_path,
+ er.fstream_unc_path,
+ er.fstream_class,
+ er.fstream_stack.join('\n'))
} else {
self.emit('warn', er)
}
diff --git a/deps/npm/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/fstream/lib/dir-writer.js
index aed9e4db1..ec50dca90 100644
--- a/deps/npm/node_modules/fstream/lib/dir-writer.js
+++ b/deps/npm/node_modules/fstream/lib/dir-writer.js
@@ -23,7 +23,7 @@ function DirWriter (props) {
// should already be established as a Directory type
if (props.type !== 'Directory' || !props.Directory) {
self.error('Non-directory type ' + props.type + ' ' +
- JSON.stringify(props), null, true)
+ JSON.stringify(props), null, true)
}
Writer.call(this, props)
@@ -102,7 +102,7 @@ DirWriter.prototype._process = function () {
do {
pp = p._path || p.path
if (pp === self.root._path || pp === self._path ||
- (pp && pp.indexOf(self._path) === 0)) {
+ (pp && pp.indexOf(self._path) === 0)) {
// console.error('DW Exit (recursive)', entry.basename, self._path)
self._processing = false
if (entry._collected) entry.pipe()
diff --git a/deps/npm/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/fstream/lib/file-reader.js
index 0757b286b..baa01f4b3 100644
--- a/deps/npm/node_modules/fstream/lib/file-reader.js
+++ b/deps/npm/node_modules/fstream/lib/file-reader.js
@@ -21,7 +21,7 @@ function FileReader (props) {
// XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
// with a HardLinkReader class.
if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'File' && props.File))) {
+ (props.type === 'File' && props.File))) {
throw new Error('Non-file type ' + props.type)
}
@@ -63,8 +63,8 @@ FileReader.prototype._getStream = function () {
if (self._bytesEmitted !== self.props.size) {
self.error("Didn't get expected byte count\n" +
- 'expect: ' + self.props.size + '\n' +
- 'actual: ' + self._bytesEmitted)
+ 'expect: ' + self.props.size + '\n' +
+ 'actual: ' + self._bytesEmitted)
}
})
@@ -124,8 +124,8 @@ FileReader.prototype._read = function () {
}
self._buffer.length = 0
}
- // console.error("FR _read done")
- // that's about all there is to it.
+// console.error("FR _read done")
+// that's about all there is to it.
}
FileReader.prototype.pause = function (who) {
diff --git a/deps/npm/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/fstream/lib/link-reader.js
index a44dd39d7..fb4cc67a9 100644
--- a/deps/npm/node_modules/fstream/lib/link-reader.js
+++ b/deps/npm/node_modules/fstream/lib/link-reader.js
@@ -19,7 +19,7 @@ function LinkReader (props) {
}
if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
throw new Error('Non-link type ' + props.type)
}
diff --git a/deps/npm/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/fstream/lib/link-writer.js
index 07a9abf7e..af5428400 100644
--- a/deps/npm/node_modules/fstream/lib/link-writer.js
+++ b/deps/npm/node_modules/fstream/lib/link-writer.js
@@ -16,7 +16,7 @@ function LinkWriter (props) {
// should already be established as a Link type
if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
throw new Error('Non-link type ' + props.type)
}
@@ -66,8 +66,8 @@ function create (self, lp, link) {
// windows in some nice fashion.
if (er) {
if ((er.code === 'ENOENT' ||
- er.code === 'EACCES' ||
- er.code === 'EPERM') && process.platform === 'win32') {
+ er.code === 'EACCES' ||
+ er.code === 'EPERM') && process.platform === 'win32') {
self.ready = true
self.emit('ready')
self.emit('end')
diff --git a/deps/npm/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/fstream/lib/reader.js
index 1d007ee21..876021f92 100644
--- a/deps/npm/node_modules/fstream/lib/reader.js
+++ b/deps/npm/node_modules/fstream/lib/reader.js
@@ -54,13 +54,13 @@ function Reader (props, currentStat) {
break
case 'Link':
- // XXX hard links are just files.
- // However, it would be good to keep track of files' dev+inode
- // and nlink values, and create a HardLinkReader that emits
- // a linkpath value of the original copy, so that the tar
- // writer can preserve them.
- // ClassType = HardLinkReader
- // break
+ // XXX hard links are just files.
+ // However, it would be good to keep track of files' dev+inode
+ // and nlink values, and create a HardLinkReader that emits
+ // a linkpath value of the original copy, so that the tar
+ // writer can preserve them.
+ // ClassType = HardLinkReader
+ // break
case 'File':
ClassType = require('./file-reader.js')
@@ -103,7 +103,7 @@ function Reader (props, currentStat) {
self._swallowErrors = true
// if (self._path.indexOf(" ") === -1) {
self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
- // }
+ // }
}
}
self.basename = props.basename = path.basename(self.path)
@@ -126,10 +126,10 @@ function Reader (props, currentStat) {
function alphasort (a, b) {
return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
}
Reader.prototype._stat = function (currentStat) {
diff --git a/deps/npm/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/fstream/lib/writer.js
index 25a608def..ca3396b5d 100644
--- a/deps/npm/node_modules/fstream/lib/writer.js
+++ b/deps/npm/node_modules/fstream/lib/writer.js
@@ -113,7 +113,7 @@ Writer.prototype._create = function () {
fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {
if (er) {
return self.warn('Cannot create ' + self._path + '\n' +
- 'Unsupported type: ' + self.type, 'ENOTSUP')
+ 'Unsupported type: ' + self.type, 'ENOTSUP')
}
self._finish()
})
@@ -179,7 +179,7 @@ function create (self) {
function endChmod (self, want, current, path, cb) {
var wantMode = want.mode
var chmod = want.follow || self.type !== 'SymbolicLink'
- ? 'chmod' : 'lchmod'
+ ? 'chmod' : 'lchmod'
if (!fs[chmod]) return cb()
if (typeof wantMode !== 'number') return cb()
@@ -196,13 +196,13 @@ function endChown (self, want, current, path, cb) {
if (process.platform === 'win32') return cb()
if (!process.getuid || process.getuid() !== 0) return cb()
if (typeof want.uid !== 'number' &&
- typeof want.gid !== 'number') return cb()
+ typeof want.gid !== 'number') return cb()
if (current.uid === want.uid &&
- current.gid === want.gid) return cb()
+ current.gid === want.gid) return cb()
var chown = (self.props.follow || self.type !== 'SymbolicLink')
- ? 'chown' : 'lchown'
+ ? 'chown' : 'lchown'
if (!fs[chown]) return cb()
if (typeof want.uid !== 'number') want.uid = current.uid
@@ -215,7 +215,7 @@ function endUtimes (self, want, current, path, cb) {
if (!fs.utimes || process.platform === 'win32') return cb()
var utimes = (want.follow || self.type !== 'SymbolicLink')
- ? 'utimes' : 'lutimes'
+ ? 'utimes' : 'lutimes'
if (utimes === 'lutimes' && !fs[utimes]) {
utimes = 'utimes'
@@ -235,7 +235,7 @@ function endUtimes (self, want, current, path, cb) {
if (!isDate(meM)) meA = new Date(meM)
if (meA.getTime() === curA.getTime() &&
- meM.getTime() === curM.getTime()) return cb()
+ meM.getTime() === curM.getTime()) return cb()
fs[utimes](path, meA, meM, cb)
}
@@ -274,8 +274,8 @@ Writer.prototype._finish = function () {
// doesn't exist yet (especially if it was intended as a symlink),
// so swallow ENOENT errors here and just soldier on.
if (er.code === 'ENOENT' &&
- (self.type === 'Link' || self.type === 'SymbolicLink') &&
- process.platform === 'win32') {
+ (self.type === 'Link' || self.type === 'SymbolicLink') &&
+ process.platform === 'win32') {
self.ready = true
self.emit('ready')
self.emit('end')
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/fstream/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/fstream/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/README.md b/deps/npm/node_modules/fstream/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/package.json b/deps/npm/node_modules/fstream/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..458bb9538
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@3",
+ "/Users/isaacs/dev/npm/npm/node_modules/fstream"
+ ]
+ ],
+ "_from": "graceful-fs@>=3.0.0 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/fstream/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@3",
+ "rawSpec": "3",
+ "scope": null,
+ "spec": ">=3.0.0 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/fstream"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@3",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/fstream",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/fstream/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/fstream/package.json b/deps/npm/node_modules/fstream/package.json
index aa6bc1cf3..0109d940c 100644
--- a/deps/npm/node_modules/fstream/package.json
+++ b/deps/npm/node_modules/fstream/package.json
@@ -6,7 +6,7 @@
},
"name": "fstream",
"description": "Advanced file system stream things",
- "version": "1.0.6",
+ "version": "1.0.7",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/fstream.git"
@@ -22,45 +22,21 @@
"rimraf": "2"
},
"devDependencies": {
- "tap": "0",
- "standard": "^2.3.2"
+ "standard": "^4.0.0",
+ "tap": "^1.2.0"
},
"scripts": {
"test": "standard && tap examples/*.js"
},
"license": "ISC",
- "gitHead": "e0c0024379c5a94ca228d232e2794b6ffb0d3caf",
+ "readme": "Like FS streams, but with stat on them, and supporting directories and\nsymbolic links, as well as normal files. Also, you can use this to set\nthe stats on a file, even if you don't change its contents, or to create\na symlink, etc.\n\nSo, for example, you can \"write\" a directory, and it'll call `mkdir`. You\ncan specify a uid and gid, and it'll call `chown`. You can specify a\n`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink\nand provide a `linkpath` and it'll call `symlink`.\n\nNote that it won't automatically resolve symbolic links. So, if you\ncall `fstream.Reader('/some/symlink')` then you'll get an object\nthat stats and then ends immediately (since it has no data). To follow\nsymbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:\ntrue })`.\n\nThere are various checks to make sure that the bytes emitted are the\nsame as the intended size, if the size is set.\n\n## Examples\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n })\n .write(\"hello\\n\")\n .end()\n```\n\nThis will create the directories if they're missing, and then write\n`hello\\n` into the file, chmod it to 0755, and assert that 6 bytes have\nbeen written when it's done.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n , flags: \"a\"\n })\n .write(\"hello\\n\")\n .end()\n```\n\nYou can pass flags in, if you want to append to a file.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/symlink\"\n , linkpath: \"./file\"\n , SymbolicLink: true\n , mode: \"0755\" // octal strings supported\n })\n .end()\n```\n\nIf isSymbolicLink is a function, it'll be called, and if it returns\ntrue, then it'll treat it as a symlink. If it's not a function, then\nany truish value will make a symlink, or you can set `type:\n'SymbolicLink'`, which does the same thing.\n\nNote that the linkpath is relative to the symbolic link location, not\nthe parent dir or cwd.\n\n```javascript\nfstream\n .Reader(\"path/to/dir\")\n .pipe(fstream.Writer(\"path/to/other/dir\"))\n```\n\nThis will do like `cp -Rp path/to/dir path/to/other/dir`. If the other\ndir exists and isn't a directory, then it'll emit an error. It'll also\nset the uid, gid, mode, etc. to be identical. In this way, it's more\nlike `rsync -a` than simply a copy.\n",
+ "readmeFilename": "README.md",
+ "gitHead": "586e8efc1cf77883f6c22fc32a7cb38f0eb88911",
"bugs": {
"url": "https://github.com/isaacs/fstream/issues"
},
"homepage": "https://github.com/isaacs/fstream#readme",
- "_id": "fstream@1.0.6",
- "_shasum": "817e50312fb4ed90da865c8eb5ecd1d1d7aed0ec",
- "_from": "fstream@>=1.0.6 <1.1.0",
- "_npmVersion": "2.9.0",
- "_nodeVersion": "2.0.0",
- "_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "isaacs@npmjs.com"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- },
- {
- "name": "iarna",
- "email": "me@re-becca.org"
- }
- ],
- "dist": {
- "shasum": "817e50312fb4ed90da865c8eb5ecd1d1d7aed0ec",
- "tarball": "http://registry.npmjs.org/fstream/-/fstream-1.0.6.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.6.tgz"
+ "_id": "fstream@1.0.7",
+ "_shasum": "455a1aa1d46077668d95b6d27838e1b1daa78c78",
+ "_from": "fstream@>=1.0.7 <1.1.0"
}
diff --git a/deps/npm/node_modules/glob/README.md b/deps/npm/node_modules/glob/README.md
index fa993dcb6..063cf950a 100644
--- a/deps/npm/node_modules/glob/README.md
+++ b/deps/npm/node_modules/glob/README.md
@@ -262,7 +262,6 @@ the filesystem.
* `matchBase` Perform a basename-only match if the pattern does not
contain any slash characters. That is, `*.js` would be treated as
equivalent to `**/*.js`, matching all js files in all directories.
-* `nonull` Return the pattern when no matches are found.
* `nodir` Do not match directories, only files. (Note: to match
*only* directories, simply put a `/` at the end of the pattern.)
* `ignore` Add a pattern or an array of patterns to exclude matches.
diff --git a/deps/npm/node_modules/glob/glob.js b/deps/npm/node_modules/glob/glob.js
index d8ac4eff6..022d2ac8c 100644
--- a/deps/npm/node_modules/glob/glob.js
+++ b/deps/npm/node_modules/glob/glob.js
@@ -57,6 +57,7 @@ var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
var once = require('once')
@@ -432,6 +433,9 @@ Glob.prototype._emitMatch = function (index, e) {
if (this.matches[index][e])
return
+ if (isIgnored(this, e))
+ return
+
if (this.paused) {
this._emitQueue.push([index, e])
return
@@ -552,6 +556,7 @@ Glob.prototype._readdirError = function (f, er, cb) {
// handle errors, and cache the information
switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json
index a0538e955..e4c9fb002 100644
--- a/deps/npm/node_modules/glob/package.json
+++ b/deps/npm/node_modules/glob/package.json
@@ -6,7 +6,7 @@
},
"name": "glob",
"description": "a little globber",
- "version": "5.0.10",
+ "version": "5.0.14",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-glob.git"
@@ -40,26 +40,26 @@
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js",
"bench": "bash benchmark.sh",
"prof": "bash prof.sh && cat profile.txt",
- "benchclean": "bash benchclean.sh"
+ "benchclean": "node benchclean.js"
},
"license": "ISC",
- "gitHead": "e3cdccc0e295c2e1d5f40cf74c73ea17a8319c5c",
+ "gitHead": "c47d4514f8f93f23b589afa18947306116bfe40f",
"bugs": {
"url": "https://github.com/isaacs/node-glob/issues"
},
"homepage": "https://github.com/isaacs/node-glob#readme",
- "_id": "glob@5.0.10",
- "_shasum": "3ee350319f31f352cef6899a48f6b6b7834c6899",
- "_from": "glob@>=5.0.10 <5.1.0",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "2.0.1",
+ "_id": "glob@5.0.14",
+ "_shasum": "a811d507acb605441edd6cd2622a3c6f06cc00e1",
+ "_from": "glob@latest",
+ "_npmVersion": "3.1.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "3ee350319f31f352cef6899a48f6b6b7834c6899",
- "tarball": "http://registry.npmjs.org/glob/-/glob-5.0.10.tgz"
+ "shasum": "a811d507acb605441edd6cd2622a3c6f06cc00e1",
+ "tarball": "http://registry.npmjs.org/glob/-/glob-5.0.14.tgz"
},
"maintainers": [
{
@@ -68,6 +68,5 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/glob/-/glob-5.0.10.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/glob/-/glob-5.0.14.tgz"
}
diff --git a/deps/npm/node_modules/glob/sync.js b/deps/npm/node_modules/glob/sync.js
index 78fada2c8..09883d2ce 100644
--- a/deps/npm/node_modules/glob/sync.js
+++ b/deps/npm/node_modules/glob/sync.js
@@ -57,7 +57,7 @@ GlobSync.prototype._finish = function () {
for (var p in matchset) {
try {
p = self._makeAbs(p)
- var real = fs.realpathSync(p, this.realpathCache)
+ var real = fs.realpathSync(p, self.realpathCache)
set[real] = true
} catch (er) {
if (er.syscall === 'stat')
@@ -303,6 +303,7 @@ GlobSync.prototype._readdirEntries = function (abs, entries) {
GlobSync.prototype._readdirError = function (f, er) {
// handle errors, and cache the information
switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
diff --git a/deps/npm/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/graceful-fs/LICENSE
index 19129e315..9d2c80369 100644
--- a/deps/npm/node_modules/graceful-fs/LICENSE
+++ b/deps/npm/node_modules/graceful-fs/LICENSE
@@ -1,6 +1,6 @@
The ISC License
-Copyright (c) Isaac Z. Schlueter and Contributors
+Copyright (c) Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/deps/npm/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/graceful-fs/fs.js
index 64ad98023..8ad4a3839 100644
--- a/deps/npm/node_modules/graceful-fs/fs.js
+++ b/deps/npm/node_modules/graceful-fs/fs.js
@@ -1,11 +1,21 @@
-// eeeeeevvvvviiiiiiillllll
-// more evil than monkey-patching the native builtin?
-// Not sure.
-
-var mod = require("module")
-var pre = '(function (exports, require, module, __filename, __dirname) { '
-var post = '});'
-var src = pre + process.binding('natives').fs + post
-var vm = require('vm')
-var fn = vm.runInThisContext(src)
-fn(exports, require, module, __filename, __dirname)
+'use strict'
+
+var fs = require('fs')
+
+module.exports = clone(fs)
+
+function clone (obj) {
+ if (obj === null || typeof obj !== 'object')
+ return obj
+
+ if (obj instanceof Object)
+ var copy = { __proto__: obj.__proto__ }
+ else
+ var copy = Object.create(null)
+
+ Object.getOwnPropertyNames(obj).forEach(function (key) {
+ Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
+ })
+
+ return copy
+}
diff --git a/deps/npm/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/graceful-fs/graceful-fs.js
index fb206b838..fe3b17cb6 100644
--- a/deps/npm/node_modules/graceful-fs/graceful-fs.js
+++ b/deps/npm/node_modules/graceful-fs/graceful-fs.js
@@ -1,11 +1,7 @@
-// Monkey-patching the fs module.
-// It's ugly, but there is simply no other way to do this.
-var fs = module.exports = require('./fs.js')
-
-var assert = require('assert')
-
-// fix up some busted stuff, mostly on windows and old nodes
-require('./polyfills.js')
+var fs = require('fs')
+var polyfills = require('./polyfills.js')
+var legacy = require('./legacy-streams.js')
+var queue = []
var util = require('util')
@@ -13,146 +9,243 @@ function noop () {}
var debug = noop
if (util.debuglog)
- debug = util.debuglog('gfs')
-else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = util.debuglog('gfs4')
+else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
debug = function() {
var m = util.format.apply(util, arguments)
- m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
console.error(m)
}
-if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
process.on('exit', function() {
- debug('fds', fds)
debug(queue)
- assert.equal(queue.length, 0)
+ require('assert').equal(queue.length, 0)
})
}
-
-var originalOpen = fs.open
-fs.open = open
-
-function open(path, flags, mode, cb) {
- if (typeof mode === "function") cb = mode, mode = null
- if (typeof cb !== "function") cb = noop
- new OpenReq(path, flags, mode, cb)
+module.exports = patch(require('./fs.js'))
+if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH) {
+ module.exports = patch(fs)
}
-function OpenReq(path, flags, mode, cb) {
- this.path = path
- this.flags = flags
- this.mode = mode
- this.cb = cb
- Req.call(this)
-}
-
-util.inherits(OpenReq, Req)
-
-OpenReq.prototype.process = function() {
- originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
-}
-
-var fds = {}
-OpenReq.prototype.done = function(er, fd) {
- debug('open done', er, fd)
- if (fd)
- fds['fd' + fd] = this.path
- Req.prototype.done.call(this, er, fd)
-}
-
-
-var originalReaddir = fs.readdir
-fs.readdir = readdir
+// Always patch fs.close/closeSync, because we want to
+// retry() whenever a close happens *anywhere* in the program.
+// This is essential when multiple graceful-fs instances are
+// in play at the same time.
+fs.close = (function (fs$close) { return function (fd, cb) {
+ return fs$close.call(fs, fd, function (err) {
+ if (!err)
+ retry()
+
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ })
+}})(fs.close)
+
+fs.closeSync = (function (fs$closeSync) { return function (fd) {
+ // Note that graceful-fs also retries when fs.closeSync() fails.
+ // Looks like a bug to me, although it's probably a harmless one.
+ var rval = fs$closeSync.apply(fs, arguments)
+ retry()
+ return rval
+}})(fs.closeSync)
+
+function patch (fs) {
+ // Everything that references the open() function needs to be in here
+ polyfills(fs)
+ fs.gracefulify = patch
+ fs.FileReadStream = ReadStream; // Legacy name.
+ fs.FileWriteStream = WriteStream; // Legacy name.
+ fs.createReadStream = createReadStream
+ fs.createWriteStream = createWriteStream
+ var fs$readFile = fs.readFile
+ fs.readFile = readFile
+ function readFile (path, options, cb) {
+ if (typeof options === 'function')
+ cb = options, options = null
+
+ return go$readFile(path, options, cb)
+
+ function go$readFile (path, options, cb) {
+ return fs$readFile(path, options, function (err) {
+ if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
+ enqueue([go$readFile, [path, options, cb]])
+ else {
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ retry()
+ }
+ })
+ }
+ }
-function readdir(path, cb) {
- if (typeof cb !== "function") cb = noop
- new ReaddirReq(path, cb)
-}
+ var fs$writeFile = fs.writeFile
+ fs.writeFile = writeFile
+ function writeFile (path, data, options, cb) {
+ if (typeof options === 'function')
+ cb = options, options = null
+
+ return go$writeFile(path, data, options, cb)
+
+ function go$writeFile (path, data, options, cb) {
+ return fs$writeFile(path, data, options, function (err) {
+ if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
+ enqueue([go$writeFile, [path, data, options, cb]])
+ else {
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ retry()
+ }
+ })
+ }
+ }
-function ReaddirReq(path, cb) {
- this.path = path
- this.cb = cb
- Req.call(this)
-}
+ var fs$appendFile = fs.appendFile
+ if (fs$appendFile)
+ fs.appendFile = appendFile
+ function appendFile (path, data, options, cb) {
+ if (typeof options === 'function')
+ cb = options, options = null
+
+ return go$appendFile(path, data, options, cb)
+
+ function go$appendFile (path, data, options, cb) {
+ return fs$appendFile(path, data, options, function (err) {
+ if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
+ enqueue([go$appendFile, [path, data, options, cb]])
+ else {
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ retry()
+ }
+ })
+ }
+ }
-util.inherits(ReaddirReq, Req)
+ var fs$readdir = fs.readdir
+ fs.readdir = readdir
+ function readdir (path, cb) {
+ return go$readdir(path, cb)
+
+ function go$readdir () {
+ return fs$readdir(path, function (err, files) {
+ if (files && files.sort)
+ files.sort(); // Backwards compatibility with graceful-fs.
+
+ if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
+ enqueue([go$readdir, [path, cb]])
+ else {
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ retry()
+ }
+ })
+ }
+ }
-ReaddirReq.prototype.process = function() {
- originalReaddir.call(fs, this.path, this.done)
-}
-ReaddirReq.prototype.done = function(er, files) {
- if (files && files.sort)
- files = files.sort()
- Req.prototype.done.call(this, er, files)
- onclose()
-}
+ if (process.version.substr(0, 4) === 'v0.8') {
+ var legStreams = legacy(fs)
+ ReadStream = legStreams.ReadStream
+ WriteStream = legStreams.WriteStream
+ }
+ var fs$ReadStream = fs.ReadStream
+ ReadStream.prototype = Object.create(fs$ReadStream.prototype)
+ ReadStream.prototype.open = ReadStream$open
-var originalClose = fs.close
-fs.close = close
+ var fs$WriteStream = fs.WriteStream
+ WriteStream.prototype = Object.create(fs$WriteStream.prototype)
+ WriteStream.prototype.open = WriteStream$open
-function close (fd, cb) {
- debug('close', fd)
- if (typeof cb !== "function") cb = noop
- delete fds['fd' + fd]
- originalClose.call(fs, fd, function(er) {
- onclose()
- cb(er)
- })
-}
+ fs.ReadStream = ReadStream
+ fs.WriteStream = WriteStream
+ function ReadStream (path, options) {
+ if (this instanceof ReadStream)
+ return fs$ReadStream.apply(this, arguments), this
+ else
+ return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
+ }
-var originalCloseSync = fs.closeSync
-fs.closeSync = closeSync
+ function ReadStream$open () {
+ var that = this
+ open(that.path, that.flags, that.mode, function (err, fd) {
+ if (err) {
+ if (that.autoClose)
+ that.destroy()
+
+ that.emit('error', err)
+ } else {
+ that.fd = fd
+ that.emit('open', fd)
+ that.read()
+ }
+ })
+ }
-function closeSync (fd) {
- try {
- return originalCloseSync(fd)
- } finally {
- onclose()
+ function WriteStream (path, options) {
+ if (this instanceof WriteStream)
+ return fs$WriteStream.apply(this, arguments), this
+ else
+ return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
}
-}
+ function WriteStream$open () {
+ var that = this
+ open(that.path, that.flags, that.mode, function (err, fd) {
+ if (err) {
+ that.destroy()
+ that.emit('error', err)
+ } else {
+ that.fd = fd
+ that.emit('open', fd)
+ }
+ })
+ }
-// Req class
-function Req () {
- // start processing
- this.done = this.done.bind(this)
- this.failures = 0
- this.process()
-}
+ function createReadStream (path, options) {
+ return new ReadStream(path, options)
+ }
-Req.prototype.done = function (er, result) {
- var tryAgain = false
- if (er) {
- var code = er.code
- var tryAgain = code === "EMFILE" || code === "ENFILE"
- if (process.platform === "win32")
- tryAgain = tryAgain || code === "OK"
+ function createWriteStream (path, options) {
+ return new WriteStream(path, options)
}
- if (tryAgain) {
- this.failures ++
- enqueue(this)
- } else {
- var cb = this.cb
- cb(er, result)
+ var fs$open = fs.open
+ fs.open = open
+ function open (path, flags, mode, cb) {
+ if (typeof mode === 'function')
+ cb = mode, mode = null
+
+ return go$open(path, flags, mode, cb)
+
+ function go$open (path, flags, mode, cb) {
+ return fs$open(path, flags, mode, function (err, fd) {
+ if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
+ enqueue([go$open, [path, flags, mode, cb]])
+ else {
+ if (typeof cb === 'function')
+ cb.apply(this, arguments)
+ retry()
+ }
+ })
+ }
}
-}
-var queue = []
+ return fs
+}
-function enqueue(req) {
- queue.push(req)
- debug('enqueue %d %s', queue.length, req.constructor.name, req)
+function enqueue (elem) {
+ debug('ENQUEUE', elem[0].name, elem[1])
+ queue.push(elem)
}
-function onclose() {
- var req = queue.shift()
- if (req) {
- debug('process', req.constructor.name, req)
- req.process()
+function retry () {
+ var elem = queue.shift()
+ if (elem) {
+ debug('RETRY', elem[0].name, elem[1])
+ elem[0].apply(null, elem[1])
}
}
diff --git a/deps/npm/node_modules/graceful-fs/legacy-streams.js b/deps/npm/node_modules/graceful-fs/legacy-streams.js
new file mode 100644
index 000000000..d617b50fc
--- /dev/null
+++ b/deps/npm/node_modules/graceful-fs/legacy-streams.js
@@ -0,0 +1,118 @@
+var Stream = require('stream').Stream
+
+module.exports = legacy
+
+function legacy (fs) {
+ return {
+ ReadStream: ReadStream,
+ WriteStream: WriteStream
+ }
+
+ function ReadStream (path, options) {
+ if (!(this instanceof ReadStream)) return new ReadStream(path, options);
+
+ Stream.call(this);
+
+ var self = this;
+
+ this.path = path;
+ this.fd = null;
+ this.readable = true;
+ this.paused = false;
+
+ this.flags = 'r';
+ this.mode = 438; /*=0666*/
+ this.bufferSize = 64 * 1024;
+
+ options = options || {};
+
+ // Mixin options into this
+ var keys = Object.keys(options);
+ for (var index = 0, length = keys.length; index < length; index++) {
+ var key = keys[index];
+ this[key] = options[key];
+ }
+
+ if (this.encoding) this.setEncoding(this.encoding);
+
+ if (this.start !== undefined) {
+ if ('number' !== typeof this.start) {
+ throw TypeError('start must be a Number');
+ }
+ if (this.end === undefined) {
+ this.end = Infinity;
+ } else if ('number' !== typeof this.end) {
+ throw TypeError('end must be a Number');
+ }
+
+ if (this.start > this.end) {
+ throw new Error('start must be <= end');
+ }
+
+ this.pos = this.start;
+ }
+
+ if (this.fd !== null) {
+ process.nextTick(function() {
+ self._read();
+ });
+ return;
+ }
+
+ fs.open(this.path, this.flags, this.mode, function (err, fd) {
+ if (err) {
+ self.emit('error', err);
+ self.readable = false;
+ return;
+ }
+
+ self.fd = fd;
+ self.emit('open', fd);
+ self._read();
+ })
+ }
+
+ function WriteStream (path, options) {
+ if (!(this instanceof WriteStream)) return new WriteStream(path, options);
+
+ Stream.call(this);
+
+ this.path = path;
+ this.fd = null;
+ this.writable = true;
+
+ this.flags = 'w';
+ this.encoding = 'binary';
+ this.mode = 438; /*=0666*/
+ this.bytesWritten = 0;
+
+ options = options || {};
+
+ // Mixin options into this
+ var keys = Object.keys(options);
+ for (var index = 0, length = keys.length; index < length; index++) {
+ var key = keys[index];
+ this[key] = options[key];
+ }
+
+ if (this.start !== undefined) {
+ if ('number' !== typeof this.start) {
+ throw TypeError('start must be a Number');
+ }
+ if (this.start < 0) {
+ throw new Error('start must be >= zero');
+ }
+
+ this.pos = this.start;
+ }
+
+ this.busy = false;
+ this._queue = [];
+
+ if (this.fd === null) {
+ this._open = fs.open;
+ this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
+ this.flush();
+ }
+ }
+}
diff --git a/deps/npm/node_modules/graceful-fs/package.json b/deps/npm/node_modules/graceful-fs/package.json
index c4a98d326..f9e1598a6 100644
--- a/deps/npm/node_modules/graceful-fs/package.json
+++ b/deps/npm/node_modules/graceful-fs/package.json
@@ -1,10 +1,40 @@
{
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me"
+ "_args": [
+ [
+ "graceful-fs@latest",
+ "/Users/isaacs/dev/npm/npm"
+ ]
+ ],
+ "_from": "graceful-fs@latest",
+ "_id": "graceful-fs@4.1.2",
+ "_inCache": true,
+ "_location": "/graceful-fs",
+ "_nodeVersion": "2.2.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
},
- "name": "graceful-fs",
+ "_npmVersion": "3.0.0",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@latest",
+ "rawSpec": "latest",
+ "scope": null,
+ "spec": "latest",
+ "type": "tag"
+ },
+ "_requiredBy": [
+ "/"
+ ],
+ "_shasum": "fe2239b7574972e67e41f808823f9bfa4a991e37",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@latest",
+ "_where": "/Users/isaacs/dev/npm/npm",
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
"description": "A drop-in replacement for fs, making various improvements.",
"version": "3.0.8",
"repository": {
@@ -18,9 +48,21 @@
"directories": {
"test": "test"
},
- "scripts": {
- "test": "tap test/*.js"
+ "dist": {
+ "shasum": "fe2239b7574972e67e41f808823f9bfa4a991e37",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.2.tgz"
},
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "files": [
+ "fs.js",
+ "graceful-fs.js",
+ "legacy-streams.js",
+ "polyfills.js"
+ ],
+ "gitHead": "c286080071b6be9aa9ba108b0bb9b44ff122926d",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
"keywords": [
"fs",
"module",
@@ -67,5 +109,14 @@
"email": "i@izs.me"
}
],
- "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "node test.js | tap -"
+ },
+ "version": "4.1.2"
}
diff --git a/deps/npm/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/graceful-fs/polyfills.js
index 42705391a..5e4f48046 100644
--- a/deps/npm/node_modules/graceful-fs/polyfills.js
+++ b/deps/npm/node_modules/graceful-fs/polyfills.js
@@ -8,18 +8,118 @@ process.cwd = function() {
cwd = origCwd.call(process)
return cwd
}
+try {
+ process.cwd()
+} catch (er) {}
+
var chdir = process.chdir
process.chdir = function(d) {
cwd = null
chdir.call(process, d)
}
-// (re-)implement some things that are known busted or missing.
+module.exports = patch
+
+function patch (fs) {
+ // (re-)implement some things that are known busted or missing.
+
+ // lchmod, broken prior to 0.6.2
+ // back-port the fix here.
+ if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ patchLchmod(fs)
+ }
+
+ // lutimes implementation, or no-op
+ if (!fs.lutimes) {
+ patchLutimes(fs)
+ }
+
+ // https://github.com/isaacs/node-graceful-fs/issues/4
+ // Chown should not fail on einval or eperm if non-root.
+ // It should not fail on enosys ever, as this just indicates
+ // that a fs doesn't support the intended operation.
+
+ fs.chown = chownFix(fs.chown)
+ fs.fchown = chownFix(fs.fchown)
+ fs.lchown = chownFix(fs.lchown)
+
+ fs.chmod = chownFix(fs.chmod)
+ fs.fchmod = chownFix(fs.fchmod)
+ fs.lchmod = chownFix(fs.lchmod)
+
+ fs.chownSync = chownFixSync(fs.chownSync)
+ fs.fchownSync = chownFixSync(fs.fchownSync)
+ fs.lchownSync = chownFixSync(fs.lchownSync)
+
+ fs.chmodSync = chownFix(fs.chmodSync)
+ fs.fchmodSync = chownFix(fs.fchmodSync)
+ fs.lchmodSync = chownFix(fs.lchmodSync)
-// lchmod, broken prior to 0.6.2
-// back-port the fix here.
-if (constants.hasOwnProperty('O_SYMLINK') &&
- process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ // if lchmod/lchown do not exist, then make them no-ops
+ if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+ }
+ if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+ }
+
+ // on Windows, A/V software can lock the directory, causing this
+ // to fail with an EACCES or EPERM if the directory contains newly
+ // created files. Try again on failure, for up to 1 second.
+ if (process.platform === "win32") {
+ fs.rename = (function (fs$rename) { return function (from, to, cb) {
+ var start = Date.now()
+ fs$rename(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return fs$rename(from, to, CB)
+ }
+ if (cb) cb(er)
+ })
+ }})(fs.rename)
+ }
+
+ // if read() returns EAGAIN, then just try it again.
+ fs.read = (function (fs$read) { return function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return fs$read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return fs$read.call(fs, fd, buffer, offset, length, position, callback)
+ }})(fs.read)
+
+ fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return fs$readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+ }})(fs.readSync)
+}
+
+function patchLchmod (fs) {
fs.lchmod = function (path, mode, callback) {
callback = callback || noop
fs.open( path
@@ -45,25 +145,25 @@ if (constants.hasOwnProperty('O_SYMLINK') &&
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
- var err, err2
- try {
- var ret = fs.fchmodSync(fd, mode)
- } catch (er) {
- err = er
- }
+ var threw = true
+ var ret
try {
- fs.closeSync(fd)
- } catch (er) {
- err2 = er
+ ret = fs.fchmodSync(fd, mode)
+ threw = false
+ } finally {
+ if (threw) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
+ } else {
+ fs.closeSync(fd)
+ }
}
- if (err || err2) throw (err || err2)
return ret
}
}
-
-// lutimes implementation, or no-op
-if (!fs.lutimes) {
+function patchLutimes (fs) {
if (constants.hasOwnProperty("O_SYMLINK")) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
@@ -79,62 +179,29 @@ if (!fs.lutimes) {
fs.lutimesSync = function (path, at, mt) {
var fd = fs.openSync(path, constants.O_SYMLINK)
- , err
- , err2
- , ret
-
+ var ret
+ var threw = true
try {
- var ret = fs.futimesSync(fd, at, mt)
- } catch (er) {
- err = er
- }
- try {
- fs.closeSync(fd)
- } catch (er) {
- err2 = er
+ ret = fs.futimesSync(fd, at, mt)
+ threw = false
+ } finally {
+ if (threw) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
+ } else {
+ fs.closeSync(fd)
+ }
}
- if (err || err2) throw (err || err2)
return ret
}
- } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
- // maybe utimensat will be bound soonish?
- fs.lutimes = function (path, at, mt, cb) {
- fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
- }
-
- fs.lutimesSync = function (path, at, mt) {
- return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
- }
-
} else {
fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
fs.lutimesSync = function () {}
}
}
-
-// https://github.com/isaacs/node-graceful-fs/issues/4
-// Chown should not fail on einval or eperm if non-root.
-// It should not fail on enosys ever, as this just indicates
-// that a fs doesn't support the intended operation.
-
-fs.chown = chownFix(fs.chown)
-fs.fchown = chownFix(fs.fchown)
-fs.lchown = chownFix(fs.lchown)
-
-fs.chmod = chownFix(fs.chmod)
-fs.fchmod = chownFix(fs.fchmod)
-fs.lchmod = chownFix(fs.lchmod)
-
-fs.chownSync = chownFixSync(fs.chownSync)
-fs.fchownSync = chownFixSync(fs.fchownSync)
-fs.lchownSync = chownFixSync(fs.lchownSync)
-
-fs.chmodSync = chownFix(fs.chmodSync)
-fs.fchmodSync = chownFix(fs.fchmodSync)
-fs.lchmodSync = chownFix(fs.lchmodSync)
-
function chownFix (orig) {
if (!orig) return orig
return function (target, uid, gid, cb) {
@@ -183,73 +250,3 @@ function chownErOk (er) {
return false
}
-
-
-// if lchmod/lchown do not exist, then make them no-ops
-if (!fs.lchmod) {
- fs.lchmod = function (path, mode, cb) {
- process.nextTick(cb)
- }
- fs.lchmodSync = function () {}
-}
-if (!fs.lchown) {
- fs.lchown = function (path, uid, gid, cb) {
- process.nextTick(cb)
- }
- fs.lchownSync = function () {}
-}
-
-
-
-// on Windows, A/V software can lock the directory, causing this
-// to fail with an EACCES or EPERM if the directory contains newly
-// created files. Try again on failure, for up to 1 second.
-if (process.platform === "win32") {
- var rename_ = fs.rename
- fs.rename = function rename (from, to, cb) {
- var start = Date.now()
- rename_(from, to, function CB (er) {
- if (er
- && (er.code === "EACCES" || er.code === "EPERM")
- && Date.now() - start < 1000) {
- return rename_(from, to, CB)
- }
- if(cb) cb(er)
- })
- }
-}
-
-
-// if read() returns EAGAIN, then just try it again.
-var read = fs.read
-fs.read = function (fd, buffer, offset, length, position, callback_) {
- var callback
- if (callback_ && typeof callback_ === 'function') {
- var eagCounter = 0
- callback = function (er, _, __) {
- if (er && er.code === 'EAGAIN' && eagCounter < 10) {
- eagCounter ++
- return read.call(fs, fd, buffer, offset, length, position, callback)
- }
- callback_.apply(this, arguments)
- }
- }
- return read.call(fs, fd, buffer, offset, length, position, callback)
-}
-
-var readSync = fs.readSync
-fs.readSync = function (fd, buffer, offset, length, position) {
- var eagCounter = 0
- while (true) {
- try {
- return readSync.call(fs, fd, buffer, offset, length, position)
- } catch (er) {
- if (er.code === 'EAGAIN' && eagCounter < 10) {
- eagCounter ++
- continue
- }
- throw er
- }
- }
-}
-
diff --git a/deps/npm/node_modules/init-package-json/.travis.yml b/deps/npm/node_modules/init-package-json/.travis.yml
index 05d299e67..991d04b6e 100644
--- a/deps/npm/node_modules/init-package-json/.travis.yml
+++ b/deps/npm/node_modules/init-package-json/.travis.yml
@@ -1,4 +1,5 @@
language: node_js
node_js:
- - "0.10"
- - "0.11"
+ - '0.10'
+ - '0.12'
+ - 'iojs'
diff --git a/deps/npm/node_modules/init-package-json/default-input.js b/deps/npm/node_modules/init-package-json/default-input.js
index 886291dce..ef5161e79 100644
--- a/deps/npm/node_modules/init-package-json/default-input.js
+++ b/deps/npm/node_modules/init-package-json/default-input.js
@@ -12,7 +12,7 @@ function isTestPkg (p) {
}
function niceName (n) {
- return n.replace(/^node-|[.-]js$/g, '')
+ return n.replace(/^node-|[.-]js$/g, '').toLowerCase()
}
function readDeps (test) { return function (cb) {
@@ -72,7 +72,6 @@ exports.version = yes ?
if (semver.valid(version)) return version
var er = new Error('Invalid version: "' + version + '"')
er.notValid = true
- er.again = true
return er
})
diff --git a/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json b/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
index 1007cdde4..1407e97be 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
+++ b/deps/npm/node_modules/init-package-json/node_modules/promzard/package.json
@@ -8,7 +8,7 @@
"description": "prompting wizardly",
"version": "0.3.0",
"repository": {
- "url": "git://github.com/isaacs/promzard"
+ "url": "git://github.com/isaacs/promzard.git"
},
"dependencies": {
"read": "1"
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md
deleted file mode 100644
index 2180a8c1a..000000000
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/LICENSE.md
+++ /dev/null
@@ -1,7 +0,0 @@
-Copyright Kyle E. Mitchell
-
-Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md
deleted file mode 100644
index 904f74b9d..000000000
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-npm-validate-package-license
-============================
-
-Give me a string and I'll tell you if it's a valid npm package license.
-
-*This package is not endorsed or approved by npm. It is part of a proposal to add license field validation to the npm command-line interface.*
-
-<!-- js var valid = require('./'); -->
-
-```js
-var validResult = {
- validForNewPackages: true,
- validForOldPackages: true
-};
-
-valid('Apache-2.0'); // => validResult
-valid('GPL-3.0 OR BSD-2-Clause'); // => validResult
-
-var invalidResult = {
- validForOldPackages: false,
- validForNewPackages: false,
- warnings: [
- 'license should be a valid SPDX license expression',
- 'license is similar to the valid expression "Apache-2.0"'
- ]
-};
-
-valid('Apache 2.0'); // => invalidResult
-```
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js
deleted file mode 100644
index c8407a520..000000000
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/index.js
+++ /dev/null
@@ -1,26 +0,0 @@
-var spdx = require('spdx');
-var correct = require('spdx-correct');
-
-module.exports = function(argument) {
- if (spdx.valid(argument)) {
- return {
- validForNewPackages: true,
- validForOldPackages: true
- };
- } else {
- var warnings = [
- 'license should be a valid SPDX license expression'
- ];
- var corrected = correct(argument);
- if (corrected) {
- warnings.push(
- 'license is similar to the valid expression "' + corrected + '"'
- );
- }
- return {
- validForOldPackages: false,
- validForNewPackages: false,
- warnings: warnings
- };
- }
-};
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore b/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore
deleted file mode 100644
index 5229acdc8..000000000
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.gitignore
-.jscsrc
-.jshintrc
-test
-.travis.yml
diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json
index 62d01c59d..ad9fab899 100644
--- a/deps/npm/node_modules/init-package-json/package.json
+++ b/deps/npm/node_modules/init-package-json/package.json
@@ -1,13 +1,13 @@
{
"name": "init-package-json",
- "version": "1.6.0",
+ "version": "1.7.1",
"main": "init-package-json.js",
"scripts": {
"test": "tap test/*.js"
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/init-package-json"
+ "url": "git://github.com/isaacs/init-package-json.git"
},
"author": {
"name": "Isaac Z. Schlueter",
@@ -22,14 +22,14 @@
"promzard": "^0.3.0",
"read": "~1.0.1",
"read-package-json": "1 || 2",
- "semver": "2.x || 3.x || 4",
- "validate-npm-package-license": "1.0.0-prerelease-2",
+ "semver": "2.x || 3.x || 4 || 5",
+ "validate-npm-package-license": "^2.0.0",
"validate-npm-package-name": "^2.0.1"
},
"devDependencies": {
"npm": "^2",
"rimraf": "^2.1.4",
- "tap": "^0.7.1"
+ "tap": "^1.2.0"
},
"keywords": [
"init",
@@ -41,38 +41,14 @@
"prompt",
"start"
],
- "gitHead": "b747e9f71eb65b22bb9139e5252bf8efb23571e1",
+ "readme": "# init-package-json\n\nA node module to get your node module started.\n\n[![Build Status](https://secure.travis-ci.org/npm/init-package-json.svg)](http://travis-ci.org/npm/init-package-json)\n\n## Usage\n\n```javascript\nvar init = require('init-package-json')\nvar path = require('path')\n\n// a path to a promzard module. In the event that this file is\n// not found, one will be provided for you.\nvar initFile = path.resolve(process.env.HOME, '.npm-init')\n\n// the dir where we're doin stuff.\nvar dir = process.cwd()\n\n// extra stuff that gets put into the PromZard module's context.\n// In npm, this is the resolved config object. Exposed as 'config'\n// Optional.\nvar configData = { some: 'extra stuff' }\n\n// Any existing stuff from the package.json file is also exposed in the\n// PromZard module as the `package` object. There will also be free\n// vars for:\n// * `filename` path to the package.json file\n// * `basename` the tip of the package dir\n// * `dirname` the parent of the package dir\n\ninit(dir, initFile, configData, function (er, data) {\n // the data's already been written to {dir}/package.json\n // now you can do stuff with it\n})\n```\n\nOr from the command line:\n\n```\n$ npm-init\n```\n\nSee [PromZard](https://github.com/isaacs/promzard) for details about\nwhat can go in the config file.\n",
+ "readmeFilename": "README.md",
+ "gitHead": "443031e837c81bb10548212a7b3700b5dde94cfb",
"bugs": {
"url": "https://github.com/isaacs/init-package-json/issues"
},
- "homepage": "https://github.com/isaacs/init-package-json",
- "_id": "init-package-json@1.6.0",
- "_shasum": "8c4c2561abca1ad30d88f5594ddb4159211a36ff",
- "_from": "init-package-json@1.6.0",
- "_npmVersion": "2.7.6",
- "_nodeVersion": "1.6.2",
- "_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
- },
- "dist": {
- "shasum": "8c4c2561abca1ad30d88f5594ddb4159211a36ff",
- "tarball": "http://registry.npmjs.org/init-package-json/-/init-package-json-1.6.0.tgz"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- },
- {
- "name": "iarna",
- "email": "me@re-becca.org"
- }
- ],
- "directories": {},
- "_resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-1.6.0.tgz"
+ "homepage": "https://github.com/isaacs/init-package-json#readme",
+ "_id": "init-package-json@1.7.1",
+ "_shasum": "32a8643779c91732d901a6fdc7505de7119fb7ad",
+ "_from": "init-package-json@>=1.7.1 <1.8.0"
}
diff --git a/deps/npm/node_modules/init-package-json/test/basic.js b/deps/npm/node_modules/init-package-json/test/basic.js
index f07f435bc..d67e0c3b1 100644
--- a/deps/npm/node_modules/init-package-json/test/basic.js
+++ b/deps/npm/node_modules/init-package-json/test/basic.js
@@ -6,6 +6,7 @@ var test = require('tap').test
test('the basics', function (t) {
var i = path.join(__dirname, 'basic.input')
+ rimraf.sync(__dirname + '/package.json')
init(__dirname, i, { foo: 'bar' }, function (er, data) {
if (er) throw er
var expect = {
@@ -18,6 +19,7 @@ test('the basics', function (t) {
config: { foo: 'bar' },
package: {}
}
+ console.log('')
t.same(data, expect)
t.end()
})
diff --git a/deps/npm/node_modules/init-package-json/test/license.js b/deps/npm/node_modules/init-package-json/test/license.js
index 87333fbf8..8d8ae93dd 100644
--- a/deps/npm/node_modules/init-package-json/test/license.js
+++ b/deps/npm/node_modules/init-package-json/test/license.js
@@ -5,7 +5,9 @@ var common = require('./lib/common')
test('license', function (t) {
init(__dirname, '', {}, function (er, data) {
- t.ok(!er, 'should not error')
+ if (er)
+ throw er
+
var wanted = {
name: 'the-name',
version: '1.0.0',
@@ -15,7 +17,8 @@ test('license', function (t) {
author: '',
main: 'basic.js'
}
- t.same(data, wanted)
+ console.log('')
+ t.has(data, wanted)
t.end()
})
common.drive([
diff --git a/deps/npm/node_modules/init-package-json/test/name-spaces.js b/deps/npm/node_modules/init-package-json/test/name-spaces.js
index dee974fc8..a395afb9a 100644
--- a/deps/npm/node_modules/init-package-json/test/name-spaces.js
+++ b/deps/npm/node_modules/init-package-json/test/name-spaces.js
@@ -4,8 +4,10 @@ var rimraf = require('rimraf')
var common = require('./lib/common')
test('spaces', function (t) {
+ rimraf.sync(__dirname + '/package.json')
init(__dirname, '', {}, function (er, data) {
- t.ok(!er, 'should not error')
+ if (er)
+ throw er
var wanted = {
name: 'the-name',
version: '1.0.0',
@@ -15,7 +17,8 @@ test('spaces', function (t) {
author: '',
main: 'basic.js'
}
- t.same(data, wanted)
+ console.log('')
+ t.has(data, wanted)
t.end()
})
common.drive([
diff --git a/deps/npm/node_modules/init-package-json/test/name-uppercase.js b/deps/npm/node_modules/init-package-json/test/name-uppercase.js
index ddedc30e5..d1623260a 100644
--- a/deps/npm/node_modules/init-package-json/test/name-uppercase.js
+++ b/deps/npm/node_modules/init-package-json/test/name-uppercase.js
@@ -5,7 +5,9 @@ var common = require('./lib/common')
test('uppercase', function (t) {
init(__dirname, '', {}, function (er, data) {
- t.ok(!er, 'should not error')
+ if (er)
+ throw er
+
var wanted = {
name: 'the-name',
version: '1.0.0',
@@ -15,7 +17,8 @@ test('uppercase', function (t) {
author: '',
main: 'basic.js'
}
- t.same(data, wanted)
+ console.log('')
+ t.has(data, wanted)
t.end()
})
common.drive([
diff --git a/deps/npm/node_modules/init-package-json/test/scope-in-config-existing-name.js b/deps/npm/node_modules/init-package-json/test/scope-in-config-existing-name.js
new file mode 100644
index 000000000..39dc90f42
--- /dev/null
+++ b/deps/npm/node_modules/init-package-json/test/scope-in-config-existing-name.js
@@ -0,0 +1,30 @@
+var fs = require('fs')
+var path = require('path')
+
+var rimraf = require('rimraf')
+var tap = require('tap')
+
+var init = require('../')
+
+var json = {
+ name: '@already/scoped',
+ version: '1.0.0'
+}
+
+tap.test('with existing package.json', function (t) {
+ fs.writeFileSync(path.join(__dirname, 'package.json'), JSON.stringify(json, null, 2))
+ console.log(fs.readFileSync(path.join(__dirname, 'package.json'), 'utf8'))
+ console.error('wrote json', json)
+ init(__dirname, __dirname, { yes: 'yes', scope: '@still' }, function (er, data) {
+ if (er) throw er
+
+ console.log('')
+ t.equal(data.name, '@still/scoped', 'new scope is added, basic name is kept')
+ t.end()
+ })
+})
+
+tap.test('teardown', function (t) {
+ rimraf.sync(path.join(__dirname, 'package.json'))
+ t.end()
+})
diff --git a/deps/npm/node_modules/init-package-json/test/scope-in-config.js b/deps/npm/node_modules/init-package-json/test/scope-in-config.js
index 1fa83d9c1..32bba16ed 100644
--- a/deps/npm/node_modules/init-package-json/test/scope-in-config.js
+++ b/deps/npm/node_modules/init-package-json/test/scope-in-config.js
@@ -21,22 +21,8 @@ tap.test('--yes with scope', function (t) {
init(__dirname, __dirname, { yes: 'yes', scope: '@scoped' }, function (er, data) {
if (er) throw er
- t.same(EXPECT, data)
- t.end()
- })
-})
-
-var json = {
- name: '@already/scoped',
- version: '1.0.0'
-}
-
-tap.test('with existing package.json', function (t) {
- fs.writeFileSync(path.join(__dirname, 'package.json'), JSON.stringify(json, null, 2))
- init(__dirname, __dirname, { yes: 'yes', scope: '@still' }, function (er, data) {
- if (er) throw er
-
- t.equal(data.name, '@still/scoped', 'new scope is added, basic name is kept')
+ console.log('')
+ t.has(data, EXPECT)
t.end()
})
})
diff --git a/deps/npm/node_modules/init-package-json/test/scope.js b/deps/npm/node_modules/init-package-json/test/scope.js
index 971916f2d..4fa064928 100644
--- a/deps/npm/node_modules/init-package-json/test/scope.js
+++ b/deps/npm/node_modules/init-package-json/test/scope.js
@@ -19,7 +19,8 @@ tap.test('the scope', function (t) {
init(dir, i, {scope: '@foo'}, function (er, data) {
if (er) throw er
- t.same(EXPECT, data)
+ console.log('')
+ t.has(data, EXPECT)
t.end()
})
setTimeout(function () {
diff --git a/deps/npm/node_modules/init-package-json/test/yes-defaults.js b/deps/npm/node_modules/init-package-json/test/yes-defaults.js
index 747ab38fd..f7cae28c5 100644
--- a/deps/npm/node_modules/init-package-json/test/yes-defaults.js
+++ b/deps/npm/node_modules/init-package-json/test/yes-defaults.js
@@ -17,7 +17,7 @@ tap.test('--yes defaults', function (t) {
init(__dirname, __dirname, {yes: 'yes'}, function (er, data) {
if (er) throw er
- t.same(EXPECT, data, 'used the default data')
+ t.has(data, EXPECT, 'used the default data')
t.end()
})
})
diff --git a/deps/npm/node_modules/lru-cache/.travis.yml b/deps/npm/node_modules/lru-cache/.travis.yml
new file mode 100644
index 000000000..4af02b3d1
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/.travis.yml
@@ -0,0 +1,8 @@
+language: node_js
+node_js:
+ - '0.8'
+ - '0.10'
+ - '0.12'
+ - 'iojs'
+before_install:
+ - npm install -g npm@latest
diff --git a/deps/npm/node_modules/lru-cache/README.md b/deps/npm/node_modules/lru-cache/README.md
index 82a6dabd5..a8bba688f 100644
--- a/deps/npm/node_modules/lru-cache/README.md
+++ b/deps/npm/node_modules/lru-cache/README.md
@@ -54,7 +54,7 @@ away.
## API
-* `set(key, value, max)`
+* `set(key, value, maxAge)`
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json
index b22362cb9..5d0f9a27a 100644
--- a/deps/npm/node_modules/lru-cache/package.json
+++ b/deps/npm/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "2.6.4",
+ "version": "2.6.5",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
@@ -20,27 +20,27 @@
"url": "git://github.com/isaacs/node-lru-cache.git"
},
"devDependencies": {
- "tap": "^0.7.1",
+ "tap": "^1.2.0",
"weak": ""
},
"license": "ISC",
- "gitHead": "aea58fc0a12714c6e1422963e7ebea66460ec39e",
+ "gitHead": "7062a0c891bfb80a294be9217e4de0f882e75776",
"bugs": {
"url": "https://github.com/isaacs/node-lru-cache/issues"
},
"homepage": "https://github.com/isaacs/node-lru-cache#readme",
- "_id": "lru-cache@2.6.4",
- "_shasum": "2675190ccd1b0701ec2f652a4d0d3d400d76c0dd",
- "_from": "lru-cache@>=2.6.4 <2.7.0",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
+ "_id": "lru-cache@2.6.5",
+ "_shasum": "e56d6354148ede8d7707b58d143220fd08df0fd5",
+ "_from": "lru-cache@2.6.5",
+ "_npmVersion": "3.0.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "2675190ccd1b0701ec2f652a4d0d3d400d76c0dd",
- "tarball": "http://registry.npmjs.org/lru-cache/-/lru-cache-2.6.4.tgz"
+ "shasum": "e56d6354148ede8d7707b58d143220fd08df0fd5",
+ "tarball": "http://registry.npmjs.org/lru-cache/-/lru-cache-2.6.5.tgz"
},
"maintainers": [
{
@@ -53,6 +53,5 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.6.4.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.6.5.tgz"
}
diff --git a/deps/npm/node_modules/lru-cache/test/foreach.js b/deps/npm/node_modules/lru-cache/test/foreach.js
index 429ebc124..4190417cb 100644
--- a/deps/npm/node_modules/lru-cache/test/foreach.js
+++ b/deps/npm/node_modules/lru-cache/test/foreach.js
@@ -108,7 +108,6 @@ test('expires', function (t) {
t.equal(val, j.toString(2))
})
t.equal(i, order.length);
- t.end()
setTimeout(function () {
var count = 0;
diff --git a/deps/npm/node_modules/lru-cache/test/memory-leak.js b/deps/npm/node_modules/lru-cache/test/memory-leak.js
index 7af45b022..b5912f6f1 100644
--- a/deps/npm/node_modules/lru-cache/test/memory-leak.js
+++ b/deps/npm/node_modules/lru-cache/test/memory-leak.js
@@ -1,5 +1,6 @@
#!/usr/bin/env node --expose_gc
+
var weak = require('weak');
var test = require('tap').test
var LRU = require('../')
diff --git a/deps/npm/node_modules/minimatch/browser.js b/deps/npm/node_modules/minimatch/browser.js
index 967b45c0d..7d0515920 100644
--- a/deps/npm/node_modules/minimatch/browser.js
+++ b/deps/npm/node_modules/minimatch/browser.js
@@ -1,4 +1,4 @@
-(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.minimatch = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
module.exports = minimatch
minimatch.Minimatch = Minimatch
@@ -273,6 +273,7 @@ function parse (pattern, isSub) {
var escaping = false
// ? => one single character
var patternListStack = []
+ var negativeLists = []
var plType
var stateChar
var inClass = false
@@ -373,9 +374,13 @@ function parse (pattern, isSub) {
}
plType = stateChar
- patternListStack.push({ type: plType, start: i - 1, reStart: re.length })
+ patternListStack.push({
+ type: plType,
+ start: i - 1,
+ reStart: re.length
+ })
// negation is (?:(?!js)[^/]*)
- re += stateChar === '!' ? '(?:(?!' : '(?:'
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this.debug('plType %j %j', stateChar, re)
stateChar = false
continue
@@ -389,12 +394,15 @@ function parse (pattern, isSub) {
clearStateChar()
hasMagic = true
re += ')'
- plType = patternListStack.pop().type
+ var pl = patternListStack.pop()
+ plType = pl.type
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
switch (plType) {
case '!':
- re += '[^/]*?)'
+ negativeLists.push(pl)
+ re += ')[^/]*?)'
+ pl.reEnd = re.length
break
case '?':
case '+':
@@ -508,7 +516,7 @@ function parse (pattern, isSub) {
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
- for (var pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
var tail = re.slice(pl.reStart + 3)
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
@@ -551,12 +559,49 @@ function parse (pattern, isSub) {
case '(': addPatternStart = true
}
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
+ }
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
- if (re !== '' && hasMagic) re = '(?=.)' + re
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
- if (addPatternStart) re = patternStart + re
+ if (addPatternStart) {
+ re = patternStart + re
+ }
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
@@ -1110,4 +1155,5 @@ module.exports = function (xs, fn) {
return res;
};
-},{}]},{},[1]);
+},{}]},{},[1])(1)
+}); \ No newline at end of file
diff --git a/deps/npm/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/minimatch/minimatch.js
index 5e13d6d5b..ec4c05c57 100644
--- a/deps/npm/node_modules/minimatch/minimatch.js
+++ b/deps/npm/node_modules/minimatch/minimatch.js
@@ -272,6 +272,7 @@ function parse (pattern, isSub) {
var escaping = false
// ? => one single character
var patternListStack = []
+ var negativeLists = []
var plType
var stateChar
var inClass = false
@@ -372,9 +373,13 @@ function parse (pattern, isSub) {
}
plType = stateChar
- patternListStack.push({ type: plType, start: i - 1, reStart: re.length })
+ patternListStack.push({
+ type: plType,
+ start: i - 1,
+ reStart: re.length
+ })
// negation is (?:(?!js)[^/]*)
- re += stateChar === '!' ? '(?:(?!' : '(?:'
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this.debug('plType %j %j', stateChar, re)
stateChar = false
continue
@@ -388,12 +393,15 @@ function parse (pattern, isSub) {
clearStateChar()
hasMagic = true
re += ')'
- plType = patternListStack.pop().type
+ var pl = patternListStack.pop()
+ plType = pl.type
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
switch (plType) {
case '!':
- re += '[^/]*?)'
+ negativeLists.push(pl)
+ re += ')[^/]*?)'
+ pl.reEnd = re.length
break
case '?':
case '+':
@@ -507,7 +515,7 @@ function parse (pattern, isSub) {
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
- for (var pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
var tail = re.slice(pl.reStart + 3)
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
@@ -550,12 +558,49 @@ function parse (pattern, isSub) {
case '(': addPatternStart = true
}
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
+ }
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
- if (re !== '' && hasMagic) re = '(?=.)' + re
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
- if (addPatternStart) re = patternStart + re
+ if (addPatternStart) {
+ re = patternStart + re
+ }
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
diff --git a/deps/npm/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
index b51613809..2f1bd3d5d 100644
--- a/deps/npm/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
+++ b/deps/npm/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
@@ -63,7 +63,7 @@
"_id": "concat-map@0.0.1",
"dist": {
"shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
- "tarball": "http://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"
+ "tarball": "https://registrytwo.npmjs.com/concat-map/-/concat-map-0.0.1.tgz"
},
"_from": "concat-map@0.0.1",
"_npmVersion": "1.3.21",
@@ -78,6 +78,6 @@
}
],
"_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
- "_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "_resolved": "https://registrytwo.npmjs.com/concat-map/-/concat-map-0.0.1.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/minimatch/package.json b/deps/npm/node_modules/minimatch/package.json
index 2cacae210..c7c9a089c 100644
--- a/deps/npm/node_modules/minimatch/package.json
+++ b/deps/npm/node_modules/minimatch/package.json
@@ -6,16 +6,16 @@
},
"name": "minimatch",
"description": "a glob matcher in javascript",
- "version": "2.0.8",
+ "version": "2.0.10",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
},
"main": "minimatch.js",
"scripts": {
- "pretest": "standard minimatch.js test/*.js",
+ "posttest": "standard minimatch.js test/*.js",
"test": "tap test/*.js",
- "prepublish": "browserify -o browser.js -e minimatch.js --bare"
+ "prepublish": "browserify -o browser.js -e minimatch.js -s minimatch --bare"
},
"engines": {
"node": "*"
@@ -26,30 +26,30 @@
"devDependencies": {
"browserify": "^9.0.3",
"standard": "^3.7.2",
- "tap": ""
+ "tap": "^1.2.0"
},
"license": "ISC",
"files": [
"minimatch.js",
"browser.js"
],
- "gitHead": "0bc7d9c4b2bc816502184862b45bd090de3406a3",
+ "gitHead": "6afb85f0c324b321f76a38df81891e562693e257",
"bugs": {
"url": "https://github.com/isaacs/minimatch/issues"
},
"homepage": "https://github.com/isaacs/minimatch#readme",
- "_id": "minimatch@2.0.8",
- "_shasum": "0bc20f6bf3570a698ef0ddff902063c6cabda6bf",
- "_from": "minimatch@>=2.0.8 <2.1.0",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
+ "_id": "minimatch@2.0.10",
+ "_shasum": "8d087c39c6b38c001b97fca7ce6d0e1e80afbac7",
+ "_from": "minimatch@2.0.10",
+ "_npmVersion": "3.1.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "0bc20f6bf3570a698ef0ddff902063c6cabda6bf",
- "tarball": "http://registry.npmjs.org/minimatch/-/minimatch-2.0.8.tgz"
+ "shasum": "8d087c39c6b38c001b97fca7ce6d0e1e80afbac7",
+ "tarball": "http://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz"
},
"maintainers": [
{
@@ -58,6 +58,5 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.8.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz"
}
diff --git a/deps/npm/node_modules/node-gyp/History.md b/deps/npm/node_modules/node-gyp/History.md
index a2af92175..8768081ea 100644
--- a/deps/npm/node_modules/node-gyp/History.md
+++ b/deps/npm/node_modules/node-gyp/History.md
@@ -1,8 +1,22 @@
+2.0.2 / 2015-07-14
+==================
+
+ * Use HTTPS for dist url (#656, @SonicHedgehog)
+ * Merge pull request #648 from nevosegal/master
+ * Merge pull request #650 from magic890/patch-1
+ * Updated Installation section on README
+ * Updated link to gyp user documentation
+ * Fix download error message spelling (#643, @tomxtobin)
+ * Merge pull request #637 from lygstate/master
+ * Set NODE_GYP_DIR for addon.gypi to setting absolute path for
+ src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32.
+ Fixes #636 (#637, @lygstate).
+
2.0.1 / 2015-05-28
==================
- * configure: try/catcht the semver range.test() call
+ * configure: try/catch the semver range.test() call
* README: update for visual studio 2013 (#510, @samccone)
2.0.0 / 2015-05-24
diff --git a/deps/npm/node_modules/node-gyp/README.md b/deps/npm/node_modules/node-gyp/README.md
index 386e54eb8..edde1f6c3 100644
--- a/deps/npm/node_modules/node-gyp/README.md
+++ b/deps/npm/node_modules/node-gyp/README.md
@@ -35,13 +35,18 @@ You will also need to install:
* On Unix:
* `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported)
* `make`
- * A proper C/C++ compiler toolchain, like GCC
+ * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
+ * On Mac OS X:
+ * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on Mac OS X)
+ * [Xcode](https://developer.apple.com/xcode/downloads/)
+ * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads`
+ * This step will install `gcc` and the related toolchain containing `make`
* On Windows:
* [Python][windows-python] ([`v2.7.3`][windows-python-v2.7.3] recommended, `v3.x.x` is __*not*__ supported)
- * Make sure that you have a PYTHON environment variable, and it is set to drive:\path\to\python.exe not to a folder.
+ * Make sure that you have a PYTHON environment variable, and it is set to drive:\path\to\python.exe not to a folder
* Windows XP/Vista/7:
* Microsoft Visual Studio C++ 2013 ([Express][msvc2013] version works well)
- * If the install fails, try uninstalling any C++ 2010 x64&x86 Redistributable that you have installed first.
+ * If the install fails, try uninstalling any C++ 2010 x64&x86 Redistributable that you have installed first
* If you get errors that the 64-bit compilers are not installed you may also need the [compiler update for the Windows SDK 7.1]
* Windows 7/8:
* Microsoft Visual Studio C++ 2013 for Windows Desktop ([Express][msvc2013] version works well)
@@ -132,8 +137,8 @@ Some additional resources for addons and writing `gyp` files:
* ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative)
* ["Hello World" node addon example](https://github.com/joyent/node/tree/master/test/addons/hello-world)
- * [gyp user documentation](http://code.google.com/p/gyp/wiki/GypUserDocumentation)
- * [gyp input format reference](http://code.google.com/p/gyp/wiki/InputFormatReference)
+ * [gyp user documentation](https://chromium.googlesource.com/external/gyp/+/master/docs/UserDocumentation.md)
+ * [gyp input format reference](https://chromium.googlesource.com/external/gyp/+/master/docs/InputFormatReference.md)
* [*"binding.gyp" files out in the wild* wiki page](https://github.com/TooTallNate/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild)
diff --git a/deps/npm/node_modules/node-gyp/addon.gypi b/deps/npm/node_modules/node-gyp/addon.gypi
index 7f6264ac6..337794242 100644
--- a/deps/npm/node_modules/node-gyp/addon.gypi
+++ b/deps/npm/node_modules/node-gyp/addon.gypi
@@ -40,7 +40,7 @@
'conditions': [
[ 'OS=="win"', {
'sources': [
- 'src/win_delay_load_hook.c',
+ '<(node_gyp_dir)/src/win_delay_load_hook.c',
],
'msvs_settings': {
'VCLinkerTool': {
diff --git a/deps/npm/node_modules/node-gyp/lib/configure.js b/deps/npm/node_modules/node-gyp/lib/configure.js
index 0a33e7008..e8c2029b5 100644
--- a/deps/npm/node_modules/node-gyp/lib/configure.js
+++ b/deps/npm/node_modules/node-gyp/lib/configure.js
@@ -316,12 +316,14 @@ function configure (gyp, argv, callback) {
// Windows expects an absolute path
output_dir = buildDir
}
+ var nodeGypDir = path.resolve(__dirname, '..')
argv.push('-I', addon_gypi)
argv.push('-I', common_gypi)
argv.push('-Dlibrary=shared_library')
argv.push('-Dvisibility=default')
argv.push('-Dnode_root_dir=' + nodeDir)
+ argv.push('-Dnode_gyp_dir=' + nodeGypDir)
argv.push('-Dmodule_root_dir=' + process.cwd())
argv.push('--depth=.')
argv.push('--no-parallel')
diff --git a/deps/npm/node_modules/node-gyp/lib/install.js b/deps/npm/node_modules/node-gyp/lib/install.js
index 89e4956a8..6cbcdb8f7 100644
--- a/deps/npm/node_modules/node-gyp/lib/install.js
+++ b/deps/npm/node_modules/node-gyp/lib/install.js
@@ -39,7 +39,7 @@ function install (gyp, argv, callback) {
}
}
- var distUrl = gyp.opts['dist-url'] || gyp.opts.disturl || 'http://nodejs.org/dist'
+ var distUrl = gyp.opts['dist-url'] || gyp.opts.disturl || 'https://nodejs.org/dist'
// Determine which node dev files version we are installing
@@ -248,7 +248,7 @@ function install (gyp, argv, callback) {
req.on('response', function (res) {
if (res.statusCode !== 200) {
badDownload = true
- cb(new Error(res.statusCode + ' response dowloading ' + tarballUrl))
+ cb(new Error(res.statusCode + ' response downloading ' + tarballUrl))
return
}
// content checksum
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/README.md b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/package.json b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..9add3d355
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/package.json
@@ -0,0 +1,72 @@
+{
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "name": "graceful-fs",
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "version": "3.0.8",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "main": "graceful-fs.js",
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "keywords": [
+ "fs",
+ "module",
+ "reading",
+ "retry",
+ "retries",
+ "queue",
+ "error",
+ "errors",
+ "handling",
+ "EMFILE",
+ "EAGAIN",
+ "EINVAL",
+ "EPERM",
+ "EACCESS"
+ ],
+ "license": "ISC",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "_id": "graceful-fs@3.0.8",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_from": "graceful-fs@>=3.0.0 <4.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/path-array/node_modules/array-index/package.json b/deps/npm/node_modules/node-gyp/node_modules/path-array/node_modules/array-index/package.json
index 6ba9df72c..8ed83ab24 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/path-array/node_modules/array-index/package.json
+++ b/deps/npm/node_modules/node-gyp/node_modules/path-array/node_modules/array-index/package.json
@@ -53,6 +53,5 @@
"tarball": "http://registry.npmjs.org/array-index/-/array-index-0.1.1.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/array-index/-/array-index-0.1.1.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/array-index/-/array-index-0.1.1.tgz"
}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/path-array/package.json b/deps/npm/node_modules/node-gyp/node_modules/path-array/package.json
index 41d25482b..ad8edc9f8 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/path-array/package.json
+++ b/deps/npm/node_modules/node-gyp/node_modules/path-array/package.json
@@ -51,6 +51,5 @@
"tarball": "http://registry.npmjs.org/path-array/-/path-array-1.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/path-array/-/path-array-1.0.0.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/path-array/-/path-array-1.0.0.tgz"
}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/semver/.npmignore
new file mode 100644
index 000000000..534108e3f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/.npmignore
@@ -0,0 +1,4 @@
+node_modules/
+coverage/
+.nyc_output/
+nyc_output/
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/semver/.travis.yml
new file mode 100644
index 000000000..991d04b6e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/.travis.yml
@@ -0,0 +1,5 @@
+language: node_js
+node_js:
+ - '0.10'
+ - '0.12'
+ - 'iojs'
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/semver/Makefile b/deps/npm/node_modules/node-gyp/node_modules/semver/Makefile
index 71af0e975..71af0e975 100644
--- a/deps/npm/node_modules/semver/Makefile
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/Makefile
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/README.md b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
new file mode 100644
index 000000000..b5e35ff0b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
@@ -0,0 +1,303 @@
+semver(1) -- The semantic versioner for npm
+===========================================
+
+## Usage
+
+ $ npm install semver
+
+ semver.valid('1.2.3') // '1.2.3'
+ semver.valid('a.b.c') // null
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ semver -h
+
+ Usage: semver <version> [<version> [...]] [-r <range> | -i <inc> | --preid <identifier> | -l | -rv]
+ Test if version(s) satisfy the supplied range(s), and sort them.
+
+ Multiple versions or ranges may be supplied, unless increment
+ option is specified. In that case, only a single version may
+ be used, and it is incremented by the specified level
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no versions are valid, or ranges are not satisfied,
+ then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A "version" is described by the `v2.0.0` specification found at
+<http://semver.org/>.
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Ranges
+
+A `version range` is a set of `comparators` which specify versions
+that satisfy the range.
+
+A `comparator` is composed of an `operator` and a `version`. The set
+of primitive `operators` is:
+
+* `<` Less than
+* `<=` Less than or equal to
+* `>` Greater than
+* `>=` Greater than or equal to
+* `=` Equal. If no operator is specified, then equality is assumed,
+ so this operator is optional, but MAY be included.
+
+For example, the comparator `>=1.2.7` would match the versions
+`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
+or `1.1.0`.
+
+Comparators can be joined by whitespace to form a `comparator set`,
+which is satisfied by the **intersection** of all of the comparators
+it includes.
+
+A range is composed of one or more comparator sets, joined by `||`. A
+version matches a range if and only if every comparator in at least
+one of the `||`-separated comparator sets is satisfied by the version.
+
+For example, the range `>=1.2.7 <1.3.0` would match the versions
+`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
+or `1.1.0`.
+
+The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
+`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
+
+### Prerelease Tags
+
+If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
+it will only be allowed to satisfy comparator sets if at least one
+comparator with the same `[major, minor, patch]` tuple also has a
+prerelease tag.
+
+For example, the range `>1.2.3-alpha.3` would be allowed to match the
+version `1.2.3-alpha.7`, but it would *not* be satisfied by
+`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
+than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
+range only accepts prerelease tags on the `1.2.3` version. The
+version `3.4.5` *would* satisfy the range, because it does not have a
+prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
+
+The purpose for this behavior is twofold. First, prerelease versions
+frequently are updated very quickly, and contain many breaking changes
+that are (by the author's design) not yet fit for public consumption.
+Therefore, by default, they are excluded from range matching
+semantics.
+
+Second, a user who has opted into using a prerelease version has
+clearly indicated the intent to use *that specific* set of
+alpha/beta/rc versions. By including a prerelease tag in the range,
+the user is indicating that they are aware of the risk. However, it
+is still not appropriate to assume that they have opted into taking a
+similar risk on the *next* set of prerelease versions.
+
+#### Prerelease Identifiers
+
+The method `.inc` takes an additional `identifier` string argument that
+will append the value of the string as a prerelease identifier:
+
+```javascript
+> semver.inc('1.2.3', 'pre', 'beta')
+'1.2.4-beta.0'
+```
+
+command-line example:
+
+```shell
+$ semver 1.2.3 -i prerelease --preid beta
+1.2.4-beta.0
+```
+
+Which then can be used to increment further:
+
+```shell
+$ semver 1.2.4-beta.0 -i prerelease
+1.2.4-beta.1
+```
+
+### Advanced Range Syntax
+
+Advanced range syntax desugars to primitive comparators in
+deterministic ways.
+
+Advanced ranges may be combined in the same way as primitive
+comparators using white space or `||`.
+
+#### Hyphen Ranges `X.Y.Z - A.B.C`
+
+Specifies an inclusive set.
+
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+
+If a partial version is provided as the first version in the inclusive
+range, then the missing pieces are replaced with zeroes.
+
+* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
+
+If a partial version is provided as the second version in the
+inclusive range, then all versions that start with the supplied parts
+of the tuple are accepted, but nothing that would be greater than the
+provided tuple parts.
+
+* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
+* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
+
+#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
+
+Any of `X`, `x`, or `*` may be used to "stand in" for one of the
+numeric values in the `[major, minor, patch]` tuple.
+
+* `*` := `>=0.0.0` (Any version satisfies)
+* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
+* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
+
+A partial version range is treated as an X-Range, so the special
+character is in fact optional.
+
+* `""` (empty string) := `*` := `>=0.0.0`
+* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
+* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
+
+#### Tilde Ranges `~1.2.3` `~1.2` `~1`
+
+Allows patch-level changes if a minor version is specified on the
+comparator. Allows minor-level changes if not.
+
+* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
+* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
+* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
+* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
+* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
+* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+
+#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
+
+Allows changes that do not modify the left-most non-zero digit in the
+`[major, minor, patch]` tuple. In other words, this allows patch and
+minor updates for versions `1.0.0` and above, patch updates for
+versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
+
+Many authors treat a `0.x` version as if the `x` were the major
+"breaking-change" indicator.
+
+Caret ranges are ideal when an author may make breaking changes
+between `0.2.4` and `0.3.0` releases, which is a common practice.
+However, it presumes that there will *not* be breaking changes between
+`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
+additive (but non-breaking), according to commonly observed practices.
+
+* `^1.2.3` := `>=1.2.3 <2.0.0`
+* `^0.2.3` := `>=0.2.3 <0.3.0`
+* `^0.0.3` := `>=0.0.3 <0.0.4`
+* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
+ `0.0.3` version *only* will be allowed, if they are greater than or
+ equal to `beta`. So, `0.0.3-pr.2` would be allowed.
+
+When parsing caret ranges, a missing `patch` value desugars to the
+number `0`, but will allow flexibility within that value, even if the
+major and minor versions are both `0`.
+
+* `^1.2.x` := `>=1.2.0 <2.0.0`
+* `^0.0.x` := `>=0.0.0 <0.1.0`
+* `^0.0` := `>=0.0.0 <0.1.0`
+
+A missing `minor` and `patch` values will desugar to zero, but also
+allow flexibility within those values, even if the major version is
+zero.
+
+* `^1.x` := `>=1.0.0 <2.0.0`
+* `^0.x` := `>=0.0.0 <1.0.0`
+
+## Functions
+
+All methods and classes take a final `loose` boolean argument that, if
+true, will be more forgiving about not-quite-valid semver strings.
+The resulting output will always be 100% strict, of course.
+
+Strict-mode Comparators and Ranges will be strict about the SemVer
+strings that they parse.
+
+* `valid(v)`: Return the parsed version, or null if it's not valid.
+* `inc(v, release)`: Return the version incremented by the release
+ type (`major`, `premajor`, `minor`, `preminor`, `patch`,
+ `prepatch`, or `prerelease`), or null if it's not valid
+ * `premajor` in one call will bump the version up to the next major
+ version and down to a prerelease of that major version.
+ `preminor`, and `prepatch` work the same way.
+ * If called from a non-prerelease version, the `prerelease` will work the
+ same as `prepatch`. It increments the patch version, then makes a
+ prerelease. If the input version is already a prerelease it simply
+ increments it.
+* `major(v)`: Return the major version number.
+* `minor(v)`: Return the minor version number.
+* `patch(v)`: Return the patch version number.
+
+### Comparison
+
+* `gt(v1, v2)`: `v1 > v2`
+* `gte(v1, v2)`: `v1 >= v2`
+* `lt(v1, v2)`: `v1 < v2`
+* `lte(v1, v2)`: `v1 <= v2`
+* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
+* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
+ `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
+* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
+ in descending order when passed to `Array.sort()`.
+* `diff(v1, v2)`: Returns difference between two versions by the release type
+ (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
+ or null if the versions are the same.
+
+
+### Ranges
+
+* `validRange(range)`: Return the valid range or null if it's not valid
+* `satisfies(version, range)`: Return true if the version satisfies the
+ range.
+* `maxSatisfying(versions, range)`: Return the highest version in the list
+ that satisfies the range, or `null` if none of them do.
+* `gtr(version, range)`: Return `true` if version is greater than all the
+ versions possible in the range.
+* `ltr(version, range)`: Return `true` if version is less than all the
+ versions possible in the range.
+* `outside(version, range, hilo)`: Return true if the version is outside
+ the bounds of the range in either the high or low direction. The
+ `hilo` argument must be either the string `'>'` or `'<'`. (This is
+ the function called by `gtr` and `ltr`.)
+
+Note that, since ranges may be non-contiguous, a version might not be
+greater than a range, less than a range, *or* satisfy a range! For
+example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
+until `2.0.0`, so the version `1.2.10` would not be greater than the
+range (because `2.0.1` satisfies, which is higher), nor less than the
+range (since `1.2.8` satisfies, which is lower), and it also does not
+satisfy the range.
+
+If you want to know if a version satisfies or does not satisfy a
+range, use the `satisfies(version, range)` function.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
new file mode 100755
index 000000000..c5f2e857e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
@@ -0,0 +1,133 @@
+#!/usr/bin/env node
+// Standalone semver comparison program.
+// Exits successfully and prints matching version(s) if
+// any supplied version is valid and passes all tests.
+
+var argv = process.argv.slice(2)
+ , versions = []
+ , range = []
+ , gt = []
+ , lt = []
+ , eq = []
+ , inc = null
+ , version = require("../package.json").version
+ , loose = false
+ , identifier = undefined
+ , semver = require("../semver")
+ , reverse = false
+
+main()
+
+function main () {
+ if (!argv.length) return help()
+ while (argv.length) {
+ var a = argv.shift()
+ var i = a.indexOf('=')
+ if (i !== -1) {
+ a = a.slice(0, i)
+ argv.unshift(a.slice(i + 1))
+ }
+ switch (a) {
+ case "-rv": case "-rev": case "--rev": case "--reverse":
+ reverse = true
+ break
+ case "-l": case "--loose":
+ loose = true
+ break
+ case "-v": case "--version":
+ versions.push(argv.shift())
+ break
+ case "-i": case "--inc": case "--increment":
+ switch (argv[0]) {
+ case "major": case "minor": case "patch": case "prerelease":
+ case "premajor": case "preminor": case "prepatch":
+ inc = argv.shift()
+ break
+ default:
+ inc = "patch"
+ break
+ }
+ break
+ case "--preid":
+ identifier = argv.shift()
+ break
+ case "-r": case "--range":
+ range.push(argv.shift())
+ break
+ case "-h": case "--help": case "-?":
+ return help()
+ default:
+ versions.push(a)
+ break
+ }
+ }
+
+ versions = versions.filter(function (v) {
+ return semver.valid(v, loose)
+ })
+ if (!versions.length) return fail()
+ if (inc && (versions.length !== 1 || range.length))
+ return failInc()
+
+ for (var i = 0, l = range.length; i < l ; i ++) {
+ versions = versions.filter(function (v) {
+ return semver.satisfies(v, range[i], loose)
+ })
+ if (!versions.length) return fail()
+ }
+ return success(versions)
+}
+
+function failInc () {
+ console.error("--inc can only be used on a single version with no range")
+ fail()
+}
+
+function fail () { process.exit(1) }
+
+function success () {
+ var compare = reverse ? "rcompare" : "compare"
+ versions.sort(function (a, b) {
+ return semver[compare](a, b, loose)
+ }).map(function (v) {
+ return semver.clean(v, loose)
+ }).map(function (v) {
+ return inc ? semver.inc(v, inc, loose, identifier) : v
+ }).forEach(function (v,i,_) { console.log(v) })
+}
+
+function help () {
+ console.log(["SemVer " + version
+ ,""
+ ,"A JavaScript implementation of the http://semver.org/ specification"
+ ,"Copyright Isaac Z. Schlueter"
+ ,""
+ ,"Usage: semver [options] <version> [<version> [...]]"
+ ,"Prints valid versions sorted by SemVer precedence"
+ ,""
+ ,"Options:"
+ ,"-r --range <range>"
+ ," Print versions that match the specified range."
+ ,""
+ ,"-i --increment [<level>]"
+ ," Increment a version by the specified level. Level can"
+ ," be one of: major, minor, patch, premajor, preminor,"
+ ," prepatch, or prerelease. Default level is 'patch'."
+ ," Only one version may be specified."
+ ,""
+ ,"--preid <identifier>"
+ ," Identifier to be used to prefix premajor, preminor,"
+ ," prepatch or prerelease version increments."
+ ,""
+ ,"-l --loose"
+ ," Interpret versions and ranges loosely"
+ ,""
+ ,"Program exits successfully if any valid version satisfies"
+ ,"all supplied ranges, and prints all satisfying versions."
+ ,""
+ ,"If no satisfying versions are found, then exits failure."
+ ,""
+ ,"Versions are printed in ascending order, so supplying"
+ ,"multiple versions to the utility will just sort them."
+ ].join("\n"))
+}
diff --git a/deps/npm/node_modules/semver/foot.js.txt b/deps/npm/node_modules/node-gyp/node_modules/semver/foot.js.txt
index 8f83c20f8..8f83c20f8 100644
--- a/deps/npm/node_modules/semver/foot.js.txt
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/foot.js.txt
diff --git a/deps/npm/node_modules/semver/head.js.txt b/deps/npm/node_modules/node-gyp/node_modules/semver/head.js.txt
index 653686517..591afab46 100644
--- a/deps/npm/node_modules/semver/head.js.txt
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/head.js.txt
@@ -1,2 +1 @@
;(function(exports) {
-
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/package.json b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
new file mode 100644
index 000000000..e9b6abe3f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "semver",
+ "version": "4.3.6",
+ "description": "The semantic version parser used by npm.",
+ "main": "semver.js",
+ "browser": "semver.browser.js",
+ "min": "semver.min.js",
+ "scripts": {
+ "test": "tap test/*.js",
+ "prepublish": "make"
+ },
+ "devDependencies": {
+ "tap": "^1.2.0",
+ "uglify-js": "~2.3.6"
+ },
+ "license": "ISC",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/npm/node-semver.git"
+ },
+ "bin": {
+ "semver": "./bin/semver"
+ },
+ "gitHead": "63c48296ca5da3ba6a88c743bb8c92effc789811",
+ "bugs": {
+ "url": "https://github.com/npm/node-semver/issues"
+ },
+ "homepage": "https://github.com/npm/node-semver#readme",
+ "_id": "semver@4.3.6",
+ "_shasum": "300bc6e0e86374f7ba61068b5b1ecd57fc6532da",
+ "_from": "semver@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0||>=4.0.0 <5.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "300bc6e0e86374f7ba61068b5b1ecd57fc6532da",
+ "tarball": "http://registry.npmjs.org/semver/-/semver-4.3.6.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "othiym23",
+ "email": "ogd@aoaioxxysz.net"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/semver/semver.browser.js b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js
index 4b0cfecf2..4b0cfecf2 100644
--- a/deps/npm/node_modules/semver/semver.browser.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js
diff --git a/deps/npm/node_modules/semver/semver.browser.js.gz b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js.gz
index d67009d8a..d67009d8a 100644
--- a/deps/npm/node_modules/semver/semver.browser.js.gz
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.browser.js.gz
Binary files differ
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
new file mode 100644
index 000000000..cafcc006b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
@@ -0,0 +1,1205 @@
+// export the class if we are in a Node-like system.
+if (typeof module === 'object' && module.exports === exports)
+ exports = module.exports = SemVer;
+
+// The debug function is excluded entirely from the minified version.
+/* nomin */ var debug;
+/* nomin */ if (typeof process === 'object' &&
+ /* nomin */ process.env &&
+ /* nomin */ process.env.NODE_DEBUG &&
+ /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG))
+ /* nomin */ debug = function() {
+ /* nomin */ var args = Array.prototype.slice.call(arguments, 0);
+ /* nomin */ args.unshift('SEMVER');
+ /* nomin */ console.log.apply(console, args);
+ /* nomin */ };
+/* nomin */ else
+ /* nomin */ debug = function() {};
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0';
+
+var MAX_LENGTH = 256;
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
+
+// The actual regexps go on exports.re
+var re = exports.re = [];
+var src = exports.src = [];
+var R = 0;
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+var NUMERICIDENTIFIER = R++;
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*';
+var NUMERICIDENTIFIERLOOSE = R++;
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+';
+
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+var NONNUMERICIDENTIFIER = R++;
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*';
+
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+var MAINVERSION = R++;
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')';
+
+var MAINVERSIONLOOSE = R++;
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')';
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+var PRERELEASEIDENTIFIER = R++;
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+var PRERELEASEIDENTIFIERLOOSE = R++;
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+var PRERELEASE = R++;
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))';
+
+var PRERELEASELOOSE = R++;
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))';
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+var BUILDIDENTIFIER = R++;
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+';
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+var BUILD = R++;
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))';
+
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+var FULL = R++;
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?';
+
+src[FULL] = '^' + FULLPLAIN + '$';
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?';
+
+var LOOSE = R++;
+src[LOOSE] = '^' + LOOSEPLAIN + '$';
+
+var GTLT = R++;
+src[GTLT] = '((?:<|>)?=?)';
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++;
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*';
+var XRANGEIDENTIFIER = R++;
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*';
+
+var XRANGEPLAIN = R++;
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGEPLAINLOOSE = R++;
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGE = R++;
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$';
+var XRANGELOOSE = R++;
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$';
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++;
+src[LONETILDE] = '(?:~>?)';
+
+var TILDETRIM = R++;
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+';
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g');
+var tildeTrimReplace = '$1~';
+
+var TILDE = R++;
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$';
+var TILDELOOSE = R++;
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$';
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++;
+src[LONECARET] = '(?:\\^)';
+
+var CARETTRIM = R++;
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+';
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g');
+var caretTrimReplace = '$1^';
+
+var CARET = R++;
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$';
+var CARETLOOSE = R++;
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$';
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++;
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$';
+var COMPARATOR = R++;
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$';
+
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++;
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')';
+
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g');
+var comparatorTrimReplace = '$1$2$3';
+
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++;
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$';
+
+var HYPHENRANGELOOSE = R++;
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$';
+
+// Star ranges basically just allow anything at all.
+var STAR = R++;
+src[STAR] = '(<|>)?=?\\s*\\*';
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i]);
+ if (!re[i])
+ re[i] = new RegExp(src[i]);
+}
+
+exports.parse = parse;
+function parse(version, loose) {
+ if (version instanceof SemVer)
+ return version;
+
+ if (typeof version !== 'string')
+ return null;
+
+ if (version.length > MAX_LENGTH)
+ return null;
+
+ var r = loose ? re[LOOSE] : re[FULL];
+ if (!r.test(version))
+ return null;
+
+ try {
+ return new SemVer(version, loose);
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.valid = valid;
+function valid(version, loose) {
+ var v = parse(version, loose);
+ return v ? v.version : null;
+}
+
+
+exports.clean = clean;
+function clean(version, loose) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), loose);
+ return s ? s.version : null;
+}
+
+exports.SemVer = SemVer;
+
+function SemVer(version, loose) {
+ if (version instanceof SemVer) {
+ if (version.loose === loose)
+ return version;
+ else
+ version = version.version;
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version);
+ }
+
+ if (version.length > MAX_LENGTH)
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+
+ if (!(this instanceof SemVer))
+ return new SemVer(version, loose);
+
+ debug('SemVer', version, loose);
+ this.loose = loose;
+ var m = version.trim().match(loose ? re[LOOSE] : re[FULL]);
+
+ if (!m)
+ throw new TypeError('Invalid Version: ' + version);
+
+ this.raw = version;
+
+ // these are actually numbers
+ this.major = +m[1];
+ this.minor = +m[2];
+ this.patch = +m[3];
+
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0)
+ throw new TypeError('Invalid major version')
+
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0)
+ throw new TypeError('Invalid minor version')
+
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0)
+ throw new TypeError('Invalid patch version')
+
+ // numberify any prerelease numeric ids
+ if (!m[4])
+ this.prerelease = [];
+ else
+ this.prerelease = m[4].split('.').map(function(id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id
+ if (num >= 0 && num < MAX_SAFE_INTEGER)
+ return num
+ }
+ return id;
+ });
+
+ this.build = m[5] ? m[5].split('.') : [];
+ this.format();
+}
+
+SemVer.prototype.format = function() {
+ this.version = this.major + '.' + this.minor + '.' + this.patch;
+ if (this.prerelease.length)
+ this.version += '-' + this.prerelease.join('.');
+ return this.version;
+};
+
+SemVer.prototype.inspect = function() {
+ return '<SemVer "' + this + '">';
+};
+
+SemVer.prototype.toString = function() {
+ return this.version;
+};
+
+SemVer.prototype.compare = function(other) {
+ debug('SemVer.compare', this.version, this.loose, other);
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return this.compareMain(other) || this.comparePre(other);
+};
+
+SemVer.prototype.compareMain = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch);
+};
+
+SemVer.prototype.comparePre = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length)
+ return -1;
+ else if (!this.prerelease.length && other.prerelease.length)
+ return 1;
+ else if (!this.prerelease.length && !other.prerelease.length)
+ return 0;
+
+ var i = 0;
+ do {
+ var a = this.prerelease[i];
+ var b = other.prerelease[i];
+ debug('prerelease compare', i, a, b);
+ if (a === undefined && b === undefined)
+ return 0;
+ else if (b === undefined)
+ return 1;
+ else if (a === undefined)
+ return -1;
+ else if (a === b)
+ continue;
+ else
+ return compareIdentifiers(a, b);
+ } while (++i);
+};
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function(release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor = 0;
+ this.major++;
+ this.inc('pre', identifier);
+ break;
+ case 'preminor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor++;
+ this.inc('pre', identifier);
+ break;
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0;
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0)
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0)
+ this.major++;
+ this.minor = 0;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0)
+ this.minor++;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0)
+ this.patch++;
+ this.prerelease = [];
+ break;
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0)
+ this.prerelease = [0];
+ else {
+ var i = this.prerelease.length;
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++;
+ i = -2;
+ }
+ }
+ if (i === -1) // didn't increment anything
+ this.prerelease.push(0);
+ }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1]))
+ this.prerelease = [identifier, 0];
+ } else
+ this.prerelease = [identifier, 0];
+ }
+ break;
+
+ default:
+ throw new Error('invalid increment argument: ' + release);
+ }
+ this.format();
+ return this;
+};
+
+exports.inc = inc;
+function inc(version, release, loose, identifier) {
+ if (typeof(loose) === 'string') {
+ identifier = loose;
+ loose = undefined;
+ }
+
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version;
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.diff = diff;
+function diff(version1, version2) {
+ if (eq(version1, version2)) {
+ return null;
+ } else {
+ var v1 = parse(version1);
+ var v2 = parse(version2);
+ if (v1.prerelease.length || v2.prerelease.length) {
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return 'pre'+key;
+ }
+ }
+ }
+ return 'prerelease';
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return key;
+ }
+ }
+ }
+ }
+}
+
+exports.compareIdentifiers = compareIdentifiers;
+
+var numeric = /^[0-9]+$/;
+function compareIdentifiers(a, b) {
+ var anum = numeric.test(a);
+ var bnum = numeric.test(b);
+
+ if (anum && bnum) {
+ a = +a;
+ b = +b;
+ }
+
+ return (anum && !bnum) ? -1 :
+ (bnum && !anum) ? 1 :
+ a < b ? -1 :
+ a > b ? 1 :
+ 0;
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers;
+function rcompareIdentifiers(a, b) {
+ return compareIdentifiers(b, a);
+}
+
+exports.major = major;
+function major(a, loose) {
+ return new SemVer(a, loose).major;
+}
+
+exports.minor = minor;
+function minor(a, loose) {
+ return new SemVer(a, loose).minor;
+}
+
+exports.patch = patch;
+function patch(a, loose) {
+ return new SemVer(a, loose).patch;
+}
+
+exports.compare = compare;
+function compare(a, b, loose) {
+ return new SemVer(a, loose).compare(b);
+}
+
+exports.compareLoose = compareLoose;
+function compareLoose(a, b) {
+ return compare(a, b, true);
+}
+
+exports.rcompare = rcompare;
+function rcompare(a, b, loose) {
+ return compare(b, a, loose);
+}
+
+exports.sort = sort;
+function sort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.compare(a, b, loose);
+ });
+}
+
+exports.rsort = rsort;
+function rsort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.rcompare(a, b, loose);
+ });
+}
+
+exports.gt = gt;
+function gt(a, b, loose) {
+ return compare(a, b, loose) > 0;
+}
+
+exports.lt = lt;
+function lt(a, b, loose) {
+ return compare(a, b, loose) < 0;
+}
+
+exports.eq = eq;
+function eq(a, b, loose) {
+ return compare(a, b, loose) === 0;
+}
+
+exports.neq = neq;
+function neq(a, b, loose) {
+ return compare(a, b, loose) !== 0;
+}
+
+exports.gte = gte;
+function gte(a, b, loose) {
+ return compare(a, b, loose) >= 0;
+}
+
+exports.lte = lte;
+function lte(a, b, loose) {
+ return compare(a, b, loose) <= 0;
+}
+
+exports.cmp = cmp;
+function cmp(a, op, b, loose) {
+ var ret;
+ switch (op) {
+ case '===':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a === b;
+ break;
+ case '!==':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a !== b;
+ break;
+ case '': case '=': case '==': ret = eq(a, b, loose); break;
+ case '!=': ret = neq(a, b, loose); break;
+ case '>': ret = gt(a, b, loose); break;
+ case '>=': ret = gte(a, b, loose); break;
+ case '<': ret = lt(a, b, loose); break;
+ case '<=': ret = lte(a, b, loose); break;
+ default: throw new TypeError('Invalid operator: ' + op);
+ }
+ return ret;
+}
+
+exports.Comparator = Comparator;
+function Comparator(comp, loose) {
+ if (comp instanceof Comparator) {
+ if (comp.loose === loose)
+ return comp;
+ else
+ comp = comp.value;
+ }
+
+ if (!(this instanceof Comparator))
+ return new Comparator(comp, loose);
+
+ debug('comparator', comp, loose);
+ this.loose = loose;
+ this.parse(comp);
+
+ if (this.semver === ANY)
+ this.value = '';
+ else
+ this.value = this.operator + this.semver.version;
+
+ debug('comp', this);
+}
+
+var ANY = {};
+Comparator.prototype.parse = function(comp) {
+ var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var m = comp.match(r);
+
+ if (!m)
+ throw new TypeError('Invalid comparator: ' + comp);
+
+ this.operator = m[1];
+ if (this.operator === '=')
+ this.operator = '';
+
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2])
+ this.semver = ANY;
+ else
+ this.semver = new SemVer(m[2], this.loose);
+};
+
+Comparator.prototype.inspect = function() {
+ return '<SemVer Comparator "' + this + '">';
+};
+
+Comparator.prototype.toString = function() {
+ return this.value;
+};
+
+Comparator.prototype.test = function(version) {
+ debug('Comparator.test', version, this.loose);
+
+ if (this.semver === ANY)
+ return true;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ return cmp(version, this.operator, this.semver, this.loose);
+};
+
+
+exports.Range = Range;
+function Range(range, loose) {
+ if ((range instanceof Range) && range.loose === loose)
+ return range;
+
+ if (!(this instanceof Range))
+ return new Range(range, loose);
+
+ this.loose = loose;
+
+ // First, split based on boolean or ||
+ this.raw = range;
+ this.set = range.split(/\s*\|\|\s*/).map(function(range) {
+ return this.parseRange(range.trim());
+ }, this).filter(function(c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length;
+ });
+
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range);
+ }
+
+ this.format();
+}
+
+Range.prototype.inspect = function() {
+ return '<SemVer Range "' + this.range + '">';
+};
+
+Range.prototype.format = function() {
+ this.range = this.set.map(function(comps) {
+ return comps.join(' ').trim();
+ }).join('||').trim();
+ return this.range;
+};
+
+Range.prototype.toString = function() {
+ return this.range;
+};
+
+Range.prototype.parseRange = function(range) {
+ var loose = this.loose;
+ range = range.trim();
+ debug('range', range, loose);
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE];
+ range = range.replace(hr, hyphenReplace);
+ debug('hyphen replace', range);
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace);
+ debug('comparator trim', range, re[COMPARATORTRIM]);
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace);
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace);
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ');
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var set = range.split(' ').map(function(comp) {
+ return parseComparator(comp, loose);
+ }).join(' ').split(/\s+/);
+ if (this.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function(comp) {
+ return !!comp.match(compRe);
+ });
+ }
+ set = set.map(function(comp) {
+ return new Comparator(comp, loose);
+ });
+
+ return set;
+};
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators;
+function toComparators(range, loose) {
+ return new Range(range, loose).set.map(function(comp) {
+ return comp.map(function(c) {
+ return c.value;
+ }).join(' ').trim().split(' ');
+ });
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator(comp, loose) {
+ debug('comp', comp);
+ comp = replaceCarets(comp, loose);
+ debug('caret', comp);
+ comp = replaceTildes(comp, loose);
+ debug('tildes', comp);
+ comp = replaceXRanges(comp, loose);
+ debug('xrange', comp);
+ comp = replaceStars(comp, loose);
+ debug('stars', comp);
+ return comp;
+}
+
+function isX(id) {
+ return !id || id.toLowerCase() === 'x' || id === '*';
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceTilde(comp, loose);
+ }).join(' ');
+}
+
+function replaceTilde(comp, loose) {
+ var r = loose ? re[TILDELOOSE] : re[TILDE];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p))
+ // ~1.2 == >=1.2.0- <1.3.0-
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else if (pr) {
+ debug('replaceTilde pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+
+ debug('tilde return', ret);
+ return ret;
+ });
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceCaret(comp, loose);
+ }).join(' ');
+}
+
+function replaceCaret(comp, loose) {
+ debug('caret', comp, loose);
+ var r = loose ? re[CARETLOOSE] : re[CARET];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p)) {
+ if (M === '0')
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0';
+ } else if (pr) {
+ debug('replaceCaret pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + (+M + 1) + '.0.0';
+ } else {
+ debug('no pr');
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0';
+ }
+
+ debug('caret return', ret);
+ return ret;
+ });
+}
+
+function replaceXRanges(comp, loose) {
+ debug('replaceXRanges', comp, loose);
+ return comp.split(/\s+/).map(function(comp) {
+ return replaceXRange(comp, loose);
+ }).join(' ');
+}
+
+function replaceXRange(comp, loose) {
+ comp = comp.trim();
+ var r = loose ? re[XRANGELOOSE] : re[XRANGE];
+ return comp.replace(r, function(ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr);
+ var xM = isX(M);
+ var xm = xM || isX(m);
+ var xp = xm || isX(p);
+ var anyX = xp;
+
+ if (gtlt === '=' && anyX)
+ gtlt = '';
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0';
+ } else {
+ // nothing is forbidden
+ ret = '*';
+ }
+ } else if (gtlt && anyX) {
+ // replace X with 0
+ if (xm)
+ m = 0;
+ if (xp)
+ p = 0;
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>=';
+ if (xm) {
+ M = +M + 1;
+ m = 0;
+ p = 0;
+ } else if (xp) {
+ m = +m + 1;
+ p = 0;
+ }
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<'
+ if (xm)
+ M = +M + 1
+ else
+ m = +m + 1
+ }
+
+ ret = gtlt + M + '.' + m + '.' + p;
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ }
+
+ debug('xRange return', ret);
+
+ return ret;
+ });
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars(comp, loose) {
+ debug('replaceStars', comp, loose);
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '');
+}
+
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+
+ if (isX(fM))
+ from = '';
+ else if (isX(fm))
+ from = '>=' + fM + '.0.0';
+ else if (isX(fp))
+ from = '>=' + fM + '.' + fm + '.0';
+ else
+ from = '>=' + from;
+
+ if (isX(tM))
+ to = '';
+ else if (isX(tm))
+ to = '<' + (+tM + 1) + '.0.0';
+ else if (isX(tp))
+ to = '<' + tM + '.' + (+tm + 1) + '.0';
+ else if (tpr)
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr;
+ else
+ to = '<=' + to;
+
+ return (from + ' ' + to).trim();
+}
+
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function(version) {
+ if (!version)
+ return false;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version))
+ return true;
+ }
+ return false;
+};
+
+function testSet(set, version) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version))
+ return false;
+ }
+
+ if (version.prerelease.length) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (var i = 0; i < set.length; i++) {
+ debug(set[i].semver);
+ if (set[i].semver === ANY)
+ continue;
+
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver;
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch)
+ return true;
+ }
+ }
+
+ // Version has a -pre, but it's not one of the ones we like.
+ return false;
+ }
+
+ return true;
+}
+
+exports.satisfies = satisfies;
+function satisfies(version, range, loose) {
+ try {
+ range = new Range(range, loose);
+ } catch (er) {
+ return false;
+ }
+ return range.test(version);
+}
+
+exports.maxSatisfying = maxSatisfying;
+function maxSatisfying(versions, range, loose) {
+ return versions.filter(function(version) {
+ return satisfies(version, range, loose);
+ }).sort(function(a, b) {
+ return rcompare(a, b, loose);
+ })[0] || null;
+}
+
+exports.validRange = validRange;
+function validRange(range, loose) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, loose).range || '*';
+ } catch (er) {
+ return null;
+ }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr;
+function ltr(version, range, loose) {
+ return outside(version, range, '<', loose);
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr;
+function gtr(version, range, loose) {
+ return outside(version, range, '>', loose);
+}
+
+exports.outside = outside;
+function outside(version, range, hilo, loose) {
+ version = new SemVer(version, loose);
+ range = new Range(range, loose);
+
+ var gtfn, ltefn, ltfn, comp, ecomp;
+ switch (hilo) {
+ case '>':
+ gtfn = gt;
+ ltefn = lte;
+ ltfn = lt;
+ comp = '>';
+ ecomp = '>=';
+ break;
+ case '<':
+ gtfn = lt;
+ ltefn = gte;
+ ltfn = gt;
+ comp = '<';
+ ecomp = '<=';
+ break;
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"');
+ }
+
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, loose)) {
+ return false;
+ }
+
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
+
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i];
+
+ var high = null;
+ var low = null;
+
+ comparators.forEach(function(comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator;
+ low = low || comparator;
+ if (gtfn(comparator.semver, high.semver, loose)) {
+ high = comparator;
+ } else if (ltfn(comparator.semver, low.semver, loose)) {
+ low = comparator;
+ }
+ });
+
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false;
+ }
+
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false;
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+// Use the define() function if we're in AMD land
+if (typeof define === 'function' && define.amd)
+ define(exports);
diff --git a/deps/npm/node_modules/semver/semver.min.js b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js
index dea027b11..dea027b11 100644
--- a/deps/npm/node_modules/semver/semver.min.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js
diff --git a/deps/npm/node_modules/semver/semver.min.js.gz b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js.gz
index cbbc16188..cbbc16188 100644
--- a/deps/npm/node_modules/semver/semver.min.js.gz
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.min.js.gz
Binary files differ
diff --git a/deps/npm/node_modules/semver/test/amd.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/amd.js
index a6041341b..a6041341b 100644
--- a/deps/npm/node_modules/semver/test/amd.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/amd.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/big-numbers.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/big-numbers.js
new file mode 100644
index 000000000..c051864bc
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/big-numbers.js
@@ -0,0 +1,31 @@
+var test = require('tap').test
+var semver = require('../')
+
+test('long version is too long', function (t) {
+ var v = '1.2.' + new Array(256).join('1')
+ t.throws(function () {
+ new semver.SemVer(v)
+ })
+ t.equal(semver.valid(v, false), null)
+ t.equal(semver.valid(v, true), null)
+ t.equal(semver.inc(v, 'patch'), null)
+ t.end()
+})
+
+test('big number is like too long version', function (t) {
+ var v = '1.2.' + new Array(100).join('1')
+ t.throws(function () {
+ new semver.SemVer(v)
+ })
+ t.equal(semver.valid(v, false), null)
+ t.equal(semver.valid(v, true), null)
+ t.equal(semver.inc(v, 'patch'), null)
+ t.end()
+})
+
+test('parsing null does not throw', function (t) {
+ t.equal(semver.parse(null), null)
+ t.equal(semver.parse({}), null)
+ t.equal(semver.parse(new semver.SemVer('1.2.3')).version, '1.2.3')
+ t.end()
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/clean.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/clean.js
new file mode 100644
index 000000000..9e268de95
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/clean.js
@@ -0,0 +1,29 @@
+var tap = require('tap');
+var test = tap.test;
+var semver = require('../semver.js');
+var clean = semver.clean;
+
+test('\nclean tests', function(t) {
+ // [range, version]
+ // Version should be detectable despite extra characters
+ [
+ ['1.2.3', '1.2.3'],
+ [' 1.2.3 ', '1.2.3'],
+ [' 1.2.3-4 ', '1.2.3-4'],
+ [' 1.2.3-pre ', '1.2.3-pre'],
+ [' =v1.2.3 ', '1.2.3'],
+ ['v1.2.3', '1.2.3'],
+ [' v1.2.3 ', '1.2.3'],
+ ['\t1.2.3', '1.2.3'],
+ ['>1.2.3', null],
+ ['~1.2.3', null],
+ ['<=1.2.3', null],
+ ['1.2.x', null]
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var msg = 'clean(' + range + ') = ' + version;
+ t.equal(clean(range), version, msg);
+ });
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/gtr.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/gtr.js
new file mode 100644
index 000000000..bbb87896c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/gtr.js
@@ -0,0 +1,173 @@
+var tap = require('tap');
+var test = tap.test;
+var semver = require('../semver.js');
+var gtr = semver.gtr;
+
+test('\ngtr tests', function(t) {
+ // [range, version, loose]
+ // Version should be greater than range
+ [
+ ['~1.2.2', '1.3.0'],
+ ['~0.6.1-1', '0.7.1-1'],
+ ['1.0.0 - 2.0.0', '2.0.1'],
+ ['1.0.0', '1.0.1-beta1'],
+ ['1.0.0', '2.0.0'],
+ ['<=2.0.0', '2.1.1'],
+ ['<=2.0.0', '3.2.9'],
+ ['<2.0.0', '2.0.0'],
+ ['0.1.20 || 1.2.4', '1.2.5'],
+ ['2.x.x', '3.0.0'],
+ ['1.2.x', '1.3.0'],
+ ['1.2.x || 2.x', '3.0.0'],
+ ['2.*.*', '5.0.1'],
+ ['1.2.*', '1.3.3'],
+ ['1.2.* || 2.*', '4.0.0'],
+ ['2', '3.0.0'],
+ ['2.3', '2.4.2'],
+ ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.5.5'],
+ ['~>3.2.1', '3.3.0'], // >=3.2.1 <3.3.0
+ ['~1', '2.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '2.2.4'],
+ ['~> 1', '3.2.3'],
+ ['~1.0', '1.1.2'], // >=1.0.0 <1.1.0
+ ['~ 1.0', '1.1.0'],
+ ['<1.2', '1.2.0'],
+ ['< 1.2', '1.2.1'],
+ ['1', '2.0.0beta', true],
+ ['~v0.5.4-pre', '0.6.0'],
+ ['~v0.5.4-pre', '0.6.1-pre'],
+ ['=0.7.x', '0.8.0'],
+ ['=0.7.x', '0.8.0-asdf'],
+ ['<0.7.x', '0.7.0'],
+ ['~1.2.2', '1.3.0'],
+ ['1.0.0 - 2.0.0', '2.2.3'],
+ ['1.0.0', '1.0.1'],
+ ['<=2.0.0', '3.0.0'],
+ ['<=2.0.0', '2.9999.9999'],
+ ['<=2.0.0', '2.2.9'],
+ ['<2.0.0', '2.9999.9999'],
+ ['<2.0.0', '2.2.9'],
+ ['2.x.x', '3.1.3'],
+ ['1.2.x', '1.3.3'],
+ ['1.2.x || 2.x', '3.1.3'],
+ ['2.*.*', '3.1.3'],
+ ['1.2.*', '1.3.3'],
+ ['1.2.* || 2.*', '3.1.3'],
+ ['2', '3.1.2'],
+ ['2.3', '2.4.1'],
+ ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0
+ ['~>3.2.1', '3.3.2'], // >=3.2.1 <3.3.0
+ ['~1', '2.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '2.2.3'],
+ ['~1.0', '1.1.0'], // >=1.0.0 <1.1.0
+ ['<1', '1.0.0'],
+ ['1', '2.0.0beta', true],
+ ['<1', '1.0.0beta', true],
+ ['< 1', '1.0.0beta', true],
+ ['=0.7.x', '0.8.2'],
+ ['<0.7.x', '0.7.2']
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = 'gtr(' + version + ', ' + range + ', ' + loose + ')';
+ t.ok(gtr(version, range, loose), msg);
+ });
+ t.end();
+});
+
+test('\nnegative gtr tests', function(t) {
+ // [range, version, loose]
+ // Version should NOT be greater than range
+ [
+ ['~0.6.1-1', '0.6.1-1'],
+ ['1.0.0 - 2.0.0', '1.2.3'],
+ ['1.0.0 - 2.0.0', '0.9.9'],
+ ['1.0.0', '1.0.0'],
+ ['>=*', '0.2.4'],
+ ['', '1.0.0', true],
+ ['*', '1.2.3'],
+ ['*', 'v1.2.3-foo'],
+ ['>=1.0.0', '1.0.0'],
+ ['>=1.0.0', '1.0.1'],
+ ['>=1.0.0', '1.1.0'],
+ ['>1.0.0', '1.0.1'],
+ ['>1.0.0', '1.1.0'],
+ ['<=2.0.0', '2.0.0'],
+ ['<=2.0.0', '1.9999.9999'],
+ ['<=2.0.0', '0.2.9'],
+ ['<2.0.0', '1.9999.9999'],
+ ['<2.0.0', '0.2.9'],
+ ['>= 1.0.0', '1.0.0'],
+ ['>= 1.0.0', '1.0.1'],
+ ['>= 1.0.0', '1.1.0'],
+ ['> 1.0.0', '1.0.1'],
+ ['> 1.0.0', '1.1.0'],
+ ['<= 2.0.0', '2.0.0'],
+ ['<= 2.0.0', '1.9999.9999'],
+ ['<= 2.0.0', '0.2.9'],
+ ['< 2.0.0', '1.9999.9999'],
+ ['<\t2.0.0', '0.2.9'],
+ ['>=0.1.97', 'v0.1.97'],
+ ['>=0.1.97', '0.1.97'],
+ ['0.1.20 || 1.2.4', '1.2.4'],
+ ['0.1.20 || >1.2.4', '1.2.4'],
+ ['0.1.20 || 1.2.4', '1.2.3'],
+ ['0.1.20 || 1.2.4', '0.1.20'],
+ ['>=0.2.3 || <0.0.1', '0.0.0'],
+ ['>=0.2.3 || <0.0.1', '0.2.3'],
+ ['>=0.2.3 || <0.0.1', '0.2.4'],
+ ['||', '1.3.4'],
+ ['2.x.x', '2.1.3'],
+ ['1.2.x', '1.2.3'],
+ ['1.2.x || 2.x', '2.1.3'],
+ ['1.2.x || 2.x', '1.2.3'],
+ ['x', '1.2.3'],
+ ['2.*.*', '2.1.3'],
+ ['1.2.*', '1.2.3'],
+ ['1.2.* || 2.*', '2.1.3'],
+ ['1.2.* || 2.*', '1.2.3'],
+ ['1.2.* || 2.*', '1.2.3'],
+ ['*', '1.2.3'],
+ ['2', '2.1.2'],
+ ['2.3', '2.3.1'],
+ ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.4.5'],
+ ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0
+ ['~1', '1.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '1.2.3'],
+ ['~> 1', '1.2.3'],
+ ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0
+ ['~ 1.0', '1.0.2'],
+ ['>=1', '1.0.0'],
+ ['>= 1', '1.0.0'],
+ ['<1.2', '1.1.1'],
+ ['< 1.2', '1.1.1'],
+ ['1', '1.0.0beta', true],
+ ['~v0.5.4-pre', '0.5.5'],
+ ['~v0.5.4-pre', '0.5.4'],
+ ['=0.7.x', '0.7.2'],
+ ['>=0.7.x', '0.7.2'],
+ ['=0.7.x', '0.7.0-asdf'],
+ ['>=0.7.x', '0.7.0-asdf'],
+ ['<=0.7.x', '0.6.2'],
+ ['>0.2.3 >0.2.4 <=0.2.5', '0.2.5'],
+ ['>=0.2.3 <=0.2.4', '0.2.4'],
+ ['1.0.0 - 2.0.0', '2.0.0'],
+ ['^1', '0.0.0-0'],
+ ['^3.0.0', '2.0.0'],
+ ['^1.0.0 || ~2.0.1', '2.0.0'],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '3.2.0'],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '1.0.0beta', true],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '5.0.0-0', true],
+ ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0']
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = '!gtr(' + version + ', ' + range + ', ' + loose + ')';
+ t.notOk(gtr(version, range, loose), msg);
+ });
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/index.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/index.js
new file mode 100644
index 000000000..c256c7947
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/index.js
@@ -0,0 +1,685 @@
+'use strict';
+
+var tap = require('tap');
+var test = tap.test;
+var semver = require('../semver.js');
+var eq = semver.eq;
+var gt = semver.gt;
+var lt = semver.lt;
+var neq = semver.neq;
+var cmp = semver.cmp;
+var gte = semver.gte;
+var lte = semver.lte;
+var satisfies = semver.satisfies;
+var validRange = semver.validRange;
+var inc = semver.inc;
+var diff = semver.diff;
+var replaceStars = semver.replaceStars;
+var toComparators = semver.toComparators;
+var SemVer = semver.SemVer;
+var Range = semver.Range;
+
+test('\ncomparison tests', function(t) {
+ // [version1, version2]
+ // version1 should be greater than version2
+ [['0.0.0', '0.0.0-foo'],
+ ['0.0.1', '0.0.0'],
+ ['1.0.0', '0.9.9'],
+ ['0.10.0', '0.9.0'],
+ ['0.99.0', '0.10.0'],
+ ['2.0.0', '1.2.3'],
+ ['v0.0.0', '0.0.0-foo', true],
+ ['v0.0.1', '0.0.0', true],
+ ['v1.0.0', '0.9.9', true],
+ ['v0.10.0', '0.9.0', true],
+ ['v0.99.0', '0.10.0', true],
+ ['v2.0.0', '1.2.3', true],
+ ['0.0.0', 'v0.0.0-foo', true],
+ ['0.0.1', 'v0.0.0', true],
+ ['1.0.0', 'v0.9.9', true],
+ ['0.10.0', 'v0.9.0', true],
+ ['0.99.0', 'v0.10.0', true],
+ ['2.0.0', 'v1.2.3', true],
+ ['1.2.3', '1.2.3-asdf'],
+ ['1.2.3', '1.2.3-4'],
+ ['1.2.3', '1.2.3-4-foo'],
+ ['1.2.3-5-foo', '1.2.3-5'],
+ ['1.2.3-5', '1.2.3-4'],
+ ['1.2.3-5-foo', '1.2.3-5-Foo'],
+ ['3.0.0', '2.7.2+asdf'],
+ ['1.2.3-a.10', '1.2.3-a.5'],
+ ['1.2.3-a.b', '1.2.3-a.5'],
+ ['1.2.3-a.b', '1.2.3-a'],
+ ['1.2.3-a.b.c.10.d.5', '1.2.3-a.b.c.5.d.100'],
+ ['1.2.3-r2', '1.2.3-r100'],
+ ['1.2.3-r100', '1.2.3-R2']
+ ].forEach(function(v) {
+ var v0 = v[0];
+ var v1 = v[1];
+ var loose = v[2];
+ t.ok(gt(v0, v1, loose), "gt('" + v0 + "', '" + v1 + "')");
+ t.ok(lt(v1, v0, loose), "lt('" + v1 + "', '" + v0 + "')");
+ t.ok(!gt(v1, v0, loose), "!gt('" + v1 + "', '" + v0 + "')");
+ t.ok(!lt(v0, v1, loose), "!lt('" + v0 + "', '" + v1 + "')");
+ t.ok(eq(v0, v0, loose), "eq('" + v0 + "', '" + v0 + "')");
+ t.ok(eq(v1, v1, loose), "eq('" + v1 + "', '" + v1 + "')");
+ t.ok(neq(v0, v1, loose), "neq('" + v0 + "', '" + v1 + "')");
+ t.ok(cmp(v1, '==', v1, loose), "cmp('" + v1 + "' == '" + v1 + "')");
+ t.ok(cmp(v0, '>=', v1, loose), "cmp('" + v0 + "' >= '" + v1 + "')");
+ t.ok(cmp(v1, '<=', v0, loose), "cmp('" + v1 + "' <= '" + v0 + "')");
+ t.ok(cmp(v0, '!=', v1, loose), "cmp('" + v0 + "' != '" + v1 + "')");
+ });
+ t.end();
+});
+
+test('\nequality tests', function(t) {
+ // [version1, version2]
+ // version1 should be equivalent to version2
+ [['1.2.3', 'v1.2.3', true],
+ ['1.2.3', '=1.2.3', true],
+ ['1.2.3', 'v 1.2.3', true],
+ ['1.2.3', '= 1.2.3', true],
+ ['1.2.3', ' v1.2.3', true],
+ ['1.2.3', ' =1.2.3', true],
+ ['1.2.3', ' v 1.2.3', true],
+ ['1.2.3', ' = 1.2.3', true],
+ ['1.2.3-0', 'v1.2.3-0', true],
+ ['1.2.3-0', '=1.2.3-0', true],
+ ['1.2.3-0', 'v 1.2.3-0', true],
+ ['1.2.3-0', '= 1.2.3-0', true],
+ ['1.2.3-0', ' v1.2.3-0', true],
+ ['1.2.3-0', ' =1.2.3-0', true],
+ ['1.2.3-0', ' v 1.2.3-0', true],
+ ['1.2.3-0', ' = 1.2.3-0', true],
+ ['1.2.3-1', 'v1.2.3-1', true],
+ ['1.2.3-1', '=1.2.3-1', true],
+ ['1.2.3-1', 'v 1.2.3-1', true],
+ ['1.2.3-1', '= 1.2.3-1', true],
+ ['1.2.3-1', ' v1.2.3-1', true],
+ ['1.2.3-1', ' =1.2.3-1', true],
+ ['1.2.3-1', ' v 1.2.3-1', true],
+ ['1.2.3-1', ' = 1.2.3-1', true],
+ ['1.2.3-beta', 'v1.2.3-beta', true],
+ ['1.2.3-beta', '=1.2.3-beta', true],
+ ['1.2.3-beta', 'v 1.2.3-beta', true],
+ ['1.2.3-beta', '= 1.2.3-beta', true],
+ ['1.2.3-beta', ' v1.2.3-beta', true],
+ ['1.2.3-beta', ' =1.2.3-beta', true],
+ ['1.2.3-beta', ' v 1.2.3-beta', true],
+ ['1.2.3-beta', ' = 1.2.3-beta', true],
+ ['1.2.3-beta+build', ' = 1.2.3-beta+otherbuild', true],
+ ['1.2.3+build', ' = 1.2.3+otherbuild', true],
+ ['1.2.3-beta+build', '1.2.3-beta+otherbuild'],
+ ['1.2.3+build', '1.2.3+otherbuild'],
+ [' v1.2.3+build', '1.2.3+otherbuild']
+ ].forEach(function(v) {
+ var v0 = v[0];
+ var v1 = v[1];
+ var loose = v[2];
+ t.ok(eq(v0, v1, loose), "eq('" + v0 + "', '" + v1 + "')");
+ t.ok(!neq(v0, v1, loose), "!neq('" + v0 + "', '" + v1 + "')");
+ t.ok(cmp(v0, '==', v1, loose), 'cmp(' + v0 + '==' + v1 + ')');
+ t.ok(!cmp(v0, '!=', v1, loose), '!cmp(' + v0 + '!=' + v1 + ')');
+ t.ok(!cmp(v0, '===', v1, loose), '!cmp(' + v0 + '===' + v1 + ')');
+ t.ok(cmp(v0, '!==', v1, loose), 'cmp(' + v0 + '!==' + v1 + ')');
+ t.ok(!gt(v0, v1, loose), "!gt('" + v0 + "', '" + v1 + "')");
+ t.ok(gte(v0, v1, loose), "gte('" + v0 + "', '" + v1 + "')");
+ t.ok(!lt(v0, v1, loose), "!lt('" + v0 + "', '" + v1 + "')");
+ t.ok(lte(v0, v1, loose), "lte('" + v0 + "', '" + v1 + "')");
+ });
+ t.end();
+});
+
+
+test('\nrange tests', function(t) {
+ // [range, version]
+ // version should be included by range
+ [['1.0.0 - 2.0.0', '1.2.3'],
+ ['^1.2.3+build', '1.2.3'],
+ ['^1.2.3+build', '1.3.0'],
+ ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '1.2.3'],
+ ['1.2.3pre+asdf - 2.4.3-pre+asdf', '1.2.3', true],
+ ['1.2.3-pre+asdf - 2.4.3pre+asdf', '1.2.3', true],
+ ['1.2.3pre+asdf - 2.4.3pre+asdf', '1.2.3', true],
+ ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '1.2.3-pre.2'],
+ ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '2.4.3-alpha'],
+ ['1.2.3+asdf - 2.4.3+asdf', '1.2.3'],
+ ['1.0.0', '1.0.0'],
+ ['>=*', '0.2.4'],
+ ['', '1.0.0'],
+ ['*', '1.2.3'],
+ ['*', 'v1.2.3', true],
+ ['>=1.0.0', '1.0.0'],
+ ['>=1.0.0', '1.0.1'],
+ ['>=1.0.0', '1.1.0'],
+ ['>1.0.0', '1.0.1'],
+ ['>1.0.0', '1.1.0'],
+ ['<=2.0.0', '2.0.0'],
+ ['<=2.0.0', '1.9999.9999'],
+ ['<=2.0.0', '0.2.9'],
+ ['<2.0.0', '1.9999.9999'],
+ ['<2.0.0', '0.2.9'],
+ ['>= 1.0.0', '1.0.0'],
+ ['>= 1.0.0', '1.0.1'],
+ ['>= 1.0.0', '1.1.0'],
+ ['> 1.0.0', '1.0.1'],
+ ['> 1.0.0', '1.1.0'],
+ ['<= 2.0.0', '2.0.0'],
+ ['<= 2.0.0', '1.9999.9999'],
+ ['<= 2.0.0', '0.2.9'],
+ ['< 2.0.0', '1.9999.9999'],
+ ['<\t2.0.0', '0.2.9'],
+ ['>=0.1.97', 'v0.1.97', true],
+ ['>=0.1.97', '0.1.97'],
+ ['0.1.20 || 1.2.4', '1.2.4'],
+ ['>=0.2.3 || <0.0.1', '0.0.0'],
+ ['>=0.2.3 || <0.0.1', '0.2.3'],
+ ['>=0.2.3 || <0.0.1', '0.2.4'],
+ ['||', '1.3.4'],
+ ['2.x.x', '2.1.3'],
+ ['1.2.x', '1.2.3'],
+ ['1.2.x || 2.x', '2.1.3'],
+ ['1.2.x || 2.x', '1.2.3'],
+ ['x', '1.2.3'],
+ ['2.*.*', '2.1.3'],
+ ['1.2.*', '1.2.3'],
+ ['1.2.* || 2.*', '2.1.3'],
+ ['1.2.* || 2.*', '1.2.3'],
+ ['*', '1.2.3'],
+ ['2', '2.1.2'],
+ ['2.3', '2.3.1'],
+ ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.4.5'],
+ ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0,
+ ['~1', '1.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '1.2.3'],
+ ['~> 1', '1.2.3'],
+ ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0,
+ ['~ 1.0', '1.0.2'],
+ ['~ 1.0.3', '1.0.12'],
+ ['>=1', '1.0.0'],
+ ['>= 1', '1.0.0'],
+ ['<1.2', '1.1.1'],
+ ['< 1.2', '1.1.1'],
+ ['~v0.5.4-pre', '0.5.5'],
+ ['~v0.5.4-pre', '0.5.4'],
+ ['=0.7.x', '0.7.2'],
+ ['<=0.7.x', '0.7.2'],
+ ['>=0.7.x', '0.7.2'],
+ ['<=0.7.x', '0.6.2'],
+ ['~1.2.1 >=1.2.3', '1.2.3'],
+ ['~1.2.1 =1.2.3', '1.2.3'],
+ ['~1.2.1 1.2.3', '1.2.3'],
+ ['~1.2.1 >=1.2.3 1.2.3', '1.2.3'],
+ ['~1.2.1 1.2.3 >=1.2.3', '1.2.3'],
+ ['~1.2.1 1.2.3', '1.2.3'],
+ ['>=1.2.1 1.2.3', '1.2.3'],
+ ['1.2.3 >=1.2.1', '1.2.3'],
+ ['>=1.2.3 >=1.2.1', '1.2.3'],
+ ['>=1.2.1 >=1.2.3', '1.2.3'],
+ ['>=1.2', '1.2.8'],
+ ['^1.2.3', '1.8.1'],
+ ['^0.1.2', '0.1.2'],
+ ['^0.1', '0.1.2'],
+ ['^1.2', '1.4.2'],
+ ['^1.2 ^1', '1.4.2'],
+ ['^1.2.3-alpha', '1.2.3-pre'],
+ ['^1.2.0-alpha', '1.2.0-pre'],
+ ['^0.0.1-alpha', '0.0.1-beta']
+ ].forEach(function(v) {
+ var range = v[0];
+ var ver = v[1];
+ var loose = v[2];
+ t.ok(satisfies(ver, range, loose), range + ' satisfied by ' + ver);
+ });
+ t.end();
+});
+
+test('\nnegative range tests', function(t) {
+ // [range, version]
+ // version should not be included by range
+ [['1.0.0 - 2.0.0', '2.2.3'],
+ ['1.2.3+asdf - 2.4.3+asdf', '1.2.3-pre.2'],
+ ['1.2.3+asdf - 2.4.3+asdf', '2.4.3-alpha'],
+ ['^1.2.3+build', '2.0.0'],
+ ['^1.2.3+build', '1.2.0'],
+ ['^1.2.3', '1.2.3-pre'],
+ ['^1.2', '1.2.0-pre'],
+ ['>1.2', '1.3.0-beta'],
+ ['<=1.2.3', '1.2.3-beta'],
+ ['^1.2.3', '1.2.3-beta'],
+ ['=0.7.x', '0.7.0-asdf'],
+ ['>=0.7.x', '0.7.0-asdf'],
+ ['1', '1.0.0beta', true],
+ ['<1', '1.0.0beta', true],
+ ['< 1', '1.0.0beta', true],
+ ['1.0.0', '1.0.1'],
+ ['>=1.0.0', '0.0.0'],
+ ['>=1.0.0', '0.0.1'],
+ ['>=1.0.0', '0.1.0'],
+ ['>1.0.0', '0.0.1'],
+ ['>1.0.0', '0.1.0'],
+ ['<=2.0.0', '3.0.0'],
+ ['<=2.0.0', '2.9999.9999'],
+ ['<=2.0.0', '2.2.9'],
+ ['<2.0.0', '2.9999.9999'],
+ ['<2.0.0', '2.2.9'],
+ ['>=0.1.97', 'v0.1.93', true],
+ ['>=0.1.97', '0.1.93'],
+ ['0.1.20 || 1.2.4', '1.2.3'],
+ ['>=0.2.3 || <0.0.1', '0.0.3'],
+ ['>=0.2.3 || <0.0.1', '0.2.2'],
+ ['2.x.x', '1.1.3'],
+ ['2.x.x', '3.1.3'],
+ ['1.2.x', '1.3.3'],
+ ['1.2.x || 2.x', '3.1.3'],
+ ['1.2.x || 2.x', '1.1.3'],
+ ['2.*.*', '1.1.3'],
+ ['2.*.*', '3.1.3'],
+ ['1.2.*', '1.3.3'],
+ ['1.2.* || 2.*', '3.1.3'],
+ ['1.2.* || 2.*', '1.1.3'],
+ ['2', '1.1.2'],
+ ['2.3', '2.4.1'],
+ ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.3.9'],
+ ['~>3.2.1', '3.3.2'], // >=3.2.1 <3.3.0
+ ['~>3.2.1', '3.2.0'], // >=3.2.1 <3.3.0
+ ['~1', '0.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '2.2.3'],
+ ['~1.0', '1.1.0'], // >=1.0.0 <1.1.0
+ ['<1', '1.0.0'],
+ ['>=1.2', '1.1.1'],
+ ['1', '2.0.0beta', true],
+ ['~v0.5.4-beta', '0.5.4-alpha'],
+ ['=0.7.x', '0.8.2'],
+ ['>=0.7.x', '0.6.2'],
+ ['<0.7.x', '0.7.2'],
+ ['<1.2.3', '1.2.3-beta'],
+ ['=1.2.3', '1.2.3-beta'],
+ ['>1.2', '1.2.8'],
+ ['^1.2.3', '2.0.0-alpha'],
+ ['^1.2.3', '1.2.2'],
+ ['^1.2', '1.1.9'],
+ ['*', 'v1.2.3-foo', true],
+ // invalid ranges never satisfied!
+ ['blerg', '1.2.3'],
+ ['git+https://user:password0123@github.com/foo', '123.0.0', true],
+ ['^1.2.3', '2.0.0-pre']
+ ].forEach(function(v) {
+ var range = v[0];
+ var ver = v[1];
+ var loose = v[2];
+ var found = satisfies(ver, range, loose);
+ t.ok(!found, ver + ' not satisfied by ' + range);
+ });
+ t.end();
+});
+
+test('\nincrement versions test', function(t) {
+// [version, inc, result, identifier]
+// inc(version, inc) -> result
+ [['1.2.3', 'major', '2.0.0'],
+ ['1.2.3', 'minor', '1.3.0'],
+ ['1.2.3', 'patch', '1.2.4'],
+ ['1.2.3tag', 'major', '2.0.0', true],
+ ['1.2.3-tag', 'major', '2.0.0'],
+ ['1.2.3', 'fake', null],
+ ['1.2.0-0', 'patch', '1.2.0'],
+ ['fake', 'major', null],
+ ['1.2.3-4', 'major', '2.0.0'],
+ ['1.2.3-4', 'minor', '1.3.0'],
+ ['1.2.3-4', 'patch', '1.2.3'],
+ ['1.2.3-alpha.0.beta', 'major', '2.0.0'],
+ ['1.2.3-alpha.0.beta', 'minor', '1.3.0'],
+ ['1.2.3-alpha.0.beta', 'patch', '1.2.3'],
+ ['1.2.4', 'prerelease', '1.2.5-0'],
+ ['1.2.3-0', 'prerelease', '1.2.3-1'],
+ ['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1'],
+ ['1.2.3-alpha.1', 'prerelease', '1.2.3-alpha.2'],
+ ['1.2.3-alpha.2', 'prerelease', '1.2.3-alpha.3'],
+ ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-alpha.1.beta'],
+ ['1.2.3-alpha.1.beta', 'prerelease', '1.2.3-alpha.2.beta'],
+ ['1.2.3-alpha.2.beta', 'prerelease', '1.2.3-alpha.3.beta'],
+ ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-alpha.10.1.beta'],
+ ['1.2.3-alpha.10.1.beta', 'prerelease', '1.2.3-alpha.10.2.beta'],
+ ['1.2.3-alpha.10.2.beta', 'prerelease', '1.2.3-alpha.10.3.beta'],
+ ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-alpha.10.beta.1'],
+ ['1.2.3-alpha.10.beta.1', 'prerelease', '1.2.3-alpha.10.beta.2'],
+ ['1.2.3-alpha.10.beta.2', 'prerelease', '1.2.3-alpha.10.beta.3'],
+ ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-alpha.10.beta'],
+ ['1.2.3-alpha.10.beta', 'prerelease', '1.2.3-alpha.11.beta'],
+ ['1.2.3-alpha.11.beta', 'prerelease', '1.2.3-alpha.12.beta'],
+ ['1.2.0', 'prepatch', '1.2.1-0'],
+ ['1.2.0-1', 'prepatch', '1.2.1-0'],
+ ['1.2.0', 'preminor', '1.3.0-0'],
+ ['1.2.3-1', 'preminor', '1.3.0-0'],
+ ['1.2.0', 'premajor', '2.0.0-0'],
+ ['1.2.3-1', 'premajor', '2.0.0-0'],
+ ['1.2.0-1', 'minor', '1.2.0'],
+ ['1.0.0-1', 'major', '1.0.0'],
+
+ ['1.2.3', 'major', '2.0.0', false, 'dev'],
+ ['1.2.3', 'minor', '1.3.0', false, 'dev'],
+ ['1.2.3', 'patch', '1.2.4', false, 'dev'],
+ ['1.2.3tag', 'major', '2.0.0', true, 'dev'],
+ ['1.2.3-tag', 'major', '2.0.0', false, 'dev'],
+ ['1.2.3', 'fake', null, false, 'dev'],
+ ['1.2.0-0', 'patch', '1.2.0', false, 'dev'],
+ ['fake', 'major', null, false, 'dev'],
+ ['1.2.3-4', 'major', '2.0.0', false, 'dev'],
+ ['1.2.3-4', 'minor', '1.3.0', false, 'dev'],
+ ['1.2.3-4', 'patch', '1.2.3', false, 'dev'],
+ ['1.2.3-alpha.0.beta', 'major', '2.0.0', false, 'dev'],
+ ['1.2.3-alpha.0.beta', 'minor', '1.3.0', false, 'dev'],
+ ['1.2.3-alpha.0.beta', 'patch', '1.2.3', false, 'dev'],
+ ['1.2.4', 'prerelease', '1.2.5-dev.0', false, 'dev'],
+ ['1.2.3-0', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.0', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1', false, 'alpha'],
+ ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-alpha.1.beta', false, 'alpha'],
+ ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-alpha.10.1.beta', false, 'alpha'],
+ ['1.2.3-alpha.10.1.beta', 'prerelease', '1.2.3-alpha.10.2.beta', false, 'alpha'],
+ ['1.2.3-alpha.10.2.beta', 'prerelease', '1.2.3-alpha.10.3.beta', false, 'alpha'],
+ ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-alpha.10.beta.1', false, 'alpha'],
+ ['1.2.3-alpha.10.beta.1', 'prerelease', '1.2.3-alpha.10.beta.2', false, 'alpha'],
+ ['1.2.3-alpha.10.beta.2', 'prerelease', '1.2.3-alpha.10.beta.3', false, 'alpha'],
+ ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'],
+ ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-alpha.10.beta', false, 'alpha'],
+ ['1.2.3-alpha.10.beta', 'prerelease', '1.2.3-alpha.11.beta', false, 'alpha'],
+ ['1.2.3-alpha.11.beta', 'prerelease', '1.2.3-alpha.12.beta', false, 'alpha'],
+ ['1.2.0', 'prepatch', '1.2.1-dev.0', 'dev'],
+ ['1.2.0-1', 'prepatch', '1.2.1-dev.0', 'dev'],
+ ['1.2.0', 'preminor', '1.3.0-dev.0', 'dev'],
+ ['1.2.3-1', 'preminor', '1.3.0-dev.0', 'dev'],
+ ['1.2.0', 'premajor', '2.0.0-dev.0', 'dev'],
+ ['1.2.3-1', 'premajor', '2.0.0-dev.0', 'dev'],
+ ['1.2.0-1', 'minor', '1.2.0', 'dev'],
+ ['1.0.0-1', 'major', '1.0.0', 'dev'],
+ ['1.2.3-dev.bar', 'prerelease', '1.2.3-dev.0', false, 'dev']
+
+ ].forEach(function(v) {
+ var pre = v[0];
+ var what = v[1];
+ var wanted = v[2];
+ var loose = v[3];
+ var id = v[4];
+ var found = inc(pre, what, loose, id);
+ var cmd = 'inc(' + pre + ', ' + what + ', ' + id + ')';
+ t.equal(found, wanted, cmd + ' === ' + wanted);
+ });
+
+ t.end();
+});
+
+test('\ndiff versions test', function(t) {
+// [version1, version2, result]
+// diff(version1, version2) -> result
+ [['1.2.3', '0.2.3', 'major'],
+ ['1.4.5', '0.2.3', 'major'],
+ ['1.2.3', '2.0.0-pre', 'premajor'],
+ ['1.2.3', '1.3.3', 'minor'],
+ ['1.0.1', '1.1.0-pre', 'preminor'],
+ ['1.2.3', '1.2.4', 'patch'],
+ ['1.2.3', '1.2.4-pre', 'prepatch'],
+ ['0.0.1', '0.0.1-pre', 'prerelease'],
+ ['0.0.1', '0.0.1-pre-2', 'prerelease'],
+ ['1.1.0', '1.1.0-pre', 'prerelease'],
+ ['1.1.0-pre-1', '1.1.0-pre-2', 'prerelease'],
+ ['1.0.0', '1.0.0', null]
+
+ ].forEach(function(v) {
+ var version1 = v[0];
+ var version2 = v[1];
+ var wanted = v[2];
+ var found = diff(version1, version2);
+ var cmd = 'diff(' + version1 + ', ' + version2 + ')';
+ t.equal(found, wanted, cmd + ' === ' + wanted);
+ });
+
+ t.end();
+});
+
+test('\nvalid range test', function(t) {
+ // [range, result]
+ // validRange(range) -> result
+ // translate ranges into their canonical form
+ [['1.0.0 - 2.0.0', '>=1.0.0 <=2.0.0'],
+ ['1.0.0', '1.0.0'],
+ ['>=*', '*'],
+ ['', '*'],
+ ['*', '*'],
+ ['*', '*'],
+ ['>=1.0.0', '>=1.0.0'],
+ ['>1.0.0', '>1.0.0'],
+ ['<=2.0.0', '<=2.0.0'],
+ ['1', '>=1.0.0 <2.0.0'],
+ ['<=2.0.0', '<=2.0.0'],
+ ['<=2.0.0', '<=2.0.0'],
+ ['<2.0.0', '<2.0.0'],
+ ['<2.0.0', '<2.0.0'],
+ ['>= 1.0.0', '>=1.0.0'],
+ ['>= 1.0.0', '>=1.0.0'],
+ ['>= 1.0.0', '>=1.0.0'],
+ ['> 1.0.0', '>1.0.0'],
+ ['> 1.0.0', '>1.0.0'],
+ ['<= 2.0.0', '<=2.0.0'],
+ ['<= 2.0.0', '<=2.0.0'],
+ ['<= 2.0.0', '<=2.0.0'],
+ ['< 2.0.0', '<2.0.0'],
+ ['< 2.0.0', '<2.0.0'],
+ ['>=0.1.97', '>=0.1.97'],
+ ['>=0.1.97', '>=0.1.97'],
+ ['0.1.20 || 1.2.4', '0.1.20||1.2.4'],
+ ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'],
+ ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'],
+ ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'],
+ ['||', '||'],
+ ['2.x.x', '>=2.0.0 <3.0.0'],
+ ['1.2.x', '>=1.2.0 <1.3.0'],
+ ['1.2.x || 2.x', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'],
+ ['1.2.x || 2.x', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'],
+ ['x', '*'],
+ ['2.*.*', '>=2.0.0 <3.0.0'],
+ ['1.2.*', '>=1.2.0 <1.3.0'],
+ ['1.2.* || 2.*', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'],
+ ['*', '*'],
+ ['2', '>=2.0.0 <3.0.0'],
+ ['2.3', '>=2.3.0 <2.4.0'],
+ ['~2.4', '>=2.4.0 <2.5.0'],
+ ['~2.4', '>=2.4.0 <2.5.0'],
+ ['~>3.2.1', '>=3.2.1 <3.3.0'],
+ ['~1', '>=1.0.0 <2.0.0'],
+ ['~>1', '>=1.0.0 <2.0.0'],
+ ['~> 1', '>=1.0.0 <2.0.0'],
+ ['~1.0', '>=1.0.0 <1.1.0'],
+ ['~ 1.0', '>=1.0.0 <1.1.0'],
+ ['^0', '>=0.0.0 <1.0.0'],
+ ['^ 1', '>=1.0.0 <2.0.0'],
+ ['^0.1', '>=0.1.0 <0.2.0'],
+ ['^1.0', '>=1.0.0 <2.0.0'],
+ ['^1.2', '>=1.2.0 <2.0.0'],
+ ['^0.0.1', '>=0.0.1 <0.0.2'],
+ ['^0.0.1-beta', '>=0.0.1-beta <0.0.2'],
+ ['^0.1.2', '>=0.1.2 <0.2.0'],
+ ['^1.2.3', '>=1.2.3 <2.0.0'],
+ ['^1.2.3-beta.4', '>=1.2.3-beta.4 <2.0.0'],
+ ['<1', '<1.0.0'],
+ ['< 1', '<1.0.0'],
+ ['>=1', '>=1.0.0'],
+ ['>= 1', '>=1.0.0'],
+ ['<1.2', '<1.2.0'],
+ ['< 1.2', '<1.2.0'],
+ ['1', '>=1.0.0 <2.0.0'],
+ ['>01.02.03', '>1.2.3', true],
+ ['>01.02.03', null],
+ ['~1.2.3beta', '>=1.2.3-beta <1.3.0', true],
+ ['~1.2.3beta', null],
+ ['^ 1.2 ^ 1', '>=1.2.0 <2.0.0 >=1.0.0 <2.0.0']
+ ].forEach(function(v) {
+ var pre = v[0];
+ var wanted = v[1];
+ var loose = v[2];
+ var found = validRange(pre, loose);
+
+ t.equal(found, wanted, 'validRange(' + pre + ') === ' + wanted);
+ });
+
+ t.end();
+});
+
+test('\ncomparators test', function(t) {
+ // [range, comparators]
+ // turn range into a set of individual comparators
+ [['1.0.0 - 2.0.0', [['>=1.0.0', '<=2.0.0']]],
+ ['1.0.0', [['1.0.0']]],
+ ['>=*', [['']]],
+ ['', [['']]],
+ ['*', [['']]],
+ ['*', [['']]],
+ ['>=1.0.0', [['>=1.0.0']]],
+ ['>=1.0.0', [['>=1.0.0']]],
+ ['>=1.0.0', [['>=1.0.0']]],
+ ['>1.0.0', [['>1.0.0']]],
+ ['>1.0.0', [['>1.0.0']]],
+ ['<=2.0.0', [['<=2.0.0']]],
+ ['1', [['>=1.0.0', '<2.0.0']]],
+ ['<=2.0.0', [['<=2.0.0']]],
+ ['<=2.0.0', [['<=2.0.0']]],
+ ['<2.0.0', [['<2.0.0']]],
+ ['<2.0.0', [['<2.0.0']]],
+ ['>= 1.0.0', [['>=1.0.0']]],
+ ['>= 1.0.0', [['>=1.0.0']]],
+ ['>= 1.0.0', [['>=1.0.0']]],
+ ['> 1.0.0', [['>1.0.0']]],
+ ['> 1.0.0', [['>1.0.0']]],
+ ['<= 2.0.0', [['<=2.0.0']]],
+ ['<= 2.0.0', [['<=2.0.0']]],
+ ['<= 2.0.0', [['<=2.0.0']]],
+ ['< 2.0.0', [['<2.0.0']]],
+ ['<\t2.0.0', [['<2.0.0']]],
+ ['>=0.1.97', [['>=0.1.97']]],
+ ['>=0.1.97', [['>=0.1.97']]],
+ ['0.1.20 || 1.2.4', [['0.1.20'], ['1.2.4']]],
+ ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]],
+ ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]],
+ ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]],
+ ['||', [[''], ['']]],
+ ['2.x.x', [['>=2.0.0', '<3.0.0']]],
+ ['1.2.x', [['>=1.2.0', '<1.3.0']]],
+ ['1.2.x || 2.x', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]],
+ ['1.2.x || 2.x', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]],
+ ['x', [['']]],
+ ['2.*.*', [['>=2.0.0', '<3.0.0']]],
+ ['1.2.*', [['>=1.2.0', '<1.3.0']]],
+ ['1.2.* || 2.*', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]],
+ ['1.2.* || 2.*', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]],
+ ['*', [['']]],
+ ['2', [['>=2.0.0', '<3.0.0']]],
+ ['2.3', [['>=2.3.0', '<2.4.0']]],
+ ['~2.4', [['>=2.4.0', '<2.5.0']]],
+ ['~2.4', [['>=2.4.0', '<2.5.0']]],
+ ['~>3.2.1', [['>=3.2.1', '<3.3.0']]],
+ ['~1', [['>=1.0.0', '<2.0.0']]],
+ ['~>1', [['>=1.0.0', '<2.0.0']]],
+ ['~> 1', [['>=1.0.0', '<2.0.0']]],
+ ['~1.0', [['>=1.0.0', '<1.1.0']]],
+ ['~ 1.0', [['>=1.0.0', '<1.1.0']]],
+ ['~ 1.0.3', [['>=1.0.3', '<1.1.0']]],
+ ['~> 1.0.3', [['>=1.0.3', '<1.1.0']]],
+ ['<1', [['<1.0.0']]],
+ ['< 1', [['<1.0.0']]],
+ ['>=1', [['>=1.0.0']]],
+ ['>= 1', [['>=1.0.0']]],
+ ['<1.2', [['<1.2.0']]],
+ ['< 1.2', [['<1.2.0']]],
+ ['1', [['>=1.0.0', '<2.0.0']]],
+ ['1 2', [['>=1.0.0', '<2.0.0', '>=2.0.0', '<3.0.0']]],
+ ['1.2 - 3.4.5', [['>=1.2.0', '<=3.4.5']]],
+ ['1.2.3 - 3.4', [['>=1.2.3', '<3.5.0']]],
+ ['1.2.3 - 3', [['>=1.2.3', '<4.0.0']]],
+ ['>*', [['<0.0.0']]],
+ ['<*', [['<0.0.0']]]
+ ].forEach(function(v) {
+ var pre = v[0];
+ var wanted = v[1];
+ var found = toComparators(v[0]);
+ var jw = JSON.stringify(wanted);
+ t.equivalent(found, wanted, 'toComparators(' + pre + ') === ' + jw);
+ });
+
+ t.end();
+});
+
+test('\ninvalid version numbers', function(t) {
+ ['1.2.3.4',
+ 'NOT VALID',
+ 1.2,
+ null,
+ 'Infinity.NaN.Infinity'
+ ].forEach(function(v) {
+ t.throws(function() {
+ new SemVer(v);
+ }, {name:'TypeError', message:'Invalid Version: ' + v});
+ });
+
+ t.end();
+});
+
+test('\nstrict vs loose version numbers', function(t) {
+ [['=1.2.3', '1.2.3'],
+ ['01.02.03', '1.2.3'],
+ ['1.2.3-beta.01', '1.2.3-beta.1'],
+ [' =1.2.3', '1.2.3'],
+ ['1.2.3foo', '1.2.3-foo']
+ ].forEach(function(v) {
+ var loose = v[0];
+ var strict = v[1];
+ t.throws(function() {
+ new SemVer(loose);
+ });
+ var lv = new SemVer(loose, true);
+ t.equal(lv.version, strict);
+ t.ok(eq(loose, strict, true));
+ t.throws(function() {
+ eq(loose, strict);
+ });
+ t.throws(function() {
+ new SemVer(strict).compare(loose);
+ });
+ });
+ t.end();
+});
+
+test('\nstrict vs loose ranges', function(t) {
+ [['>=01.02.03', '>=1.2.3'],
+ ['~1.02.03beta', '>=1.2.3-beta <1.3.0']
+ ].forEach(function(v) {
+ var loose = v[0];
+ var comps = v[1];
+ t.throws(function() {
+ new Range(loose);
+ });
+ t.equal(new Range(loose, true).range, comps);
+ });
+ t.end();
+});
+
+test('\nmax satisfying', function(t) {
+ [[['1.2.3', '1.2.4'], '1.2', '1.2.4'],
+ [['1.2.4', '1.2.3'], '1.2', '1.2.4'],
+ [['1.2.3', '1.2.4', '1.2.5', '1.2.6'], '~1.2.3', '1.2.6'],
+ [['1.1.0', '1.2.0', '1.2.1', '1.3.0', '2.0.0b1', '2.0.0b2', '2.0.0b3', '2.0.0', '2.1.0'], '~2.0.0', '2.0.0', true]
+ ].forEach(function(v) {
+ var versions = v[0];
+ var range = v[1];
+ var expect = v[2];
+ var loose = v[3];
+ var actual = semver.maxSatisfying(versions, range, loose);
+ t.equal(actual, expect);
+ });
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/ltr.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/ltr.js
new file mode 100644
index 000000000..0f7167d65
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/ltr.js
@@ -0,0 +1,181 @@
+var tap = require('tap');
+var test = tap.test;
+var semver = require('../semver.js');
+var ltr = semver.ltr;
+
+test('\nltr tests', function(t) {
+ // [range, version, loose]
+ // Version should be less than range
+ [
+ ['~1.2.2', '1.2.1'],
+ ['~0.6.1-1', '0.6.1-0'],
+ ['1.0.0 - 2.0.0', '0.0.1'],
+ ['1.0.0-beta.2', '1.0.0-beta.1'],
+ ['1.0.0', '0.0.0'],
+ ['>=2.0.0', '1.1.1'],
+ ['>=2.0.0', '1.2.9'],
+ ['>2.0.0', '2.0.0'],
+ ['0.1.20 || 1.2.4', '0.1.5'],
+ ['2.x.x', '1.0.0'],
+ ['1.2.x', '1.1.0'],
+ ['1.2.x || 2.x', '1.0.0'],
+ ['2.*.*', '1.0.1'],
+ ['1.2.*', '1.1.3'],
+ ['1.2.* || 2.*', '1.1.9999'],
+ ['2', '1.0.0'],
+ ['2.3', '2.2.2'],
+ ['~2.4', '2.3.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.3.5'],
+ ['~>3.2.1', '3.2.0'], // >=3.2.1 <3.3.0
+ ['~1', '0.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '0.2.4'],
+ ['~> 1', '0.2.3'],
+ ['~1.0', '0.1.2'], // >=1.0.0 <1.1.0
+ ['~ 1.0', '0.1.0'],
+ ['>1.2', '1.2.0'],
+ ['> 1.2', '1.2.1'],
+ ['1', '0.0.0beta', true],
+ ['~v0.5.4-pre', '0.5.4-alpha'],
+ ['~v0.5.4-pre', '0.5.4-alpha'],
+ ['=0.7.x', '0.6.0'],
+ ['=0.7.x', '0.6.0-asdf'],
+ ['>=0.7.x', '0.6.0'],
+ ['~1.2.2', '1.2.1'],
+ ['1.0.0 - 2.0.0', '0.2.3'],
+ ['1.0.0', '0.0.1'],
+ ['>=2.0.0', '1.0.0'],
+ ['>=2.0.0', '1.9999.9999'],
+ ['>=2.0.0', '1.2.9'],
+ ['>2.0.0', '2.0.0'],
+ ['>2.0.0', '1.2.9'],
+ ['2.x.x', '1.1.3'],
+ ['1.2.x', '1.1.3'],
+ ['1.2.x || 2.x', '1.1.3'],
+ ['2.*.*', '1.1.3'],
+ ['1.2.*', '1.1.3'],
+ ['1.2.* || 2.*', '1.1.3'],
+ ['2', '1.9999.9999'],
+ ['2.3', '2.2.1'],
+ ['~2.4', '2.3.0'], // >=2.4.0 <2.5.0
+ ['~>3.2.1', '2.3.2'], // >=3.2.1 <3.3.0
+ ['~1', '0.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '0.2.3'],
+ ['~1.0', '0.0.0'], // >=1.0.0 <1.1.0
+ ['>1', '1.0.0'],
+ ['2', '1.0.0beta', true],
+ ['>1', '1.0.0beta', true],
+ ['> 1', '1.0.0beta', true],
+ ['=0.7.x', '0.6.2'],
+ ['=0.7.x', '0.7.0-asdf'],
+ ['^1', '1.0.0-0'],
+ ['>=0.7.x', '0.7.0-asdf'],
+ ['1', '1.0.0beta', true],
+ ['>=0.7.x', '0.6.2'],
+ ['>1.2.3', '1.3.0-alpha']
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = 'ltr(' + version + ', ' + range + ', ' + loose + ')';
+ t.ok(ltr(version, range, loose), msg);
+ });
+ t.end();
+});
+
+test('\nnegative ltr tests', function(t) {
+ // [range, version, loose]
+ // Version should NOT be less than range
+ [
+ ['~ 1.0', '1.1.0'],
+ ['~0.6.1-1', '0.6.1-1'],
+ ['1.0.0 - 2.0.0', '1.2.3'],
+ ['1.0.0 - 2.0.0', '2.9.9'],
+ ['1.0.0', '1.0.0'],
+ ['>=*', '0.2.4'],
+ ['', '1.0.0', true],
+ ['*', '1.2.3'],
+ ['>=1.0.0', '1.0.0'],
+ ['>=1.0.0', '1.0.1'],
+ ['>=1.0.0', '1.1.0'],
+ ['>1.0.0', '1.0.1'],
+ ['>1.0.0', '1.1.0'],
+ ['<=2.0.0', '2.0.0'],
+ ['<=2.0.0', '1.9999.9999'],
+ ['<=2.0.0', '0.2.9'],
+ ['<2.0.0', '1.9999.9999'],
+ ['<2.0.0', '0.2.9'],
+ ['>= 1.0.0', '1.0.0'],
+ ['>= 1.0.0', '1.0.1'],
+ ['>= 1.0.0', '1.1.0'],
+ ['> 1.0.0', '1.0.1'],
+ ['> 1.0.0', '1.1.0'],
+ ['<= 2.0.0', '2.0.0'],
+ ['<= 2.0.0', '1.9999.9999'],
+ ['<= 2.0.0', '0.2.9'],
+ ['< 2.0.0', '1.9999.9999'],
+ ['<\t2.0.0', '0.2.9'],
+ ['>=0.1.97', 'v0.1.97'],
+ ['>=0.1.97', '0.1.97'],
+ ['0.1.20 || 1.2.4', '1.2.4'],
+ ['0.1.20 || >1.2.4', '1.2.4'],
+ ['0.1.20 || 1.2.4', '1.2.3'],
+ ['0.1.20 || 1.2.4', '0.1.20'],
+ ['>=0.2.3 || <0.0.1', '0.0.0'],
+ ['>=0.2.3 || <0.0.1', '0.2.3'],
+ ['>=0.2.3 || <0.0.1', '0.2.4'],
+ ['||', '1.3.4'],
+ ['2.x.x', '2.1.3'],
+ ['1.2.x', '1.2.3'],
+ ['1.2.x || 2.x', '2.1.3'],
+ ['1.2.x || 2.x', '1.2.3'],
+ ['x', '1.2.3'],
+ ['2.*.*', '2.1.3'],
+ ['1.2.*', '1.2.3'],
+ ['1.2.* || 2.*', '2.1.3'],
+ ['1.2.* || 2.*', '1.2.3'],
+ ['1.2.* || 2.*', '1.2.3'],
+ ['*', '1.2.3'],
+ ['2', '2.1.2'],
+ ['2.3', '2.3.1'],
+ ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0
+ ['~2.4', '2.4.5'],
+ ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0
+ ['~1', '1.2.3'], // >=1.0.0 <2.0.0
+ ['~>1', '1.2.3'],
+ ['~> 1', '1.2.3'],
+ ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0
+ ['~ 1.0', '1.0.2'],
+ ['>=1', '1.0.0'],
+ ['>= 1', '1.0.0'],
+ ['<1.2', '1.1.1'],
+ ['< 1.2', '1.1.1'],
+ ['~v0.5.4-pre', '0.5.5'],
+ ['~v0.5.4-pre', '0.5.4'],
+ ['=0.7.x', '0.7.2'],
+ ['>=0.7.x', '0.7.2'],
+ ['<=0.7.x', '0.6.2'],
+ ['>0.2.3 >0.2.4 <=0.2.5', '0.2.5'],
+ ['>=0.2.3 <=0.2.4', '0.2.4'],
+ ['1.0.0 - 2.0.0', '2.0.0'],
+ ['^3.0.0', '4.0.0'],
+ ['^1.0.0 || ~2.0.1', '2.0.0'],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '3.2.0'],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '1.0.0beta', true],
+ ['^0.1.0 || ~3.0.1 || 5.0.0', '5.0.0-0', true],
+ ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0'],
+ ['^1.0.0alpha', '1.0.0beta', true],
+ ['~1.0.0alpha', '1.0.0beta', true],
+ ['^1.0.0-alpha', '1.0.0beta', true],
+ ['~1.0.0-alpha', '1.0.0beta', true],
+ ['^1.0.0-alpha', '1.0.0-beta'],
+ ['~1.0.0-alpha', '1.0.0-beta'],
+ ['=0.1.0', '1.0.0']
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = '!ltr(' + version + ', ' + range + ', ' + loose + ')';
+ t.notOk(ltr(version, range, loose), msg);
+ });
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/test/major-minor-patch.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/major-minor-patch.js
new file mode 100644
index 000000000..e9d4039c8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/major-minor-patch.js
@@ -0,0 +1,72 @@
+var tap = require('tap');
+var test = tap.test;
+var semver = require('../semver.js');
+
+test('\nmajor tests', function(t) {
+ // [range, version]
+ // Version should be detectable despite extra characters
+ [
+ ['1.2.3', 1],
+ [' 1.2.3 ', 1],
+ [' 2.2.3-4 ', 2],
+ [' 3.2.3-pre ', 3],
+ ['v5.2.3', 5],
+ [' v8.2.3 ', 8],
+ ['\t13.2.3', 13],
+ ['=21.2.3', 21, true],
+ ['v=34.2.3', 34, true]
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = 'major(' + range + ') = ' + version;
+ t.equal(semver.major(range, loose), version, msg);
+ });
+ t.end();
+});
+
+test('\nminor tests', function(t) {
+ // [range, version]
+ // Version should be detectable despite extra characters
+ [
+ ['1.1.3', 1],
+ [' 1.1.3 ', 1],
+ [' 1.2.3-4 ', 2],
+ [' 1.3.3-pre ', 3],
+ ['v1.5.3', 5],
+ [' v1.8.3 ', 8],
+ ['\t1.13.3', 13],
+ ['=1.21.3', 21, true],
+ ['v=1.34.3', 34, true]
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = 'minor(' + range + ') = ' + version;
+ t.equal(semver.minor(range, loose), version, msg);
+ });
+ t.end();
+});
+
+test('\npatch tests', function(t) {
+ // [range, version]
+ // Version should be detectable despite extra characters
+ [
+ ['1.2.1', 1],
+ [' 1.2.1 ', 1],
+ [' 1.2.2-4 ', 2],
+ [' 1.2.3-pre ', 3],
+ ['v1.2.5', 5],
+ [' v1.2.8 ', 8],
+ ['\t1.2.13', 13],
+ ['=1.2.21', 21, true],
+ ['v=1.2.34', 34, true]
+ ].forEach(function(tuple) {
+ var range = tuple[0];
+ var version = tuple[1];
+ var loose = tuple[2] || false;
+ var msg = 'patch(' + range + ') = ' + version;
+ t.equal(semver.patch(range, loose), version, msg);
+ });
+ t.end();
+});
diff --git a/deps/npm/node_modules/semver/test/no-module.js b/deps/npm/node_modules/node-gyp/node_modules/semver/test/no-module.js
index 8b50873f1..274f63d1b 100644
--- a/deps/npm/node_modules/semver/test/no-module.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/test/no-module.js
@@ -16,4 +16,3 @@ test('no module system', function(t) {
t.ok(global.semver.satisfies('1.2.3', '1.2'));
t.end();
});
-
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
index 7fab5394c..5aa78aec3 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
@@ -56,6 +56,5 @@
"tarball": "http://registry.npmjs.org/tar/-/tar-1.0.3.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/tar/-/tar-1.0.3.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-1.0.3.tgz"
}
diff --git a/deps/npm/node_modules/node-gyp/package.json b/deps/npm/node_modules/node-gyp/package.json
index 98e08d504..2191b9c55 100644
--- a/deps/npm/node_modules/node-gyp/package.json
+++ b/deps/npm/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
"bindings",
"gyp"
],
- "version": "2.0.1",
+ "version": "2.0.2",
"installVersion": 9,
"author": {
"name": "Nathan Rajlich",
@@ -46,17 +46,17 @@
"engines": {
"node": ">= 0.8.0"
},
- "gitHead": "0b9790ab6b885e2020e83936e402ac23c9e84726",
+ "gitHead": "f403e263b87f6a8ad130add248c90565d49427f7",
"bugs": {
"url": "https://github.com/TooTallNate/node-gyp/issues"
},
"homepage": "https://github.com/TooTallNate/node-gyp#readme",
- "_id": "node-gyp@2.0.1",
+ "_id": "node-gyp@2.0.2",
"scripts": {},
- "_shasum": "38e9c5b54df7115cd0953cee67863f839d0c7888",
- "_from": "node-gyp@>=2.0.1 <2.1.0",
- "_npmVersion": "2.9.1",
- "_nodeVersion": "0.12.3",
+ "_shasum": "6350760aaba74ba108fdc368afd8864e14b6ad91",
+ "_from": "node-gyp@latest",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.6",
"_npmUser": {
"name": "tootallnate",
"email": "nathan@tootallnate.net"
@@ -76,9 +76,9 @@
}
],
"dist": {
- "shasum": "38e9c5b54df7115cd0953cee67863f839d0c7888",
- "tarball": "http://registry.npmjs.org/node-gyp/-/node-gyp-2.0.1.tgz"
+ "shasum": "6350760aaba74ba108fdc368afd8864e14b6ad91",
+ "tarball": "http://registry.npmjs.org/node-gyp/-/node-gyp-2.0.2.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-2.0.1.tgz"
+ "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-2.0.2.tgz"
}
diff --git a/deps/npm/node_modules/nopt/.travis.yml b/deps/npm/node_modules/nopt/.travis.yml
new file mode 100644
index 000000000..99f2bbf50
--- /dev/null
+++ b/deps/npm/node_modules/nopt/.travis.yml
@@ -0,0 +1,9 @@
+language: node_js
+language: node_js
+node_js:
+ - '0.8'
+ - '0.10'
+ - '0.12'
+ - 'iojs'
+before_install:
+ - npm install -g npm@latest
diff --git a/deps/npm/node_modules/nopt/README.md b/deps/npm/node_modules/nopt/README.md
index 5aba088b5..22c50ec40 100644
--- a/deps/npm/node_modules/nopt/README.md
+++ b/deps/npm/node_modules/nopt/README.md
@@ -5,9 +5,10 @@ The Wrong Way is to sit down and write an option parser. We've all done
that.
The Right Way is to write some complex configurable program with so many
-options that you go half-insane just trying to manage them all, and put
-it off with duct-tape solutions until you see exactly to the core of the
-problem, and finally snap and write an awesome option parser.
+options that you hit the limit of your frustration just trying to
+manage them all, and defer it with duct-tape solutions until you see
+exactly to the core of the problem, and finally snap and write an
+awesome option parser.
If you want to write an option parser, don't write an option parser.
Write a package manager, or a source control system, or a service
diff --git a/deps/npm/node_modules/nopt/package.json b/deps/npm/node_modules/nopt/package.json
index 97c090b2f..c44afa1a2 100644
--- a/deps/npm/node_modules/nopt/package.json
+++ b/deps/npm/node_modules/nopt/package.json
@@ -1,6 +1,6 @@
{
"name": "nopt",
- "version": "3.0.2",
+ "version": "3.0.3",
"description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
"author": {
"name": "Isaac Z. Schlueter",
@@ -23,25 +23,25 @@
"abbrev": "1"
},
"devDependencies": {
- "tap": "~0.4.8"
+ "tap": "^1.2.0"
},
- "gitHead": "a0ff8dcbb29ae9da68769c9f782bd4d70746b02d",
+ "gitHead": "f64a64cd48d9f2660dd4e59191ff46a26397d6b1",
"bugs": {
"url": "https://github.com/isaacs/nopt/issues"
},
"homepage": "https://github.com/isaacs/nopt#readme",
- "_id": "nopt@3.0.2",
- "_shasum": "a82a87f9d8c3df140fe78fb29657a7a774403b5e",
- "_from": "nopt@>=3.0.2 <3.1.0",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
+ "_id": "nopt@3.0.3",
+ "_shasum": "0e9978f33016bae0b75e3748c03bbbb71da5c530",
+ "_from": "nopt@>=3.0.3 <3.1.0",
+ "_npmVersion": "2.12.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "a82a87f9d8c3df140fe78fb29657a7a774403b5e",
- "tarball": "http://registry.npmjs.org/nopt/-/nopt-3.0.2.tgz"
+ "shasum": "0e9978f33016bae0b75e3748c03bbbb71da5c530",
+ "tarball": "http://registry.npmjs.org/nopt/-/nopt-3.0.3.tgz"
},
"maintainers": [
{
@@ -50,6 +50,5 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.2.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.3.tgz"
}
diff --git a/deps/npm/node_modules/normalize-git-url/.eslintrc b/deps/npm/node_modules/normalize-git-url/.eslintrc
deleted file mode 100644
index b54e30fd2..000000000
--- a/deps/npm/node_modules/normalize-git-url/.eslintrc
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "env" : {
- "node" : true
- },
- "rules" : {
- "semi": [2, "never"],
- "strict": 0,
- "quotes": [1, "double", "avoid-escape"],
- "no-use-before-define": 0,
- "curly": 0,
- "no-underscore-dangle": 0,
- "no-lonely-if": 1,
- "no-unused-vars": [2, {"vars" : "all", "args" : "after-used"}],
- "no-mixed-requires": 0,
- "space-infix-ops": 0,
- "key-spacing": 0,
- "no-multi-spaces": 0
- }
-}
diff --git a/deps/npm/node_modules/normalize-git-url/normalize-git-url.js b/deps/npm/node_modules/normalize-git-url/normalize-git-url.js
index 766203794..db0022ac3 100644
--- a/deps/npm/node_modules/normalize-git-url/normalize-git-url.js
+++ b/deps/npm/node_modules/normalize-git-url/normalize-git-url.js
@@ -1,7 +1,10 @@
-var url = require("url")
+var url = require('url')
module.exports = function normalize (u) {
- var parsed = url.parse(u, true)
+ var parsed = url.parse(u)
+ // If parsing actually alters the URL, it is almost certainly an
+ // scp-style URL, or an invalid one.
+ var altered = u !== url.format(parsed)
// git is so tricky!
// if the path is like ssh://foo:22/some/path then it works, but
@@ -9,19 +12,27 @@ module.exports = function normalize (u) {
// If the path is like ssh://foo:some/path then it works, but
// only if you remove the ssh://
if (parsed.protocol) {
- parsed.protocol = parsed.protocol.replace(/^git\+/, "")
-
- // ssh paths that are scp-style urls don't need the ssh://
- parsed.pathname = parsed.pathname.replace(/^\/?:/, "/")
+ parsed.protocol = parsed.protocol.replace(/^git\+/, '')
}
// figure out what we should check out.
- var checkout = parsed.hash && parsed.hash.substr(1) || "master"
- parsed.hash = ""
+ var checkout = parsed.hash && parsed.hash.substr(1) || 'master'
+ parsed.hash = ''
+
+ var returnedUrl
+ if (altered) {
+ if (u.match(/^git\+https?/) && parsed.pathname.match(/\/?:[^0-9]/)) {
+ returnedUrl = u.replace(/^git\+(.*:[^:]+):(.*)/, '$1/$2')
+ } else {
+ returnedUrl = u.replace(/^(?:git\+)?ssh:\/\//, '')
+ }
+ returnedUrl = returnedUrl.replace(/#[^#]*$/, '')
+ } else {
+ returnedUrl = url.format(parsed)
+ }
- u = url.format(parsed)
return {
- url : u,
- branch : checkout
+ url: returnedUrl,
+ branch: checkout
}
}
diff --git a/deps/npm/node_modules/normalize-git-url/package.json b/deps/npm/node_modules/normalize-git-url/package.json
index 6008db675..b2dc3d910 100644
--- a/deps/npm/node_modules/normalize-git-url/package.json
+++ b/deps/npm/node_modules/normalize-git-url/package.json
@@ -1,6 +1,6 @@
{
"name": "normalize-git-url",
- "version": "1.0.1",
+ "version": "3.0.1",
"description": "Normalizes Git URLs. For npm, but you can use it too.",
"main": "normalize-git-url.js",
"directories": {
@@ -35,8 +35,8 @@
"homepage": "https://github.com/npm/normalize-git-url",
"readme": "# normalize-git-url\n\nYou have a bunch of Git URLs. You want to convert them to a canonical\nrepresentation, probably for use inside npm so that it doesn't end up creating\na bunch of superfluous cached origins. You use this package.\n\n## Usage\n\n```javascript\nvar ngu = require('normalize-git-url');\nvar normalized = ngu(\"git+ssh://git@github.com:organization/repo.git#hashbrowns\")\n// get back:\n// {\n// url : \"ssh://git@github.com/organization/repo.git\",\n// branch : \"hashbrowns\" // did u know hashbrowns are delicious?\n// }\n```\n\n## API\n\nThere's just the one function, and all it takes is a single parameter, a non-normalized Git URL.\n\n### normalizeGitUrl(url)\n\n* `url` {String} The Git URL (very loosely speaking) to be normalized.\n\nReturns an object with the following format:\n\n* `url` {String} The normalized URL.\n* `branch` {String} The treeish to be checked out once the repo at `url` is\n cloned. It doesn't have to be a branch, but it's a lot easier to intuit what\n the output is for with that name.\n\n## Limitations\n\nRight now this doesn't try to special-case GitHub too much -- it doesn't ensure\nthat `.git` is added to the end of URLs, it doesn't prefer `https:` over\n`http:` or `ssh:`, it doesn't deal with redirects, and it doesn't try to\nresolve symbolic names to treeish hashcodes. For now, it just tries to account\nfor minor differences in representation.\n",
"readmeFilename": "README.md",
- "gitHead": "d87bf42e845ed664e4a8bab3490052fb44c90433",
- "_id": "normalize-git-url@1.0.1",
- "_shasum": "1b561345d66e3a3bc5513a5ace85f155ca42613e",
- "_from": "normalize-git-url@>=1.0.1 <1.1.0"
+ "gitHead": "8393cd4345e404eb6ad2ff6853dcc8287807ca22",
+ "_id": "normalize-git-url@3.0.1",
+ "_shasum": "d40d419d05a15870271e50534dbb7b8ccd9b0a5c",
+ "_from": "normalize-git-url@latest"
}
diff --git a/deps/npm/node_modules/normalize-git-url/test/basic.js b/deps/npm/node_modules/normalize-git-url/test/basic.js
index f8f199f65..37952d651 100644
--- a/deps/npm/node_modules/normalize-git-url/test/basic.js
+++ b/deps/npm/node_modules/normalize-git-url/test/basic.js
@@ -1,55 +1,63 @@
-var test = require("tap").test
+var test = require('tap').test
-var normalize = require("../normalize-git-url.js")
+var normalize = require('../normalize-git-url.js')
-test("basic normalization tests", function (t) {
+test('basic normalization tests', function (t) {
t.same(
- normalize("git+ssh://user@hostname:project.git#commit-ish"),
- { url : "ssh://user@hostname/project.git", branch : "commit-ish" }
+ normalize('git+ssh://user@hostname:project.git#commit-ish'),
+ { url: 'user@hostname:project.git', branch: 'commit-ish' }
)
t.same(
- normalize("git+http://user@hostname/project/blah.git#commit-ish"),
- { url : "http://user@hostname/project/blah.git", branch : "commit-ish" }
+ normalize('git+http://user@hostname/project/blah.git#commit-ish'),
+ { url: 'http://user@hostname/project/blah.git', branch: 'commit-ish' }
)
t.same(
- normalize("git+https://user@hostname/project/blah.git#commit-ish"),
- { url : "https://user@hostname/project/blah.git", branch : "commit-ish" }
+ normalize('git+https://user@hostname/project/blah.git#commit-ish'),
+ { url: 'https://user@hostname/project/blah.git', branch: 'commit-ish' }
)
t.same(
- normalize("git+ssh://git@github.com:npm/npm.git#v1.0.27"),
- { url : "ssh://git@github.com/npm/npm.git", branch : "v1.0.27" }
+ normalize('git+https://user@hostname:project/blah.git#commit-ish'),
+ { url: 'https://user@hostname/project/blah.git', branch: 'commit-ish' }
)
t.same(
- normalize("git+ssh://git@github.com:org/repo#dev"),
- { url : "ssh://git@github.com/org/repo", branch : "dev" }
+ normalize('git+ssh://git@github.com:npm/npm.git#v1.0.27'),
+ { url: 'git@github.com:npm/npm.git', branch: 'v1.0.27' }
)
t.same(
- normalize("git+ssh://git@github.com/org/repo#dev"),
- { url : "ssh://git@github.com/org/repo", branch : "dev" }
+ normalize('git+ssh://git@github.com:/npm/npm.git#v1.0.28'),
+ { url: 'git@github.com:/npm/npm.git', branch: 'v1.0.28' }
)
t.same(
- normalize("git+ssh://foo:22/some/path"),
- { url : "ssh://foo:22/some/path", branch : "master" }
+ normalize('git+ssh://git@github.com:org/repo#dev'),
+ { url: 'git@github.com:org/repo', branch: 'dev' }
)
t.same(
- normalize("git@github.com:org/repo#dev"),
- { url : "git@github.com:org/repo", branch : "dev" }
+ normalize('git+ssh://git@github.com/org/repo#dev'),
+ { url: 'ssh://git@github.com/org/repo', branch: 'dev' }
)
t.same(
- normalize("git+https://github.com/KenanY/node-uuid"),
- { url : "https://github.com/KenanY/node-uuid", branch : "master" }
+ normalize('git+ssh://foo:22/some/path'),
+ { url: 'ssh://foo:22/some/path', branch: 'master' }
)
t.same(
- normalize("git+https://github.com/KenanY/node-uuid#7a018f2d075b03a73409e8356f9b29c9ad4ea2c5"),
- { url : "https://github.com/KenanY/node-uuid", branch : "7a018f2d075b03a73409e8356f9b29c9ad4ea2c5" }
+ normalize('git@github.com:org/repo#dev'),
+ { url: 'git@github.com:org/repo', branch: 'dev' }
)
t.same(
- normalize("git+ssh://git@git.example.com:b/b.git#v1.0.0"),
- { url : "ssh://git@git.example.com/b/b.git", branch : "v1.0.0" }
+ normalize('git+https://github.com/KenanY/node-uuid'),
+ { url: 'https://github.com/KenanY/node-uuid', branch: 'master' }
)
t.same(
- normalize("git+ssh://git@github.com:npm/npm-proto.git#othiym23/organized"),
- { url : "ssh://git@github.com/npm/npm-proto.git", branch : "othiym23/organized" }
+ normalize('git+https://github.com/KenanY/node-uuid#7a018f2d075b03a73409e8356f9b29c9ad4ea2c5'),
+ { url: 'https://github.com/KenanY/node-uuid', branch: '7a018f2d075b03a73409e8356f9b29c9ad4ea2c5' }
+ )
+ t.same(
+ normalize('git+ssh://git@git.example.com:b/b.git#v1.0.0'),
+ { url: 'git@git.example.com:b/b.git', branch: 'v1.0.0' }
+ )
+ t.same(
+ normalize('git+ssh://git@github.com:npm/npm-proto.git#othiym23/organized'),
+ { url: 'git@github.com:npm/npm-proto.git', branch: 'othiym23/organized' }
)
t.end()
diff --git a/deps/npm/node_modules/normalize-package-data/README.md b/deps/npm/node_modules/normalize-package-data/README.md
index 0b9d7b5b4..da4d9f651 100644
--- a/deps/npm/node_modules/normalize-package-data/README.md
+++ b/deps/npm/node_modules/normalize-package-data/README.md
@@ -95,7 +95,7 @@ If `version` field is given, the value of the version field must be a valid *sem
### Rules for license field
-The `license` field should be a valid *SDPDX license expression* string, as determined by the `spdx.valid` method. See [documentation for the spdx module](https://github.com/kemitchell/spdx.js).
+The `license` field should be a valid *SDPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/packages/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license).
## Credits
diff --git a/deps/npm/node_modules/normalize-package-data/lib/fixer.js b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
index 3d9380b6e..c96f0bcc4 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/fixer.js
+++ b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
@@ -1,5 +1,5 @@
var semver = require("semver")
-var spdx = require('spdx');
+var validateLicense = require('validate-npm-package-license');
var hostedGitInfo = require("hosted-git-info")
var depTypes = ["dependencies","devDependencies","optionalDependencies"]
var extractDescription = require("./extract_description")
@@ -292,12 +292,16 @@ var fixer = module.exports = {
, fixLicenseField: function(data) {
if (!data.license) {
return this.warn("missingLicense")
- } else if (
- typeof(data.license) !== 'string' ||
- data.license.length < 1 ||
- !spdx.valid(data.license)
- ) {
- this.warn("nonSPDXLicense")
+ } else{
+ if (
+ typeof(data.license) !== 'string' ||
+ data.license.length < 1
+ ) {
+ this.warn("invalidLicense")
+ } else {
+ if (!validateLicense(data.license).validForNewPackages)
+ this.warn("invalidLicense")
+ }
}
}
}
diff --git a/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json b/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
index 3bfce72cd..5fd19f322 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
+++ b/deps/npm/node_modules/normalize-package-data/lib/warning_messages.json
@@ -25,7 +25,7 @@
,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
,"nonUrlHomepage": "homepage field must be a string url. Deleted."
- ,"nonSPDXLicense": "license should be a valid SPDX license expression"
+ ,"invalidLicense": "license should be a valid SPDX license expression"
,"missingProtocolHomepage": "homepage field must start with a protocol."
,"typo": "%s should probably be %s."
}
diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json
index 44ca4d586..0ea7c5418 100644
--- a/deps/npm/node_modules/normalize-package-data/package.json
+++ b/deps/npm/node_modules/normalize-package-data/package.json
@@ -1,6 +1,6 @@
{
"name": "normalize-package-data",
- "version": "2.2.1",
+ "version": "2.3.1",
"author": {
"name": "Meryn Stol",
"email": "merynstol@gmail.com"
@@ -17,8 +17,8 @@
},
"dependencies": {
"hosted-git-info": "^2.0.2",
- "semver": "2 || 3 || 4",
- "spdx": "^0.4.0"
+ "semver": "2 || 3 || 4 || 5",
+ "validate-npm-package-license": "^2.0.0"
},
"devDependencies": {
"async": "~0.9.0",
@@ -39,14 +39,14 @@
"email": "rok@kowalski.gd"
}
],
- "readme": "# normalize-package-data [![Build Status](https://travis-ci.org/npm/normalize-package-data.png?branch=master)](https://travis-ci.org/npm/normalize-package-data)\n\nnormalize-package data exports a function that normalizes package metadata. This data is typically found in a package.json file, but in principle could come from any source - for example the npm registry.\n\nnormalize-package-data is used by [read-package-json](https://npmjs.org/package/read-package-json) to normalize the data it reads from a package.json file. In turn, read-package-json is used by [npm](https://npmjs.org/package/npm) and various npm-related tools.\n\n## Installation\n\n```\nnpm install normalize-package-data\n```\n\n## Usage\n\nBasic usage is really simple. You call the function that normalize-package-data exports. Let's call it `normalizeData`.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nnormalizeData(packageData)\n// packageData is now normalized\n```\n\n#### Strict mode\n\nYou may activate strict validation by passing true as the second argument.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, true)\n// packageData is now normalized\n```\n\nIf strict mode is activated, only Semver 2.0 version strings are accepted. Otherwise, Semver 1.0 strings are accepted as well. Packages must have a name, and the name field must not have contain leading or trailing whitespace.\n\n#### Warnings\n\nOptionally, you may pass a \"warning\" function. It gets called whenever the `normalizeData` function encounters something that doesn't look right. It indicates less than perfect input data.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, warnFn)\n// packageData is now normalized. Any number of warnings may have been logged.\n```\n\nYou may combine strict validation with warnings by passing `true` as the second argument, and `warnFn` as third.\n\nWhen `private` field is set to `true`, warnings will be suppressed.\n\n### Potential exceptions\n\nIf the supplied data has an invalid name or version vield, `normalizeData` will throw an error. Depending on where you call `normalizeData`, you may want to catch these errors so can pass them to a callback.\n\n## What normalization (currently) entails\n\n* The value of `name` field gets trimmed (unless in strict mode).\n* The value of the `version` field gets cleaned by `semver.clean`. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n* If `name` and/or `version` fields are missing, they are set to empty strings.\n* If `files` field is not an array, it will be removed.\n* If `bin` field is a string, then `bin` field will become an object with `name` set to the value of the `name` field, and `bin` set to the original string value.\n* If `man` field is a string, it will become an array with the original string as its sole member.\n* If `keywords` field is string, it is considered to be a list of keywords separated by one or more white-space characters. It gets converted to an array by splitting on `\\s+`.\n* All people fields (`author`, `maintainers`, `contributors`) get converted into objects with name, email and url properties.\n* If `bundledDependencies` field (a typo) exists and `bundleDependencies` field does not, `bundledDependencies` will get renamed to `bundleDependencies`.\n* If the value of any of the dependencies fields (`dependencies`, `devDependencies`, `optionalDependencies`) is a string, it gets converted into an object with familiar `name=>value` pairs.\n* The values in `optionalDependencies` get added to `dependencies`. The `optionalDependencies` array is left untouched.\n* As of v2: Dependencies that point at known hosted git providers (currently: github, bitbucket, gitlab) will have their URLs canonicalized, but protocols will be preserved.\n* As of v2: Dependencies that use shortcuts for hosted git providers (`org/proj`, `github:org/proj`, `bitbucket:org/proj`, `gitlab:org/proj`, `gist:docid`) will have the shortcut left in place. (In the case of github, the `org/proj` form will be expanded to `github:org/proj`.) THIS MARKS A BREAKING CHANGE FROM V1, where the shorcut was previously expanded to a URL.\n* If `description` field does not exist, but `readme` field does, then (more or less) the first paragraph of text that's found in the readme is taken as value for `description`.\n* If `repository` field is a string, it will become an object with `url` set to the original string value, and `type` set to `\"git\"`.\n* If `repository.url` is not a valid url, but in the style of \"[owner-name]/[repo-name]\", `repository.url` will be set to https://github.com/[owner-name]/[repo-name]\n* If `bugs` field is a string, the value of `bugs` field is changed into an object with `url` set to the original string value.\n* If `bugs` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `bugs` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/issues . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n* If `bugs` field is an object, the resulting value only has email and url properties. If email and url properties are not strings, they are ignored. If no valid values for either email or url is found, bugs field will be removed.\n* If `homepage` field is not a string, it will be removed.\n* If the url in the `homepage` field does not specify a protocol, then http is assumed. For example, `myproject.org` will be changed to `http://myproject.org`.\n* If `homepage` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `homepage` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/ . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n\n### Rules for name field\n\nIf `name` field is given, the value of the name field must be a string. The string may not:\n\n* start with a period.\n* contain the following characters: `/@\\s+%`\n* contain and characters that would need to be encoded for use in urls.\n* resemble the word `node_modules` or `favicon.ico` (case doesn't matter).\n\n### Rules for version field\n\nIf `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n\n### Rules for license field\n\nThe `license` field should be a valid *SDPDX license expression* string, as determined by the `spdx.valid` method. See [documentation for the spdx module](https://github.com/kemitchell/spdx.js).\n\n## Credits\n\nThis package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson.\n\n## License\n\nnormalize-package-data is released under the [BSD 2-Clause License](http://opensource.org/licenses/MIT). \nCopyright (c) 2013 Meryn Stol \n",
+ "readme": "# normalize-package-data [![Build Status](https://travis-ci.org/npm/normalize-package-data.png?branch=master)](https://travis-ci.org/npm/normalize-package-data)\n\nnormalize-package data exports a function that normalizes package metadata. This data is typically found in a package.json file, but in principle could come from any source - for example the npm registry.\n\nnormalize-package-data is used by [read-package-json](https://npmjs.org/package/read-package-json) to normalize the data it reads from a package.json file. In turn, read-package-json is used by [npm](https://npmjs.org/package/npm) and various npm-related tools.\n\n## Installation\n\n```\nnpm install normalize-package-data\n```\n\n## Usage\n\nBasic usage is really simple. You call the function that normalize-package-data exports. Let's call it `normalizeData`.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nnormalizeData(packageData)\n// packageData is now normalized\n```\n\n#### Strict mode\n\nYou may activate strict validation by passing true as the second argument.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, true)\n// packageData is now normalized\n```\n\nIf strict mode is activated, only Semver 2.0 version strings are accepted. Otherwise, Semver 1.0 strings are accepted as well. Packages must have a name, and the name field must not have contain leading or trailing whitespace.\n\n#### Warnings\n\nOptionally, you may pass a \"warning\" function. It gets called whenever the `normalizeData` function encounters something that doesn't look right. It indicates less than perfect input data.\n\n```javascript\nnormalizeData = require('normalize-package-data')\npackageData = fs.readFileSync(\"package.json\")\nwarnFn = function(msg) { console.error(msg) }\nnormalizeData(packageData, warnFn)\n// packageData is now normalized. Any number of warnings may have been logged.\n```\n\nYou may combine strict validation with warnings by passing `true` as the second argument, and `warnFn` as third.\n\nWhen `private` field is set to `true`, warnings will be suppressed.\n\n### Potential exceptions\n\nIf the supplied data has an invalid name or version vield, `normalizeData` will throw an error. Depending on where you call `normalizeData`, you may want to catch these errors so can pass them to a callback.\n\n## What normalization (currently) entails\n\n* The value of `name` field gets trimmed (unless in strict mode).\n* The value of the `version` field gets cleaned by `semver.clean`. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n* If `name` and/or `version` fields are missing, they are set to empty strings.\n* If `files` field is not an array, it will be removed.\n* If `bin` field is a string, then `bin` field will become an object with `name` set to the value of the `name` field, and `bin` set to the original string value.\n* If `man` field is a string, it will become an array with the original string as its sole member.\n* If `keywords` field is string, it is considered to be a list of keywords separated by one or more white-space characters. It gets converted to an array by splitting on `\\s+`.\n* All people fields (`author`, `maintainers`, `contributors`) get converted into objects with name, email and url properties.\n* If `bundledDependencies` field (a typo) exists and `bundleDependencies` field does not, `bundledDependencies` will get renamed to `bundleDependencies`.\n* If the value of any of the dependencies fields (`dependencies`, `devDependencies`, `optionalDependencies`) is a string, it gets converted into an object with familiar `name=>value` pairs.\n* The values in `optionalDependencies` get added to `dependencies`. The `optionalDependencies` array is left untouched.\n* As of v2: Dependencies that point at known hosted git providers (currently: github, bitbucket, gitlab) will have their URLs canonicalized, but protocols will be preserved.\n* As of v2: Dependencies that use shortcuts for hosted git providers (`org/proj`, `github:org/proj`, `bitbucket:org/proj`, `gitlab:org/proj`, `gist:docid`) will have the shortcut left in place. (In the case of github, the `org/proj` form will be expanded to `github:org/proj`.) THIS MARKS A BREAKING CHANGE FROM V1, where the shorcut was previously expanded to a URL.\n* If `description` field does not exist, but `readme` field does, then (more or less) the first paragraph of text that's found in the readme is taken as value for `description`.\n* If `repository` field is a string, it will become an object with `url` set to the original string value, and `type` set to `\"git\"`.\n* If `repository.url` is not a valid url, but in the style of \"[owner-name]/[repo-name]\", `repository.url` will be set to https://github.com/[owner-name]/[repo-name]\n* If `bugs` field is a string, the value of `bugs` field is changed into an object with `url` set to the original string value.\n* If `bugs` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `bugs` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/issues . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n* If `bugs` field is an object, the resulting value only has email and url properties. If email and url properties are not strings, they are ignored. If no valid values for either email or url is found, bugs field will be removed.\n* If `homepage` field is not a string, it will be removed.\n* If the url in the `homepage` field does not specify a protocol, then http is assumed. For example, `myproject.org` will be changed to `http://myproject.org`.\n* If `homepage` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `homepage` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/ . If the repository field points to a GitHub Gist repo url, the associated http url is chosen.\n\n### Rules for name field\n\nIf `name` field is given, the value of the name field must be a string. The string may not:\n\n* start with a period.\n* contain the following characters: `/@\\s+%`\n* contain and characters that would need to be encoded for use in urls.\n* resemble the word `node_modules` or `favicon.ico` (case doesn't matter).\n\n### Rules for version field\n\nIf `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver).\n\n### Rules for license field\n\nThe `license` field should be a valid *SDPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/packages/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license).\n\n## Credits\n\nThis package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson.\n\n## License\n\nnormalize-package-data is released under the [BSD 2-Clause License](http://opensource.org/licenses/MIT). \nCopyright (c) 2013 Meryn Stol \n",
"readmeFilename": "README.md",
- "gitHead": "e319259b7ebac34c470ae0dc55610befa711dbb6",
+ "gitHead": "43b844bd37aac28d204be7567b731d9c55025871",
"bugs": {
"url": "https://github.com/npm/normalize-package-data/issues"
},
"homepage": "https://github.com/npm/normalize-package-data#readme",
- "_id": "normalize-package-data@2.2.1",
- "_shasum": "0e00ebd73ac065eb19092fd313531dd291a0e045",
- "_from": "normalize-package-data@latest"
+ "_id": "normalize-package-data@2.3.1",
+ "_shasum": "e2d24a5ab38a90b22cd697753407d8564f49a890",
+ "_from": "normalize-package-data@>=2.3.1 <2.4.0"
}
diff --git a/deps/npm/node_modules/normalize-package-data/test/normalize.js b/deps/npm/node_modules/normalize-package-data/test/normalize.js
index 3dce3c057..de6f1ce5b 100644
--- a/deps/npm/node_modules/normalize-package-data/test/normalize.js
+++ b/deps/npm/node_modules/normalize-package-data/test/normalize.js
@@ -160,7 +160,7 @@ tap.test("license field should be a valid SPDX expression", function(t) {
[ warningMessages.missingDescription,
warningMessages.missingRepository,
warningMessages.missingReadme,
- warningMessages.nonSPDXLicense]
+ warningMessages.invalidLicense]
t.same(warnings, expect)
t.end()
})
diff --git a/deps/npm/node_modules/npm-cache-filename/index.js b/deps/npm/node_modules/npm-cache-filename/index.js
index 9fd2488d4..50d21792b 100644
--- a/deps/npm/node_modules/npm-cache-filename/index.js
+++ b/deps/npm/node_modules/npm-cache-filename/index.js
@@ -10,8 +10,12 @@ function cf(root, u) {
u = url.parse(u);;
var h = u.host.replace(/:/g, '_');;
// Strip off any /-rev/... or ?rev=... bits
- var revre = /(\?rev=|\?.*?&rev=|\/-rev\/).*$/
- var parts = u.path.replace(revre, '').split('/').slice(1)
+ var revre = /(\?rev=|\?.*?&rev=|\/-rev\/).*$/;;
+ var parts = u.path.replace(revre, '').split('/').slice(1);;
+ // Make sure different git references get different folders
+ if (u.hash && u.hash.length > 1) {
+ parts.push(u.hash.slice(1));;
+ };;
var p = [root, h].concat(parts.map(function(part) {
return encodeURIComponent(part).replace(/%/g, '_');;
}));;
diff --git a/deps/npm/node_modules/npm-cache-filename/package.json b/deps/npm/node_modules/npm-cache-filename/package.json
index 979450bc6..b2431b96c 100644
--- a/deps/npm/node_modules/npm-cache-filename/package.json
+++ b/deps/npm/node_modules/npm-cache-filename/package.json
@@ -1,18 +1,18 @@
{
"name": "npm-cache-filename",
- "version": "1.0.1",
+ "version": "1.0.2",
"description": "Given a cache folder and url, return the appropriate cache folder.",
"main": "index.js",
"dependencies": {},
"devDependencies": {
- "tap": "^0.4.10"
+ "tap": "^1.2.0"
},
"scripts": {
"test": "tap test.js"
},
"repository": {
"type": "git",
- "url": "git://github.com/npm/npm-cache-filename"
+ "url": "git://github.com/npm/npm-cache-filename.git"
},
"author": {
"name": "Isaac Z. Schlueter",
@@ -24,10 +24,34 @@
"url": "https://github.com/npm/npm-cache-filename/issues"
},
"homepage": "https://github.com/npm/npm-cache-filename",
- "readme": "# npm-cache-filename\n\nGiven a cache folder and url, return the appropriate cache folder.\n\n## USAGE\n\n```javascript\nvar cf = require('npm-cache-filename');\nconsole.log(cf('/tmp/cache', 'https://registry.npmjs.org:1234/foo/bar'));\n// outputs: /tmp/cache/registry.npmjs.org_1234/foo/bar\n```\n\nAs a bonus, you can also bind it to a specific root path:\n\n```javascript\nvar cf = require('npm-cache-filename');\nvar getFile = cf('/tmp/cache');\n\nconsole.log(getFile('https://registry.npmjs.org:1234/foo/bar'));\n// outputs: /tmp/cache/registry.npmjs.org_1234/foo/bar\n```\n",
- "readmeFilename": "README.md",
- "_id": "npm-cache-filename@1.0.1",
- "_shasum": "9b640f0c1a5ba1145659685372a9ff71f70c4323",
- "_from": "npm-cache-filename@latest",
- "_resolved": "https://registry.npmjs.org/npm-cache-filename/-/npm-cache-filename-1.0.1.tgz"
+ "gitHead": "b7eef12919fdf544a3b83bba73093f7268c40c1e",
+ "_id": "npm-cache-filename@1.0.2",
+ "_shasum": "ded306c5b0bfc870a9e9faf823bc5f283e05ae11",
+ "_from": "npm-cache-filename@1.0.2",
+ "_npmVersion": "2.12.1",
+ "_nodeVersion": "2.2.2",
+ "_npmUser": {
+ "name": "zkat",
+ "email": "kat@sykosomatic.org"
+ },
+ "dist": {
+ "shasum": "ded306c5b0bfc870a9e9faf823bc5f283e05ae11",
+ "tarball": "http://registry.npmjs.org/npm-cache-filename/-/npm-cache-filename-1.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "kat",
+ "email": "kat@lua.cz"
+ },
+ {
+ "name": "zkat",
+ "email": "kat@sykosomatic.org"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/npm-cache-filename/-/npm-cache-filename-1.0.2.tgz"
}
diff --git a/deps/npm/node_modules/npm-cache-filename/test.js b/deps/npm/node_modules/npm-cache-filename/test.js
index 5faecceb4..bdca80b50 100644
--- a/deps/npm/node_modules/npm-cache-filename/test.js
+++ b/deps/npm/node_modules/npm-cache-filename/test.js
@@ -17,5 +17,7 @@ test('it does the thing it says it does', function(t) {
'/tmp/foo_134/xyz')
t.equal(cf("/tmp", "https://foo:134/xyz-rev/baz"),
'/tmp/foo_134/xyz-rev/baz')
+ t.equal(cf("/tmp", "git://foo:134/xyz-rev/baz.git#master"),
+ '/tmp/foo_134/xyz-rev/baz.git/master')
t.end();
});;
diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json
index cc9f206d1..3e7203c6f 100644
--- a/deps/npm/node_modules/npm-install-checks/package.json
+++ b/deps/npm/node_modules/npm-install-checks/package.json
@@ -1,16 +1,16 @@
{
"name": "npm-install-checks",
- "version": "1.0.5",
+ "version": "1.0.6",
"description": "checks that npm runs during the installation of a module",
"main": "index.js",
"dependencies": {
"npmlog": "0.1 || 1",
- "semver": "^2.3.0 || 3.x || 4"
+ "semver": "^2.3.0 || 3.x || 4 || 5"
},
"devDependencies": {
- "tap": "~0.4.8",
+ "mkdirp": "~0.3.5",
"rimraf": "~2.2.5",
- "mkdirp": "~0.3.5"
+ "tap": "^1.2.0"
},
"scripts": {
"test": "tap test/*.js"
@@ -32,34 +32,10 @@
"bugs": {
"url": "https://github.com/npm/npm-install-checks/issues"
},
- "gitHead": "c36e052a0a54ad82932689fa86fd59197277f80d",
- "_id": "npm-install-checks@1.0.5",
- "_shasum": "a1b5beabfd60e0535b14f763157c410cb6bdae56",
- "_from": "npm-install-checks@>=1.0.5 <1.1.0",
- "_npmVersion": "2.1.11",
- "_nodeVersion": "0.10.33",
- "_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
- },
- "maintainers": [
- {
- "name": "robertkowalski",
- "email": "rok@kowalski.gd"
- },
- {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- {
- "name": "iarna",
- "email": "me@re-becca.org"
- }
- ],
- "dist": {
- "shasum": "a1b5beabfd60e0535b14f763157c410cb6bdae56",
- "tarball": "http://registry.npmjs.org/npm-install-checks/-/npm-install-checks-1.0.5.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-1.0.5.tgz"
+ "readme": "# npm-install-checks\n\nA package that contains checks that npm runs during the installation.\n\n## API\n\n### .checkEngine(target, npmVer, nodeVer, force, strict, cb)\nCheck if node/npm version is supported by the package.\n\nError type: `ENOTSUP`\n\n### .checkPlatform(target, force, cb)\nCheck if OS/Arch is supported by the package.\n\nError type: `EBADPLATFORM`\n\n### .checkCycle(target, ancestors, cb)\nCheck for cyclic dependencies.\n\nError type: `ECYCLE`\n\n### .checkGit(folder, cb)\nCheck if a folder is a .git folder.\n\nError type: `EISGIT`\n",
+ "readmeFilename": "README.md",
+ "gitHead": "f28aebca7f5df0ddb13161b0f04d069004f6c367",
+ "_id": "npm-install-checks@1.0.6",
+ "_shasum": "8d4c1e852806e4e2d66601ab787be5841550d0cb",
+ "_from": "npm-install-checks@>=1.0.6 <1.1.0"
}
diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json
index 0be06bb05..9caed85e7 100644
--- a/deps/npm/node_modules/npm-package-arg/package.json
+++ b/deps/npm/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-package-arg",
- "version": "4.0.1",
+ "version": "4.0.2",
"description": "Parse the things that can be arguments to `npm install`",
"main": "npa.js",
"directories": {
@@ -8,10 +8,10 @@
},
"dependencies": {
"hosted-git-info": "^2.1.4",
- "semver": "4"
+ "semver": "4 || 5"
},
"devDependencies": {
- "tap": "^0.4.9"
+ "tap": "^1.2.0"
},
"scripts": {
"test": "tap test/*.js"
@@ -32,8 +32,8 @@
"homepage": "https://github.com/npm/npm-package-arg",
"readme": "# npm-package-arg\n\nParse package name and specifier passed to commands like `npm install` or\n`npm cache add`. This just parses the text given-- it's worth noting that\n`npm` has further logic it applies by looking at your disk to figure out\nwhat ambiguous specifiers are. If you want that logic, please see\n[realize-package-specifier].\n\n[realize-package-specifier]: https://www.npmjs.org/package/realize-package-specifier\n\nArguments look like: `foo@1.2`, `@bar/foo@1.2`, `foo@user/foo`, `http://x.com/foo.tgz`,\n`git+https://github.com/user/foo`, `bitbucket:user/foo`, `foo.tar.gz` or `bar`\n\n## EXAMPLES\n\n```javascript\nvar assert = require(\"assert\")\nvar npa = require(\"npm-package-arg\")\n\n// Pass in the descriptor, and it'll return an object\nvar parsed = npa(\"@bar/foo@1.2\")\n\n// Returns an object like:\n{\n raw: '@bar/foo@1.2', // what was passed in\n name: \"@bar/foo\", // the name of the package\n scope: \"@bar\", // the private scope of the package, or null\n type: \"range\", // the type of specifier this is\n spec: \">=1.2.0 <1.3.0\" // the expanded specifier\n rawSpec: \"1.2\" // the specifier as passed in\n }\n\n// Parsing urls pointing at hosted git services produces a variation:\nvar parsed = npa(\"git+https://github.com/user/foo\")\n\n// Returns an object like:\n{\n raw: 'git+https://github.com/user/foo',\n scope: null,\n name: null,\n rawSpec: 'git+https://github.com/user/foo',\n spec: 'user/foo',\n type: 'hosted',\n hosted: {\n type: 'github',\n ssh: 'git@github.com:user/foo.git',\n sshurl: 'git+ssh://git@github.com/user/foo.git',\n https: 'https://github.com/user/foo.git',\n directUrl: 'https://raw.githubusercontent.com/user/foo/master/package.json'\n }\n}\n\n// Completely unreasonable invalid garbage throws an error\n// Make sure you wrap this in a try/catch if you have not\n// already sanitized the inputs!\nassert.throws(function() {\n npa(\"this is not \\0 a valid package name or url\")\n})\n```\n\n## USING\n\n`var npa = require('npm-package-arg')`\n\n* var result = npa(*arg*)\n\nParses *arg* and returns a result object detailing what *arg* is.\n\n*arg* -- a package descriptor, like: `foo@1.2`, or `foo@user/foo`, or\n`http://x.com/foo.tgz`, or `git+https://github.com/user/foo`\n\n## RESULT OBJECT\n\nThe objects that are returned by npm-package-arg contain the following\nkeys:\n\n* `name` - If known, the `name` field expected in the resulting pkg.\n* `type` - One of the following strings:\n * `git` - A git repo\n * `hosted` - A hosted project, from github, bitbucket or gitlab. Originally\n either a full url pointing at one of these services or a shorthand like\n `user/project` or `github:user/project` for github or `bitbucket:user/project`\n for bitbucket.\n * `tag` - A tagged version, like `\"foo@latest\"`\n * `version` - A specific version number, like `\"foo@1.2.3\"`\n * `range` - A version range, like `\"foo@2.x\"`\n * `local` - A local file or folder path\n * `remote` - An http url (presumably to a tgz)\n* `spec` - The \"thing\". URL, the range, git repo, etc.\n* `hosted` - If type=hosted this will be an object with the following keys:\n * `type` - github, bitbucket or gitlab\n * `ssh` - The ssh path for this git repo\n * `sshUrl` - The ssh URL for this git repo\n * `httpsUrl` - The HTTPS URL for this git repo\n * `directUrl` - The URL for the package.json in this git repo\n* `raw` - The original un-modified string that was provided.\n* `rawSpec` - The part after the `name@...`, as it was originally\n provided.\n* `scope` - If a name is something like `@org/module` then the `scope`\n field will be set to `org`. If it doesn't have a scoped name, then\n scope is `null`.\n",
"readmeFilename": "README.md",
- "gitHead": "794c9981033bb16bd4a88c7ba45c109107439172",
- "_id": "npm-package-arg@4.0.1",
- "_shasum": "bfbea17cd2b9fdc4fca2f02796794173dbf1877c",
- "_from": "npm-package-arg@>=4.0.0 <4.1.0"
+ "gitHead": "8d3c51c33807fabde4db86a3811831b756eaf2eb",
+ "_id": "npm-package-arg@4.0.2",
+ "_shasum": "3f28235f9f6428e54bfeca73629e27d6c81a7e82",
+ "_from": "npm-package-arg@>=4.0.2 <4.1.0"
}
diff --git a/deps/npm/node_modules/npm-registry-client/lib/ping.js b/deps/npm/node_modules/npm-registry-client/lib/ping.js
new file mode 100644
index 000000000..b90f99d11
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/lib/ping.js
@@ -0,0 +1,23 @@
+module.exports = ping
+
+var url = require('url')
+var assert = require('assert')
+
+function ping (uri, params, cb) {
+ assert(typeof uri === 'string', 'must pass registry URI to ping')
+ assert(params && typeof params === 'object', 'must pass params to ping')
+ assert(typeof cb === 'function', 'must pass callback to ping')
+
+ var auth = params.auth
+ assert(auth && typeof auth === 'object', 'must pass auth to ping')
+
+ this.request(url.resolve(uri, '-/ping?write=true'), { auth: auth }, function (er, fullData) {
+ if (er) {
+ cb(er)
+ } else if (fullData) {
+ cb(null, fullData)
+ } else {
+ cb(new Error('No data received'))
+ }
+ })
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/.npmignore b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/.npmignore
deleted file mode 100644
index b512c09d4..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.npmignore b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.npmignore
new file mode 100644
index 000000000..38344f87a
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.npmignore
@@ -0,0 +1,5 @@
+build/
+test/
+examples/
+fs.js
+zlib.js \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.travis.yml
new file mode 100644
index 000000000..a2870dfb1
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.travis.yml
@@ -0,0 +1,39 @@
+sudo: false
+language: node_js
+before_install:
+ - npm install -g npm
+notifications:
+ email: false
+matrix:
+ include:
+ - node_js: '0.8'
+ env: TASK=test
+ - node_js: '0.10'
+ env: TASK=test
+ - node_js: '0.11'
+ env: TASK=test
+ - node_js: '0.12'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="39..beta"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="34..beta"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+script: "npm run $TASK"
+env:
+ global:
+ - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
+ - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml
new file mode 100644
index 000000000..96d9cfbd3
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml
@@ -0,0 +1 @@
+ui: tape
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/LICENSE b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/LICENSE
new file mode 100644
index 000000000..e3d4e695a
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/LICENSE
@@ -0,0 +1,18 @@
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/README.md
new file mode 100644
index 000000000..77fafa3da
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/README.md
@@ -0,0 +1,36 @@
+# readable-stream
+
+***Node-core streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
+
+
+[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
+[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
+
+
+[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream)
+
+```bash
+npm install --save readable-stream
+```
+
+***Node-core streams for userland***
+
+This package is a mirror of the Streams2 and Streams3 implementations in
+Node-core, including [documentation](doc/stream.markdown).
+
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+
+As of version 2.0.0 **readable-stream** uses semantic versioning.
+
+# Streams WG Team Members
+
+* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt;
+ - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
+* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
+ - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
+* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;rod@vagg.org&gt;
+ - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
+* **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt;
+* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
+* **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown
new file mode 100644
index 000000000..a2270c880
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown
@@ -0,0 +1,1651 @@
+# Stream
+
+ Stability: 2 - Stable
+
+A stream is an abstract interface implemented by various objects in
+io.js. For example a [request to an HTTP
+server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage) is a stream, as is
+[stdout][]. Streams are readable, writable, or both. All streams are
+instances of [EventEmitter][]
+
+You can load the Stream base classes by doing `require('stream')`.
+There are base classes provided for [Readable][] streams, [Writable][]
+streams, [Duplex][] streams, and [Transform][] streams.
+
+This document is split up into 3 sections. The first explains the
+parts of the API that you need to be aware of to use streams in your
+programs. If you never implement a streaming API yourself, you can
+stop there.
+
+The second section explains the parts of the API that you need to use
+if you implement your own custom streams yourself. The API is
+designed to make this easy for you to do.
+
+The third section goes into more depth about how streams work,
+including some of the internal mechanisms and functions that you
+should probably not modify unless you definitely know what you are
+doing.
+
+
+## API for Stream Consumers
+
+<!--type=misc-->
+
+Streams can be either [Readable][], [Writable][], or both ([Duplex][]).
+
+All streams are EventEmitters, but they also have other custom methods
+and properties depending on whether they are Readable, Writable, or
+Duplex.
+
+If a stream is both Readable and Writable, then it implements all of
+the methods and events below. So, a [Duplex][] or [Transform][] stream is
+fully described by this API, though their implementation may be
+somewhat different.
+
+It is not necessary to implement Stream interfaces in order to consume
+streams in your programs. If you **are** implementing streaming
+interfaces in your own program, please also refer to
+[API for Stream Implementors][] below.
+
+Almost all io.js programs, no matter how simple, use Streams in some
+way. Here is an example of using Streams in an io.js program:
+
+```javascript
+var http = require('http');
+
+var server = http.createServer(function (req, res) {
+ // req is an http.IncomingMessage, which is a Readable Stream
+ // res is an http.ServerResponse, which is a Writable Stream
+
+ var body = '';
+ // we want to get the data as utf8 strings
+ // If you don't set an encoding, then you'll get Buffer objects
+ req.setEncoding('utf8');
+
+ // Readable streams emit 'data' events once a listener is added
+ req.on('data', function (chunk) {
+ body += chunk;
+ });
+
+ // the end event tells you that you have entire body
+ req.on('end', function () {
+ try {
+ var data = JSON.parse(body);
+ } catch (er) {
+ // uh oh! bad json!
+ res.statusCode = 400;
+ return res.end('error: ' + er.message);
+ }
+
+ // write back something interesting to the user:
+ res.write(typeof data);
+ res.end();
+ });
+});
+
+server.listen(1337);
+
+// $ curl localhost:1337 -d '{}'
+// object
+// $ curl localhost:1337 -d '"foo"'
+// string
+// $ curl localhost:1337 -d 'not json'
+// error: Unexpected token o
+```
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+The Readable stream interface is the abstraction for a *source* of
+data that you are reading from. In other words, data comes *out* of a
+Readable stream.
+
+A Readable stream will not start emitting data until you indicate that
+you are ready to receive it.
+
+Readable streams have two "modes": a **flowing mode** and a **paused
+mode**. When in flowing mode, data is read from the underlying system
+and provided to your program as fast as possible. In paused mode, you
+must explicitly call `stream.read()` to get chunks of data out.
+Streams start out in paused mode.
+
+**Note**: If no data event handlers are attached, and there are no
+[`pipe()`][] destinations, and the stream is switched into flowing
+mode, then data will be lost.
+
+You can switch to flowing mode by doing any of the following:
+
+* Adding a [`'data'` event][] handler to listen for data.
+* Calling the [`resume()`][] method to explicitly open the flow.
+* Calling the [`pipe()`][] method to send the data to a [Writable][].
+
+You can switch back to paused mode by doing either of the following:
+
+* If there are no pipe destinations, by calling the [`pause()`][]
+ method.
+* If there are pipe destinations, by removing any [`'data'` event][]
+ handlers, and removing all pipe destinations by calling the
+ [`unpipe()`][] method.
+
+Note that, for backwards compatibility reasons, removing `'data'`
+event handlers will **not** automatically pause the stream. Also, if
+there are piped destinations, then calling `pause()` will not
+guarantee that the stream will *remain* paused once those
+destinations drain and ask for more data.
+
+Examples of readable streams include:
+
+* [http responses, on the client](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage)
+* [http requests, on the server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage)
+* [fs read streams](https://iojs.org/dist/v2.3.0/doc/api/fs.html#fs_class_fs_readstream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdout and stderr][]
+* [process.stdin][]
+
+#### Event: 'readable'
+
+When a chunk of data can be read from the stream, it will emit a
+`'readable'` event.
+
+In some cases, listening for a `'readable'` event will cause some data
+to be read into the internal buffer from the underlying system, if it
+hadn't already.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ // there is some data to read now
+});
+```
+
+Once the internal buffer is drained, a `readable` event will fire
+again when more data is available.
+
+#### Event: 'data'
+
+* `chunk` {Buffer | String} The chunk of data.
+
+Attaching a `data` event listener to a stream that has not been
+explicitly paused will switch the stream into flowing mode. Data will
+then be passed as soon as it is available.
+
+If you just want to get all the data out of the stream as fast as
+possible, this is the best way to do so.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+```
+
+#### Event: 'end'
+
+This event fires when there will be no more data to read.
+
+Note that the `end` event **will not fire** unless the data is
+completely consumed. This can be done by switching into flowing mode,
+or by calling `read()` repeatedly until you get to the end.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+readable.on('end', function() {
+ console.log('there will be no more data.');
+});
+```
+
+#### Event: 'close'
+
+Emitted when the underlying resource (for example, the backing file
+descriptor) has been closed. Not all streams will emit this.
+
+#### Event: 'error'
+
+* {Error Object}
+
+Emitted if there was an error receiving data.
+
+#### readable.read([size])
+
+* `size` {Number} Optional argument to specify how much data to read.
+* Return {String | Buffer | null}
+
+The `read()` method pulls some data out of the internal buffer and
+returns it. If there is no data available, then it will return
+`null`.
+
+If you pass in a `size` argument, then it will return that many
+bytes. If `size` bytes are not available, then it will return `null`.
+
+If you do not specify a `size` argument, then it will return all the
+data in the internal buffer.
+
+This method should only be called in paused mode. In flowing mode,
+this method is called automatically until the internal buffer is
+drained.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = readable.read())) {
+ console.log('got %d bytes of data', chunk.length);
+ }
+});
+```
+
+If this method returns a data chunk, then it will also trigger the
+emission of a [`'data'` event][].
+
+#### readable.setEncoding(encoding)
+
+* `encoding` {String} The encoding to use.
+* Return: `this`
+
+Call this function to cause the stream to return strings of the
+specified encoding instead of Buffer objects. For example, if you do
+`readable.setEncoding('utf8')`, then the output data will be
+interpreted as UTF-8 data, and returned as strings. If you do
+`readable.setEncoding('hex')`, then the data will be encoded in
+hexadecimal string format.
+
+This properly handles multi-byte characters that would otherwise be
+potentially mangled if you simply pulled the Buffers directly and
+called `buf.toString(encoding)` on them. If you want to read the data
+as strings, always use this method.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.setEncoding('utf8');
+readable.on('data', function(chunk) {
+ assert.equal(typeof chunk, 'string');
+ console.log('got %d characters of string data', chunk.length);
+});
+```
+
+#### readable.resume()
+
+* Return: `this`
+
+This method will cause the readable stream to resume emitting `data`
+events.
+
+This method will switch the stream into flowing mode. If you do *not*
+want to consume the data from a stream, but you *do* want to get to
+its `end` event, you can call [`readable.resume()`][] to open the flow of
+data.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.resume();
+readable.on('end', function() {
+ console.log('got to the end, but did not read anything');
+});
+```
+
+#### readable.pause()
+
+* Return: `this`
+
+This method will cause a stream in flowing mode to stop emitting
+`data` events, switching out of flowing mode. Any data that becomes
+available will remain in the internal buffer.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+ readable.pause();
+ console.log('there will be no more data for 1 second');
+ setTimeout(function() {
+ console.log('now data will start flowing again');
+ readable.resume();
+ }, 1000);
+});
+```
+
+#### readable.isPaused()
+
+* Return: `Boolean`
+
+This method returns whether or not the `readable` has been **explicitly**
+paused by client code (using `readable.pause()` without a corresponding
+`readable.resume()`).
+
+```javascript
+var readable = new stream.Readable
+
+readable.isPaused() // === false
+readable.pause()
+readable.isPaused() // === true
+readable.resume()
+readable.isPaused() // === false
+```
+
+#### readable.pipe(destination[, options])
+
+* `destination` {[Writable][] Stream} The destination for writing data
+* `options` {Object} Pipe options
+ * `end` {Boolean} End the writer when the reader ends. Default = `true`
+
+This method pulls all the data out of a readable stream, and writes it
+to the supplied destination, automatically managing the flow so that
+the destination is not overwhelmed by a fast readable stream.
+
+Multiple destinations can be piped to safely.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt'
+readable.pipe(writable);
+```
+
+This function returns the destination stream, so you can set up pipe
+chains like so:
+
+```javascript
+var r = fs.createReadStream('file.txt');
+var z = zlib.createGzip();
+var w = fs.createWriteStream('file.txt.gz');
+r.pipe(z).pipe(w);
+```
+
+For example, emulating the Unix `cat` command:
+
+```javascript
+process.stdin.pipe(process.stdout);
+```
+
+By default [`end()`][] is called on the destination when the source stream
+emits `end`, so that `destination` is no longer writable. Pass `{ end:
+false }` as `options` to keep the destination stream open.
+
+This keeps `writer` open so that "Goodbye" can be written at the
+end.
+
+```javascript
+reader.pipe(writer, { end: false });
+reader.on('end', function() {
+ writer.end('Goodbye\n');
+});
+```
+
+Note that `process.stderr` and `process.stdout` are never closed until
+the process exits, regardless of the specified options.
+
+#### readable.unpipe([destination])
+
+* `destination` {[Writable][] Stream} Optional specific stream to unpipe
+
+This method will remove the hooks set up for a previous `pipe()` call.
+
+If the destination is not specified, then all pipes are removed.
+
+If the destination is specified, but no pipe is set up for it, then
+this is a no-op.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt',
+// but only for the first second
+readable.pipe(writable);
+setTimeout(function() {
+ console.log('stop writing to file.txt');
+ readable.unpipe(writable);
+ console.log('manually close the file stream');
+ writable.end();
+}, 1000);
+```
+
+#### readable.unshift(chunk)
+
+* `chunk` {Buffer | String} Chunk of data to unshift onto the read queue
+
+This is useful in certain cases where a stream is being consumed by a
+parser, which needs to "un-consume" some data that it has
+optimistically pulled out of the source, so that the stream can be
+passed on to some other party.
+
+If you find that you must often call `stream.unshift(chunk)` in your
+programs, consider implementing a [Transform][] stream instead. (See API
+for Stream Implementors, below.)
+
+```javascript
+// Pull off a header delimited by \n\n
+// use unshift() if we get too much
+// Call the callback with (error, header, stream)
+var StringDecoder = require('string_decoder').StringDecoder;
+function parseHeader(stream, callback) {
+ stream.on('error', callback);
+ stream.on('readable', onReadable);
+ var decoder = new StringDecoder('utf8');
+ var header = '';
+ function onReadable() {
+ var chunk;
+ while (null !== (chunk = stream.read())) {
+ var str = decoder.write(chunk);
+ if (str.match(/\n\n/)) {
+ // found the header boundary
+ var split = str.split(/\n\n/);
+ header += split.shift();
+ var remaining = split.join('\n\n');
+ var buf = new Buffer(remaining, 'utf8');
+ if (buf.length)
+ stream.unshift(buf);
+ stream.removeListener('error', callback);
+ stream.removeListener('readable', onReadable);
+ // now the body of the message can be read from the stream.
+ callback(null, header, stream);
+ } else {
+ // still reading the header.
+ header += str;
+ }
+ }
+ }
+}
+```
+
+#### readable.wrap(stream)
+
+* `stream` {Stream} An "old style" readable stream
+
+Versions of Node.js prior to v0.10 had streams that did not implement the
+entire Streams API as it is today. (See "Compatibility" below for
+more information.)
+
+If you are using an older io.js library that emits `'data'` events and
+has a [`pause()`][] method that is advisory only, then you can use the
+`wrap()` method to create a [Readable][] stream that uses the old stream
+as its data source.
+
+You will very rarely ever need to call this function, but it exists
+as a convenience for interacting with old io.js programs and libraries.
+
+For example:
+
+```javascript
+var OldReader = require('./old-api-module.js').OldReader;
+var oreader = new OldReader;
+var Readable = require('stream').Readable;
+var myReader = new Readable().wrap(oreader);
+
+myReader.on('readable', function() {
+ myReader.read(); // etc.
+});
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+The Writable stream interface is an abstraction for a *destination*
+that you are writing data *to*.
+
+Examples of writable streams include:
+
+* [http requests, on the client](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_class_http_clientrequest)
+* [http responses, on the server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_class_http_serverresponse)
+* [fs write streams](https://iojs.org/dist/v2.3.0/doc/api/fs.html#fs_class_fs_writestream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdin](https://iojs.org/dist/v2.3.0/doc/api/child_process.html#child_process_child_stdin)
+* [process.stdout][], [process.stderr][]
+
+#### writable.write(chunk[, encoding][, callback])
+
+* `chunk` {String | Buffer} The data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Callback for when this chunk of data is flushed
+* Returns: {Boolean} True if the data was handled completely.
+
+This method writes some data to the underlying system, and calls the
+supplied callback once the data has been fully handled.
+
+The return value indicates if you should continue writing right now.
+If the data had to be buffered internally, then it will return
+`false`. Otherwise, it will return `true`.
+
+This return value is strictly advisory. You MAY continue to write,
+even if it returns `false`. However, writes will be buffered in
+memory, so it is best not to do this excessively. Instead, wait for
+the `drain` event before writing more data.
+
+#### Event: 'drain'
+
+If a [`writable.write(chunk)`][] call returns false, then the `drain`
+event will indicate when it is appropriate to begin writing more data
+to the stream.
+
+```javascript
+// Write the data to the supplied writable stream 1MM times.
+// Be attentive to back-pressure.
+function writeOneMillionTimes(writer, data, encoding, callback) {
+ var i = 1000000;
+ write();
+ function write() {
+ var ok = true;
+ do {
+ i -= 1;
+ if (i === 0) {
+ // last time!
+ writer.write(data, encoding, callback);
+ } else {
+ // see if we should continue, or wait
+ // don't pass the callback, because we're not done yet.
+ ok = writer.write(data, encoding);
+ }
+ } while (i > 0 && ok);
+ if (i > 0) {
+ // had to stop early!
+ // write some more once it drains
+ writer.once('drain', write);
+ }
+ }
+}
+```
+
+#### writable.cork()
+
+Forces buffering of all writes.
+
+Buffered data will be flushed either at `.uncork()` or at `.end()` call.
+
+#### writable.uncork()
+
+Flush all data, buffered since `.cork()` call.
+
+#### writable.setDefaultEncoding(encoding)
+
+* `encoding` {String} The new default encoding
+
+Sets the default encoding for a writable stream.
+
+#### writable.end([chunk][, encoding][, callback])
+
+* `chunk` {String | Buffer} Optional data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Optional callback for when the stream is finished
+
+Call this method when no more data will be written to the stream. If
+supplied, the callback is attached as a listener on the `finish` event.
+
+Calling [`write()`][] after calling [`end()`][] will raise an error.
+
+```javascript
+// write 'hello, ' and then end with 'world!'
+var file = fs.createWriteStream('example.txt');
+file.write('hello, ');
+file.end('world!');
+// writing more now is not allowed!
+```
+
+#### Event: 'finish'
+
+When the [`end()`][] method has been called, and all data has been flushed
+to the underlying system, this event is emitted.
+
+```javascript
+var writer = getWritableStreamSomehow();
+for (var i = 0; i < 100; i ++) {
+ writer.write('hello, #' + i + '!\n');
+}
+writer.end('this is the end\n');
+writer.on('finish', function() {
+ console.error('all writes are now complete.');
+});
+```
+
+#### Event: 'pipe'
+
+* `src` {[Readable][] Stream} source stream that is piping to this writable
+
+This is emitted whenever the `pipe()` method is called on a readable
+stream, adding this writable to its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('pipe', function(src) {
+ console.error('something is piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+```
+
+#### Event: 'unpipe'
+
+* `src` {[Readable][] Stream} The source stream that [unpiped][] this writable
+
+This is emitted whenever the [`unpipe()`][] method is called on a
+readable stream, removing this writable from its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('unpipe', function(src) {
+ console.error('something has stopped piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+reader.unpipe(writer);
+```
+
+#### Event: 'error'
+
+* {Error object}
+
+Emitted if there was an error when writing or piping data.
+
+### Class: stream.Duplex
+
+Duplex streams are streams that implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Duplex streams include:
+
+* [tcp sockets][]
+* [zlib streams][]
+* [crypto streams][]
+
+
+### Class: stream.Transform
+
+Transform streams are [Duplex][] streams where the output is in some way
+computed from the input. They implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Transform streams include:
+
+* [zlib streams][]
+* [crypto streams][]
+
+
+## API for Stream Implementors
+
+<!--type=misc-->
+
+To implement any sort of stream, the pattern is the same:
+
+1. Extend the appropriate parent class in your own subclass. (The
+ [`util.inherits`][] method is particularly helpful for this.)
+2. Call the appropriate parent class constructor in your constructor,
+ to be sure that the internal mechanisms are set up properly.
+2. Implement one or more specific methods, as detailed below.
+
+The class to extend and the method(s) to implement depend on the sort
+of stream class you are writing:
+
+<table>
+ <thead>
+ <tr>
+ <th>
+ <p>Use-case</p>
+ </th>
+ <th>
+ <p>Class</p>
+ </th>
+ <th>
+ <p>Method(s) to implement</p>
+ </th>
+ </tr>
+ </thead>
+ <tr>
+ <td>
+ <p>Reading only</p>
+ </td>
+ <td>
+ <p>[Readable](#stream_class_stream_readable_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Writing only</p>
+ </td>
+ <td>
+ <p>[Writable](#stream_class_stream_writable_1)</p>
+ </td>
+ <td>
+ <p><code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Reading and writing</p>
+ </td>
+ <td>
+ <p>[Duplex](#stream_class_stream_duplex_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code>, <code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Operate on written data, then read the result</p>
+ </td>
+ <td>
+ <p>[Transform](#stream_class_stream_transform_1)</p>
+ </td>
+ <td>
+ <p><code>_transform</code>, <code>_flush</code></p>
+ </td>
+ </tr>
+</table>
+
+In your implementation code, it is very important to never call the
+methods described in [API for Stream Consumers][] above. Otherwise, you
+can potentially cause adverse side effects in programs that consume
+your streaming interfaces.
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+`stream.Readable` is an abstract class designed to be extended with an
+underlying implementation of the [`_read(size)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+streams in your programs. What follows is an explanation of how to
+implement Readable streams in your programs.
+
+#### Example: A Counting Stream
+
+<!--type=example-->
+
+This is a basic example of a Readable stream. It emits the numerals
+from 1 to 1,000,000 in ascending order, and then ends.
+
+```javascript
+var Readable = require('stream').Readable;
+var util = require('util');
+util.inherits(Counter, Readable);
+
+function Counter(opt) {
+ Readable.call(this, opt);
+ this._max = 1000000;
+ this._index = 1;
+}
+
+Counter.prototype._read = function() {
+ var i = this._index++;
+ if (i > this._max)
+ this.push(null);
+ else {
+ var str = '' + i;
+ var buf = new Buffer(str, 'ascii');
+ this.push(buf);
+ }
+};
+```
+
+#### Example: SimpleProtocol v1 (Sub-optimal)
+
+This is similar to the `parseHeader` function described above, but
+implemented as a custom stream. Also, note that this implementation
+does not convert the incoming data to a string.
+
+However, this would be better implemented as a [Transform][] stream. See
+below for a better implementation.
+
+```javascript
+// A parser for a simple data protocol.
+// The "header" is a JSON object, followed by 2 \n characters, and
+// then a message body.
+//
+// NOTE: This can be done more simply as a Transform stream!
+// Using Readable directly for this is sub-optimal. See the
+// alternative example below under the Transform section.
+
+var Readable = require('stream').Readable;
+var util = require('util');
+
+util.inherits(SimpleProtocol, Readable);
+
+function SimpleProtocol(source, options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(source, options);
+
+ Readable.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+
+ // source is a readable stream, such as a socket or file
+ this._source = source;
+
+ var self = this;
+ source.on('end', function() {
+ self.push(null);
+ });
+
+ // give it a kick whenever the source is readable
+ // read(0) will not consume any bytes
+ source.on('readable', function() {
+ self.read(0);
+ });
+
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._read = function(n) {
+ if (!this._inBody) {
+ var chunk = this._source.read();
+
+ // if the source doesn't have data, we don't have data yet.
+ if (chunk === null)
+ return this.push('');
+
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ this.push('');
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // now, because we got some extra data, unshift the rest
+ // back into the read queue so that our consumer will see it.
+ var b = chunk.slice(split);
+ this.unshift(b);
+
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+ }
+ } else {
+ // from there on, just provide the data to our consumer.
+ // careful not to push(null), since that would indicate EOF.
+ var chunk = this._source.read();
+ if (chunk) this.push(chunk);
+ }
+};
+
+// Usage:
+// var parser = new SimpleProtocol(source);
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+#### new stream.Readable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} The maximum number of bytes to store in
+ the internal buffer before ceasing to read from the underlying
+ resource. Default=16kb, or 16 for `objectMode` streams
+ * `encoding` {String} If specified, then buffers will be decoded to
+ strings using the specified encoding. Default=null
+ * `objectMode` {Boolean} Whether this stream should behave
+ as a stream of objects. Meaning that stream.read(n) returns
+ a single value instead of a Buffer of size n. Default=false
+
+In classes that extend the Readable class, make sure to call the
+Readable constructor so that the buffering settings can be properly
+initialized.
+
+#### readable.\_read(size)
+
+* `size` {Number} Number of bytes to read asynchronously
+
+Note: **Implement this function, but do NOT call it directly.**
+
+This function should NOT be called directly. It should be implemented
+by child classes, and only called by the internal Readable class
+methods.
+
+All Readable stream implementations must provide a `_read` method to
+fetch data from the underlying resource.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+When data is available, put it into the read queue by calling
+`readable.push(chunk)`. If `push` returns false, then you should stop
+reading. When `_read` is called again, you should start pushing more
+data.
+
+The `size` argument is advisory. Implementations where a "read" is a
+single call that returns data can use this to know how much data to
+fetch. Implementations where that is not relevant, such as TCP or
+TLS, may ignore this argument, and simply provide data whenever it
+becomes available. There is no need, for example to "wait" until
+`size` bytes are available before calling [`stream.push(chunk)`][].
+
+#### readable.push(chunk[, encoding])
+
+* `chunk` {Buffer | null | String} Chunk of data to push into the read queue
+* `encoding` {String} Encoding of String chunks. Must be a valid
+ Buffer encoding, such as `'utf8'` or `'ascii'`
+* return {Boolean} Whether or not more pushes should be performed
+
+Note: **This function should be called by Readable implementors, NOT
+by consumers of Readable streams.**
+
+The `_read()` function will not be called again until at least one
+`push(chunk)` call is made.
+
+The `Readable` class works by putting data into a read queue to be
+pulled out later by calling the `read()` method when the `'readable'`
+event fires.
+
+The `push()` method will explicitly insert some data into the read
+queue. If it is called with `null` then it will signal the end of the
+data (EOF).
+
+This API is designed to be as flexible as possible. For example,
+you may be wrapping a lower-level source which has some sort of
+pause/resume mechanism, and a data callback. In those cases, you
+could wrap the low-level source object by doing something like this:
+
+```javascript
+// source is an object with readStop() and readStart() methods,
+// and an `ondata` member that gets called when it has data, and
+// an `onend` member that gets called when the data is over.
+
+util.inherits(SourceWrapper, Readable);
+
+function SourceWrapper(options) {
+ Readable.call(this, options);
+
+ this._source = getLowlevelSourceObject();
+ var self = this;
+
+ // Every time there's data, we push it into the internal buffer.
+ this._source.ondata = function(chunk) {
+ // if push() returns false, then we need to stop reading from source
+ if (!self.push(chunk))
+ self._source.readStop();
+ };
+
+ // When the source ends, we push the EOF-signaling `null` chunk
+ this._source.onend = function() {
+ self.push(null);
+ };
+}
+
+// _read will be called when the stream wants to pull more data in
+// the advisory size argument is ignored in this case.
+SourceWrapper.prototype._read = function(size) {
+ this._source.readStart();
+};
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+`stream.Writable` is an abstract class designed to be extended with an
+underlying implementation of the [`_write(chunk, encoding, callback)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+writable streams in your programs. What follows is an explanation of
+how to implement Writable streams in your programs.
+
+#### new stream.Writable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} Buffer level when [`write()`][] starts
+ returning false. Default=16kb, or 16 for `objectMode` streams
+ * `decodeStrings` {Boolean} Whether or not to decode strings into
+ Buffers before passing them to [`_write()`][]. Default=true
+ * `objectMode` {Boolean} Whether or not the `write(anyObj)` is
+ a valid operation. If set you can write arbitrary data instead
+ of only `Buffer` / `String` data. Default=false
+
+In classes that extend the Writable class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### writable.\_write(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be written. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunk.
+
+All Writable stream implementations must provide a [`_write()`][]
+method to send data to the underlying resource.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+Call the callback using the standard `callback(error)` pattern to
+signal that the write completed successfully or with an error.
+
+If the `decodeStrings` flag is set in the constructor options, then
+`chunk` may be a string rather than a Buffer, and `encoding` will
+indicate the sort of string that it is. This is to support
+implementations that have an optimized handling for certain string
+data encodings. If you do not explicitly set the `decodeStrings`
+option to `false`, then you can safely ignore the `encoding` argument,
+and assume that `chunk` will always be a Buffer.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### writable.\_writev(chunks, callback)
+
+* `chunks` {Array} The chunks to be written. Each chunk has following
+ format: `{ chunk: ..., encoding: ... }`.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunks.
+
+Note: **This function MUST NOT be called directly.** It may be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+This function is completely optional to implement. In most cases it is
+unnecessary. If implemented, it will be called with all the chunks
+that are buffered in the write queue.
+
+
+### Class: stream.Duplex
+
+<!--type=class-->
+
+A "duplex" stream is one that is both Readable and Writable, such as a
+TCP socket connection.
+
+Note that `stream.Duplex` is an abstract class designed to be extended
+with an underlying implementation of the `_read(size)` and
+[`_write(chunk, encoding, callback)`][] methods as you would with a
+Readable or Writable stream class.
+
+Since JavaScript doesn't have multiple prototypal inheritance, this
+class prototypally inherits from Readable, and then parasitically from
+Writable. It is thus up to the user to implement both the lowlevel
+`_read(n)` method as well as the lowlevel
+[`_write(chunk, encoding, callback)`][] method on extension duplex classes.
+
+#### new stream.Duplex(options)
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors. Also has the following fields:
+ * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then
+ the stream will automatically end the readable side when the
+ writable side ends and vice versa.
+ * `readableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for readable side of the stream. Has no effect if `objectMode`
+ is `true`.
+ * `writableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for writable side of the stream. Has no effect if `objectMode`
+ is `true`.
+
+In classes that extend the Duplex class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+
+### Class: stream.Transform
+
+A "transform" stream is a duplex stream where the output is causally
+connected in some way to the input, such as a [zlib][] stream or a
+[crypto][] stream.
+
+There is no requirement that the output be the same size as the input,
+the same number of chunks, or arrive at the same time. For example, a
+Hash stream will only ever have a single chunk of output which is
+provided when the input is ended. A zlib stream will produce output
+that is either much smaller or much larger than its input.
+
+Rather than implement the [`_read()`][] and [`_write()`][] methods, Transform
+classes must implement the `_transform()` method, and may optionally
+also implement the `_flush()` method. (See below.)
+
+#### new stream.Transform([options])
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors.
+
+In classes that extend the Transform class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### transform.\_transform(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be transformed. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument and data) when you are done processing the supplied chunk.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Transform
+class methods only.
+
+All Transform stream implementations must provide a `_transform`
+method to accept input and produce output.
+
+`_transform` should do whatever has to be done in this specific
+Transform class, to handle the bytes being written, and pass them off
+to the readable portion of the interface. Do asynchronous I/O,
+process things, and so on.
+
+Call `transform.push(outputChunk)` 0 or more times to generate output
+from this input chunk, depending on how much data you want to output
+as a result of this chunk.
+
+Call the callback function only when the current chunk is completely
+consumed. Note that there may or may not be output as a result of any
+particular input chunk. If you supply output as the second argument to the
+callback, it will be passed to push method, in other words the following are
+equivalent:
+
+```javascript
+transform.prototype._transform = function (data, encoding, callback) {
+ this.push(data);
+ callback();
+}
+
+transform.prototype._transform = function (data, encoding, callback) {
+ callback(null, data);
+}
+```
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### transform.\_flush(callback)
+
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done flushing any remaining data.
+
+Note: **This function MUST NOT be called directly.** It MAY be implemented
+by child classes, and if so, will be called by the internal Transform
+class methods only.
+
+In some cases, your transform operation may need to emit a bit more
+data at the end of the stream. For example, a `Zlib` compression
+stream will store up some internal state so that it can optimally
+compress the output. At the end, however, it needs to do the best it
+can with what is left, so that the data will be complete.
+
+In those cases, you can implement a `_flush` method, which will be
+called at the very end, after all the written data is consumed, but
+before emitting `end` to signal the end of the readable side. Just
+like with `_transform`, call `transform.push(chunk)` zero or more
+times, as appropriate, and call `callback` when the flush operation is
+complete.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### Events: 'finish' and 'end'
+
+The [`finish`][] and [`end`][] events are from the parent Writable
+and Readable classes respectively. The `finish` event is fired after
+`.end()` is called and all chunks have been processed by `_transform`,
+`end` is fired after all data has been output which is after the callback
+in `_flush` has been called.
+
+#### Example: `SimpleProtocol` parser v2
+
+The example above of a simple protocol parser can be implemented
+simply by using the higher level [Transform][] stream class, similar to
+the `parseHeader` and `SimpleProtocol v1` examples above.
+
+In this example, rather than providing the input as an argument, it
+would be piped into the parser, which is a more idiomatic io.js stream
+approach.
+
+```javascript
+var util = require('util');
+var Transform = require('stream').Transform;
+util.inherits(SimpleProtocol, Transform);
+
+function SimpleProtocol(options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(options);
+
+ Transform.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
+ if (!this._inBody) {
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+
+ // now, because we got some extra data, emit this first.
+ this.push(chunk.slice(split));
+ }
+ } else {
+ // from there on, just provide the data to our consumer as-is.
+ this.push(chunk);
+ }
+ done();
+};
+
+// Usage:
+// var parser = new SimpleProtocol();
+// source.pipe(parser)
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+### Class: stream.PassThrough
+
+This is a trivial implementation of a [Transform][] stream that simply
+passes the input bytes across to the output. Its purpose is mainly
+for examples and testing, but there are occasionally use cases where
+it can come in handy as a building block for novel sorts of streams.
+
+
+## Simplified Constructor API
+
+<!--type=misc-->
+
+In simple cases there is now the added benefit of being able to construct a stream without inheritance.
+
+This can be done by passing the appropriate methods as constructor options:
+
+Examples:
+
+### Readable
+```javascript
+var readable = new stream.Readable({
+ read: function(n) {
+ // sets this._read under the hood
+ }
+});
+```
+
+### Writable
+```javascript
+var writable = new stream.Writable({
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var writable = new stream.Writable({
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Duplex
+```javascript
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Transform
+```javascript
+var transform = new stream.Transform({
+ transform: function(chunk, encoding, next) {
+ // sets this._transform under the hood
+ },
+ flush: function(done) {
+ // sets this._flush under the hood
+ }
+});
+```
+
+## Streams: Under the Hood
+
+<!--type=misc-->
+
+### Buffering
+
+<!--type=misc-->
+
+Both Writable and Readable streams will buffer data on an internal
+object called `_writableState.buffer` or `_readableState.buffer`,
+respectively.
+
+The amount of data that will potentially be buffered depends on the
+`highWaterMark` option which is passed into the constructor.
+
+Buffering in Readable streams happens when the implementation calls
+[`stream.push(chunk)`][]. If the consumer of the Stream does not call
+`stream.read()`, then the data will sit in the internal queue until it
+is consumed.
+
+Buffering in Writable streams happens when the user calls
+[`stream.write(chunk)`][] repeatedly, even when `write()` returns `false`.
+
+The purpose of streams, especially with the `pipe()` method, is to
+limit the buffering of data to acceptable levels, so that sources and
+destinations of varying speed will not overwhelm the available memory.
+
+### `stream.read(0)`
+
+There are some cases where you want to trigger a refresh of the
+underlying readable stream mechanisms, without actually consuming any
+data. In that case, you can call `stream.read(0)`, which will always
+return null.
+
+If the internal read buffer is below the `highWaterMark`, and the
+stream is not currently reading, then calling `read(0)` will trigger
+a low-level `_read` call.
+
+There is almost never a need to do this. However, you will see some
+cases in io.js's internals where this is done, particularly in the
+Readable stream class internals.
+
+### `stream.push('')`
+
+Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
+interesting side effect. Because it *is* a call to
+[`stream.push()`][], it will end the `reading` process. However, it
+does *not* add any data to the readable buffer, so there's nothing for
+a user to consume.
+
+Very rarely, there are cases where you have no data to provide now,
+but the consumer of your stream (or, perhaps, another bit of your own
+code) will know when to check again, by calling `stream.read(0)`. In
+those cases, you *may* call `stream.push('')`.
+
+So far, the only use case for this functionality is in the
+[tls.CryptoStream][] class, which is deprecated in io.js v1.0. If you
+find that you have to use `stream.push('')`, please consider another
+approach, because it almost certainly indicates that something is
+horribly wrong.
+
+### Compatibility with Older Node.js Versions
+
+<!--type=misc-->
+
+In versions of Node.js prior to v0.10, the Readable stream interface was
+simpler, but also less powerful and less useful.
+
+* Rather than waiting for you to call the `read()` method, `'data'`
+ events would start emitting immediately. If you needed to do some
+ I/O to decide how to handle data, then you had to store the chunks
+ in some kind of buffer so that they would not be lost.
+* The [`pause()`][] method was advisory, rather than guaranteed. This
+ meant that you still had to be prepared to receive `'data'` events
+ even when the stream was in a paused state.
+
+In io.js v1.0 and Node.js v0.10, the Readable class described below was added.
+For backwards compatibility with older Node.js programs, Readable streams
+switch into "flowing mode" when a `'data'` event handler is added, or
+when the [`resume()`][] method is called. The effect is that, even if
+you are not using the new `read()` method and `'readable'` event, you
+no longer have to worry about losing `'data'` chunks.
+
+Most programs will continue to function normally. However, this
+introduces an edge case in the following conditions:
+
+* No [`'data'` event][] handler is added.
+* The [`resume()`][] method is never called.
+* The stream is not piped to any writable destination.
+
+For example, consider the following code:
+
+```javascript
+// WARNING! BROKEN!
+net.createServer(function(socket) {
+
+ // we add an 'end' method, but never consume the data
+ socket.on('end', function() {
+ // It will never get here.
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+}).listen(1337);
+```
+
+In versions of Node.js prior to v0.10, the incoming message data would be
+simply discarded. However, in io.js v1.0 and Node.js v0.10 and beyond,
+the socket will remain paused forever.
+
+The workaround in this situation is to call the `resume()` method to
+start the flow of data:
+
+```javascript
+// Workaround
+net.createServer(function(socket) {
+
+ socket.on('end', function() {
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+ // start the flow of data, discarding it.
+ socket.resume();
+
+}).listen(1337);
+```
+
+In addition to new Readable streams switching into flowing mode,
+pre-v0.10 style streams can be wrapped in a Readable class using the
+`wrap()` method.
+
+
+### Object Mode
+
+<!--type=misc-->
+
+Normally, Streams operate on Strings and Buffers exclusively.
+
+Streams that are in **object mode** can emit generic JavaScript values
+other than Buffers and Strings.
+
+A Readable stream in object mode will always return a single item from
+a call to `stream.read(size)`, regardless of what the size argument
+is.
+
+A Writable stream in object mode will always ignore the `encoding`
+argument to `stream.write(data, encoding)`.
+
+The special value `null` still retains its special value for object
+mode streams. That is, for object mode readable streams, `null` as a
+return value from `stream.read()` indicates that there is no more
+data, and [`stream.push(null)`][] will signal the end of stream data
+(`EOF`).
+
+No streams in io.js core are object mode streams. This pattern is only
+used by userland streaming libraries.
+
+You should set `objectMode` in your stream child class constructor on
+the options object. Setting `objectMode` mid-stream is not safe.
+
+For Duplex streams `objectMode` can be set exclusively for readable or
+writable side with `readableObjectMode` and `writableObjectMode`
+respectively. These options can be used to implement parsers and
+serializers with Transform streams.
+
+```javascript
+var util = require('util');
+var StringDecoder = require('string_decoder').StringDecoder;
+var Transform = require('stream').Transform;
+util.inherits(JSONParseStream, Transform);
+
+// Gets \n-delimited JSON string data, and emits the parsed objects
+function JSONParseStream() {
+ if (!(this instanceof JSONParseStream))
+ return new JSONParseStream();
+
+ Transform.call(this, { readableObjectMode : true });
+
+ this._buffer = '';
+ this._decoder = new StringDecoder('utf8');
+}
+
+JSONParseStream.prototype._transform = function(chunk, encoding, cb) {
+ this._buffer += this._decoder.write(chunk);
+ // split on newlines
+ var lines = this._buffer.split(/\r?\n/);
+ // keep the last partial line buffered
+ this._buffer = lines.pop();
+ for (var l = 0; l < lines.length; l++) {
+ var line = lines[l];
+ try {
+ var obj = JSON.parse(line);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+
+JSONParseStream.prototype._flush = function(cb) {
+ // Just handle any leftover
+ var rem = this._buffer.trim();
+ if (rem) {
+ try {
+ var obj = JSON.parse(rem);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+```
+
+
+[EventEmitter]: https://iojs.org/dist/v2.3.0/doc/api/events.html#events_class_events_eventemitter
+[Object mode]: #stream_object_mode
+[`stream.push(chunk)`]: #stream_readable_push_chunk_encoding
+[`stream.push(null)`]: #stream_readable_push_chunk_encoding
+[`stream.push()`]: #stream_readable_push_chunk_encoding
+[`unpipe()`]: #stream_readable_unpipe_destination
+[unpiped]: #stream_readable_unpipe_destination
+[tcp sockets]: https://iojs.org/dist/v2.3.0/doc/api/net.html#net_class_net_socket
+[zlib streams]: zlib.html
+[zlib]: zlib.html
+[crypto streams]: crypto.html
+[crypto]: crypto.html
+[tls.CryptoStream]: https://iojs.org/dist/v2.3.0/doc/api/tls.html#tls_class_cryptostream
+[process.stdin]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdin
+[stdout]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdout
+[process.stdout]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdout
+[process.stderr]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stderr
+[child process stdout and stderr]: https://iojs.org/dist/v2.3.0/doc/api/child_process.html#child_process_child_stdout
+[API for Stream Consumers]: #stream_api_for_stream_consumers
+[API for Stream Implementors]: #stream_api_for_stream_implementors
+[Readable]: #stream_class_stream_readable
+[Writable]: #stream_class_stream_writable
+[Duplex]: #stream_class_stream_duplex
+[Transform]: #stream_class_stream_transform
+[`end`]: #stream_event_end
+[`finish`]: #stream_event_finish
+[`_read(size)`]: #stream_readable_read_size_1
+[`_read()`]: #stream_readable_read_size_1
+[_read]: #stream_readable_read_size_1
+[`writable.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback
+[`write()`]: #stream_writable_write_chunk_encoding_callback
+[`stream.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`_write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback_1
+[`_write()`]: #stream_writable_write_chunk_encoding_callback_1
+[_write]: #stream_writable_write_chunk_encoding_callback_1
+[`util.inherits`]: https://iojs.org/dist/v2.3.0/doc/api/util.html#util_util_inherits_constructor_superconstructor
+[`end()`]: #stream_writable_end_chunk_encoding_callback
+[`'data'` event]: #stream_event_data
+[`resume()`]: #stream_readable_resume
+[`readable.resume()`]: #stream_readable_resume
+[`pause()`]: #stream_readable_pause
+[`unpipe()`]: #stream_readable_unpipe_destination
+[`pipe()`]: #stream_readable_pipe_destination_options
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
new file mode 100644
index 000000000..c141a99c2
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
@@ -0,0 +1,58 @@
+# streams WG Meeting 2015-01-30
+
+## Links
+
+* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
+* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
+* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
+
+## Agenda
+
+Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
+
+* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
+* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
+* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
+* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
+
+## Minutes
+
+### adopt a charter
+
+* group: +1's all around
+
+### What versioning scheme should be adopted?
+* group: +1’s 3.0.0
+* domenic+group: pulling in patches from other sources where appropriate
+* mikeal: version independently, suggesting versions for io.js
+* mikeal+domenic: work with TC to notify in advance of changes
+simpler stream creation
+
+### streamline creation of streams
+* sam: streamline creation of streams
+* domenic: nice simple solution posted
+ but, we lose the opportunity to change the model
+ may not be backwards incompatible (double check keys)
+
+ **action item:** domenic will check
+
+### remove implicit flowing of streams on(‘data’)
+* add isFlowing / isPaused
+* mikeal: worrying that we’re documenting polyfill methods – confuses users
+* domenic: more reflective API is probably good, with warning labels for users
+* new section for mad scientists (reflective stream access)
+* calvin: name the “third state”
+* mikeal: maybe borrow the name from whatwg?
+* domenic: we’re missing the “third state”
+* consensus: kind of difficult to name the third state
+* mikeal: figure out differences in states / compat
+* mathias: always flow on data – eliminates third state
+ * explore what it breaks
+
+**action items:**
+* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
+* ask rod/build for infrastructure
+* **chris**: explore the “flow on data” approach
+* add isPaused/isFlowing
+* add new docs section
+* move isPaused to that section
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/duplex.js
new file mode 100644
index 000000000..ca807af87
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/duplex.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_duplex.js")
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js
new file mode 100644
index 000000000..69558af03
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js
@@ -0,0 +1,82 @@
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
+
+'use strict';
+
+/*<replacement>*/
+var objectKeys = Object.keys || function (obj) {
+ var keys = [];
+ for (var key in obj) keys.push(key);
+ return keys;
+}
+/*</replacement>*/
+
+
+module.exports = Duplex;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
+
+util.inherits(Duplex, Readable);
+
+var keys = objectKeys(Writable.prototype);
+for (var v = 0; v < keys.length; v++) {
+ var method = keys[v];
+ if (!Duplex.prototype[method])
+ Duplex.prototype[method] = Writable.prototype[method];
+}
+
+function Duplex(options) {
+ if (!(this instanceof Duplex))
+ return new Duplex(options);
+
+ Readable.call(this, options);
+ Writable.call(this, options);
+
+ if (options && options.readable === false)
+ this.readable = false;
+
+ if (options && options.writable === false)
+ this.writable = false;
+
+ this.allowHalfOpen = true;
+ if (options && options.allowHalfOpen === false)
+ this.allowHalfOpen = false;
+
+ this.once('end', onend);
+}
+
+// the no-half-open enforcer
+function onend() {
+ // if we allow half-open state, or if the writable side ended,
+ // then we're ok.
+ if (this.allowHalfOpen || this._writableState.ended)
+ return;
+
+ // no more data can be written.
+ // But allow more writes to happen in this tick.
+ processNextTick(onEndNT, this);
+}
+
+function onEndNT(self) {
+ self.end();
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js
new file mode 100644
index 000000000..bddfdd015
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -0,0 +1,27 @@
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+'use strict';
+
+module.exports = PassThrough;
+
+var Transform = require('./_stream_transform');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(PassThrough, Transform);
+
+function PassThrough(options) {
+ if (!(this instanceof PassThrough))
+ return new PassThrough(options);
+
+ Transform.call(this, options);
+}
+
+PassThrough.prototype._transform = function(chunk, encoding, cb) {
+ cb(null, chunk);
+};
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js
new file mode 100644
index 000000000..eef3d825d
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js
@@ -0,0 +1,959 @@
+'use strict';
+
+module.exports = Readable;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var isArray = require('isarray');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Readable.ReadableState = ReadableState;
+
+var EE = require('events').EventEmitter;
+
+/*<replacement>*/
+if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
+ return emitter.listeners(type).length;
+};
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var debug = require('util');
+if (debug && debug.debuglog) {
+ debug = debug.debuglog('stream');
+} else {
+ debug = function () {};
+}
+/*</replacement>*/
+
+var StringDecoder;
+
+util.inherits(Readable, Stream);
+
+function ReadableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
+ options = options || {};
+
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.readableObjectMode;
+
+ // the point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ var hwm = options.highWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.buffer = [];
+ this.length = 0;
+ this.pipes = null;
+ this.pipesCount = 0;
+ this.flowing = null;
+ this.ended = false;
+ this.endEmitted = false;
+ this.reading = false;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ this.needReadable = false;
+ this.emittedReadable = false;
+ this.readableListening = false;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // when piping, we only care about 'readable' events that happen
+ // after read()ing all the bytes and not getting any pushback.
+ this.ranOut = false;
+
+ // the number of writers that are awaiting a drain event in .pipe()s
+ this.awaitDrain = 0;
+
+ // if true, a maybeReadMore has been scheduled
+ this.readingMore = false;
+
+ this.decoder = null;
+ this.encoding = null;
+ if (options.encoding) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this.decoder = new StringDecoder(options.encoding);
+ this.encoding = options.encoding;
+ }
+}
+
+function Readable(options) {
+ var Duplex = require('./_stream_duplex');
+
+ if (!(this instanceof Readable))
+ return new Readable(options);
+
+ this._readableState = new ReadableState(options, this);
+
+ // legacy
+ this.readable = true;
+
+ if (options && typeof options.read === 'function')
+ this._read = options.read;
+
+ Stream.call(this);
+}
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function(chunk, encoding) {
+ var state = this._readableState;
+
+ if (!state.objectMode && typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding;
+ if (encoding !== state.encoding) {
+ chunk = new Buffer(chunk, encoding);
+ encoding = '';
+ }
+ }
+
+ return readableAddChunk(this, state, chunk, encoding, false);
+};
+
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function(chunk) {
+ var state = this._readableState;
+ return readableAddChunk(this, state, chunk, '', true);
+};
+
+Readable.prototype.isPaused = function() {
+ return this._readableState.flowing === false;
+};
+
+function readableAddChunk(stream, state, chunk, encoding, addToFront) {
+ var er = chunkInvalid(state, chunk);
+ if (er) {
+ stream.emit('error', er);
+ } else if (chunk === null) {
+ state.reading = false;
+ onEofChunk(stream, state);
+ } else if (state.objectMode || chunk && chunk.length > 0) {
+ if (state.ended && !addToFront) {
+ var e = new Error('stream.push() after EOF');
+ stream.emit('error', e);
+ } else if (state.endEmitted && addToFront) {
+ var e = new Error('stream.unshift() after end event');
+ stream.emit('error', e);
+ } else {
+ if (state.decoder && !addToFront && !encoding)
+ chunk = state.decoder.write(chunk);
+
+ if (!addToFront)
+ state.reading = false;
+
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront)
+ state.buffer.unshift(chunk);
+ else
+ state.buffer.push(chunk);
+
+ if (state.needReadable)
+ emitReadable(stream);
+ }
+
+ maybeReadMore(stream, state);
+ }
+ } else if (!addToFront) {
+ state.reading = false;
+ }
+
+ return needMoreData(state);
+}
+
+
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes. This is to work around cases where hwm=0,
+// such as the repl. Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+ return !state.ended &&
+ (state.needReadable ||
+ state.length < state.highWaterMark ||
+ state.length === 0);
+}
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function(enc) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this._readableState.decoder = new StringDecoder(enc);
+ this._readableState.encoding = enc;
+ return this;
+};
+
+// Don't raise the hwm > 128MB
+var MAX_HWM = 0x800000;
+function roundUpToNextPowerOf2(n) {
+ if (n >= MAX_HWM) {
+ n = MAX_HWM;
+ } else {
+ // Get the next highest power of 2
+ n--;
+ for (var p = 1; p < 32; p <<= 1) n |= n >> p;
+ n++;
+ }
+ return n;
+}
+
+function howMuchToRead(n, state) {
+ if (state.length === 0 && state.ended)
+ return 0;
+
+ if (state.objectMode)
+ return n === 0 ? 0 : 1;
+
+ if (n === null || isNaN(n)) {
+ // only flow one buffer at a time
+ if (state.flowing && state.buffer.length)
+ return state.buffer[0].length;
+ else
+ return state.length;
+ }
+
+ if (n <= 0)
+ return 0;
+
+ // If we're asking for more than the target buffer level,
+ // then raise the water mark. Bump up to the next highest
+ // power of 2, to prevent increasing it excessively in tiny
+ // amounts.
+ if (n > state.highWaterMark)
+ state.highWaterMark = roundUpToNextPowerOf2(n);
+
+ // don't have that much. return null, unless we've ended.
+ if (n > state.length) {
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
+ } else {
+ return state.length;
+ }
+ }
+
+ return n;
+}
+
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function(n) {
+ debug('read', n);
+ var state = this._readableState;
+ var nOrig = n;
+
+ if (typeof n !== 'number' || n > 0)
+ state.emittedReadable = false;
+
+ // if we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (n === 0 &&
+ state.needReadable &&
+ (state.length >= state.highWaterMark || state.ended)) {
+ debug('read: emitReadable', state.length, state.ended);
+ if (state.length === 0 && state.ended)
+ endReadable(this);
+ else
+ emitReadable(this);
+ return null;
+ }
+
+ n = howMuchToRead(n, state);
+
+ // if we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ if (state.length === 0)
+ endReadable(this);
+ return null;
+ }
+
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+
+ // if we need a readable event, then we need to do some reading.
+ var doRead = state.needReadable;
+ debug('need readable', doRead);
+
+ // if we currently have less than the highWaterMark, then also read some
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true;
+ debug('length less than watermark', doRead);
+ }
+
+ // however, if we've ended, then there's no point, and if we're already
+ // reading, then it's unnecessary.
+ if (state.ended || state.reading) {
+ doRead = false;
+ debug('reading or ended', doRead);
+ }
+
+ if (doRead) {
+ debug('do read');
+ state.reading = true;
+ state.sync = true;
+ // if the length is currently zero, then we *need* a readable event.
+ if (state.length === 0)
+ state.needReadable = true;
+ // call internal read method
+ this._read(state.highWaterMark);
+ state.sync = false;
+ }
+
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (doRead && !state.reading)
+ n = howMuchToRead(nOrig, state);
+
+ var ret;
+ if (n > 0)
+ ret = fromList(n, state);
+ else
+ ret = null;
+
+ if (ret === null) {
+ state.needReadable = true;
+ n = 0;
+ }
+
+ state.length -= n;
+
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (state.length === 0 && !state.ended)
+ state.needReadable = true;
+
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended && state.length === 0)
+ endReadable(this);
+
+ if (ret !== null)
+ this.emit('data', ret);
+
+ return ret;
+};
+
+function chunkInvalid(state, chunk) {
+ var er = null;
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ return er;
+}
+
+
+function onEofChunk(stream, state) {
+ if (state.ended) return;
+ if (state.decoder) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk);
+ state.length += state.objectMode ? 1 : chunk.length;
+ }
+ }
+ state.ended = true;
+
+ // emit 'readable' now to make sure it gets picked up.
+ emitReadable(stream);
+}
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ var state = stream._readableState;
+ state.needReadable = false;
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing);
+ state.emittedReadable = true;
+ if (state.sync)
+ processNextTick(emitReadable_, stream);
+ else
+ emitReadable_(stream);
+ }
+}
+
+function emitReadable_(stream) {
+ debug('emit readable');
+ stream.emit('readable');
+ flow(stream);
+}
+
+
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore) {
+ state.readingMore = true;
+ processNextTick(maybeReadMore_, stream, state);
+ }
+}
+
+function maybeReadMore_(stream, state) {
+ var len = state.length;
+ while (!state.reading && !state.flowing && !state.ended &&
+ state.length < state.highWaterMark) {
+ debug('maybeReadMore read 0');
+ stream.read(0);
+ if (len === state.length)
+ // didn't get any data, stop spinning.
+ break;
+ else
+ len = state.length;
+ }
+ state.readingMore = false;
+}
+
+// abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function(n) {
+ this.emit('error', new Error('not implemented'));
+};
+
+Readable.prototype.pipe = function(dest, pipeOpts) {
+ var src = this;
+ var state = this._readableState;
+
+ switch (state.pipesCount) {
+ case 0:
+ state.pipes = dest;
+ break;
+ case 1:
+ state.pipes = [state.pipes, dest];
+ break;
+ default:
+ state.pipes.push(dest);
+ break;
+ }
+ state.pipesCount += 1;
+ debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
+
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
+ dest !== process.stdout &&
+ dest !== process.stderr;
+
+ var endFn = doEnd ? onend : cleanup;
+ if (state.endEmitted)
+ processNextTick(endFn);
+ else
+ src.once('end', endFn);
+
+ dest.on('unpipe', onunpipe);
+ function onunpipe(readable) {
+ debug('onunpipe');
+ if (readable === src) {
+ cleanup();
+ }
+ }
+
+ function onend() {
+ debug('onend');
+ dest.end();
+ }
+
+ // when the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ var ondrain = pipeOnDrain(src);
+ dest.on('drain', ondrain);
+
+ function cleanup() {
+ debug('cleanup');
+ // cleanup event handlers once the pipe is broken
+ dest.removeListener('close', onclose);
+ dest.removeListener('finish', onfinish);
+ dest.removeListener('drain', ondrain);
+ dest.removeListener('error', onerror);
+ dest.removeListener('unpipe', onunpipe);
+ src.removeListener('end', onend);
+ src.removeListener('end', cleanup);
+ src.removeListener('data', ondata);
+
+ // if the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (state.awaitDrain &&
+ (!dest._writableState || dest._writableState.needDrain))
+ ondrain();
+ }
+
+ src.on('data', ondata);
+ function ondata(chunk) {
+ debug('ondata');
+ var ret = dest.write(chunk);
+ if (false === ret) {
+ debug('false write response, pause',
+ src._readableState.awaitDrain);
+ src._readableState.awaitDrain++;
+ src.pause();
+ }
+ }
+
+ // if the dest has an error, then stop piping into it.
+ // however, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ debug('onerror', er);
+ unpipe();
+ dest.removeListener('error', onerror);
+ if (EE.listenerCount(dest, 'error') === 0)
+ dest.emit('error', er);
+ }
+ // This is a brutally ugly hack to make sure that our error handler
+ // is attached before any userland ones. NEVER DO THIS.
+ if (!dest._events || !dest._events.error)
+ dest.on('error', onerror);
+ else if (isArray(dest._events.error))
+ dest._events.error.unshift(onerror);
+ else
+ dest._events.error = [onerror, dest._events.error];
+
+
+
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish);
+ unpipe();
+ }
+ dest.once('close', onclose);
+ function onfinish() {
+ debug('onfinish');
+ dest.removeListener('close', onclose);
+ unpipe();
+ }
+ dest.once('finish', onfinish);
+
+ function unpipe() {
+ debug('unpipe');
+ src.unpipe(dest);
+ }
+
+ // tell the dest that it's being piped to
+ dest.emit('pipe', src);
+
+ // start the flow if it hasn't been started already.
+ if (!state.flowing) {
+ debug('pipe resume');
+ src.resume();
+ }
+
+ return dest;
+};
+
+function pipeOnDrain(src) {
+ return function() {
+ var state = src._readableState;
+ debug('pipeOnDrain', state.awaitDrain);
+ if (state.awaitDrain)
+ state.awaitDrain--;
+ if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) {
+ state.flowing = true;
+ flow(src);
+ }
+ };
+}
+
+
+Readable.prototype.unpipe = function(dest) {
+ var state = this._readableState;
+
+ // if we're not piping anywhere, then do nothing.
+ if (state.pipesCount === 0)
+ return this;
+
+ // just one destination. most common case.
+ if (state.pipesCount === 1) {
+ // passed in one, but it's not the right one.
+ if (dest && dest !== state.pipes)
+ return this;
+
+ if (!dest)
+ dest = state.pipes;
+
+ // got a match.
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+ if (dest)
+ dest.emit('unpipe', this);
+ return this;
+ }
+
+ // slow case. multiple pipe destinations.
+
+ if (!dest) {
+ // remove all.
+ var dests = state.pipes;
+ var len = state.pipesCount;
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+
+ for (var i = 0; i < len; i++)
+ dests[i].emit('unpipe', this);
+ return this;
+ }
+
+ // try to find the right one.
+ var i = indexOf(state.pipes, dest);
+ if (i === -1)
+ return this;
+
+ state.pipes.splice(i, 1);
+ state.pipesCount -= 1;
+ if (state.pipesCount === 1)
+ state.pipes = state.pipes[0];
+
+ dest.emit('unpipe', this);
+
+ return this;
+};
+
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function(ev, fn) {
+ var res = Stream.prototype.on.call(this, ev, fn);
+
+ // If listening to data, and it has not explicitly been paused,
+ // then call resume to start the flow of data on the next tick.
+ if (ev === 'data' && false !== this._readableState.flowing) {
+ this.resume();
+ }
+
+ if (ev === 'readable' && this.readable) {
+ var state = this._readableState;
+ if (!state.readableListening) {
+ state.readableListening = true;
+ state.emittedReadable = false;
+ state.needReadable = true;
+ if (!state.reading) {
+ processNextTick(nReadingNextTick, this);
+ } else if (state.length) {
+ emitReadable(this, state);
+ }
+ }
+ }
+
+ return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
+
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
+
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function() {
+ var state = this._readableState;
+ if (!state.flowing) {
+ debug('resume');
+ state.flowing = true;
+ resume(this, state);
+ }
+ return this;
+};
+
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true;
+ processNextTick(resume_, stream, state);
+ }
+}
+
+function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
+
+ state.resumeScheduled = false;
+ stream.emit('resume');
+ flow(stream);
+ if (state.flowing && !state.reading)
+ stream.read(0);
+}
+
+Readable.prototype.pause = function() {
+ debug('call pause flowing=%j', this._readableState.flowing);
+ if (false !== this._readableState.flowing) {
+ debug('pause');
+ this._readableState.flowing = false;
+ this.emit('pause');
+ }
+ return this;
+};
+
+function flow(stream) {
+ var state = stream._readableState;
+ debug('flow', state.flowing);
+ if (state.flowing) {
+ do {
+ var chunk = stream.read();
+ } while (null !== chunk && state.flowing);
+ }
+}
+
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function(stream) {
+ var state = this._readableState;
+ var paused = false;
+
+ var self = this;
+ stream.on('end', function() {
+ debug('wrapped end');
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length)
+ self.push(chunk);
+ }
+
+ self.push(null);
+ });
+
+ stream.on('data', function(chunk) {
+ debug('wrapped data');
+ if (state.decoder)
+ chunk = state.decoder.write(chunk);
+
+ // don't skip over falsy values in objectMode
+ if (state.objectMode && (chunk === null || chunk === undefined))
+ return;
+ else if (!state.objectMode && (!chunk || !chunk.length))
+ return;
+
+ var ret = self.push(chunk);
+ if (!ret) {
+ paused = true;
+ stream.pause();
+ }
+ });
+
+ // proxy all the other methods.
+ // important when wrapping filters and duplexes.
+ for (var i in stream) {
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = function(method) { return function() {
+ return stream[method].apply(stream, arguments);
+ }; }(i);
+ }
+ }
+
+ // proxy certain important events.
+ var events = ['error', 'close', 'destroy', 'pause', 'resume'];
+ forEach(events, function(ev) {
+ stream.on(ev, self.emit.bind(self, ev));
+ });
+
+ // when we try to consume some more bytes, simply unpause the
+ // underlying stream.
+ self._read = function(n) {
+ debug('wrapped _read', n);
+ if (paused) {
+ paused = false;
+ stream.resume();
+ }
+ };
+
+ return self;
+};
+
+
+
+// exposed for testing purposes only.
+Readable._fromList = fromList;
+
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+function fromList(n, state) {
+ var list = state.buffer;
+ var length = state.length;
+ var stringMode = !!state.decoder;
+ var objectMode = !!state.objectMode;
+ var ret;
+
+ // nothing in the list, definitely empty.
+ if (list.length === 0)
+ return null;
+
+ if (length === 0)
+ ret = null;
+ else if (objectMode)
+ ret = list.shift();
+ else if (!n || n >= length) {
+ // read it all, truncate the array.
+ if (stringMode)
+ ret = list.join('');
+ else
+ ret = Buffer.concat(list, length);
+ list.length = 0;
+ } else {
+ // read just some of it.
+ if (n < list[0].length) {
+ // just take a part of the first list item.
+ // slice is the same for buffers and strings.
+ var buf = list[0];
+ ret = buf.slice(0, n);
+ list[0] = buf.slice(n);
+ } else if (n === list[0].length) {
+ // first list is a perfect match
+ ret = list.shift();
+ } else {
+ // complex case.
+ // we have enough to cover it, but it spans past the first buffer.
+ if (stringMode)
+ ret = '';
+ else
+ ret = new Buffer(n);
+
+ var c = 0;
+ for (var i = 0, l = list.length; i < l && c < n; i++) {
+ var buf = list[0];
+ var cpy = Math.min(n - c, buf.length);
+
+ if (stringMode)
+ ret += buf.slice(0, cpy);
+ else
+ buf.copy(ret, c, 0, cpy);
+
+ if (cpy < buf.length)
+ list[0] = buf.slice(cpy);
+ else
+ list.shift();
+
+ c += cpy;
+ }
+ }
+ }
+
+ return ret;
+}
+
+function endReadable(stream) {
+ var state = stream._readableState;
+
+ // If we get here before consuming all the bytes, then that is a
+ // bug in node. Should never happen.
+ if (state.length > 0)
+ throw new Error('endReadable called on non-empty stream');
+
+ if (!state.endEmitted) {
+ state.ended = true;
+ processNextTick(endReadableNT, state, stream);
+ }
+}
+
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
+ }
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
+
+function indexOf (xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js
new file mode 100644
index 000000000..3675d18d9
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js
@@ -0,0 +1,197 @@
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+
+'use strict';
+
+module.exports = Transform;
+
+var Duplex = require('./_stream_duplex');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(Transform, Duplex);
+
+
+function TransformState(stream) {
+ this.afterTransform = function(er, data) {
+ return afterTransform(stream, er, data);
+ };
+
+ this.needTransform = false;
+ this.transforming = false;
+ this.writecb = null;
+ this.writechunk = null;
+}
+
+function afterTransform(stream, er, data) {
+ var ts = stream._transformState;
+ ts.transforming = false;
+
+ var cb = ts.writecb;
+
+ if (!cb)
+ return stream.emit('error', new Error('no writecb in Transform class'));
+
+ ts.writechunk = null;
+ ts.writecb = null;
+
+ if (data !== null && data !== undefined)
+ stream.push(data);
+
+ if (cb)
+ cb(er);
+
+ var rs = stream._readableState;
+ rs.reading = false;
+ if (rs.needReadable || rs.length < rs.highWaterMark) {
+ stream._read(rs.highWaterMark);
+ }
+}
+
+
+function Transform(options) {
+ if (!(this instanceof Transform))
+ return new Transform(options);
+
+ Duplex.call(this, options);
+
+ this._transformState = new TransformState(this);
+
+ // when the writable side finishes, then flush out anything remaining.
+ var stream = this;
+
+ // start out asking for a readable event once data is transformed.
+ this._readableState.needReadable = true;
+
+ // we have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+ this._readableState.sync = false;
+
+ if (options) {
+ if (typeof options.transform === 'function')
+ this._transform = options.transform;
+
+ if (typeof options.flush === 'function')
+ this._flush = options.flush;
+ }
+
+ this.once('prefinish', function() {
+ if (typeof this._flush === 'function')
+ this._flush(function(er) {
+ done(stream, er);
+ });
+ else
+ done(stream);
+ });
+}
+
+Transform.prototype.push = function(chunk, encoding) {
+ this._transformState.needTransform = false;
+ return Duplex.prototype.push.call(this, chunk, encoding);
+};
+
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side. You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk. If you pass
+// an error, then that'll put the hurt on the whole operation. If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function(chunk, encoding, cb) {
+ throw new Error('not implemented');
+};
+
+Transform.prototype._write = function(chunk, encoding, cb) {
+ var ts = this._transformState;
+ ts.writecb = cb;
+ ts.writechunk = chunk;
+ ts.writeencoding = encoding;
+ if (!ts.transforming) {
+ var rs = this._readableState;
+ if (ts.needTransform ||
+ rs.needReadable ||
+ rs.length < rs.highWaterMark)
+ this._read(rs.highWaterMark);
+ }
+};
+
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function(n) {
+ var ts = this._transformState;
+
+ if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+ ts.transforming = true;
+ this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+ } else {
+ // mark that we need a transform, so that any data that comes in
+ // will get processed, now that we've asked for it.
+ ts.needTransform = true;
+ }
+};
+
+
+function done(stream, er) {
+ if (er)
+ return stream.emit('error', er);
+
+ // if there's nothing in the write buffer, then that means
+ // that nothing more will ever be provided
+ var ws = stream._writableState;
+ var ts = stream._transformState;
+
+ if (ws.length)
+ throw new Error('calling transform done when ws.length != 0');
+
+ if (ts.transforming)
+ throw new Error('calling transform done when still transforming');
+
+ return stream.push(null);
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js
new file mode 100644
index 000000000..b23295201
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js
@@ -0,0 +1,520 @@
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, cb), and it'll handle all
+// the drain event emission and buffering.
+
+'use strict';
+
+module.exports = Writable;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Writable.WritableState = WritableState;
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
+
+util.inherits(Writable, Stream);
+
+function nop() {}
+
+function WriteReq(chunk, encoding, cb) {
+ this.chunk = chunk;
+ this.encoding = encoding;
+ this.callback = cb;
+ this.next = null;
+}
+
+function WritableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
+ options = options || {};
+
+ // object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.writableObjectMode;
+
+ // the point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write()
+ var hwm = options.highWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.needDrain = false;
+ // at the start of calling end()
+ this.ending = false;
+ // when end() has been called, and returned
+ this.ended = false;
+ // when 'finish' is emitted
+ this.finished = false;
+
+ // should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ var noDecode = options.decodeStrings === false;
+ this.decodeStrings = !noDecode;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+ this.length = 0;
+
+ // a flag to see when we're in the middle of a write.
+ this.writing = false;
+
+ // when true all writes will be buffered until .uncork() call
+ this.corked = 0;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // a flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ this.bufferProcessing = false;
+
+ // the callback that's passed to _write(chunk,cb)
+ this.onwrite = function(er) {
+ onwrite(stream, er);
+ };
+
+ // the callback that the user supplies to write(chunk,encoding,cb)
+ this.writecb = null;
+
+ // the amount that is being written when _write is called.
+ this.writelen = 0;
+
+ this.bufferedRequest = null;
+ this.lastBufferedRequest = null;
+
+ // number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted
+ this.pendingcb = 0;
+
+ // emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams
+ this.prefinished = false;
+
+ // True if the error was already emitted and should not be thrown again
+ this.errorEmitted = false;
+}
+
+WritableState.prototype.getBuffer = function writableStateGetBuffer() {
+ var current = this.bufferedRequest;
+ var out = [];
+ while (current) {
+ out.push(current);
+ current = current.next;
+ }
+ return out;
+};
+
+(function (){try {
+Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: require('util-deprecate')(function() {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use ' +
+ '_writableState.getBuffer() instead.')
+});
+}catch(_){}}());
+
+
+function Writable(options) {
+ var Duplex = require('./_stream_duplex');
+
+ // Writable ctor is applied to Duplexes, though they're not
+ // instanceof Writable, they're instanceof Readable.
+ if (!(this instanceof Writable) && !(this instanceof Duplex))
+ return new Writable(options);
+
+ this._writableState = new WritableState(options, this);
+
+ // legacy.
+ this.writable = true;
+
+ if (options) {
+ if (typeof options.write === 'function')
+ this._write = options.write;
+
+ if (typeof options.writev === 'function')
+ this._writev = options.writev;
+ }
+
+ Stream.call(this);
+}
+
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function() {
+ this.emit('error', new Error('Cannot pipe. Not readable.'));
+};
+
+
+function writeAfterEnd(stream, cb) {
+ var er = new Error('write after end');
+ // TODO: defer error events consistently everywhere, not just the cb
+ stream.emit('error', er);
+ processNextTick(cb, er);
+}
+
+// If we get something that is not a buffer, string, null, or undefined,
+// and we're not in objectMode, then that's an error.
+// Otherwise stream chunks are all considered to be of length=1, and the
+// watermarks determine how many objects to keep in the buffer, rather than
+// how many bytes or characters.
+function validChunk(stream, state, chunk, cb) {
+ var valid = true;
+
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ var er = new TypeError('Invalid non-string/buffer chunk');
+ stream.emit('error', er);
+ processNextTick(cb, er);
+ valid = false;
+ }
+ return valid;
+}
+
+Writable.prototype.write = function(chunk, encoding, cb) {
+ var state = this._writableState;
+ var ret = false;
+
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ else if (!encoding)
+ encoding = state.defaultEncoding;
+
+ if (typeof cb !== 'function')
+ cb = nop;
+
+ if (state.ended)
+ writeAfterEnd(this, cb);
+ else if (validChunk(this, state, chunk, cb)) {
+ state.pendingcb++;
+ ret = writeOrBuffer(this, state, chunk, encoding, cb);
+ }
+
+ return ret;
+};
+
+Writable.prototype.cork = function() {
+ var state = this._writableState;
+
+ state.corked++;
+};
+
+Writable.prototype.uncork = function() {
+ var state = this._writableState;
+
+ if (state.corked) {
+ state.corked--;
+
+ if (!state.writing &&
+ !state.corked &&
+ !state.finished &&
+ !state.bufferProcessing &&
+ state.bufferedRequest)
+ clearBuffer(this, state);
+ }
+};
+
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string')
+ encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64',
+'ucs2', 'ucs-2','utf16le', 'utf-16le', 'raw']
+.indexOf((encoding + '').toLowerCase()) > -1))
+ throw new TypeError('Unknown encoding: ' + encoding);
+ this._writableState.defaultEncoding = encoding;
+};
+
+function decodeChunk(state, chunk, encoding) {
+ if (!state.objectMode &&
+ state.decodeStrings !== false &&
+ typeof chunk === 'string') {
+ chunk = new Buffer(chunk, encoding);
+ }
+ return chunk;
+}
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, chunk, encoding, cb) {
+ chunk = decodeChunk(state, chunk, encoding);
+
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ var len = state.objectMode ? 1 : chunk.length;
+
+ state.length += len;
+
+ var ret = state.length < state.highWaterMark;
+ // we must ensure that previous needDrain will not be reset to false.
+ if (!ret)
+ state.needDrain = true;
+
+ if (state.writing || state.corked) {
+ var last = state.lastBufferedRequest;
+ state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
+ if (last) {
+ last.next = state.lastBufferedRequest;
+ } else {
+ state.bufferedRequest = state.lastBufferedRequest;
+ }
+ } else {
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ }
+
+ return ret;
+}
+
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+ state.writelen = len;
+ state.writecb = cb;
+ state.writing = true;
+ state.sync = true;
+ if (writev)
+ stream._writev(chunk, state.onwrite);
+ else
+ stream._write(chunk, encoding, state.onwrite);
+ state.sync = false;
+}
+
+function onwriteError(stream, state, sync, er, cb) {
+ --state.pendingcb;
+ if (sync)
+ processNextTick(cb, er);
+ else
+ cb(er);
+
+ stream._writableState.errorEmitted = true;
+ stream.emit('error', er);
+}
+
+function onwriteStateUpdate(state) {
+ state.writing = false;
+ state.writecb = null;
+ state.length -= state.writelen;
+ state.writelen = 0;
+}
+
+function onwrite(stream, er) {
+ var state = stream._writableState;
+ var sync = state.sync;
+ var cb = state.writecb;
+
+ onwriteStateUpdate(state);
+
+ if (er)
+ onwriteError(stream, state, sync, er, cb);
+ else {
+ // Check if we're actually ready to finish, but don't emit yet
+ var finished = needFinish(state);
+
+ if (!finished &&
+ !state.corked &&
+ !state.bufferProcessing &&
+ state.bufferedRequest) {
+ clearBuffer(stream, state);
+ }
+
+ if (sync) {
+ processNextTick(afterWrite, stream, state, finished, cb);
+ } else {
+ afterWrite(stream, state, finished, cb);
+ }
+ }
+}
+
+function afterWrite(stream, state, finished, cb) {
+ if (!finished)
+ onwriteDrain(stream, state);
+ state.pendingcb--;
+ cb();
+ finishMaybe(stream, state);
+}
+
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+ if (state.length === 0 && state.needDrain) {
+ state.needDrain = false;
+ stream.emit('drain');
+ }
+}
+
+
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+ state.bufferProcessing = true;
+ var entry = state.bufferedRequest;
+
+ if (stream._writev && entry && entry.next) {
+ // Fast case, write everything using _writev()
+ var buffer = [];
+ var cbs = [];
+ while (entry) {
+ cbs.push(entry.callback);
+ buffer.push(entry);
+ entry = entry.next;
+ }
+
+ // count the one we are adding, as well.
+ // TODO(isaacs) clean this up
+ state.pendingcb++;
+ state.lastBufferedRequest = null;
+ doWrite(stream, state, true, state.length, buffer, '', function(err) {
+ for (var i = 0; i < cbs.length; i++) {
+ state.pendingcb--;
+ cbs[i](err);
+ }
+ });
+
+ // Clear buffer
+ } else {
+ // Slow case, write chunks one-by-one
+ while (entry) {
+ var chunk = entry.chunk;
+ var encoding = entry.encoding;
+ var cb = entry.callback;
+ var len = state.objectMode ? 1 : chunk.length;
+
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ entry = entry.next;
+ // if we didn't call the onwrite immediately, then
+ // it means that we need to wait until it does.
+ // also, that means that the chunk and cb are currently
+ // being processed, so move the buffer counter past them.
+ if (state.writing) {
+ break;
+ }
+ }
+
+ if (entry === null)
+ state.lastBufferedRequest = null;
+ }
+ state.bufferedRequest = entry;
+ state.bufferProcessing = false;
+}
+
+Writable.prototype._write = function(chunk, encoding, cb) {
+ cb(new Error('not implemented'));
+};
+
+Writable.prototype._writev = null;
+
+Writable.prototype.end = function(chunk, encoding, cb) {
+ var state = this._writableState;
+
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = null;
+ encoding = null;
+ } else if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (chunk !== null && chunk !== undefined)
+ this.write(chunk, encoding);
+
+ // .end() fully uncorks
+ if (state.corked) {
+ state.corked = 1;
+ this.uncork();
+ }
+
+ // ignore unnecessary end() calls.
+ if (!state.ending && !state.finished)
+ endWritable(this, state, cb);
+};
+
+
+function needFinish(state) {
+ return (state.ending &&
+ state.length === 0 &&
+ state.bufferedRequest === null &&
+ !state.finished &&
+ !state.writing);
+}
+
+function prefinish(stream, state) {
+ if (!state.prefinished) {
+ state.prefinished = true;
+ stream.emit('prefinish');
+ }
+}
+
+function finishMaybe(stream, state) {
+ var need = needFinish(state);
+ if (need) {
+ if (state.pendingcb === 0) {
+ prefinish(stream, state);
+ state.finished = true;
+ stream.emit('finish');
+ } else {
+ prefinish(stream, state);
+ }
+ }
+ return need;
+}
+
+function endWritable(stream, state, cb) {
+ state.ending = true;
+ finishMaybe(stream, state);
+ if (cb) {
+ if (state.finished)
+ processNextTick(cb);
+ else
+ stream.once('finish', cb);
+ }
+ state.ended = true;
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md
new file mode 100644
index 000000000..5a76b4149
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md
@@ -0,0 +1,3 @@
+# core-util-is
+
+The `util.is*` functions introduced in Node v0.12.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch
new file mode 100644
index 000000000..a06d5c05f
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch
@@ -0,0 +1,604 @@
+diff --git a/lib/util.js b/lib/util.js
+index a03e874..9074e8e 100644
+--- a/lib/util.js
++++ b/lib/util.js
+@@ -19,430 +19,6 @@
+ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+ // USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+-var formatRegExp = /%[sdj%]/g;
+-exports.format = function(f) {
+- if (!isString(f)) {
+- var objects = [];
+- for (var i = 0; i < arguments.length; i++) {
+- objects.push(inspect(arguments[i]));
+- }
+- return objects.join(' ');
+- }
+-
+- var i = 1;
+- var args = arguments;
+- var len = args.length;
+- var str = String(f).replace(formatRegExp, function(x) {
+- if (x === '%%') return '%';
+- if (i >= len) return x;
+- switch (x) {
+- case '%s': return String(args[i++]);
+- case '%d': return Number(args[i++]);
+- case '%j':
+- try {
+- return JSON.stringify(args[i++]);
+- } catch (_) {
+- return '[Circular]';
+- }
+- default:
+- return x;
+- }
+- });
+- for (var x = args[i]; i < len; x = args[++i]) {
+- if (isNull(x) || !isObject(x)) {
+- str += ' ' + x;
+- } else {
+- str += ' ' + inspect(x);
+- }
+- }
+- return str;
+-};
+-
+-
+-// Mark that a method should not be used.
+-// Returns a modified function which warns once by default.
+-// If --no-deprecation is set, then it is a no-op.
+-exports.deprecate = function(fn, msg) {
+- // Allow for deprecating things in the process of starting up.
+- if (isUndefined(global.process)) {
+- return function() {
+- return exports.deprecate(fn, msg).apply(this, arguments);
+- };
+- }
+-
+- if (process.noDeprecation === true) {
+- return fn;
+- }
+-
+- var warned = false;
+- function deprecated() {
+- if (!warned) {
+- if (process.throwDeprecation) {
+- throw new Error(msg);
+- } else if (process.traceDeprecation) {
+- console.trace(msg);
+- } else {
+- console.error(msg);
+- }
+- warned = true;
+- }
+- return fn.apply(this, arguments);
+- }
+-
+- return deprecated;
+-};
+-
+-
+-var debugs = {};
+-var debugEnviron;
+-exports.debuglog = function(set) {
+- if (isUndefined(debugEnviron))
+- debugEnviron = process.env.NODE_DEBUG || '';
+- set = set.toUpperCase();
+- if (!debugs[set]) {
+- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) {
+- var pid = process.pid;
+- debugs[set] = function() {
+- var msg = exports.format.apply(exports, arguments);
+- console.error('%s %d: %s', set, pid, msg);
+- };
+- } else {
+- debugs[set] = function() {};
+- }
+- }
+- return debugs[set];
+-};
+-
+-
+-/**
+- * Echos the value of a value. Trys to print the value out
+- * in the best way possible given the different types.
+- *
+- * @param {Object} obj The object to print out.
+- * @param {Object} opts Optional options object that alters the output.
+- */
+-/* legacy: obj, showHidden, depth, colors*/
+-function inspect(obj, opts) {
+- // default options
+- var ctx = {
+- seen: [],
+- stylize: stylizeNoColor
+- };
+- // legacy...
+- if (arguments.length >= 3) ctx.depth = arguments[2];
+- if (arguments.length >= 4) ctx.colors = arguments[3];
+- if (isBoolean(opts)) {
+- // legacy...
+- ctx.showHidden = opts;
+- } else if (opts) {
+- // got an "options" object
+- exports._extend(ctx, opts);
+- }
+- // set default options
+- if (isUndefined(ctx.showHidden)) ctx.showHidden = false;
+- if (isUndefined(ctx.depth)) ctx.depth = 2;
+- if (isUndefined(ctx.colors)) ctx.colors = false;
+- if (isUndefined(ctx.customInspect)) ctx.customInspect = true;
+- if (ctx.colors) ctx.stylize = stylizeWithColor;
+- return formatValue(ctx, obj, ctx.depth);
+-}
+-exports.inspect = inspect;
+-
+-
+-// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
+-inspect.colors = {
+- 'bold' : [1, 22],
+- 'italic' : [3, 23],
+- 'underline' : [4, 24],
+- 'inverse' : [7, 27],
+- 'white' : [37, 39],
+- 'grey' : [90, 39],
+- 'black' : [30, 39],
+- 'blue' : [34, 39],
+- 'cyan' : [36, 39],
+- 'green' : [32, 39],
+- 'magenta' : [35, 39],
+- 'red' : [31, 39],
+- 'yellow' : [33, 39]
+-};
+-
+-// Don't use 'blue' not visible on cmd.exe
+-inspect.styles = {
+- 'special': 'cyan',
+- 'number': 'yellow',
+- 'boolean': 'yellow',
+- 'undefined': 'grey',
+- 'null': 'bold',
+- 'string': 'green',
+- 'date': 'magenta',
+- // "name": intentionally not styling
+- 'regexp': 'red'
+-};
+-
+-
+-function stylizeWithColor(str, styleType) {
+- var style = inspect.styles[styleType];
+-
+- if (style) {
+- return '\u001b[' + inspect.colors[style][0] + 'm' + str +
+- '\u001b[' + inspect.colors[style][1] + 'm';
+- } else {
+- return str;
+- }
+-}
+-
+-
+-function stylizeNoColor(str, styleType) {
+- return str;
+-}
+-
+-
+-function arrayToHash(array) {
+- var hash = {};
+-
+- array.forEach(function(val, idx) {
+- hash[val] = true;
+- });
+-
+- return hash;
+-}
+-
+-
+-function formatValue(ctx, value, recurseTimes) {
+- // Provide a hook for user-specified inspect functions.
+- // Check that value is an object with an inspect function on it
+- if (ctx.customInspect &&
+- value &&
+- isFunction(value.inspect) &&
+- // Filter out the util module, it's inspect function is special
+- value.inspect !== exports.inspect &&
+- // Also filter out any prototype objects using the circular check.
+- !(value.constructor && value.constructor.prototype === value)) {
+- var ret = value.inspect(recurseTimes, ctx);
+- if (!isString(ret)) {
+- ret = formatValue(ctx, ret, recurseTimes);
+- }
+- return ret;
+- }
+-
+- // Primitive types cannot have properties
+- var primitive = formatPrimitive(ctx, value);
+- if (primitive) {
+- return primitive;
+- }
+-
+- // Look up the keys of the object.
+- var keys = Object.keys(value);
+- var visibleKeys = arrayToHash(keys);
+-
+- if (ctx.showHidden) {
+- keys = Object.getOwnPropertyNames(value);
+- }
+-
+- // Some type of object without properties can be shortcutted.
+- if (keys.length === 0) {
+- if (isFunction(value)) {
+- var name = value.name ? ': ' + value.name : '';
+- return ctx.stylize('[Function' + name + ']', 'special');
+- }
+- if (isRegExp(value)) {
+- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
+- }
+- if (isDate(value)) {
+- return ctx.stylize(Date.prototype.toString.call(value), 'date');
+- }
+- if (isError(value)) {
+- return formatError(value);
+- }
+- }
+-
+- var base = '', array = false, braces = ['{', '}'];
+-
+- // Make Array say that they are Array
+- if (isArray(value)) {
+- array = true;
+- braces = ['[', ']'];
+- }
+-
+- // Make functions say that they are functions
+- if (isFunction(value)) {
+- var n = value.name ? ': ' + value.name : '';
+- base = ' [Function' + n + ']';
+- }
+-
+- // Make RegExps say that they are RegExps
+- if (isRegExp(value)) {
+- base = ' ' + RegExp.prototype.toString.call(value);
+- }
+-
+- // Make dates with properties first say the date
+- if (isDate(value)) {
+- base = ' ' + Date.prototype.toUTCString.call(value);
+- }
+-
+- // Make error with message first say the error
+- if (isError(value)) {
+- base = ' ' + formatError(value);
+- }
+-
+- if (keys.length === 0 && (!array || value.length == 0)) {
+- return braces[0] + base + braces[1];
+- }
+-
+- if (recurseTimes < 0) {
+- if (isRegExp(value)) {
+- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
+- } else {
+- return ctx.stylize('[Object]', 'special');
+- }
+- }
+-
+- ctx.seen.push(value);
+-
+- var output;
+- if (array) {
+- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
+- } else {
+- output = keys.map(function(key) {
+- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
+- });
+- }
+-
+- ctx.seen.pop();
+-
+- return reduceToSingleString(output, base, braces);
+-}
+-
+-
+-function formatPrimitive(ctx, value) {
+- if (isUndefined(value))
+- return ctx.stylize('undefined', 'undefined');
+- if (isString(value)) {
+- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
+- .replace(/'/g, "\\'")
+- .replace(/\\"/g, '"') + '\'';
+- return ctx.stylize(simple, 'string');
+- }
+- if (isNumber(value)) {
+- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0,
+- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 .
+- if (value === 0 && 1 / value < 0)
+- return ctx.stylize('-0', 'number');
+- return ctx.stylize('' + value, 'number');
+- }
+- if (isBoolean(value))
+- return ctx.stylize('' + value, 'boolean');
+- // For some reason typeof null is "object", so special case here.
+- if (isNull(value))
+- return ctx.stylize('null', 'null');
+-}
+-
+-
+-function formatError(value) {
+- return '[' + Error.prototype.toString.call(value) + ']';
+-}
+-
+-
+-function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
+- var output = [];
+- for (var i = 0, l = value.length; i < l; ++i) {
+- if (hasOwnProperty(value, String(i))) {
+- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
+- String(i), true));
+- } else {
+- output.push('');
+- }
+- }
+- keys.forEach(function(key) {
+- if (!key.match(/^\d+$/)) {
+- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
+- key, true));
+- }
+- });
+- return output;
+-}
+-
+-
+-function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
+- var name, str, desc;
+- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
+- if (desc.get) {
+- if (desc.set) {
+- str = ctx.stylize('[Getter/Setter]', 'special');
+- } else {
+- str = ctx.stylize('[Getter]', 'special');
+- }
+- } else {
+- if (desc.set) {
+- str = ctx.stylize('[Setter]', 'special');
+- }
+- }
+- if (!hasOwnProperty(visibleKeys, key)) {
+- name = '[' + key + ']';
+- }
+- if (!str) {
+- if (ctx.seen.indexOf(desc.value) < 0) {
+- if (isNull(recurseTimes)) {
+- str = formatValue(ctx, desc.value, null);
+- } else {
+- str = formatValue(ctx, desc.value, recurseTimes - 1);
+- }
+- if (str.indexOf('\n') > -1) {
+- if (array) {
+- str = str.split('\n').map(function(line) {
+- return ' ' + line;
+- }).join('\n').substr(2);
+- } else {
+- str = '\n' + str.split('\n').map(function(line) {
+- return ' ' + line;
+- }).join('\n');
+- }
+- }
+- } else {
+- str = ctx.stylize('[Circular]', 'special');
+- }
+- }
+- if (isUndefined(name)) {
+- if (array && key.match(/^\d+$/)) {
+- return str;
+- }
+- name = JSON.stringify('' + key);
+- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
+- name = name.substr(1, name.length - 2);
+- name = ctx.stylize(name, 'name');
+- } else {
+- name = name.replace(/'/g, "\\'")
+- .replace(/\\"/g, '"')
+- .replace(/(^"|"$)/g, "'");
+- name = ctx.stylize(name, 'string');
+- }
+- }
+-
+- return name + ': ' + str;
+-}
+-
+-
+-function reduceToSingleString(output, base, braces) {
+- var numLinesEst = 0;
+- var length = output.reduce(function(prev, cur) {
+- numLinesEst++;
+- if (cur.indexOf('\n') >= 0) numLinesEst++;
+- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
+- }, 0);
+-
+- if (length > 60) {
+- return braces[0] +
+- (base === '' ? '' : base + '\n ') +
+- ' ' +
+- output.join(',\n ') +
+- ' ' +
+- braces[1];
+- }
+-
+- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
+-}
+-
+-
+ // NOTE: These type checking functions intentionally don't use `instanceof`
+ // because it is fragile and can be easily faked with `Object.create()`.
+ function isArray(ar) {
+@@ -522,166 +98,10 @@ function isPrimitive(arg) {
+ exports.isPrimitive = isPrimitive;
+
+ function isBuffer(arg) {
+- return arg instanceof Buffer;
++ return Buffer.isBuffer(arg);
+ }
+ exports.isBuffer = isBuffer;
+
+ function objectToString(o) {
+ return Object.prototype.toString.call(o);
+-}
+-
+-
+-function pad(n) {
+- return n < 10 ? '0' + n.toString(10) : n.toString(10);
+-}
+-
+-
+-var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
+- 'Oct', 'Nov', 'Dec'];
+-
+-// 26 Feb 16:19:34
+-function timestamp() {
+- var d = new Date();
+- var time = [pad(d.getHours()),
+- pad(d.getMinutes()),
+- pad(d.getSeconds())].join(':');
+- return [d.getDate(), months[d.getMonth()], time].join(' ');
+-}
+-
+-
+-// log is just a thin wrapper to console.log that prepends a timestamp
+-exports.log = function() {
+- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments));
+-};
+-
+-
+-/**
+- * Inherit the prototype methods from one constructor into another.
+- *
+- * The Function.prototype.inherits from lang.js rewritten as a standalone
+- * function (not on Function.prototype). NOTE: If this file is to be loaded
+- * during bootstrapping this function needs to be rewritten using some native
+- * functions as prototype setup using normal JavaScript does not work as
+- * expected during bootstrapping (see mirror.js in r114903).
+- *
+- * @param {function} ctor Constructor function which needs to inherit the
+- * prototype.
+- * @param {function} superCtor Constructor function to inherit prototype from.
+- */
+-exports.inherits = function(ctor, superCtor) {
+- ctor.super_ = superCtor;
+- ctor.prototype = Object.create(superCtor.prototype, {
+- constructor: {
+- value: ctor,
+- enumerable: false,
+- writable: true,
+- configurable: true
+- }
+- });
+-};
+-
+-exports._extend = function(origin, add) {
+- // Don't do anything if add isn't an object
+- if (!add || !isObject(add)) return origin;
+-
+- var keys = Object.keys(add);
+- var i = keys.length;
+- while (i--) {
+- origin[keys[i]] = add[keys[i]];
+- }
+- return origin;
+-};
+-
+-function hasOwnProperty(obj, prop) {
+- return Object.prototype.hasOwnProperty.call(obj, prop);
+-}
+-
+-
+-// Deprecated old stuff.
+-
+-exports.p = exports.deprecate(function() {
+- for (var i = 0, len = arguments.length; i < len; ++i) {
+- console.error(exports.inspect(arguments[i]));
+- }
+-}, 'util.p: Use console.error() instead');
+-
+-
+-exports.exec = exports.deprecate(function() {
+- return require('child_process').exec.apply(this, arguments);
+-}, 'util.exec is now called `child_process.exec`.');
+-
+-
+-exports.print = exports.deprecate(function() {
+- for (var i = 0, len = arguments.length; i < len; ++i) {
+- process.stdout.write(String(arguments[i]));
+- }
+-}, 'util.print: Use console.log instead');
+-
+-
+-exports.puts = exports.deprecate(function() {
+- for (var i = 0, len = arguments.length; i < len; ++i) {
+- process.stdout.write(arguments[i] + '\n');
+- }
+-}, 'util.puts: Use console.log instead');
+-
+-
+-exports.debug = exports.deprecate(function(x) {
+- process.stderr.write('DEBUG: ' + x + '\n');
+-}, 'util.debug: Use console.error instead');
+-
+-
+-exports.error = exports.deprecate(function(x) {
+- for (var i = 0, len = arguments.length; i < len; ++i) {
+- process.stderr.write(arguments[i] + '\n');
+- }
+-}, 'util.error: Use console.error instead');
+-
+-
+-exports.pump = exports.deprecate(function(readStream, writeStream, callback) {
+- var callbackCalled = false;
+-
+- function call(a, b, c) {
+- if (callback && !callbackCalled) {
+- callback(a, b, c);
+- callbackCalled = true;
+- }
+- }
+-
+- readStream.addListener('data', function(chunk) {
+- if (writeStream.write(chunk) === false) readStream.pause();
+- });
+-
+- writeStream.addListener('drain', function() {
+- readStream.resume();
+- });
+-
+- readStream.addListener('end', function() {
+- writeStream.end();
+- });
+-
+- readStream.addListener('close', function() {
+- call();
+- });
+-
+- readStream.addListener('error', function(err) {
+- writeStream.end();
+- call(err);
+- });
+-
+- writeStream.addListener('error', function(err) {
+- readStream.destroy();
+- call(err);
+- });
+-}, 'util.pump(): Use readableStream.pipe() instead');
+-
+-
+-var uv;
+-exports._errnoException = function(err, syscall) {
+- if (isUndefined(uv)) uv = process.binding('uv');
+- var errname = uv.errname(err);
+- var e = new Error(syscall + ' ' + errname);
+- e.code = errname;
+- e.errno = errname;
+- e.syscall = syscall;
+- return e;
+-};
++} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
new file mode 100644
index 000000000..9074e8ebc
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
@@ -0,0 +1,107 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
+function isArray(ar) {
+ return Array.isArray(ar);
+}
+exports.isArray = isArray;
+
+function isBoolean(arg) {
+ return typeof arg === 'boolean';
+}
+exports.isBoolean = isBoolean;
+
+function isNull(arg) {
+ return arg === null;
+}
+exports.isNull = isNull;
+
+function isNullOrUndefined(arg) {
+ return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
+
+function isNumber(arg) {
+ return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
+
+function isString(arg) {
+ return typeof arg === 'string';
+}
+exports.isString = isString;
+
+function isSymbol(arg) {
+ return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
+
+function isUndefined(arg) {
+ return arg === void 0;
+}
+exports.isUndefined = isUndefined;
+
+function isRegExp(re) {
+ return isObject(re) && objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
+
+function isObject(arg) {
+ return typeof arg === 'object' && arg !== null;
+}
+exports.isObject = isObject;
+
+function isDate(d) {
+ return isObject(d) && objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
+
+function isError(e) {
+ return isObject(e) &&
+ (objectToString(e) === '[object Error]' || e instanceof Error);
+}
+exports.isError = isError;
+
+function isFunction(arg) {
+ return typeof arg === 'function';
+}
+exports.isFunction = isFunction;
+
+function isPrimitive(arg) {
+ return arg === null ||
+ typeof arg === 'boolean' ||
+ typeof arg === 'number' ||
+ typeof arg === 'string' ||
+ typeof arg === 'symbol' || // ES6 symbol
+ typeof arg === 'undefined';
+}
+exports.isPrimitive = isPrimitive;
+
+function isBuffer(arg) {
+ return Buffer.isBuffer(arg);
+}
+exports.isBuffer = isBuffer;
+
+function objectToString(o) {
+ return Object.prototype.toString.call(o);
+} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json
new file mode 100644
index 000000000..b67333380
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "core-util-is",
+ "version": "1.0.1",
+ "description": "The `util.is*` functions introduced in Node v0.12.",
+ "main": "lib/util.js",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/core-util-is.git"
+ },
+ "keywords": [
+ "util",
+ "isBuffer",
+ "isArray",
+ "isNumber",
+ "isString",
+ "isRegExp",
+ "isThis",
+ "isThat",
+ "polyfill"
+ ],
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/isaacs/core-util-is/issues"
+ },
+ "readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n",
+ "readmeFilename": "README.md",
+ "homepage": "https://github.com/isaacs/core-util-is#readme",
+ "_id": "core-util-is@1.0.1",
+ "_shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
+ "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz",
+ "_from": "core-util-is@>=1.0.0 <1.1.0"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/util.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/util.js
new file mode 100644
index 000000000..007fa1057
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/util.js
@@ -0,0 +1,106 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
+function isArray(ar) {
+ return Array.isArray(ar);
+}
+exports.isArray = isArray;
+
+function isBoolean(arg) {
+ return typeof arg === 'boolean';
+}
+exports.isBoolean = isBoolean;
+
+function isNull(arg) {
+ return arg === null;
+}
+exports.isNull = isNull;
+
+function isNullOrUndefined(arg) {
+ return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
+
+function isNumber(arg) {
+ return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
+
+function isString(arg) {
+ return typeof arg === 'string';
+}
+exports.isString = isString;
+
+function isSymbol(arg) {
+ return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
+
+function isUndefined(arg) {
+ return arg === void 0;
+}
+exports.isUndefined = isUndefined;
+
+function isRegExp(re) {
+ return isObject(re) && objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
+
+function isObject(arg) {
+ return typeof arg === 'object' && arg !== null;
+}
+exports.isObject = isObject;
+
+function isDate(d) {
+ return isObject(d) && objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
+
+function isError(e) {
+ return isObject(e) && objectToString(e) === '[object Error]';
+}
+exports.isError = isError;
+
+function isFunction(arg) {
+ return typeof arg === 'function';
+}
+exports.isFunction = isFunction;
+
+function isPrimitive(arg) {
+ return arg === null ||
+ typeof arg === 'boolean' ||
+ typeof arg === 'number' ||
+ typeof arg === 'string' ||
+ typeof arg === 'symbol' || // ES6 symbol
+ typeof arg === 'undefined';
+}
+exports.isPrimitive = isPrimitive;
+
+function isBuffer(arg) {
+ return arg instanceof Buffer;
+}
+exports.isBuffer = isBuffer;
+
+function objectToString(o) {
+ return Object.prototype.toString.call(o);
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md
new file mode 100644
index 000000000..052a62b8d
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md
@@ -0,0 +1,54 @@
+
+# isarray
+
+`Array#isArray` for older browsers.
+
+## Usage
+
+```js
+var isArray = require('isarray');
+
+console.log(isArray([])); // => true
+console.log(isArray({})); // => false
+```
+
+## Installation
+
+With [npm](http://npmjs.org) do
+
+```bash
+$ npm install isarray
+```
+
+Then bundle for the browser with
+[browserify](https://github.com/substack/browserify).
+
+With [component](http://component.io) do
+
+```bash
+$ component install juliangruber/isarray
+```
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/build/build.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/build/build.js
new file mode 100644
index 000000000..e1856ef09
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/build/build.js
@@ -0,0 +1,208 @@
+
+/**
+ * Require the given path.
+ *
+ * @param {String} path
+ * @return {Object} exports
+ * @api public
+ */
+
+function require(path, parent, orig) {
+ var resolved = require.resolve(path);
+
+ // lookup failed
+ if (null == resolved) {
+ orig = orig || path;
+ parent = parent || 'root';
+ var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
+ err.path = orig;
+ err.parent = parent;
+ err.require = true;
+ throw err;
+ }
+
+ var module = require.modules[resolved];
+
+ // perform real require()
+ // by invoking the module's
+ // registered function
+ if (!module.exports) {
+ module.exports = {};
+ module.client = module.component = true;
+ module.call(this, module.exports, require.relative(resolved), module);
+ }
+
+ return module.exports;
+}
+
+/**
+ * Registered modules.
+ */
+
+require.modules = {};
+
+/**
+ * Registered aliases.
+ */
+
+require.aliases = {};
+
+/**
+ * Resolve `path`.
+ *
+ * Lookup:
+ *
+ * - PATH/index.js
+ * - PATH.js
+ * - PATH
+ *
+ * @param {String} path
+ * @return {String} path or null
+ * @api private
+ */
+
+require.resolve = function(path) {
+ if (path.charAt(0) === '/') path = path.slice(1);
+ var index = path + '/index.js';
+
+ var paths = [
+ path,
+ path + '.js',
+ path + '.json',
+ path + '/index.js',
+ path + '/index.json'
+ ];
+
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ if (require.modules.hasOwnProperty(path)) return path;
+ }
+
+ if (require.aliases.hasOwnProperty(index)) {
+ return require.aliases[index];
+ }
+};
+
+/**
+ * Normalize `path` relative to the current path.
+ *
+ * @param {String} curr
+ * @param {String} path
+ * @return {String}
+ * @api private
+ */
+
+require.normalize = function(curr, path) {
+ var segs = [];
+
+ if ('.' != path.charAt(0)) return path;
+
+ curr = curr.split('/');
+ path = path.split('/');
+
+ for (var i = 0; i < path.length; ++i) {
+ if ('..' == path[i]) {
+ curr.pop();
+ } else if ('.' != path[i] && '' != path[i]) {
+ segs.push(path[i]);
+ }
+ }
+
+ return curr.concat(segs).join('/');
+};
+
+/**
+ * Register module at `path` with callback `definition`.
+ *
+ * @param {String} path
+ * @param {Function} definition
+ * @api private
+ */
+
+require.register = function(path, definition) {
+ require.modules[path] = definition;
+};
+
+/**
+ * Alias a module definition.
+ *
+ * @param {String} from
+ * @param {String} to
+ * @api private
+ */
+
+require.alias = function(from, to) {
+ if (!require.modules.hasOwnProperty(from)) {
+ throw new Error('Failed to alias "' + from + '", it does not exist');
+ }
+ require.aliases[to] = from;
+};
+
+/**
+ * Return a require function relative to the `parent` path.
+ *
+ * @param {String} parent
+ * @return {Function}
+ * @api private
+ */
+
+require.relative = function(parent) {
+ var p = require.normalize(parent, '..');
+
+ /**
+ * lastIndexOf helper.
+ */
+
+ function lastIndexOf(arr, obj) {
+ var i = arr.length;
+ while (i--) {
+ if (arr[i] === obj) return i;
+ }
+ return -1;
+ }
+
+ /**
+ * The relative require() itself.
+ */
+
+ function localRequire(path) {
+ var resolved = localRequire.resolve(path);
+ return require(resolved, parent, path);
+ }
+
+ /**
+ * Resolve relative to the parent.
+ */
+
+ localRequire.resolve = function(path) {
+ var c = path.charAt(0);
+ if ('/' == c) return path.slice(1);
+ if ('.' == c) return require.normalize(p, path);
+
+ // resolve deps by returning
+ // the dep in the nearest "deps"
+ // directory
+ var segs = parent.split('/');
+ var i = lastIndexOf(segs, 'deps') + 1;
+ if (!i) i = 0;
+ path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
+ return path;
+ };
+
+ /**
+ * Check if module is defined at `path`.
+ */
+
+ localRequire.exists = function(path) {
+ return require.modules.hasOwnProperty(localRequire.resolve(path));
+ };
+
+ return localRequire;
+};
+require.register("isarray/index.js", function(exports, require, module){
+module.exports = Array.isArray || function (arr) {
+ return Object.prototype.toString.call(arr) == '[object Array]';
+};
+
+});
+require.alias("isarray/index.js", "isarray/index.js");
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json
new file mode 100644
index 000000000..9e31b6838
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json
@@ -0,0 +1,19 @@
+{
+ "name" : "isarray",
+ "description" : "Array#isArray for older browsers",
+ "version" : "0.0.1",
+ "repository" : "juliangruber/isarray",
+ "homepage": "https://github.com/juliangruber/isarray",
+ "main" : "index.js",
+ "scripts" : [
+ "index.js"
+ ],
+ "dependencies" : {},
+ "keywords": ["browser","isarray","array"],
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "license": "MIT"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js
new file mode 100644
index 000000000..5f5ad45d4
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js
@@ -0,0 +1,3 @@
+module.exports = Array.isArray || function (arr) {
+ return Object.prototype.toString.call(arr) == '[object Array]';
+};
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json
new file mode 100644
index 000000000..fb1eb3786
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json
@@ -0,0 +1,38 @@
+{
+ "name": "isarray",
+ "description": "Array#isArray for older browsers",
+ "version": "0.0.1",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/isarray.git"
+ },
+ "homepage": "https://github.com/juliangruber/isarray",
+ "main": "index.js",
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "*"
+ },
+ "keywords": [
+ "browser",
+ "isarray",
+ "array"
+ ],
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "license": "MIT",
+ "readme": "\n# isarray\n\n`Array#isArray` for older browsers.\n\n## Usage\n\n```js\nvar isArray = require('isarray');\n\nconsole.log(isArray([])); // => true\nconsole.log(isArray({})); // => false\n```\n\n## Installation\n\nWith [npm](http://npmjs.org) do\n\n```bash\n$ npm install isarray\n```\n\nThen bundle for the browser with\n[browserify](https://github.com/substack/browserify).\n\nWith [component](http://component.io) do\n\n```bash\n$ component install juliangruber/isarray\n```\n\n## License\n\n(MIT)\n\nCopyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
+ "readmeFilename": "README.md",
+ "bugs": {
+ "url": "https://github.com/juliangruber/isarray/issues"
+ },
+ "_id": "isarray@0.0.1",
+ "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
+ "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "_from": "isarray@0.0.1"
+}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.travis.yml b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
index 6064ca092..5ac988553 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.travis.yml
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
@@ -1,5 +1,7 @@
language: node_js
node_js:
+ - "0.8"
- "0.10"
+ - "0.11"
- "0.12"
- "iojs"
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js
new file mode 100644
index 000000000..3eb2f33d0
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js
@@ -0,0 +1,13 @@
+'use strict';
+module.exports = nextTick;
+
+function nextTick(fn) {
+ var args = new Array(arguments.length - 1);
+ var i = 0;
+ while (i < arguments.length) {
+ args[i++] = arguments[i];
+ }
+ process.nextTick(function afterTick() {
+ fn.apply(null, args);
+ });
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json
new file mode 100644
index 000000000..9be9ed5b5
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "process-nextick-args",
+ "version": "1.0.2",
+ "description": "process.nextTick but always with args",
+ "main": "index.js",
+ "scripts": {
+ "test": "node test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git"
+ },
+ "author": "",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/calvinmetcalf/process-nextick-args/issues"
+ },
+ "homepage": "https://github.com/calvinmetcalf/process-nextick-args",
+ "devDependencies": {
+ "tap": "~0.2.6"
+ },
+ "gitHead": "295707643b4ed6667c1afb71ffb6101669b5dac2",
+ "_id": "process-nextick-args@1.0.2",
+ "_shasum": "8b4d3fc586668bd5b6573e732edf2b71c1c1d8aa",
+ "_from": "process-nextick-args@>=1.0.0 <1.1.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "dist": {
+ "shasum": "8b4d3fc586668bd5b6573e732edf2b71c1c1d8aa",
+ "tarball": "http://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
new file mode 100644
index 000000000..78e7cfaeb
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
@@ -0,0 +1,18 @@
+process-nextick-args
+=====
+
+[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args)
+
+```bash
+npm install --save process-nextick-args
+```
+
+Always be able to pass arguments to process.nextTick, no matter the platform
+
+```js
+var nextTick = require('process-nextick-args');
+
+nextTick(function (a, b, c) {
+ console.log(a, b, c);
+}, 'step', 3, 'profit');
+```
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js
new file mode 100644
index 000000000..729f775ff
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js
@@ -0,0 +1,17 @@
+var test = require("tap").test;
+var nextTick = require('./');
+
+test('should work', function (t) {
+ t.plan(5);
+ nextTick(function (a) {
+ t.ok(a);
+ nextTick(function (thing) {
+ t.equals(thing, 7);
+ }, 7);
+ }, true);
+ nextTick(function (a, b, c) {
+ t.equals(a, 'step');
+ t.equals(b, 3);
+ t.equals(c, 'profit');
+ }, 'step', 3, 'profit');
+});
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore
new file mode 100644
index 000000000..206320cc1
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore
@@ -0,0 +1,2 @@
+build
+test
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE
new file mode 100644
index 000000000..6de584a48
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE
@@ -0,0 +1,20 @@
+Copyright Joyent, Inc. and other Node contributors.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to permit
+persons to whom the Software is furnished to do so, subject to the
+following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md
new file mode 100644
index 000000000..4d2aa0015
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md
@@ -0,0 +1,7 @@
+**string_decoder.js** (`require('string_decoder')`) from Node.js core
+
+Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details.
+
+Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**
+
+The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js
new file mode 100644
index 000000000..b00e54fb7
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js
@@ -0,0 +1,221 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+var Buffer = require('buffer').Buffer;
+
+var isBufferEncoding = Buffer.isEncoding
+ || function(encoding) {
+ switch (encoding && encoding.toLowerCase()) {
+ case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
+ default: return false;
+ }
+ }
+
+
+function assertEncoding(encoding) {
+ if (encoding && !isBufferEncoding(encoding)) {
+ throw new Error('Unknown encoding: ' + encoding);
+ }
+}
+
+// StringDecoder provides an interface for efficiently splitting a series of
+// buffers into a series of JS strings without breaking apart multi-byte
+// characters. CESU-8 is handled as part of the UTF-8 encoding.
+//
+// @TODO Handling all encodings inside a single object makes it very difficult
+// to reason about this code, so it should be split up in the future.
+// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
+// points as used by CESU-8.
+var StringDecoder = exports.StringDecoder = function(encoding) {
+ this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
+ assertEncoding(encoding);
+ switch (this.encoding) {
+ case 'utf8':
+ // CESU-8 represents each of Surrogate Pair by 3-bytes
+ this.surrogateSize = 3;
+ break;
+ case 'ucs2':
+ case 'utf16le':
+ // UTF-16 represents each of Surrogate Pair by 2-bytes
+ this.surrogateSize = 2;
+ this.detectIncompleteChar = utf16DetectIncompleteChar;
+ break;
+ case 'base64':
+ // Base-64 stores 3 bytes in 4 chars, and pads the remainder.
+ this.surrogateSize = 3;
+ this.detectIncompleteChar = base64DetectIncompleteChar;
+ break;
+ default:
+ this.write = passThroughWrite;
+ return;
+ }
+
+ // Enough space to store all bytes of a single character. UTF-8 needs 4
+ // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
+ this.charBuffer = new Buffer(6);
+ // Number of bytes received for the current incomplete multi-byte character.
+ this.charReceived = 0;
+ // Number of bytes expected for the current incomplete multi-byte character.
+ this.charLength = 0;
+};
+
+
+// write decodes the given buffer and returns it as JS string that is
+// guaranteed to not contain any partial multi-byte characters. Any partial
+// character found at the end of the buffer is buffered up, and will be
+// returned when calling write again with the remaining bytes.
+//
+// Note: Converting a Buffer containing an orphan surrogate to a String
+// currently works, but converting a String to a Buffer (via `new Buffer`, or
+// Buffer#write) will replace incomplete surrogates with the unicode
+// replacement character. See https://codereview.chromium.org/121173009/ .
+StringDecoder.prototype.write = function(buffer) {
+ var charStr = '';
+ // if our last write ended with an incomplete multibyte character
+ while (this.charLength) {
+ // determine how many remaining bytes this buffer has to offer for this char
+ var available = (buffer.length >= this.charLength - this.charReceived) ?
+ this.charLength - this.charReceived :
+ buffer.length;
+
+ // add the new bytes to the char buffer
+ buffer.copy(this.charBuffer, this.charReceived, 0, available);
+ this.charReceived += available;
+
+ if (this.charReceived < this.charLength) {
+ // still not enough chars in this buffer? wait for more ...
+ return '';
+ }
+
+ // remove bytes belonging to the current character from the buffer
+ buffer = buffer.slice(available, buffer.length);
+
+ // get the character that was split
+ charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
+
+ // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
+ var charCode = charStr.charCodeAt(charStr.length - 1);
+ if (charCode >= 0xD800 && charCode <= 0xDBFF) {
+ this.charLength += this.surrogateSize;
+ charStr = '';
+ continue;
+ }
+ this.charReceived = this.charLength = 0;
+
+ // if there are no more bytes in this buffer, just emit our char
+ if (buffer.length === 0) {
+ return charStr;
+ }
+ break;
+ }
+
+ // determine and set charLength / charReceived
+ this.detectIncompleteChar(buffer);
+
+ var end = buffer.length;
+ if (this.charLength) {
+ // buffer the incomplete character bytes we got
+ buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
+ end -= this.charReceived;
+ }
+
+ charStr += buffer.toString(this.encoding, 0, end);
+
+ var end = charStr.length - 1;
+ var charCode = charStr.charCodeAt(end);
+ // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
+ if (charCode >= 0xD800 && charCode <= 0xDBFF) {
+ var size = this.surrogateSize;
+ this.charLength += size;
+ this.charReceived += size;
+ this.charBuffer.copy(this.charBuffer, size, 0, size);
+ buffer.copy(this.charBuffer, 0, 0, size);
+ return charStr.substring(0, end);
+ }
+
+ // or just emit the charStr
+ return charStr;
+};
+
+// detectIncompleteChar determines if there is an incomplete UTF-8 character at
+// the end of the given buffer. If so, it sets this.charLength to the byte
+// length that character, and sets this.charReceived to the number of bytes
+// that are available for this character.
+StringDecoder.prototype.detectIncompleteChar = function(buffer) {
+ // determine how many bytes we have to check at the end of this buffer
+ var i = (buffer.length >= 3) ? 3 : buffer.length;
+
+ // Figure out if one of the last i bytes of our buffer announces an
+ // incomplete char.
+ for (; i > 0; i--) {
+ var c = buffer[buffer.length - i];
+
+ // See http://en.wikipedia.org/wiki/UTF-8#Description
+
+ // 110XXXXX
+ if (i == 1 && c >> 5 == 0x06) {
+ this.charLength = 2;
+ break;
+ }
+
+ // 1110XXXX
+ if (i <= 2 && c >> 4 == 0x0E) {
+ this.charLength = 3;
+ break;
+ }
+
+ // 11110XXX
+ if (i <= 3 && c >> 3 == 0x1E) {
+ this.charLength = 4;
+ break;
+ }
+ }
+ this.charReceived = i;
+};
+
+StringDecoder.prototype.end = function(buffer) {
+ var res = '';
+ if (buffer && buffer.length)
+ res = this.write(buffer);
+
+ if (this.charReceived) {
+ var cr = this.charReceived;
+ var buf = this.charBuffer;
+ var enc = this.encoding;
+ res += buf.slice(0, cr).toString(enc);
+ }
+
+ return res;
+};
+
+function passThroughWrite(buffer) {
+ return buffer.toString(this.encoding);
+}
+
+function utf16DetectIncompleteChar(buffer) {
+ this.charReceived = buffer.length % 2;
+ this.charLength = this.charReceived ? 2 : 0;
+}
+
+function base64DetectIncompleteChar(buffer) {
+ this.charReceived = buffer.length % 3;
+ this.charLength = this.charReceived ? 3 : 0;
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json
new file mode 100644
index 000000000..ee7070235
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json
@@ -0,0 +1,34 @@
+{
+ "name": "string_decoder",
+ "version": "0.10.31",
+ "description": "The string_decoder module from Node core",
+ "main": "index.js",
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "~0.4.8"
+ },
+ "scripts": {
+ "test": "tap test/simple/*.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/rvagg/string_decoder.git"
+ },
+ "homepage": "https://github.com/rvagg/string_decoder",
+ "keywords": [
+ "string",
+ "decoder",
+ "browser",
+ "browserify"
+ ],
+ "license": "MIT",
+ "readme": "**string_decoder.js** (`require('string_decoder')`) from Node.js core\n\nCopyright Joyent, Inc. and other Node contributors. See LICENCE file for details.\n\nVersion numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**\n\nThe *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.",
+ "readmeFilename": "README.md",
+ "bugs": {
+ "url": "https://github.com/rvagg/string_decoder/issues"
+ },
+ "_id": "string_decoder@0.10.31",
+ "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
+ "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
+ "_from": "string_decoder@>=0.10.0 <0.11.0"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md
new file mode 100644
index 000000000..ec010299b
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md
@@ -0,0 +1,11 @@
+
+1.0.1 / 2014-11-25
+==================
+
+ * browser: use `console.warn()` for deprecation calls
+ * browser: more jsdocs
+
+1.0.0 / 2014-04-30
+==================
+
+ * initial commit
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
new file mode 100644
index 000000000..6a60e8c22
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md
new file mode 100644
index 000000000..75622fa7c
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md
@@ -0,0 +1,53 @@
+util-deprecate
+==============
+### The Node.js `util.deprecate()` function with browser support
+
+In Node.js, this module simply re-exports the `util.deprecate()` function.
+
+In the web browser (i.e. via browserify), a browser-specific implementation
+of the `util.deprecate()` function is used.
+
+
+## API
+
+A `deprecate()` function is the only thing exposed by this module.
+
+``` javascript
+// setup:
+exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead');
+
+
+// users see:
+foo();
+// foo() is deprecated, use bar() instead
+foo();
+foo();
+```
+
+
+## License
+
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js
new file mode 100644
index 000000000..55fa5a4bc
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js
@@ -0,0 +1,62 @@
+
+/**
+ * Module exports.
+ */
+
+module.exports = deprecate;
+
+/**
+ * Mark that a method should not be used.
+ * Returns a modified function which warns once by default.
+ *
+ * If `localStorage.noDeprecation = true` is set, then it is a no-op.
+ *
+ * If `localStorage.throwDeprecation = true` is set, then deprecated functions
+ * will throw an Error when invoked.
+ *
+ * If `localStorage.traceDeprecation = true` is set, then deprecated functions
+ * will invoke `console.trace()` instead of `console.error()`.
+ *
+ * @param {Function} fn - the function to deprecate
+ * @param {String} msg - the string to print to the console when `fn` is invoked
+ * @returns {Function} a new "deprecated" version of `fn`
+ * @api public
+ */
+
+function deprecate (fn, msg) {
+ if (config('noDeprecation')) {
+ return fn;
+ }
+
+ var warned = false;
+ function deprecated() {
+ if (!warned) {
+ if (config('throwDeprecation')) {
+ throw new Error(msg);
+ } else if (config('traceDeprecation')) {
+ console.trace(msg);
+ } else {
+ console.warn(msg);
+ }
+ warned = true;
+ }
+ return fn.apply(this, arguments);
+ }
+
+ return deprecated;
+}
+
+/**
+ * Checks `localStorage` for boolean values for the given `name`.
+ *
+ * @param {String} name
+ * @returns {Boolean}
+ * @api private
+ */
+
+function config (name) {
+ if (!global.localStorage) return false;
+ var val = global.localStorage[name];
+ if (null == val) return false;
+ return String(val).toLowerCase() === 'true';
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js
new file mode 100644
index 000000000..5e6fcff5d
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js
@@ -0,0 +1,6 @@
+
+/**
+ * For Node.js, simply re-export the core `util.deprecate` function.
+ */
+
+module.exports = require('util').deprecate;
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json
new file mode 100644
index 000000000..ea487da0e
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json
@@ -0,0 +1,53 @@
+{
+ "name": "util-deprecate",
+ "version": "1.0.1",
+ "description": "The Node.js `util.deprecate()` function with browser support",
+ "main": "node.js",
+ "browser": "browser.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/util-deprecate.git"
+ },
+ "keywords": [
+ "util",
+ "deprecate",
+ "browserify",
+ "browser",
+ "node"
+ ],
+ "author": {
+ "name": "Nathan Rajlich",
+ "email": "nathan@tootallnate.net",
+ "url": "http://n8.io/"
+ },
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/util-deprecate/issues"
+ },
+ "homepage": "https://github.com/TooTallNate/util-deprecate",
+ "gitHead": "6e923f7d98a0afbe5b9c7db9d0f0029c1936746c",
+ "_id": "util-deprecate@1.0.1",
+ "_shasum": "3556a3d13c4c6aa7983d7e2425478197199b7881",
+ "_from": "util-deprecate@>=1.0.1 <1.1.0",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ "maintainers": [
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ }
+ ],
+ "dist": {
+ "shasum": "3556a3d13c4c6aa7983d7e2425478197199b7881",
+ "tarball": "http://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.1.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.1.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/package.json
new file mode 100644
index 000000000..70ad998ca
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/package.json
@@ -0,0 +1,75 @@
+{
+ "name": "readable-stream",
+ "version": "2.0.2",
+ "description": "Streams3, a user-land copy of the stream library from iojs v2.x",
+ "main": "readable.js",
+ "dependencies": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.1",
+ "isarray": "0.0.1",
+ "process-nextick-args": "~1.0.0",
+ "string_decoder": "~0.10.x",
+ "util-deprecate": "~1.0.1"
+ },
+ "devDependencies": {
+ "tap": "~0.2.6",
+ "tape": "~4.0.0",
+ "zuul": "~3.0.0"
+ },
+ "scripts": {
+ "test": "tap test/parallel/*.js",
+ "browser": "zuul --browser-name $BROWSER_NAME --browser-version $BROWSER_VERSION -- test/browser.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/readable-stream.git"
+ },
+ "keywords": [
+ "readable",
+ "stream",
+ "pipe"
+ ],
+ "browser": {
+ "util": false
+ },
+ "license": "MIT",
+ "gitHead": "1a70134a71196eeabb5e27bc7580faaa68d30513",
+ "bugs": {
+ "url": "https://github.com/nodejs/readable-stream/issues"
+ },
+ "homepage": "https://github.com/nodejs/readable-stream#readme",
+ "_id": "readable-stream@2.0.2",
+ "_shasum": "bec81beae8cf455168bc2e5b2b31f5bcfaed9b1b",
+ "_from": "readable-stream@>=2.0.0 <2.1.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "dist": {
+ "shasum": "bec81beae8cf455168bc2e5b2b31f5bcfaed9b1b",
+ "tarball": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.2.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/passthrough.js
new file mode 100644
index 000000000..27e8d8a55
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/passthrough.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_passthrough.js")
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/readable.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/readable.js
new file mode 100644
index 000000000..6222a5798
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/readable.js
@@ -0,0 +1,12 @@
+var Stream = (function (){
+ try {
+ return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
+ } catch(_){}
+}());
+exports = module.exports = require('./lib/_stream_readable.js');
+exports.Stream = Stream || exports;
+exports.Readable = exports;
+exports.Writable = require('./lib/_stream_writable.js');
+exports.Duplex = require('./lib/_stream_duplex.js');
+exports.Transform = require('./lib/_stream_transform.js');
+exports.PassThrough = require('./lib/_stream_passthrough.js');
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/transform.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/transform.js
new file mode 100644
index 000000000..5d482f078
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/transform.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_transform.js")
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/writable.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/writable.js
new file mode 100644
index 000000000..e1e9efdf3
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/node_modules/readable-stream/writable.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_writable.js")
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/package.json
index 0a5652171..caa06817b 100644
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/package.json
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/package.json
@@ -1,6 +1,6 @@
{
"name": "concat-stream",
- "version": "1.4.8",
+ "version": "1.5.0",
"description": "writable stream that concatenates strings or binary data and calls a callback with the result",
"tags": [
"stream",
@@ -23,6 +23,9 @@
"node >= 0.8"
],
"main": "index.js",
+ "files": [
+ "index.js"
+ ],
"scripts": {
"test": "tape test/*.js test/server/*.js"
},
@@ -30,7 +33,7 @@
"dependencies": {
"inherits": "~2.0.1",
"typedarray": "~0.0.5",
- "readable-stream": "~1.1.9"
+ "readable-stream": "~2.0.0"
},
"devDependencies": {
"tape": "~2.3.2"
@@ -51,13 +54,13 @@
"android-browser/4.2..latest"
]
},
- "gitHead": "1f4ea1a7791b9366a133cab033eb0f3564cb0d92",
- "homepage": "https://github.com/maxogden/concat-stream",
- "_id": "concat-stream@1.4.8",
- "_shasum": "e8325bb89e55000e52b626d97466fde1a28cfe5d",
+ "gitHead": "7cb37c8ddc0fd2ea03c104d07d44d84b83a31185",
+ "homepage": "https://github.com/maxogden/concat-stream#readme",
+ "_id": "concat-stream@1.5.0",
+ "_shasum": "53f7d43c51c5e43f81c8fdd03321c631be68d611",
"_from": "concat-stream@>=1.4.6 <2.0.0",
- "_npmVersion": "2.7.0",
- "_nodeVersion": "1.5.1",
+ "_npmVersion": "2.9.0",
+ "_nodeVersion": "1.8.2",
"_npmUser": {
"name": "maxogden",
"email": "max@maxogden.com"
@@ -69,10 +72,10 @@
}
],
"dist": {
- "shasum": "e8325bb89e55000e52b626d97466fde1a28cfe5d",
- "tarball": "http://registry.npmjs.org/concat-stream/-/concat-stream-1.4.8.tgz"
+ "shasum": "53f7d43c51c5e43f81c8fdd03321c631be68d611",
+ "tarball": "http://registry.npmjs.org/concat-stream/-/concat-stream-1.5.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.4.8.tgz",
+ "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.5.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/readme.md b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/readme.md
index d028aec3c..69234d52a 100644
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/readme.md
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/readme.md
@@ -2,8 +2,18 @@
Writable stream that concatenates strings or binary data and calls a callback with the result. Not a transform stream -- more of a stream sink.
+[![Build Status](https://travis-ci.org/maxogden/concat-stream.svg?branch=master)](https://travis-ci.org/maxogden/concat-stream)
+
[![NPM](https://nodei.co/npm/concat-stream.png)](https://nodei.co/npm/concat-stream/)
+### description
+
+Streams emit many buffers. If you want to collect all of the buffers, and when the stream ends concatenate all of the buffers together and receive a single buffer then this is the module for you.
+
+Only use this if you know you can fit all of the output of your stream into a single Buffer (e.g. in RAM).
+
+There are also `objectMode` streams that emit things other than Buffers, and you can concatenate these too. See below for details.
+
### examples
#### Buffers
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/array.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/array.js
deleted file mode 100644
index 86e7dd43b..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/array.js
+++ /dev/null
@@ -1,12 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-
-test('array stream', function (t) {
- t.plan(1)
- var arrays = concat({ encoding: 'array' }, function(out) {
- t.deepEqual(out, [1,2,3,4,5,6])
- })
- arrays.write([1,2,3])
- arrays.write([4,5,6])
- arrays.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/buffer.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/buffer.js
deleted file mode 100644
index d28f5f9c1..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/buffer.js
+++ /dev/null
@@ -1,31 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-var TA = require('typedarray')
-var U8 = typeof Uint8Array !== 'undefined' ? Uint8Array : TA.Uint8Array
-
-test('buffer stream', function (t) {
- t.plan(2)
- var buffers = concat(function(out) {
- t.ok(Buffer.isBuffer(out))
- t.equal(out.toString('utf8'), 'pizza Array is not a stringy cat')
- })
- buffers.write(new Buffer('pizza Array is not a ', 'utf8'))
- buffers.write(new Buffer('stringy cat'))
- buffers.end()
-})
-
-test('buffer mixed writes', function (t) {
- t.plan(2)
- var buffers = concat(function(out) {
- t.ok(Buffer.isBuffer(out))
- t.equal(out.toString('utf8'), 'pizza Array is not a stringy cat555')
- })
- buffers.write(new Buffer('pizza'))
- buffers.write(' Array is not a ')
- buffers.write([ 115, 116, 114, 105, 110, 103, 121 ])
- var u8 = new U8(4)
- u8[0] = 32; u8[1] = 99; u8[2] = 97; u8[3] = 116
- buffers.write(u8)
- buffers.write(555)
- buffers.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/infer.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/infer.js
deleted file mode 100644
index 91ab933f4..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/infer.js
+++ /dev/null
@@ -1,15 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-
-test('type inference works as expected', function(t) {
- var stream = concat()
- t.equal(stream.inferEncoding(['hello']), 'array', 'array')
- t.equal(stream.inferEncoding(new Buffer('hello')), 'buffer', 'buffer')
- t.equal(stream.inferEncoding(undefined), 'buffer', 'buffer')
- t.equal(stream.inferEncoding(new Uint8Array(1)), 'uint8array', 'uint8array')
- t.equal(stream.inferEncoding('hello'), 'string', 'string')
- t.equal(stream.inferEncoding(''), 'string', 'string')
- t.equal(stream.inferEncoding({hello: "world"}), 'object', 'object')
- t.equal(stream.inferEncoding(1), 'buffer', 'buffer')
- t.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/nothing.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/nothing.js
deleted file mode 100644
index 6ac604965..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/nothing.js
+++ /dev/null
@@ -1,25 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-
-test('no callback stream', function (t) {
- var stream = concat()
- stream.write('space')
- stream.end(' cats')
- t.end()
-})
-
-test('no encoding set, no data', function (t) {
- var stream = concat(function(data) {
- t.deepEqual(data, [])
- t.end()
- })
- stream.end()
-})
-
-test('encoding set to string, no data', function (t) {
- var stream = concat({ encoding: 'string' }, function(data) {
- t.deepEqual(data, '')
- t.end()
- })
- stream.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/objects.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/objects.js
deleted file mode 100644
index ad921ed25..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/objects.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-
-test('writing objects', function (t) {
- var stream = concat({encoding: "objects"}, concatted)
- function concatted(objs) {
- t.equal(objs.length, 2)
- t.deepEqual(objs[0], {"foo": "bar"})
- t.deepEqual(objs[1], {"baz": "taco"})
- }
- stream.write({"foo": "bar"})
- stream.write({"baz": "taco"})
- stream.end()
- t.end()
-})
-
-
-test('switch to objects encoding if no encoding specified and objects are written', function (t) {
- var stream = concat(concatted)
- function concatted(objs) {
- t.equal(objs.length, 2)
- t.deepEqual(objs[0], {"foo": "bar"})
- t.deepEqual(objs[1], {"baz": "taco"})
- }
- stream.write({"foo": "bar"})
- stream.write({"baz": "taco"})
- stream.end()
- t.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/server/ls.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/server/ls.js
deleted file mode 100644
index 3258d8ddc..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/server/ls.js
+++ /dev/null
@@ -1,16 +0,0 @@
-var concat = require('../../')
-var spawn = require('child_process').spawn
-var exec = require('child_process').exec
-var test = require('tape')
-
-test('ls command', function (t) {
- t.plan(1)
- var cmd = spawn('ls', [ __dirname ])
- cmd.stdout.pipe(
- concat(function(out) {
- exec('ls ' + __dirname, function (err, body) {
- t.equal(out.toString('utf8'), body.toString('utf8'))
- })
- })
- )
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/string.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/string.js
deleted file mode 100644
index 218c52206..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/string.js
+++ /dev/null
@@ -1,76 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-var TA = require('typedarray')
-var U8 = typeof Uint8Array !== 'undefined' ? Uint8Array : TA.Uint8Array
-
-test('string -> buffer stream', function (t) {
- t.plan(2)
- var strings = concat({ encoding: 'buffer'}, function(out) {
- t.ok(Buffer.isBuffer(out))
- t.equal(out.toString('utf8'), 'nacho dogs')
- })
- strings.write("nacho ")
- strings.write("dogs")
- strings.end()
-})
-
-test('string stream', function (t) {
- t.plan(2)
- var strings = concat({ encoding: 'string' }, function(out) {
- t.equal(typeof out, 'string')
- t.equal(out, 'nacho dogs')
- })
- strings.write("nacho ")
- strings.write("dogs")
- strings.end()
-})
-
-test('end chunk', function (t) {
- t.plan(1)
- var endchunk = concat({ encoding: 'string' }, function(out) {
- t.equal(out, 'this is the end')
- })
- endchunk.write("this ")
- endchunk.write("is the ")
- endchunk.end("end")
-})
-
-test('string from mixed write encodings', function (t) {
- t.plan(2)
- var strings = concat({ encoding: 'string' }, function(out) {
- t.equal(typeof out, 'string')
- t.equal(out, 'nacho dogs')
- })
- strings.write('na')
- strings.write(Buffer('cho'))
- strings.write([ 32, 100 ])
- var u8 = new U8(3)
- u8[0] = 111; u8[1] = 103; u8[2] = 115;
- strings.end(u8)
-})
-
-test('string from buffers with multibyte characters', function (t) {
- t.plan(2)
- var strings = concat({ encoding: 'string' }, function(out) {
- t.equal(typeof out, 'string')
- t.equal(out, '☃☃☃☃☃☃☃☃')
- })
- var snowman = new Buffer('☃')
- for (var i = 0; i < 8; i++) {
- strings.write(snowman.slice(0, 1))
- strings.write(snowman.slice(1))
- }
- strings.end()
-})
-
-test('string infer encoding with empty string chunk', function (t) {
- t.plan(2)
- var strings = concat(function(out) {
- t.equal(typeof out, 'string')
- t.equal(out, 'nacho dogs')
- })
- strings.write("")
- strings.write("nacho ")
- strings.write("dogs")
- strings.end()
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/typedarray.js b/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/typedarray.js
deleted file mode 100644
index ee0711082..000000000
--- a/deps/npm/node_modules/npm-registry-client/node_modules/concat-stream/test/typedarray.js
+++ /dev/null
@@ -1,33 +0,0 @@
-var concat = require('../')
-var test = require('tape')
-var TA = require('typedarray')
-var U8 = typeof Uint8Array !== 'undefined' ? Uint8Array : TA.Uint8Array
-
-test('typed array stream', function (t) {
- t.plan(2)
- var a = new U8(5)
- a[0] = 97; a[1] = 98; a[2] = 99; a[3] = 100; a[4] = 101;
- var b = new U8(3)
- b[0] = 32; b[1] = 102; b[2] = 103;
- var c = new U8(4)
- c[0] = 32; c[1] = 120; c[2] = 121; c[3] = 122;
-
- var arrays = concat({ encoding: 'Uint8Array' }, function(out) {
- t.equal(typeof out.subarray, 'function')
- t.deepEqual(Buffer(out).toString('utf8'), 'abcde fg xyz')
- })
- arrays.write(a)
- arrays.write(b)
- arrays.end(c)
-})
-
-test('typed array from strings, buffers, and arrays', function (t) {
- t.plan(2)
- var arrays = concat({ encoding: 'Uint8Array' }, function(out) {
- t.equal(typeof out.subarray, 'function')
- t.deepEqual(Buffer(out).toString('utf8'), 'abcde fg xyz')
- })
- arrays.write('abcde')
- arrays.write(Buffer(' fg '))
- arrays.end([ 120, 121, 122 ])
-})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..9add3d355
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/package.json
@@ -0,0 +1,72 @@
+{
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "name": "graceful-fs",
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "version": "3.0.8",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "main": "graceful-fs.js",
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "keywords": [
+ "fs",
+ "module",
+ "reading",
+ "retry",
+ "retries",
+ "queue",
+ "error",
+ "errors",
+ "handling",
+ "EMFILE",
+ "EAGAIN",
+ "EINVAL",
+ "EPERM",
+ "EACCESS"
+ ],
+ "license": "ISC",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "_id": "graceful-fs@3.0.8",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_from": "graceful-fs@>=3.0.0 <4.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/npm-registry-client/package.json b/deps/npm/node_modules/npm-registry-client/package.json
index 23eacd5f9..ffcccd85d 100644
--- a/deps/npm/node_modules/npm-registry-client/package.json
+++ b/deps/npm/node_modules/npm-registry-client/package.json
@@ -6,7 +6,7 @@
},
"name": "npm-registry-client",
"description": "Client for the npm registry",
- "version": "6.4.0",
+ "version": "6.5.1",
"repository": {
"url": "git://github.com/isaacs/npm-registry-client.git"
},
@@ -25,15 +25,15 @@
"request": "^2.47.0",
"retry": "^0.6.1",
"rimraf": "2",
- "semver": "2 >=2.2.1 || 3.x || 4",
+ "semver": "2 >=2.2.1 || 3.x || 4 || 5",
"slide": "^1.1.3",
"npmlog": ""
},
"devDependencies": {
"negotiator": "^0.4.9",
"nock": "^0.56.0",
- "standard": "^3.2.0",
- "tap": ""
+ "standard": "^4.0.0",
+ "tap": "^1.2.0"
},
"optionalDependencies": {
"npmlog": ""
@@ -41,12 +41,12 @@
"license": "ISC",
"readme": "# npm-registry-client\n\nThe code that npm uses to talk to the registry.\n\nIt handles all the caching and HTTP calls.\n\n## Usage\n\n```javascript\nvar RegClient = require('npm-registry-client')\nvar client = new RegClient(config)\nvar uri = \"https://registry.npmjs.org/npm\"\nvar params = {timeout: 1000}\n\nclient.get(uri, params, function (error, data, raw, res) {\n // error is an error if there was a problem.\n // data is the parsed data object\n // raw is the json string\n // res is the response from couch\n})\n```\n\n# Registry URLs\n\nThe registry calls take either a full URL pointing to a resource in the\nregistry, or a base URL for the registry as a whole (including the registry\npath – but be sure to terminate the path with `/`). `http` and `https` URLs are\nthe only ones supported.\n\n## Using the client\n\nEvery call to the client follows the same pattern:\n\n* `uri` {String} The *fully-qualified* URI of the registry API method being\n invoked.\n* `params` {Object} Per-request parameters.\n* `callback` {Function} Callback to be invoked when the call is complete.\n\n### Credentials\n\nMany requests to the registry can by authenticated, and require credentials\nfor authorization. These credentials always look the same:\n\n* `username` {String}\n* `password` {String}\n* `email` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n**or**\n\n* `token` {String}\n* `alwaysAuth` {Boolean} Whether calls to the target registry are always\n authed.\n\n## API\n\n### client.access(uri, params, cb)\n\n* `uri` {String} Registry URL for the package's access API endpoint.\n Looks like `/-/package/<package name>/access`.\n* `params` {Object} Object containing per-request properties.\n * `access` {String} New access level for the package. Can be either\n `public` or `restricted`. Registry will raise an error if trying\n to change the access level of an unscoped package.\n * `auth` {Credentials}\n\nSet the access level for scoped packages. For now, there are only two\naccess levels: \"public\" and \"restricted\".\n\n### client.adduser(uri, params, cb)\n\n* `uri` {String} Base registry URL.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nAdd a user account to the registry, or verify the credentials.\n\n### client.deprecate(uri, params, cb)\n\n* `uri` {String} Full registry URI for the deprecated package.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Semver version range.\n * `message` {String} The message to use as a deprecation warning.\n * `auth` {Credentials}\n* `cb` {Function}\n\nDeprecate a version of a package in the registry.\n\n### client.distTags.fetch(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `auth` {Credentials}\n* `cb` {Function}\n\nFetch all of the `dist-tags` for the named package.\n\n### client.distTags.add(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `version` {String} Exact version to be mapped to the `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nAdd (or replace) a single dist-tag onto the named package.\n\n### client.distTags.set(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSet all of the `dist-tags` for the named package at once, creating any\n`dist-tags` that do not already exit. Any `dist-tags` not included in the\n`distTags` map will be removed.\n\n### client.distTags.update(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTags` {Object} Object containing a map from tag names to package\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nUpdate the values of multiple `dist-tags`, creating any `dist-tags` that do\nnot already exist. Any pre-existing `dist-tags` not included in the `distTags`\nmap will be left alone.\n\n### client.distTags.rm(uri, params, cb)\n\n* `uri` {String} Base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `package` {String} Name of the package.\n * `distTag` {String} Name of the new `dist-tag`.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a single `dist-tag` from the named package.\n\n### client.get(uri, params, cb)\n\n* `uri` {String} The complete registry URI to fetch\n* `params` {Object} Object containing per-request properties.\n * `timeout` {Number} Duration before the request times out. Optional\n (default: never).\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `staleOk` {Boolean} If there's cached data available, then return that to\n the callback quickly, and update the cache the background. Optional\n (default: false).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetches data from the registry via a GET request, saving it in the cache folder\nwith the ETag or the \"Last Modified\" timestamp.\n\n### client.publish(uri, params, cb)\n\n* `uri` {String} The registry URI for the package to publish.\n* `params` {Object} Object containing per-request properties.\n * `metadata` {Object} Package metadata.\n * `access` {String} Access for the package. Can be `public` or `restricted` (no default).\n * `body` {Stream} Stream of the package body / tarball.\n * `auth` {Credentials}\n* `cb` {Function}\n\nPublish a package to the registry.\n\nNote that this does not create the tarball from a folder.\n\n### client.star(uri, params, cb)\n\n* `uri` {String} The complete registry URI for the package to star.\n* `params` {Object} Object containing per-request properties.\n * `starred` {Boolean} True to star the package, false to unstar it. Optional\n (default: false).\n * `auth` {Credentials}\n* `cb` {Function}\n\nStar or unstar a package.\n\nNote that the user does not have to be the package owner to star or unstar a\npackage, though other writes do require that the user be the package owner.\n\n### client.stars(uri, params, cb)\n\n* `uri` {String} The base URL for the registry.\n* `params` {Object} Object containing per-request properties.\n * `username` {String} Name of user to fetch starred packages for. Optional\n (default: user in `auth`).\n * `auth` {Credentials} Optional (required if `username` is omitted).\n* `cb` {Function}\n\nView your own or another user's starred packages.\n\n### client.tag(uri, params, cb)\n\n* `uri` {String} The complete registry URI to tag\n* `params` {Object} Object containing per-request properties.\n * `version` {String} Version to tag.\n * `tag` {String} Tag name to apply.\n * `auth` {Credentials}\n* `cb` {Function}\n\nMark a version in the `dist-tags` hash, so that `pkg@tag` will fetch the\nspecified version.\n\n### client.unpublish(uri, params, cb)\n\n* `uri` {String} The complete registry URI of the package to unpublish.\n* `params` {Object} Object containing per-request properties.\n * `version` {String} version to unpublish. Optional – omit to unpublish all\n versions.\n * `auth` {Credentials}\n* `cb` {Function}\n\nRemove a version of a package (or all versions) from the registry. When the\nlast version us unpublished, the entire document is removed from the database.\n\n### client.whoami(uri, params, cb)\n\n* `uri` {String} The base registry for the URI.\n* `params` {Object} Object containing per-request properties.\n * `auth` {Credentials}\n* `cb` {Function}\n\nSimple call to see who the registry thinks you are. Especially useful with\ntoken-based auth.\n\n\n## PLUMBING\n\nThe below are primarily intended for use by the rest of the API, or by the npm\ncaching logic directly.\n\n### client.request(uri, params, cb)\n\n* `uri` {String} URI pointing to the resource to request.\n* `params` {Object} Object containing per-request properties.\n * `method` {String} HTTP method. Optional (default: \"GET\").\n * `body` {Stream | Buffer | String | Object} The request body. Objects\n that are not Buffers or Streams are encoded as JSON. Optional – body\n only used for write operations.\n * `etag` {String} The cached ETag. Optional.\n * `lastModified` {String} The cached Last-Modified timestamp. Optional.\n * `follow` {Boolean} Follow 302/301 responses. Optional (default: true).\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n * `error` {Error | null}\n * `data` {Object} the parsed data object\n * `raw` {String} the json\n * `res` {Response Object} response from couch\n\nMake a generic request to the registry. All the other methods are wrappers\naround `client.request`.\n\n### client.fetch(uri, params, cb)\n\n* `uri` {String} The complete registry URI to upload to\n* `params` {Object} Object containing per-request properties.\n * `headers` {Stream} HTTP headers to be included with the request. Optional.\n * `auth` {Credentials} Optional.\n* `cb` {Function}\n\nFetch a package from a URL, with auth set appropriately if included. Used to\ncache remote tarballs as well as request package tarballs from the registry.\n\n# Configuration\n\nThe client uses its own configuration, which is just passed in as a simple\nnested object. The following are the supported values (with their defaults, if\nany):\n\n* `proxy.http` {URL} The URL to proxy HTTP requests through.\n* `proxy.https` {URL} The URL to proxy HTTPS requests through. Defaults to be\n the same as `proxy.http` if unset.\n* `proxy.localAddress` {IP} The local address to use on multi-homed systems.\n* `ssl.ca` {String} Certificate signing authority certificates to trust.\n* `ssl.certificate` {String} Client certificate (PEM encoded). Enable access\n to servers that require client certificates.\n* `ssl.key` {String} Private key (PEM encoded) for client certificate.\n* `ssl.strict` {Boolean} Whether or not to be strict with SSL certificates.\n Default = `true`\n* `retry.count` {Number} Number of times to retry on GET failures. Default = 2.\n* `retry.factor` {Number} `factor` setting for `node-retry`. Default = 10.\n* `retry.minTimeout` {Number} `minTimeout` setting for `node-retry`.\n Default = 10000 (10 seconds)\n* `retry.maxTimeout` {Number} `maxTimeout` setting for `node-retry`.\n Default = 60000 (60 seconds)\n* `userAgent` {String} User agent header to send. Default =\n `\"node/{process.version}\"`\n* `log` {Object} The logger to use. Defaults to `require(\"npmlog\")` if\n that works, otherwise logs are disabled.\n* `defaultTag` {String} The default tag to use when publishing new packages.\n Default = `\"latest\"`\n* `couchToken` {Object} A token for use with\n [couch-login](https://npmjs.org/package/couch-login).\n* `sessionToken` {string} A random identifier for this set of client requests.\n Default = 8 random hexadecimal bytes.\n",
"readmeFilename": "README.md",
- "gitHead": "a8d3193832487fb2e6b5015e30d15fe1b15f48e2",
+ "gitHead": "dbb351ae906f40be03f21bbe28bd392a380dc7bb",
"bugs": {
"url": "https://github.com/isaacs/npm-registry-client/issues"
},
- "homepage": "https://github.com/isaacs/npm-registry-client",
- "_id": "npm-registry-client@6.4.0",
- "_shasum": "4da1adfd1b63c9a7b9a6626eb10e36665c29b5f4",
- "_from": "npm-registry-client@6.4.0"
+ "homepage": "https://github.com/isaacs/npm-registry-client#readme",
+ "_id": "npm-registry-client@6.5.1",
+ "_shasum": "328d2088252b69fa541c3dd9f7690288661592a1",
+ "_from": "npm-registry-client@>=6.5.1 <6.6.0"
}
diff --git a/deps/npm/node_modules/npm-registry-client/test/fixtures/@npm/npm-registry-client/cache.json b/deps/npm/node_modules/npm-registry-client/test/fixtures/@npm/npm-registry-client/cache.json
deleted file mode 100644
index 4561db502..000000000
--- a/deps/npm/node_modules/npm-registry-client/test/fixtures/@npm/npm-registry-client/cache.json
+++ /dev/null
@@ -1 +0,0 @@
-{"_id":"@npm%2fnpm-registry-client","_rev":"213-0a1049cf56172b7d9a1184742c6477b9","name":"@npm/npm-registry-client","description":"Client for the npm registry","dist-tags":{"latest":"2.0.4","v2.0":"2.0.3"},"versions":{"0.0.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_npmUser":{"name":"isaacs","email":"i@izs.me"},"_id":"@npm%2fnpm-registry-client@0.0.1","_engineSupported":true,"_npmVersion":"1.1.24","_nodeVersion":"v0.7.10-pre","_defaultsLoaded":true,"dist":{"shasum":"693a08f6d2faea22bbd2bf412508a63d3e6229a7","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.1.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_npmUser":{"name":"isaacs","email":"i@izs.me"},"_id":"@npm%2fnpm-registry-client@0.0.2","_engineSupported":true,"_npmVersion":"1.1.24","_nodeVersion":"v0.7.10-pre","_defaultsLoaded":true,"dist":{"shasum":"b48c0ec5563c6a6fdc253454fc56d2c60c5a26f4","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.2.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_npmUser":{"name":"isaacs","email":"i@izs.me"},"_id":"@npm%2fnpm-registry-client@0.0.3","_engineSupported":true,"_npmVersion":"1.1.24","_nodeVersion":"v0.7.10-pre","_defaultsLoaded":true,"dist":{"shasum":"ccc0254c2d59e3ea9b9050e2b16edef78df1a1e8","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.3.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_npmUser":{"name":"isaacs","email":"i@izs.me"},"_id":"@npm%2fnpm-registry-client@0.0.4","_engineSupported":true,"_npmVersion":"1.1.25","_nodeVersion":"v0.7.10-pre","_defaultsLoaded":true,"dist":{"shasum":"faabd25ef477521c74ac21e0f4cf3a2f66d18fb3","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.4.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.5":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.5","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_id":"@npm%2fnpm-registry-client@0.0.5","dist":{"shasum":"85219810c9d89ae8d28ea766e7cf74efbd9f1e52","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.5.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.6":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"The code that npm uses to talk to the registry","version":"0.0.6","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_id":"@npm%2fnpm-registry-client@0.0.6","dist":{"shasum":"cc6533b3b41df65e6e9db2601fbbf1a509a7e94c","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.6.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.7":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"The code that npm uses to talk to the registry","version":"0.0.7","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"engines":{"node":"*"},"_id":"@npm%2fnpm-registry-client@0.0.7","dist":{"shasum":"0cee1d1c61f1c8e483774fe1f7bbb81c4f394a3a","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.7.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.8":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.8","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.0.8","dist":{"shasum":"1b7411c3f7310ec2a96b055b00e7ca606e47bd07","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.8.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.9":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.9","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.0.9","dist":{"shasum":"6d5bfde431559ac9e2e52a7db85f5839b874f022","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.9.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.10":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.10","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.0.10","dist":{"shasum":"0c8b6a4615bce82aa6cc04a0d1f7dc89921f7a38","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.10.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.0.11":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.0.11","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.0.11","dist":{"shasum":"afab40be5bed1faa946d8e1827844698f2ec1db7","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.0.11.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.1.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.1.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.1.0","dist":{"shasum":"1077d6bbb5e432450239dc6622a59474953ffbea","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.1.0.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.1.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.1.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.1.1","dist":{"shasum":"759765361d09b715270f59cf50f10908e4e9c5fc","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.1.1.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.1.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.1.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.1.2","dist":{"shasum":"541ce93abb3d35f5c325545c718dd3bbeaaa9ff0","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.1.2.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.1.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.1.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.1.3","dist":{"shasum":"e9a40d7031e8f809af5fd85aa9aac979e17efc97","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.1.3.tgz"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.1.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.1.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.1.4","dist":{"shasum":"b211485b046191a1085362376530316f0cab0420","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.1.4.tgz"},"_npmVersion":"1.1.48","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.0","dist":{"shasum":"6508a4b4d96f31057d5200ca5779531bafd2b840","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.0.tgz"},"_npmVersion":"1.1.49","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"node-uuid":"~1.3.3","request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.1","dist":{"shasum":"1bc8c4576c368cd88253d8a52daf40c55b89bb1a","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.1.tgz"},"_npmVersion":"1.1.49","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.5":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.5","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.5","dist":{"shasum":"2f55d675dfb977403b1ad0d96874c1d30e8058d7","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.5.tgz"},"_npmVersion":"1.1.51","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.6":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.6","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.6","dist":{"shasum":"f05df6695360360ad220e6e13a6a7bace7165fbe","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.6.tgz"},"_npmVersion":"1.1.56","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.7":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.7","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.0.14","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.7","dist":{"shasum":"867bad8854cae82ed89ee3b7f1d391af59491671","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.7.tgz"},"_npmVersion":"1.1.59","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.8":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.8","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.6","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.8","dist":{"shasum":"ef194cdb70f1ea03a576cff2c97392fa96e36563","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.8.tgz"},"_npmVersion":"1.1.62","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.9":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.9","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.9","dist":{"shasum":"3cec10431dfed1594adaf99c50f482ee56ecf9e4","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.9.tgz"},"_npmVersion":"1.1.59","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.10":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.10","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2.0.1","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.10","dist":{"shasum":"1e69726dae0944e78562fd77243f839c6a2ced1e","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.10.tgz"},"_npmVersion":"1.1.64","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.11":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.11","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.11","dist":{"shasum":"d92f33c297eb1bbd57fd597c3d8f5f7e9340a0b5","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.11.tgz"},"_npmVersion":"1.1.70","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.12":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.12","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.1.8","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.12","dist":{"shasum":"3bfb6fc0e4b131d665580cd1481c341fe521bfd3","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.12.tgz"},"_from":".","_npmVersion":"1.2.2","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.13":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.13","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.13","dist":{"shasum":"e03f2a4340065511b7184a3e2862cd5d459ef027","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.13.tgz"},"_from":".","_npmVersion":"1.2.4","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.14":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.14","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.14","dist":{"shasum":"186874a7790417a340d582b1cd4a7c338087ee12","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.14.tgz"},"_from":".","_npmVersion":"1.2.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.15":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.15","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.15","dist":{"shasum":"f71f32b7185855f1f8b7a5ef49e49d2357c2c552","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.15.tgz"},"_from":".","_npmVersion":"1.2.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.16":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.16","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.16","dist":{"shasum":"3331323b5050fc5afdf77c3a35913c16f3e43964","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.16.tgz"},"_from":".","_npmVersion":"1.2.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.17":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.17","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.17","dist":{"shasum":"1df2bbecac6751f5d9600fb43722aef96d956773","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.17.tgz"},"_from":".","_npmVersion":"1.2.11","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.18":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.18","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.9.202","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.18","dist":{"shasum":"198c8d15ed9b1ed546faf6e431eb63a6b18193ad","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.18.tgz"},"_from":".","_npmVersion":"1.2.13","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.19":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.19","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.16","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.19","dist":{"shasum":"106da826f0d2007f6e081f2b68fb6f26fa951b20","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.19.tgz"},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.20":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.20","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.16","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","_id":"@npm%2fnpm-registry-client@0.2.20","dist":{"shasum":"3fff194331e26660be2cf8ebf45ddf7d36add5f6","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.20.tgz"},"_from":".","_npmVersion":"1.2.15","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.21":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.21","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.16","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.21","dist":{"shasum":"d85dd32525f193925c46ff9eb0e0f529dfd1b254","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.21.tgz"},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.22":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.22","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"~2.20.0","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.22","dist":{"shasum":"caa22ff40a1ccd632a660b8b80c333c8f92d5a17","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.22.tgz"},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.23":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.23","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.20.0","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.23","dist":{"shasum":"a320ab2b1d048b4f7b88e40bd86974ca322b4c24","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.23.tgz"},"_from":".","_npmVersion":"1.2.19","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.24":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.24","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.20.0","graceful-fs":"~1.2.0","semver":"~1.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.24","dist":{"shasum":"e12f644338619319ee7f233363a1714a87f3c72d","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.24.tgz"},"_from":".","_npmVersion":"1.2.22","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.25":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.25","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.20.0","graceful-fs":"~1.2.0","semver":"~2.0.5","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.25","dist":{"shasum":"c2caeb1dcf937d6fcc4a187765d401f5e2f54027","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.25.tgz"},"_from":".","_npmVersion":"1.2.32","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.26":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.26","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.20.0","graceful-fs":"~1.2.0","semver":"~2.0.5","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.26","dist":{"shasum":"4c5a2b3de946e383032f10fa497d0c15ee5f4c60","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.26.tgz"},"_from":".","_npmVersion":"1.3.1","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.27":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.27","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.20.0","graceful-fs":"~2.0.0","semver":"~2.0.5","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.15","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.27","dist":{"shasum":"8f338189d32769267886a07ad7b7fd2267446adf","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.27.tgz"},"_from":".","_npmVersion":"1.3.2","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.28":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.28","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"~2.1.0","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"_id":"@npm%2fnpm-registry-client@0.2.28","dist":{"shasum":"959141fc0180d7b1ad089e87015a8a2142a8bffc","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.28.tgz"},"_from":".","_npmVersion":"1.3.6","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.29":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.29","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.2.29","dist":{"shasum":"66ff2766f0c61d41e8a6139d3692d8833002c686","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.29.tgz"},"_from":".","_npmVersion":"1.3.12","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.30":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.30","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.2.30","dist":{"shasum":"f01cae5c51aa0a1c5dc2516cbad3ebde068d3eaa","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.30.tgz"},"_from":".","_npmVersion":"1.3.14","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.2.31":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.2.31","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.2.31","dist":{"shasum":"24a23e24e43246677cb485f8391829e9536563d4","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.2.31.tgz"},"_from":".","_npmVersion":"1.3.17","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.0","dist":{"shasum":"66eab02a69be67f232ac14023eddfb8308c2eccd","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.0.tgz"},"_from":".","_npmVersion":"1.3.18","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.1","dist":{"shasum":"16dba07cc304442edcece378218672d0a1258ef8","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.1.tgz"},"_from":".","_npmVersion":"1.3.18","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.2","dist":{"shasum":"ea3060bd0a87fb1d97b87433b50f38f7272b1686","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.2.tgz"},"_from":".","_npmVersion":"1.3.20","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.3","dist":{"shasum":"da08bb681fb24aa5c988ca71f8c10f27f09daf4a","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.3.tgz"},"_from":".","_npmVersion":"1.3.21","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.4","dist":{"shasum":"25d771771590b1ca39277aea4506af234c5f4342","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.4.tgz"},"_from":".","_npmVersion":"1.3.25","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.5":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.5","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","couch-login":"~0.1.18","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.5","dist":{"shasum":"98ba1ac851a3939a3fb9917c28fa8da522dc635f","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.5.tgz"},"_from":".","_npmVersion":"1.3.25","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.3.6":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.3.6","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.3.6","dist":{"shasum":"c48a2a03643769acc49672860f7920ec6bffac6e","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.3.6.tgz"},"_from":".","_npmVersion":"1.3.26","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.0","dist":{"shasum":"30d0c178b7f2e54183a6a3fc9fe4071eb10290bf","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.0.tgz"},"_from":".","_npmVersion":"1.3.26","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.1","dist":{"shasum":"9c49b3e44558e2072158fb085be8a083c5f83537","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.1.tgz"},"_from":".","_npmVersion":"1.4.0","_npmUser":{"name":"npm-www","email":"npm@npmjs.com"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.2","dist":{"shasum":"d9568a9413bee14951201ce73f3b3992ec6658c0","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.2.tgz"},"_from":".","_npmVersion":"1.4.1","_npmUser":{"name":"npm-www","email":"npm@npmjs.com"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.3","dist":{"shasum":"aa188fc5067158e991a57f4697c54994108f5389","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.3.tgz"},"_from":".","_npmVersion":"1.4.2","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.4","dist":{"shasum":"f9dbc383a49069d8c7f67755a3ff6e424aff584f","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.4.tgz"},"_from":".","_npmVersion":"1.4.2","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.5":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.5","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.5","dist":{"shasum":"7d6fdca46139470715f9477ddb5ad3e770d4de7b","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.5.tgz"},"_from":".","_npmVersion":"1.4.4","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.6":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.6","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.6","_from":".","_npmVersion":"1.4.6","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"657f69a79543fc4cc264c3b2de958bd15f7140fe","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.6.tgz"},"directories":{}},"0.4.7":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.7","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.7","dist":{"shasum":"f4369b59890da7882527eb7c427dd95d43707afb","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.7.tgz"},"_from":".","_npmVersion":"1.4.6","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"directories":{}},"0.4.8":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.8","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.8","_shasum":"a6685a161033101be6064b7af887ab440e8695d0","_from":".","_npmVersion":"1.4.8","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"a6685a161033101be6064b7af887ab440e8695d0","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.8.tgz"},"directories":{}},"0.4.9":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.9","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.9","_shasum":"304d3d4726a58e33d8cc965afdc9ed70b996580c","_from":".","_npmVersion":"1.4.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"304d3d4726a58e33d8cc965afdc9ed70b996580c","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.9.tgz"},"directories":{}},"0.4.10":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.10","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"^2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.10","_shasum":"ab7bf1be3ba07d769eaf74dee3c9347e02283116","_from":".","_npmVersion":"1.4.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"ab7bf1be3ba07d769eaf74dee3c9347e02283116","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.10.tgz"},"directories":{}},"0.4.11":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.11","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"2 >=2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.11","_shasum":"032e9b6b050ed052ee9441841a945a184ea6bc33","_from":".","_npmVersion":"1.4.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"032e9b6b050ed052ee9441841a945a184ea6bc33","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.11.tgz"},"directories":{}},"0.4.12":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"0.4.12","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"request":"2 >=2.25.0","graceful-fs":"~2.0.0","semver":"2 >=2.2.1","slide":"~1.1.3","chownr":"0","mkdirp":"~0.3.3","rimraf":"~2","retry":"0.6.0","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@0.4.12","_shasum":"34303422f6a3da93ca3a387a2650d707c8595b99","_from":".","_npmVersion":"1.4.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"34303422f6a3da93ca3a387a2650d707c8595b99","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-0.4.12.tgz"},"directories":{}},"1.0.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"1.0.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"~2.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@1.0.0","_shasum":"2a6f9dfdce5f8ebf4b9af4dbfd738384d25014e5","_from":".","_npmVersion":"1.4.10","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"2a6f9dfdce5f8ebf4b9af4dbfd738384d25014e5","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-1.0.0.tgz"},"directories":{}},"1.0.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"1.0.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"~2.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"98b1278c230cf6c159f189e2f8c69daffa727ab8","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@1.0.1","_shasum":"c5f6a87d285f2005a35d3f67d9c724bce551b0f1","_from":".","_npmVersion":"1.4.13","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"c5f6a87d285f2005a35d3f67d9c724bce551b0f1","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-1.0.1.tgz"},"directories":{}},"2.0.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"2.0.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"~2.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"47a98069b6a34e751cbd5b84ce92858cae5abe70","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@2.0.0","_shasum":"88810dac2d534c0df1d905c79e723392fcfc791a","_from":".","_npmVersion":"1.4.14","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"}],"dist":{"shasum":"88810dac2d534c0df1d905c79e723392fcfc791a","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-2.0.0.tgz"},"directories":{}},"2.0.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"2.0.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"123e40131f83f7265f66ecd2a558cce44a3aea86","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@2.0.1","_shasum":"611c7cb7c8f7ff22be2ebc6398423b5de10db0e2","_from":".","_npmVersion":"1.4.14","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"611c7cb7c8f7ff22be2ebc6398423b5de10db0e2","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-2.0.1.tgz"},"directories":{}},"2.0.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"2.0.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"6ecc311c9dd4890f2d9b6bae60447070a3321e12","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@2.0.2","_shasum":"a82b000354c7f830114fb18444764bc477d5740f","_from":".","_npmVersion":"1.4.15","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"a82b000354c7f830114fb18444764bc477d5740f","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-2.0.2.tgz"},"directories":{}},"3.0.0":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.0","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","normalize-package-data":"^0.4.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"6bb1aec1e85fa82ee075bd997d6fb9f2dbb7f643","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.0","_shasum":"4febc5cdb274e9fa06bc3008910e3fa1ec007994","_from":".","_npmVersion":"1.5.0-pre","_npmUser":{"name":"othiym23","email":"ogd@aoaioxxysz.net"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"4febc5cdb274e9fa06bc3008910e3fa1ec007994","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.0.tgz"},"directories":{}},"3.0.1":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.1","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","normalize-package-data":"^0.4.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"fe8382dde609ea1e3580fcdc5bc3d0bba119cfc6","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.1","_shasum":"5f3ee362ce5c237cfb798fce22c77875fc1a63c2","_from":".","_npmVersion":"1.5.0-alpha-1","_npmUser":{"name":"othiym23","email":"ogd@aoaioxxysz.net"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"5f3ee362ce5c237cfb798fce22c77875fc1a63c2","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.1.tgz"},"directories":{}},"2.0.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"2.0.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"2578fb9a807d77417554ba235ba8fac39405e832","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@2.0.3","_shasum":"93dad3d9a162c99404badb71739c622c0f3b9a72","_from":".","_npmVersion":"1.5.0-alpha-1","_npmUser":{"name":"othiym23","email":"ogd@aoaioxxysz.net"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"93dad3d9a162c99404badb71739c622c0f3b9a72","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-2.0.3.tgz"},"directories":{}},"3.0.2":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.2","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","normalize-package-data":"^0.4.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"15343019160ace0b9874cf0ec186b3425dbc7301","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.2","_shasum":"5dd0910157ce55f4286a1871d39f9a2128cd3c99","_from":".","_npmVersion":"1.5.0-alpha-2","_npmUser":{"name":"othiym23","email":"ogd@aoaioxxysz.net"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"5dd0910157ce55f4286a1871d39f9a2128cd3c99","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.2.tgz"},"directories":{}},"3.0.3":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.3","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.3.3","normalize-package-data":"^0.4.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1 || 3.x","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"b18a780d1185f27c06c27812147b83aba0d4a2f5","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.3","_shasum":"2377dc1cf69b4d374b3a95fb7feba8c804d8cb30","_from":".","_npmVersion":"2.0.0-alpha-5","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"2377dc1cf69b4d374b3a95fb7feba8c804d8cb30","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.3.tgz"},"directories":{}},"3.0.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"~0.5.0","normalize-package-data":"^0.4.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1 || 3.x","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"54900fe4b2eb5b99ee6dfe173f145732fdfae80e","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.4","_shasum":"d4a177d1f25615cfaef9b6844fa366ffbf5f578a","_from":".","_npmVersion":"2.0.0-alpha-5","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"d4a177d1f25615cfaef9b6844fa366ffbf5f578a","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.4.tgz"},"directories":{}},"3.0.5":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.5","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"0.5","normalize-package-data":"0.4","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"2","semver":"2 >=2.2.1 || 3.x","slide":"^1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"BSD","gitHead":"635db1654346bc86473df7b39626601425f46177","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.5","_shasum":"cdabaefa399b81ac8a86a48718aefd80e7b19ff3","_from":".","_npmVersion":"2.0.0-alpha-5","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"cdabaefa399b81ac8a86a48718aefd80e7b19ff3","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.5.tgz"},"directories":{}},"3.0.6":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"3.0.6","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"^0.5.0","normalize-package-data":"0.4","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"2","semver":"2 >=2.2.1 || 3.x","slide":"^1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"ISC","gitHead":"eba30fadd724ed5cad1aec95ac3ee907a59b7317","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@3.0.6","_shasum":"14a17d9a60ed2a80b04edcbc596dbce0d96540ee","_from":".","_npmVersion":"1.4.22","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"14a17d9a60ed2a80b04edcbc596dbce0d96540ee","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-3.0.6.tgz"},"directories":{}},"2.0.4":{"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"name":"@npm/npm-registry-client","description":"Client for the npm registry","version":"2.0.4","repository":{"url":"git://github.com/isaacs/npm-registry-client"},"main":"index.js","scripts":{"test":"tap test/*.js"},"dependencies":{"chownr":"0","graceful-fs":"^3.0.0","mkdirp":"^0.5.0","npm-cache-filename":"^1.0.0","request":"2 >=2.25.0","retry":"0.6.0","rimraf":"~2","semver":"2 >=2.2.1","slide":"~1.1.3","npmlog":""},"devDependencies":{"tap":""},"optionalDependencies":{"npmlog":""},"license":"ISC","gitHead":"a10f621d9cdc813b9d3092a14b661f65bfa6d40d","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"homepage":"https://github.com/isaacs/npm-registry-client","_id":"@npm%2fnpm-registry-client@2.0.4","_shasum":"528e08900d7655c12096d1637d1c3a7a5b451019","_from":".","_npmVersion":"1.4.22","_npmUser":{"name":"isaacs","email":"i@izs.me"},"maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"dist":{"shasum":"528e08900d7655c12096d1637d1c3a7a5b451019","tarball":"http://registry.npmjs.org/@npm%2fnpm-registry-client/-/@npm%2fnpm-registry-client-2.0.4.tgz"},"directories":{}}},"readme":"# npm-registry-client\u000a\u000aThe code that npm uses to talk to the registry.\u000a\u000aIt handles all the caching and HTTP calls.\u000a\u000a## Usage\u000a\u000a```javascript\u000avar RegClient = require('npm-registry-client')\u000avar client = new RegClient(config)\u000avar uri = \"npm://registry.npmjs.org/npm\"\u000avar options = {timeout: 1000}\u000a\u000aclient.get(uri, options, function (error, data, raw, res) {\u000a // error is an error if there was a problem.\u000a // data is the parsed data object\u000a // raw is the json string\u000a // res is the response from couch\u000a})\u000a```\u000a\u000a# Registry URLs\u000a\u000aThe registry calls take either a full URL pointing to a resource in the\u000aregistry, or a base URL for the registry as a whole (for the base URL, any path\u000awill be ignored). In addition to `http` and `https`, `npm` URLs are allowed.\u000a`npm` URLs are `https` URLs with the additional restrictions that they will\u000aalways include authorization credentials, and the response is always registry\u000ametadata (and not tarballs or other attachments).\u000a\u000a# Configuration\u000a\u000aThis program is designed to work with\u000a[npmconf](https://npmjs.org/package/npmconf), but you can also pass in\u000aa plain-jane object with the appropriate configs, and it'll shim it\u000afor you. Any configuration thingie that has get/set/del methods will\u000aalso be accepted.\u000a\u000a* `cache` **Required** {String} Path to the cache folder\u000a* `always-auth` {Boolean} Auth even for GET requests.\u000a* `auth` {String} A base64-encoded `username:password`\u000a* `email` {String} User's email address\u000a* `tag` {String} The default tag to use when publishing new packages.\u000a Default = `\"latest\"`\u000a* `ca` {String} Cerficate signing authority certificates to trust.\u000a* `cert` {String} Client certificate (PEM encoded). Enable access\u000a to servers that require client certificates\u000a* `key` {String} Private key (PEM encoded) for client certificate 'cert'\u000a* `strict-ssl` {Boolean} Whether or not to be strict with SSL\u000a certificates. Default = `true`\u000a* `user-agent` {String} User agent header to send. Default =\u000a `\"node/{process.version} {process.platform} {process.arch}\"`\u000a* `log` {Object} The logger to use. Defaults to `require(\"npmlog\")` if\u000a that works, otherwise logs are disabled.\u000a* `fetch-retries` {Number} Number of times to retry on GET failures.\u000a Default=2\u000a* `fetch-retry-factor` {Number} `factor` setting for `node-retry`. Default=10\u000a* `fetch-retry-mintimeout` {Number} `minTimeout` setting for `node-retry`.\u000a Default=10000 (10 seconds)\u000a* `fetch-retry-maxtimeout` {Number} `maxTimeout` setting for `node-retry`.\u000a Default=60000 (60 seconds)\u000a* `proxy` {URL} The url to proxy requests through.\u000a* `https-proxy` {URL} The url to proxy https requests through.\u000a Defaults to be the same as `proxy` if unset.\u000a* `_auth` {String} The base64-encoded authorization header.\u000a* `username` `_password` {String} Username/password to use to generate\u000a `_auth` if not supplied.\u000a* `_token` {Object} A token for use with\u000a [couch-login](https://npmjs.org/package/couch-login)\u000a\u000a# client.request(method, uri, options, cb)\u000a\u000a* `method` {String} HTTP method\u000a* `uri` {String} URI pointing to the resource to request\u000a* `options` {Object} Object containing optional per-request properties.\u000a * `what` {Stream | Buffer | String | Object} The request body. Objects\u000a that are not Buffers or Streams are encoded as JSON.\u000a * `etag` {String} The cached ETag\u000a * `follow` {Boolean} Follow 302/301 responses (defaults to true)\u000a* `cb` {Function}\u000a * `error` {Error | null}\u000a * `data` {Object} the parsed data object\u000a * `raw` {String} the json\u000a * `res` {Response Object} response from couch\u000a\u000aMake a request to the registry. All the other methods are wrappers around\u000a`request`.\u000a\u000a# client.adduser(base, username, password, email, cb)\u000a\u000a* `base` {String} Base registry URL\u000a* `username` {String}\u000a* `password` {String}\u000a* `email` {String}\u000a* `cb` {Function}\u000a\u000aAdd a user account to the registry, or verify the credentials.\u000a\u000a# client.deprecate(uri, version, message, cb)\u000a\u000a* `uri` {String} Full registry URI for the deprecated package\u000a* `version` {String} Semver version range\u000a* `message` {String} The message to use as a deprecation warning\u000a* `cb` {Function}\u000a\u000aDeprecate a version of a package in the registry.\u000a\u000a# client.bugs(uri, cb)\u000a\u000a* `uri` {String} Full registry URI for the package\u000a* `cb` {Function}\u000a\u000aGet the url for bugs of a package\u000a\u000a# client.get(uri, options, cb)\u000a\u000a* `uri` {String} The complete registry URI to fetch\u000a* `options` {Object} Object containing optional per-request properties.\u000a * `timeout` {Number} Duration before the request times out.\u000a * `follow` {Boolean} Follow 302/301 responses (defaults to true)\u000a * `staleOk` {Boolean} If there's cached data available, then return that\u000a to the callback quickly, and update the cache the background.\u000a\u000aFetches data from the registry via a GET request, saving it in the cache folder\u000awith the ETag.\u000a\u000a# client.publish(uri, data, tarball, cb)\u000a\u000a* `uri` {String} The registry URI to publish to\u000a* `data` {Object} Package data\u000a* `tarball` {String | Stream} Filename or stream of the package tarball\u000a* `cb` {Function}\u000a\u000aPublish a package to the registry.\u000a\u000aNote that this does not create the tarball from a folder. However, it can\u000aaccept a gzipped tar stream or a filename to a tarball.\u000a\u000a# client.star(uri, starred, cb)\u000a\u000a* `uri` {String} The complete registry URI to star\u000a* `starred` {Boolean} True to star the package, false to unstar it.\u000a* `cb` {Function}\u000a\u000aStar or unstar a package.\u000a\u000aNote that the user does not have to be the package owner to star or unstar a\u000apackage, though other writes do require that the user be the package owner.\u000a\u000a# client.stars(base, username, cb)\u000a\u000a* `base` {String} The base URL for the registry\u000a* `username` {String} Name of user to fetch starred packages for.\u000a* `cb` {Function}\u000a\u000aView your own or another user's starred packages.\u000a\u000a# client.tag(uri, version, tag, cb)\u000a\u000a* `uri` {String} The complete registry URI to tag\u000a* `version` {String} Version to tag\u000a* `tag` {String} Tag name to apply\u000a* `cb` {Function}\u000a\u000aMark a version in the `dist-tags` hash, so that `pkg@tag` will fetch the\u000aspecified version.\u000a\u000a# client.unpublish(uri, [ver], cb)\u000a\u000a* `uri` {String} The complete registry URI to unpublish\u000a* `ver` {String} version to unpublish. Leave blank to unpublish all\u000a versions.\u000a* `cb` {Function}\u000a\u000aRemove a version of a package (or all versions) from the registry. When the\u000alast version us unpublished, the entire document is removed from the database.\u000a\u000a# client.upload(uri, file, [etag], [nofollow], cb)\u000a\u000a* `uri` {String} The complete registry URI to upload to\u000a* `file` {String | Stream} Either the filename or a readable stream\u000a* `etag` {String} Cache ETag\u000a* `nofollow` {Boolean} Do not follow 301/302 responses\u000a* `cb` {Function}\u000a\u000aUpload an attachment. Mostly used by `client.publish()`.\u000a","maintainers":[{"name":"isaacs","email":"i@izs.me"},{"name":"othiym23","email":"ogd@aoaioxxysz.net"}],"time":{"modified":"2014-07-31T21:59:52.896Z","created":"2012-06-07T04:43:36.581Z","0.0.1":"2012-06-07T04:43:38.123Z","0.0.2":"2012-06-07T05:35:05.937Z","0.0.3":"2012-06-09T00:55:25.861Z","0.0.4":"2012-06-11T03:53:26.548Z","0.0.5":"2012-06-11T23:48:11.235Z","0.0.6":"2012-06-17T06:23:27.320Z","0.0.7":"2012-06-18T19:19:38.315Z","0.0.8":"2012-06-28T20:40:20.563Z","0.0.9":"2012-07-10T03:28:04.651Z","0.0.10":"2012-07-11T17:03:45.151Z","0.0.11":"2012-07-17T14:06:37.489Z","0.1.0":"2012-07-23T18:17:38.007Z","0.1.1":"2012-07-23T21:21:28.196Z","0.1.2":"2012-07-24T06:14:12.831Z","0.1.3":"2012-08-07T02:02:20.564Z","0.1.4":"2012-08-15T03:04:52.822Z","0.1.5":"2012-08-17T21:59:33.310Z","0.2.0":"2012-08-17T22:00:18.081Z","0.2.1":"2012-08-17T22:07:28.827Z","0.2.2":"2012-08-17T22:37:24.352Z","0.2.3":"2012-08-19T19:16:44.808Z","0.2.4":"2012-08-19T19:18:51.792Z","0.2.5":"2012-08-20T16:54:50.794Z","0.2.6":"2012-08-22T00:25:04.766Z","0.2.7":"2012-08-27T19:07:34.829Z","0.2.8":"2012-10-02T19:53:50.661Z","0.2.9":"2012-10-03T22:09:50.766Z","0.2.10":"2012-10-25T14:55:54.216Z","0.2.11":"2012-12-21T16:26:38.094Z","0.2.12":"2013-01-18T22:22:41.668Z","0.2.13":"2013-02-06T00:16:35.939Z","0.2.14":"2013-02-10T02:44:02.764Z","0.2.15":"2013-02-11T19:18:55.678Z","0.2.16":"2013-02-15T17:09:03.249Z","0.2.17":"2013-02-16T03:47:13.898Z","0.2.18":"2013-03-06T22:09:23.536Z","0.2.19":"2013-03-20T06:27:39.128Z","0.2.20":"2013-03-28T00:43:07.558Z","0.2.21":"2013-04-29T15:46:54.094Z","0.2.22":"2013-04-29T15:51:02.178Z","0.2.23":"2013-05-11T00:28:14.198Z","0.2.24":"2013-05-24T21:27:50.693Z","0.2.25":"2013-06-20T15:36:46.277Z","0.2.26":"2013-07-06T17:12:54.670Z","0.2.27":"2013-07-11T07:14:45.740Z","0.2.28":"2013-08-02T20:27:41.732Z","0.2.29":"2013-10-28T18:23:24.477Z","0.2.30":"2013-11-18T23:12:00.540Z","0.2.31":"2013-12-16T08:36:43.044Z","0.3.0":"2013-12-17T07:03:10.699Z","0.3.1":"2013-12-17T16:53:27.867Z","0.3.2":"2013-12-17T22:25:14.882Z","0.3.3":"2013-12-21T16:07:06.773Z","0.3.4":"2014-01-29T15:24:05.163Z","0.3.5":"2014-01-31T01:53:19.656Z","0.3.6":"2014-02-07T00:17:21.362Z","0.4.0":"2014-02-13T01:17:18.973Z","0.4.1":"2014-02-13T23:47:37.892Z","0.4.2":"2014-02-14T00:29:13.086Z","0.4.3":"2014-02-16T03:40:54.640Z","0.4.4":"2014-02-16T03:41:48.856Z","0.4.5":"2014-03-12T05:09:17.474Z","0.4.6":"2014-03-29T19:44:15.041Z","0.4.7":"2014-04-02T19:41:07.149Z","0.4.8":"2014-05-01T22:24:54.980Z","0.4.9":"2014-05-12T21:52:55.127Z","0.4.10":"2014-05-13T16:44:29.801Z","0.4.11":"2014-05-13T20:33:04.738Z","0.4.12":"2014-05-14T06:14:22.842Z","1.0.0":"2014-05-14T23:04:37.188Z","1.0.1":"2014-06-03T00:55:54.448Z","2.0.0":"2014-06-06T04:23:46.579Z","2.0.1":"2014-06-06T06:25:14.419Z","2.0.2":"2014-06-14T00:33:10.205Z","3.0.0":"2014-07-02T00:30:29.154Z","3.0.1":"2014-07-14T23:29:05.057Z","2.0.3":"2014-07-15T00:09:36.043Z","3.0.2":"2014-07-17T06:30:02.659Z","3.0.3":"2014-07-23T21:20:42.406Z","3.0.4":"2014-07-25T00:27:26.007Z","3.0.5":"2014-07-25T00:28:48.007Z","3.0.6":"2014-07-31T21:57:49.043Z","2.0.4":"2014-07-31T21:59:52.896Z"},"author":{"name":"Isaac Z. Schlueter","email":"i@izs.me","url":"http://blog.izs.me/"},"repository":{"url":"git://github.com/isaacs/npm-registry-client"},"users":{"fgribreau":true,"fengmk2":true},"readmeFilename":"README.md","homepage":"https://github.com/isaacs/npm-registry-client","bugs":{"url":"https://github.com/isaacs/npm-registry-client/issues"},"license":"ISC","_attachments":{}}
diff --git a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/cache.json b/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/cache.json
deleted file mode 100644
index 01da30027..000000000
--- a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/cache.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.3.3","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.3.3","dependencies":{},"devDependencies":{},"optionalDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.1.1","_nodeVersion":"v0.6.11","_defaultsLoaded":true,"dist":{"shasum":"47ac53683daf832bfa952e1774417da47817ae42","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.3.3.tgz"},"readme":" __ \n /\\ \\ __ \n __ __ ___ \\_\\ \\ __ _ __ ____ ___ ___ _ __ __ /\\_\\ ____ \n /\\ \\/\\ \\ /' _ `\\ /'_ \\ /'__`\\/\\ __\\/ ,__\\ / ___\\ / __`\\/\\ __\\/'__`\\ \\/\\ \\ /',__\\ \n \\ \\ \\_\\ \\/\\ \\/\\ \\/\\ \\ \\ \\/\\ __/\\ \\ \\//\\__, `\\/\\ \\__//\\ \\ \\ \\ \\ \\//\\ __/ __ \\ \\ \\/\\__, `\\\n \\ \\____/\\ \\_\\ \\_\\ \\___,_\\ \\____\\\\ \\_\\\\/\\____/\\ \\____\\ \\____/\\ \\_\\\\ \\____\\/\\_\\ _\\ \\ \\/\\____/\n \\/___/ \\/_/\\/_/\\/__,_ /\\/____/ \\/_/ \\/___/ \\/____/\\/___/ \\/_/ \\/____/\\/_//\\ \\_\\ \\/___/ \n \\ \\____/ \n \\/___/\n \nUnderscore.js is a utility-belt library for JavaScript that provides \nsupport for the usual functional suspects (each, map, reduce, filter...) \nwithout extending any core JavaScript objects.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://documentcloud.github.com/underscore/\n\nMany thanks to our contributors:\nhttps://github.com/documentcloud/underscore/contributors\n","maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/package.tgz b/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/package.tgz
deleted file mode 100644
index 19da9baa7..000000000
--- a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/1.3.3/package.tgz
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/cache.json b/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/cache.json
deleted file mode 100644
index d899f1192..000000000
--- a/deps/npm/node_modules/npm-registry-client/test/fixtures/underscore/cache.json
+++ /dev/null
@@ -1 +0,0 @@
-{"_id":"underscore","_rev":"72-47f2986bfd8e8b55068b204588bbf484","name":"underscore","description":"JavaScript's functional programming helper library.","dist-tags":{"latest":"1.3.3","stable":"1.3.3"},"versions":{"1.0.3":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.0.3","_id":"underscore@1.0.3","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.7-2","_nodeVersion":"v0.3.1-pre","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.0.3.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.0.4":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.0.4","_id":"underscore@1.0.4","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.7-2","_nodeVersion":"v0.3.1-pre","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.0.4.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.0":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.1.0","_id":"underscore@1.1.0","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.7-2","_nodeVersion":"v0.3.1-pre","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.0.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.1":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.1.1","_id":"underscore@1.1.1","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.7-2","_nodeVersion":"v0.3.1-pre","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.1.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.2":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.1.2","_id":"underscore@1.1.2","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.7-2","_nodeVersion":"v0.3.1-pre","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.2.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.3":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore","version":"1.1.3","_id":"underscore@1.1.3","engines":{"node":"*"},"_nodeSupported":true,"_npmVersion":"0.2.8-1","_nodeVersion":"v0.2.5","dist":{"tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.3.tgz"},"directories":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.4":{"name":"underscore","description":"Functional programming aid for JavaScript. Works well with jQuery.","url":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"lib":".","main":"underscore.js","version":"1.1.4","_id":"underscore@1.1.4","engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"0.3.9","_nodeVersion":"v0.5.0-pre","dist":{"shasum":"9e82274902865625b3a6d4c315a38ffd80047dae","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.4.tgz"},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.1.5":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.1.5","_id":"underscore@1.1.5","engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"0.3.16","_nodeVersion":"v0.4.2","directories":{},"files":[""],"_defaultsLoaded":true,"dist":{"shasum":"23601d62c75619998b2f0db24938102793336a56","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.5.tgz"},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.6":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.1.6","_id":"underscore@1.1.6","engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"0.3.18","_nodeVersion":"v0.4.2","directories":{},"files":[""],"_defaultsLoaded":true,"dist":{"shasum":"6868da1bdd72d75285be0b4e50f228e70d001a2c","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.6.tgz"},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}]},"1.1.7":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.1.7","devDependencies":{},"_id":"underscore@1.1.7","engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.3","_nodeVersion":"v0.4.7","_defaultsLoaded":true,"dist":{"shasum":"40bab84bad19d230096e8d6ef628bff055d83db0","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.1.7.tgz"},"scripts":{},"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.2.0":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.2.0","_npmJsonOpts":{"file":"/Users/jashkenas/.npm/underscore/1.2.0/package/package.json","wscript":false,"contributors":false,"serverjs":false},"_id":"underscore@1.2.0","devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.22","_nodeVersion":"v0.4.10","_defaultsLoaded":true,"dist":{"shasum":"b32ce32c8c118caa8031c10b54c7f65ab3b557fd","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.2.0.tgz"},"scripts":{},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"directories":{}},"1.2.1":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.2.1","_npmJsonOpts":{"file":"/Users/jashkenas/.npm/underscore/1.2.1/package/package.json","wscript":false,"contributors":false,"serverjs":false},"_id":"underscore@1.2.1","devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.22","_nodeVersion":"v0.4.10","_defaultsLoaded":true,"dist":{"shasum":"fc5c6b0765673d92a2d4ac8b4dc0aa88702e2bd4","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.2.1.tgz"},"scripts":{},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"directories":{}},"1.2.2":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.2.2","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.2.2","devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.104","_nodeVersion":"v0.6.0","_defaultsLoaded":true,"dist":{"shasum":"74dd40e9face84e724eb2edae945b8aedc233ba3","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.2.2.tgz"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.2.3":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"dependencies":{},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.2.3","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.2.3","devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.104","_nodeVersion":"v0.6.0","_defaultsLoaded":true,"dist":{"shasum":"11b874da70f4683d7d48bba2b44be1e600d2f6cf","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.2.3.tgz"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.2.4":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.2.4","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.2.4","dependencies":{},"devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.104","_nodeVersion":"v0.6.6","_defaultsLoaded":true,"dist":{"shasum":"e8da6241aa06f64df2473bb2590b8c17c84c3c7e","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.2.4.tgz"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.3.0":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"contributors":[],"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.3.0","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.3.0","dependencies":{},"devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.104","_nodeVersion":"v0.6.6","_defaultsLoaded":true,"dist":{"shasum":"253b2d79b7bb67943ced0fc744eb18267963ede8","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.3.0.tgz"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.3.1":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.3.1","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.3.1","dependencies":{},"devDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.0.104","_nodeVersion":"v0.6.6","_defaultsLoaded":true,"dist":{"shasum":"6cb8aad0e77eb5dbbfb54b22bcd8697309cf9641","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.3.1.tgz"},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.3.2":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.3.2","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.3.2","dependencies":{},"devDependencies":{},"optionalDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.1.1","_nodeVersion":"v0.6.11","_defaultsLoaded":true,"dist":{"shasum":"1b4e455089ab1d1d38ab6794ffe6cf08f764394a","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.3.2.tgz"},"readme":" __ \n /\\ \\ __ \n __ __ ___ \\_\\ \\ __ _ __ ____ ___ ___ _ __ __ /\\_\\ ____ \n /\\ \\/\\ \\ /' _ `\\ /'_ \\ /'__`\\/\\ __\\/ ,__\\ / ___\\ / __`\\/\\ __\\/'__`\\ \\/\\ \\ /',__\\ \n \\ \\ \\_\\ \\/\\ \\/\\ \\/\\ \\ \\ \\/\\ __/\\ \\ \\//\\__, `\\/\\ \\__//\\ \\ \\ \\ \\ \\//\\ __/ __ \\ \\ \\/\\__, `\\\n \\ \\____/\\ \\_\\ \\_\\ \\___,_\\ \\____\\\\ \\_\\\\/\\____/\\ \\____\\ \\____/\\ \\_\\\\ \\____\\/\\_\\ _\\ \\ \\/\\____/\n \\/___/ \\/_/\\/_/\\/__,_ /\\/____/ \\/_/ \\/___/ \\/____/\\/___/ \\/_/ \\/____/\\/_//\\ \\_\\ \\/___/ \n \\ \\____/ \n \\/___/\n \nUnderscore.js is a utility-belt library for JavaScript that provides \nsupport for the usual functional suspects (each, map, reduce, filter...) \nwithout extending any core JavaScript objects.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://documentcloud.github.com/underscore/\n\nMany thanks to our contributors:\nhttps://github.com/documentcloud/underscore/contributors\n","maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}},"1.3.3":{"name":"underscore","description":"JavaScript's functional programming helper library.","homepage":"http://documentcloud.github.com/underscore/","keywords":["util","functional","server","client","browser"],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"main":"underscore.js","version":"1.3.3","_npmUser":{"name":"jashkenas","email":"jashkenas@gmail.com"},"_id":"underscore@1.3.3","dependencies":{},"devDependencies":{},"optionalDependencies":{},"engines":{"node":"*"},"_engineSupported":true,"_npmVersion":"1.1.1","_nodeVersion":"v0.6.11","_defaultsLoaded":true,"dist":{"shasum":"47ac53683daf832bfa952e1774417da47817ae42","tarball":"http://registry.npmjs.org/underscore/-/underscore-1.3.3.tgz"},"readme":" __ \n /\\ \\ __ \n __ __ ___ \\_\\ \\ __ _ __ ____ ___ ___ _ __ __ /\\_\\ ____ \n /\\ \\/\\ \\ /' _ `\\ /'_ \\ /'__`\\/\\ __\\/ ,__\\ / ___\\ / __`\\/\\ __\\/'__`\\ \\/\\ \\ /',__\\ \n \\ \\ \\_\\ \\/\\ \\/\\ \\/\\ \\ \\ \\/\\ __/\\ \\ \\//\\__, `\\/\\ \\__//\\ \\ \\ \\ \\ \\//\\ __/ __ \\ \\ \\/\\__, `\\\n \\ \\____/\\ \\_\\ \\_\\ \\___,_\\ \\____\\\\ \\_\\\\/\\____/\\ \\____\\ \\____/\\ \\_\\\\ \\____\\/\\_\\ _\\ \\ \\/\\____/\n \\/___/ \\/_/\\/_/\\/__,_ /\\/____/ \\/_/ \\/___/ \\/____/\\/___/ \\/_/ \\/____/\\/_//\\ \\_\\ \\/___/ \n \\ \\____/ \n \\/___/\n \nUnderscore.js is a utility-belt library for JavaScript that provides \nsupport for the usual functional suspects (each, map, reduce, filter...) \nwithout extending any core JavaScript objects.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://documentcloud.github.com/underscore/\n\nMany thanks to our contributors:\nhttps://github.com/documentcloud/underscore/contributors\n","maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"directories":{}}},"maintainers":[{"name":"documentcloud","email":"jeremy@documentcloud.org"},{"name":"jashkenas","email":"jashkenas@gmail.com"}],"author":{"name":"Jeremy Ashkenas","email":"jeremy@documentcloud.org"},"time":{"1.0.3":"2011-12-07T15:12:18.045Z","1.0.4":"2011-12-07T15:12:18.045Z","1.1.0":"2011-12-07T15:12:18.045Z","1.1.1":"2011-12-07T15:12:18.045Z","1.1.2":"2011-12-07T15:12:18.045Z","1.1.3":"2011-12-07T15:12:18.045Z","1.1.4":"2011-12-07T15:12:18.045Z","1.1.5":"2011-12-07T15:12:18.045Z","1.1.6":"2011-12-07T15:12:18.045Z","1.1.7":"2011-12-07T15:12:18.045Z","1.2.0":"2011-12-07T15:12:18.045Z","1.2.1":"2011-12-07T15:12:18.045Z","1.2.2":"2011-11-14T20:28:47.115Z","1.2.3":"2011-12-07T15:12:18.045Z","1.2.4":"2012-01-09T17:23:14.818Z","1.3.0":"2012-01-11T16:41:38.459Z","1.3.1":"2012-01-23T22:57:36.474Z","1.3.2":"2012-04-09T18:38:14.345Z","1.3.3":"2012-04-10T14:43:48.089Z"},"repository":{"type":"git","url":"git://github.com/documentcloud/underscore.git"},"users":{"vesln":true,"mvolkmann":true,"lancehunt":true,"mikl":true,"linus":true,"vasc":true,"bat":true,"dmalam":true,"mbrevoort":true,"danielr":true,"rsimoes":true,"thlorenz":true}} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-client/test/get-403.js b/deps/npm/node_modules/npm-registry-client/test/get-403.js
new file mode 100644
index 000000000..0b8592e50
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/test/get-403.js
@@ -0,0 +1,29 @@
+var tap = require('tap')
+
+var server = require('./lib/server.js')
+var common = require('./lib/common.js')
+
+tap.test('get returns 403', function (t) {
+ server.expect('/underscore', function (req, res) {
+ t.equal(req.method, 'GET', 'got expected method')
+
+ res.writeHead(403)
+ res.end(JSON.stringify({
+ error: 'get that cat out of the toilet that\'s gross omg'
+ }))
+ })
+
+ var client = common.freshClient()
+ client.get(
+ 'http://localhost:1337/underscore',
+ {},
+ function (er) {
+ t.ok(er, 'failed as expected')
+
+ t.equal(er.statusCode, 403, 'status code was attached to error as expected')
+ t.equal(er.code, 'E403', 'error code was formatted as expected')
+
+ t.end()
+ }
+ )
+})
diff --git a/deps/npm/node_modules/npm-registry-client/test/ping.js b/deps/npm/node_modules/npm-registry-client/test/ping.js
new file mode 100644
index 000000000..002d8ba8f
--- /dev/null
+++ b/deps/npm/node_modules/npm-registry-client/test/ping.js
@@ -0,0 +1,75 @@
+var test = require('tap').test
+
+var server = require('./lib/server.js')
+var common = require('./lib/common.js')
+var client = common.freshClient()
+
+function nop () {}
+
+var TOKEN = 'not-bad-meaning-bad-but-bad-meaning-wombat'
+var AUTH = { token: TOKEN }
+var PARAMS = { auth: AUTH }
+var DEP_USER = 'username'
+var HOST = 'localhost'
+
+test('ping call contract', function (t) {
+ t.throws(function () {
+ client.ping(undefined, AUTH, nop)
+ }, 'requires a URI')
+
+ t.throws(function () {
+ client.ping([], AUTH, nop)
+ }, 'requires URI to be a string')
+
+ t.throws(function () {
+ client.ping(common.registry, undefined, nop)
+ }, 'requires params object')
+
+ t.throws(function () {
+ client.ping(common.registry, '', nop)
+ }, 'params must be object')
+
+ t.throws(function () {
+ client.ping(common.registry, AUTH, undefined)
+ }, 'requires callback')
+
+ t.throws(function () {
+ client.ping(common.registry, AUTH, 'callback')
+ }, 'callback must be function')
+
+ t.throws(
+ function () {
+ var params = {}
+ client.ping(common.registry, params, nop)
+ },
+ { name: 'AssertionError', message: 'must pass auth to ping' },
+ 'must pass auth to ping'
+ )
+
+ t.end()
+})
+
+test('ping', function (t) {
+ server.expect('GET', '/-/ping?write=true', function (req, res) {
+ t.equal(req.method, 'GET')
+ res.statusCode = 200
+ res.json({
+ ok: true,
+ host: HOST,
+ peer: HOST,
+ username: DEP_USER
+ })
+ })
+
+ client.ping(common.registry, PARAMS, function (error, found) {
+ t.ifError(error, 'no errors')
+ var wanted = {
+ ok: true,
+ host: HOST,
+ peer: HOST,
+ username: DEP_USER
+ }
+ t.same(found, wanted)
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/osenv/node_modules/os-homedir/index.js b/deps/npm/node_modules/osenv/node_modules/os-homedir/index.js
new file mode 100644
index 000000000..758ff653d
--- /dev/null
+++ b/deps/npm/node_modules/osenv/node_modules/os-homedir/index.js
@@ -0,0 +1,24 @@
+'use strict';
+var os = require('os');
+
+function homedir() {
+ var env = process.env;
+ var home = env.HOME;
+ var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME;
+
+ if (process.platform === 'win32') {
+ return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null;
+ }
+
+ if (process.platform === 'darwin') {
+ return home || (user ? '/Users/' + user : null);
+ }
+
+ if (process.platform === 'linux') {
+ return home || (user ? (process.getuid() === 0 ? '/root' : '/home/' + user) : null);
+ }
+
+ return home || null;
+}
+
+module.exports = typeof os.homedir === 'function' ? os.homedir : homedir;
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/License b/deps/npm/node_modules/osenv/node_modules/os-homedir/license
index 4804b7ab4..654d0bfe9 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/License
+++ b/deps/npm/node_modules/osenv/node_modules/os-homedir/license
@@ -1,4 +1,6 @@
-Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/deps/npm/node_modules/osenv/node_modules/os-homedir/package.json b/deps/npm/node_modules/osenv/node_modules/os-homedir/package.json
new file mode 100644
index 000000000..c9a3b650c
--- /dev/null
+++ b/deps/npm/node_modules/osenv/node_modules/os-homedir/package.json
@@ -0,0 +1,70 @@
+{
+ "name": "os-homedir",
+ "version": "1.0.0",
+ "description": "io.js 2.3.0 os.homedir() ponyfill",
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/os-homedir.git"
+ },
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "node test.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "built-in",
+ "core",
+ "ponyfill",
+ "polyfill",
+ "shim",
+ "os",
+ "homedir",
+ "home",
+ "dir",
+ "directory",
+ "folder",
+ "user",
+ "path"
+ ],
+ "devDependencies": {
+ "ava": "0.0.4",
+ "path-exists": "^1.0.0"
+ },
+ "gitHead": "7e39e2e049de404f06233fa617ecf46fed997a78",
+ "bugs": {
+ "url": "https://github.com/sindresorhus/os-homedir/issues"
+ },
+ "homepage": "https://github.com/sindresorhus/os-homedir",
+ "_id": "os-homedir@1.0.0",
+ "_shasum": "e37078bc61b5869063053897257e39ec1261b702",
+ "_from": "os-homedir@>=1.0.0 <2.0.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
+ "_npmUser": {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ "dist": {
+ "shasum": "e37078bc61b5869063053897257e39ec1261b702",
+ "tarball": "http://registry.npmjs.org/os-homedir/-/os-homedir-1.0.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/osenv/node_modules/os-homedir/readme.md b/deps/npm/node_modules/osenv/node_modules/os-homedir/readme.md
new file mode 100644
index 000000000..4851f104e
--- /dev/null
+++ b/deps/npm/node_modules/osenv/node_modules/os-homedir/readme.md
@@ -0,0 +1,33 @@
+# os-homedir [![Build Status](https://travis-ci.org/sindresorhus/os-homedir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-homedir)
+
+> io.js 2.3.0 [`os.homedir()`](https://iojs.org/api/os.html#os_os_homedir) ponyfill
+
+> Ponyfill: A polyfill that doesn't overwrite the native method
+
+
+## Install
+
+```
+$ npm install --save os-homedir
+```
+
+
+## Usage
+
+```js
+var osHomedir = require('os-homedir');
+
+console.log(osHomedir());
+//=> /Users/sindresorhus
+```
+
+
+## Related
+
+- [user-home](https://github.com/sindresorhus/user-home) - Same as this module but caches the result
+- [home-or-tmp](https://github.com/sindresorhus/home-or-tmp) - Get the user home directory with fallback to the system temp directory
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/osenv/node_modules/os-tmpdir/package.json b/deps/npm/node_modules/osenv/node_modules/os-tmpdir/package.json
index 1857f8f5f..f8b2682bf 100644
--- a/deps/npm/node_modules/osenv/node_modules/os-tmpdir/package.json
+++ b/deps/npm/node_modules/osenv/node_modules/os-tmpdir/package.json
@@ -5,7 +5,7 @@
"license": "MIT",
"repository": {
"type": "git",
- "url": "git+https://github.com/sindresorhus/os-tmpdir.git"
+ "url": "https://github.com/sindresorhus/os-tmpdir"
},
"author": {
"name": "Sindre Sorhus",
@@ -65,6 +65,5 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.1.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.1.tgz"
}
diff --git a/deps/npm/node_modules/osenv/osenv.js b/deps/npm/node_modules/osenv/osenv.js
index 0cabcba66..702a95b98 100644
--- a/deps/npm/node_modules/osenv/osenv.js
+++ b/deps/npm/node_modules/osenv/osenv.js
@@ -2,6 +2,7 @@ var isWindows = process.platform === 'win32'
var path = require('path')
var exec = require('child_process').exec
var osTmpdir = require('os-tmpdir')
+var osHomedir = require('os-homedir')
// looking up envs is a bit costly.
// Also, sometimes we want to have a fallback
@@ -50,9 +51,7 @@ memo('tmpdir', function () {
})
memo('home', function () {
- return ( isWindows ? process.env.USERPROFILE
- : process.env.HOME
- )
+ return osHomedir()
})
memo('path', function () {
diff --git a/deps/npm/node_modules/osenv/package.json b/deps/npm/node_modules/osenv/package.json
index 851f19b51..d5718bdb1 100644
--- a/deps/npm/node_modules/osenv/package.json
+++ b/deps/npm/node_modules/osenv/package.json
@@ -1,11 +1,12 @@
{
"name": "osenv",
- "version": "0.1.2",
+ "version": "0.1.3",
"main": "osenv.js",
"directories": {
"test": "test"
},
"dependencies": {
+ "os-homedir": "^1.0.0",
"os-tmpdir": "^1.0.0"
},
"devDependencies": {
@@ -34,14 +35,41 @@
},
"license": "ISC",
"description": "Look up environment settings specific to different operating systems",
- "readme": "# osenv\n\nLook up environment settings specific to different operating systems.\n\n## Usage\n\n```javascript\nvar osenv = require('osenv')\nvar path = osenv.path()\nvar user = osenv.user()\n// etc.\n\n// Some things are not reliably in the env, and have a fallback command:\nvar h = osenv.hostname(function (er, hostname) {\n h = hostname\n})\n// This will still cause it to be memoized, so calling osenv.hostname()\n// is now an immediate operation.\n\n// You can always send a cb, which will get called in the nextTick\n// if it's been memoized, or wait for the fallback data if it wasn't\n// found in the environment.\nosenv.hostname(function (er, hostname) {\n if (er) console.error('error looking up hostname')\n else console.log('this machine calls itself %s', hostname)\n})\n```\n\n## osenv.hostname()\n\nThe machine name. Calls `hostname` if not found.\n\n## osenv.user()\n\nThe currently logged-in user. Calls `whoami` if not found.\n\n## osenv.prompt()\n\nEither PS1 on unix, or PROMPT on Windows.\n\n## osenv.tmpdir()\n\nThe place where temporary files should be created.\n\n## osenv.home()\n\nNo place like it.\n\n## osenv.path()\n\nAn array of the places that the operating system will search for\nexecutables.\n\n## osenv.editor() \n\nReturn the executable name of the editor program. This uses the EDITOR\nand VISUAL environment variables, and falls back to `vi` on Unix, or\n`notepad.exe` on Windows.\n\n## osenv.shell()\n\nThe SHELL on Unix, which Windows calls the ComSpec. Defaults to 'bash'\nor 'cmd'.\n",
- "readmeFilename": "README.md",
- "gitHead": "88a154d6d8ad39fefb9af2fe1b306cd12fb6d6d0",
+ "gitHead": "f746b3405d8f9e28054d11b97e1436f6a15016c4",
"bugs": {
"url": "https://github.com/npm/osenv/issues"
},
"homepage": "https://github.com/npm/osenv#readme",
- "_id": "osenv@0.1.2",
- "_shasum": "f4d23ebeceaef078600fb78c0ea58fac5996a02d",
- "_from": "osenv@latest"
+ "_id": "osenv@0.1.3",
+ "_shasum": "83cf05c6d6458fc4d5ac6362ea325d92f2754217",
+ "_from": "osenv@0.1.3",
+ "_npmVersion": "3.0.0",
+ "_nodeVersion": "2.2.1",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "83cf05c6d6458fc4d5ac6362ea325d92f2754217",
+ "tarball": "http://registry.npmjs.org/osenv/-/osenv-0.1.3.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ {
+ "name": "robertkowalski",
+ "email": "rok@kowalski.gd"
+ },
+ {
+ "name": "othiym23",
+ "email": "ogd@aoaioxxysz.net"
+ },
+ {
+ "name": "iarna",
+ "email": "me@re-becca.org"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.3.tgz"
}
diff --git a/deps/npm/node_modules/osenv/x.tap b/deps/npm/node_modules/osenv/x.tap
new file mode 100644
index 000000000..90d847208
--- /dev/null
+++ b/deps/npm/node_modules/osenv/x.tap
@@ -0,0 +1,39 @@
+TAP version 13
+ # Subtest: test/unix.js
+ TAP version 13
+ # Subtest: basic unix sanity test
+ ok 1 - should be equal
+ ok 2 - should be equal
+ ok 3 - should be equal
+ ok 4 - should be equivalent
+ ok 5 - should be equal
+ ok 6 - should be equal
+ ok 7 - should be equal
+ ok 8 - should be equal
+ ok 9 - should be equal
+ ok 10 - should be equal
+ ok 11 - should be equal
+ ok 12 - should be equal
+ ok 13 - should be equal
+ ok 14 - should be equal
+ 1..14
+ ok 1 - basic unix sanity test # time=10.712ms
+
+ 1..1
+ # time=18.422ms
+ok 1 - test/unix.js # time=169.827ms
+
+ # Subtest: test/windows.js
+ TAP version 13
+ 1..0 # Skip windows tests, this is not windows
+
+ok 2 - test/windows.js # SKIP Skip windows tests, this is not windows
+
+ # Subtest: test/nada.js
+ TAP version 13
+ 1..0
+
+ok 2 - test/nada.js
+
+1..3
+# time=274.247ms
diff --git a/deps/npm/node_modules/read-installed/.travis.yml b/deps/npm/node_modules/read-installed/.travis.yml
new file mode 100644
index 000000000..848ffd1c5
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/.travis.yml
@@ -0,0 +1,9 @@
+language: node_js
+before_install:
+ - '[ "${TRAVIS_NODE_VERSION}" != "0.8" ] || npm install -g npm@1.4.28'
+ - npm install -g npm@latest
+node_js:
+ - '0.8'
+ - '0.10'
+ - '0.12'
+ - 'iojs'
diff --git a/deps/npm/node_modules/read-installed/node_modules/debuglog/package.json b/deps/npm/node_modules/read-installed/node_modules/debuglog/package.json
index 4a8257c98..b622fe922 100644
--- a/deps/npm/node_modules/read-installed/node_modules/debuglog/package.json
+++ b/deps/npm/node_modules/read-installed/node_modules/debuglog/package.json
@@ -6,7 +6,7 @@
"main": "debuglog.js",
"repository": {
"type": "git",
- "url": "https://github.com/sam-github/node-debuglog.git"
+ "url": "git+https://github.com/sam-github/node-debuglog.git"
},
"author": {
"name": "Sam Roberts",
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/README.md b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/package.json b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..40b65a3c2
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/package.json
@@ -0,0 +1,72 @@
+{
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "name": "graceful-fs",
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "version": "3.0.8",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "main": "graceful-fs.js",
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "keywords": [
+ "fs",
+ "module",
+ "reading",
+ "retry",
+ "retries",
+ "queue",
+ "error",
+ "errors",
+ "handling",
+ "EMFILE",
+ "EAGAIN",
+ "EINVAL",
+ "EPERM",
+ "EACCESS"
+ ],
+ "license": "ISC",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "_id": "graceful-fs@3.0.8",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_from": "graceful-fs@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/read-installed/node_modules/readdir-scoped-modules/package.json b/deps/npm/node_modules/read-installed/node_modules/readdir-scoped-modules/package.json
index de45b2f4d..c3a39d401 100644
--- a/deps/npm/node_modules/read-installed/node_modules/readdir-scoped-modules/package.json
+++ b/deps/npm/node_modules/read-installed/node_modules/readdir-scoped-modules/package.json
@@ -20,7 +20,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/readdir-scoped-modules"
+ "url": "git+https://github.com/npm/readdir-scoped-modules.git"
},
"author": {
"name": "Isaac Z. Schlueter",
diff --git a/deps/npm/node_modules/read-installed/node_modules/util-extend/package.json b/deps/npm/node_modules/read-installed/node_modules/util-extend/package.json
index 942b78604..259d6c104 100644
--- a/deps/npm/node_modules/read-installed/node_modules/util-extend/package.json
+++ b/deps/npm/node_modules/read-installed/node_modules/util-extend/package.json
@@ -8,7 +8,7 @@
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/util-extend"
+ "url": "git://github.com/isaacs/util-extend.git"
},
"author": "",
"license": "MIT",
@@ -37,5 +37,5 @@
"directories": {},
"_shasum": "bb703b79480293ddcdcfb3c6a9fea20f483415bc",
"_resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.1.tgz",
- "homepage": "https://github.com/isaacs/util-extend"
+ "homepage": "https://github.com/isaacs/util-extend#readme"
}
diff --git a/deps/npm/node_modules/read-installed/package.json b/deps/npm/node_modules/read-installed/package.json
index 7ecdc716e..a7e093216 100644
--- a/deps/npm/node_modules/read-installed/package.json
+++ b/deps/npm/node_modules/read-installed/package.json
@@ -1,10 +1,10 @@
{
"name": "read-installed",
"description": "Read all the installed packages in a folder, and return a tree structure with all the data.",
- "version": "4.0.0",
+ "version": "4.0.2",
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/read-installed"
+ "url": "git://github.com/isaacs/read-installed.git"
},
"main": "read-installed.js",
"scripts": {
@@ -14,7 +14,7 @@
"debuglog": "^1.0.1",
"read-package-json": "^2.0.0",
"readdir-scoped-modules": "^1.0.0",
- "semver": "2 || 3 || 4",
+ "semver": "2 || 3 || 4 || 5",
"slide": "~1.1.3",
"util-extend": "^1.0.1",
"graceful-fs": "2 || 3"
@@ -31,16 +31,16 @@
"devDependencies": {
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
- "tap": "~0.4.8"
+ "tap": "^1.2.0"
},
"readme": "# read-installed\n\nRead all the installed packages in a folder, and return a tree\nstructure with all the data.\n\nnpm uses this.\n\n## 2.0.0\n\nBreaking changes in `2.0.0`:\n\nThe second argument is now an `Object` that contains the following keys:\n\n * `depth` optional, defaults to Infinity\n * `log` optional log Function\n * `dev` optional, default false, set to true to include devDependencies\n\n## Usage\n\n```javascript\nvar readInstalled = require(\"read-installed\")\n// optional options\nvar options = { dev: false, log: fn, depth: 2 }\nreadInstalled(folder, options, function (er, data) {\n ...\n})\n```\n",
"readmeFilename": "README.md",
- "gitHead": "c1dea4823e2219a79d6184621917ee27d4283bc0",
+ "gitHead": "e683eb7f2bb8e9b7b40c1e26192b385b92a3192f",
"bugs": {
"url": "https://github.com/isaacs/read-installed/issues"
},
- "homepage": "https://github.com/isaacs/read-installed",
- "_id": "read-installed@4.0.0",
- "_shasum": "dbca08d6bd83e2a3b93c962053ba4d839e0769ba",
- "_from": "read-installed@>=4.0.0 <4.1.0"
+ "homepage": "https://github.com/isaacs/read-installed#readme",
+ "_id": "read-installed@4.0.2",
+ "_shasum": "4e8b18be974ccb75654b28ddcc9aac686404690e",
+ "_from": "read-installed@>=4.0.2 <4.1.0"
}
diff --git a/deps/npm/node_modules/read-installed/read-installed.js b/deps/npm/node_modules/read-installed/read-installed.js
index 2e299445b..19e77be91 100644
--- a/deps/npm/node_modules/read-installed/read-installed.js
+++ b/deps/npm/node_modules/read-installed/read-installed.js
@@ -56,7 +56,7 @@ to READ(packagefolder, parentobj, name, reqver)
obj = read package.json
installed = ./node_modules/*
if parentobj is null, and no package.json
- obj = {dependencies:{<installed>:"*"}}
+ obj = {dependencies:{<installed>:ANY}}
deps = Object.keys(obj.dependencies)
obj.path = packagefolder
obj.parent = parentobj
@@ -105,6 +105,10 @@ var debug = require("debuglog")("read-installed")
var readdir = require("readdir-scoped-modules")
+// Sentinel catch-all version constraint used when a dependency is not
+// listed in the package.json file.
+var ANY = {}
+
module.exports = readInstalled
function readInstalled (folder, opts, cb) {
@@ -190,7 +194,7 @@ function readInstalled_ (folder, parent, name, reqver, depth, opts, cb) {
if (realpathSeen[real]) return cb(null, realpathSeen[real])
if (obj === true) {
obj = {dependencies:{}, path:folder}
- installed.forEach(function (i) { obj.dependencies[i] = "*" })
+ installed.forEach(function (i) { obj.dependencies[i] = ANY })
}
if (name && obj.name !== name) obj.invalid = true
obj.realName = name || obj.name
@@ -199,6 +203,14 @@ function readInstalled_ (folder, parent, name, reqver, depth, opts, cb) {
// At this point, figure out what dependencies we NEED to get met
obj._dependencies = copy(obj.dependencies)
+ if (reqver === ANY) {
+ // We were unable to determine the required version of this
+ // dependency from the package.json file, but we now know its actual
+ // version, so treat that version as the required version to avoid
+ // marking the dependency as invalid below. See #40.
+ reqver = obj.version;
+ }
+
// "foo":"http://blah" and "foo":"latest" are always presumed valid
if (reqver
&& semver.validRange(reqver, true)
diff --git a/deps/npm/node_modules/read-installed/test/issue-40.js b/deps/npm/node_modules/read-installed/test/issue-40.js
new file mode 100644
index 000000000..3105fe24a
--- /dev/null
+++ b/deps/npm/node_modules/read-installed/test/issue-40.js
@@ -0,0 +1,15 @@
+var readInstalled = require('../read-installed.js');
+var test = require('tap').test;
+var path = require('path');
+
+test('prerelease packages should not be marked invalid', function(t) {
+ readInstalled(
+ path.join(__dirname, 'fixtures/issue-40'),
+ { log: console.error },
+ function(err, map) {
+ t.strictEqual(map.dependencies.fake.version, '0.1.0-2');
+ t.notOk(map.dependencies.fake.invalid);
+ t.end();
+ }
+ );
+});
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/README.md b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/package.json b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..349c3d824
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@2 || 3",
+ "/Users/isaacs/dev/npm/npm/node_modules/read-package-json"
+ ]
+ ],
+ "_from": "graceful-fs@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/read-package-json/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@2 || 3",
+ "rawSpec": "2 || 3",
+ "scope": null,
+ "spec": ">=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/read-package-json"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@2 || 3",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/read-package-json",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/request/.eslintrc b/deps/npm/node_modules/request/.eslintrc
deleted file mode 100644
index e79f481f0..000000000
--- a/deps/npm/node_modules/request/.eslintrc
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "env": {
- "node": true
- },
- "rules": {
- // 2-space indentation
- "indent": [2, 2],
- // Disallow semi-colons, unless needed to disambiguate statement
- "semi": [2, "never"],
- // Require strings to use single quotes
- "quotes": [2, "single"],
- // Require curly braces for all control statements
- "curly": 2,
- // Disallow using variables and functions before they've been defined
- "no-use-before-define": 2,
- // Allow any case for variable naming
- "camelcase": 0,
- // Disallow unused variables, except as function arguments
- "no-unused-vars": [2, {"args":"none"}],
- // Allow leading underscores for method names
- // REASON: we use underscores to denote private methods
- "no-underscore-dangle": 0,
- // Allow multi spaces around operators since they are
- // used for alignment. This is not consistent in the
- // code.
- "no-multi-spaces": 0,
- // Style rule is: most objects use { beforeColon: false, afterColon: true }, unless aligning which uses:
- //
- // {
- // beforeColon : true,
- // afterColon : true
- // }
- //
- // eslint can't handle this, so the check is disabled.
- "key-spacing": 0,
- // Allow shadowing vars in outer scope (needs discussion)
- "no-shadow": 0,
- // Use if () { }
- // ^ space
- "space-after-keywords": [2, "always"],
- // Use if () { }
- // ^ space
- "space-before-blocks": [2, "always"]
- }
-}
diff --git a/deps/npm/node_modules/request/.travis.yml b/deps/npm/node_modules/request/.travis.yml
index bd0f638eb..6180cb5d7 100644
--- a/deps/npm/node_modules/request/.travis.yml
+++ b/deps/npm/node_modules/request/.travis.yml
@@ -3,10 +3,12 @@ node_js:
- "io.js"
- "0.12"
- "0.10"
-after_script: ./node_modules/.bin/istanbul cover ./node_modules/tape/bin/tape tests/test-*.js --report lcovonly && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js --verbose
+sudo: false
+
+after_script: "npm run test-cov && cat ./coverage/lcov.info | codecov && cat ./coverage/lcov.info | coveralls"
+
webhooks:
urls: https://webhooks.gitter.im/e/237280ed4796c19cc626
on_success: change # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: false # default: false
-sudo: false
diff --git a/deps/npm/node_modules/request/CHANGELOG.md b/deps/npm/node_modules/request/CHANGELOG.md
index 2de0061ad..4cc1fcbe4 100644
--- a/deps/npm/node_modules/request/CHANGELOG.md
+++ b/deps/npm/node_modules/request/CHANGELOG.md
@@ -1,5 +1,24 @@
## Change Log
+### v2.60.0 (2015/07/21)
+- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews)
+
+### v2.59.0 (2015/07/20)
+- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options. Forever option defaults to using http(s).Agent in node 0.12+ (@simov)
+- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman)
+- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme)
+- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov)
+- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov)
+- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x)
+- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov)
+- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles)
+
+### v2.58.0 (2015/06/16)
+- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov)
+- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov)
+- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov)
+- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov)
+
### v2.57.0 (2015/05/31)
- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR)
@@ -68,7 +87,7 @@
- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson)
### v2.52.0 (2015/02/02)
-- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen, @brichard19)
+- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen)
- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao)
- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom)
- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen)
@@ -474,7 +493,7 @@
- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman)
- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden)
- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs)
-- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@developmentseed)
+- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer)
- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr)
- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex)
- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs)
diff --git a/deps/npm/node_modules/request/README.md b/deps/npm/node_modules/request/README.md
index d8bd40570..b72276798 100644
--- a/deps/npm/node_modules/request/README.md
+++ b/deps/npm/node_modules/request/README.md
@@ -4,6 +4,7 @@
[![npm package](https://nodei.co/npm/request.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/request/)
[![Build status](https://img.shields.io/travis/request/request.svg?style=flat-square)](https://travis-ci.org/request/request)
+[![Coverage](https://img.shields.io/codecov/c/github/request/request.svg?style=flat-square)](https://codecov.io/github/request/request?branch=master)
[![Coverage](https://img.shields.io/coveralls/request/request.svg?style=flat-square)](https://coveralls.io/r/request/request)
[![Dependency Status](https://img.shields.io/david/request/request.svg?style=flat-square)](https://david-dm.org/request/request)
[![Gitter](https://img.shields.io/badge/gitter-join_chat-blue.svg?style=flat-square)](https://gitter.im/request/request?utm_source=badge)
@@ -410,7 +411,7 @@ request.post({url:url, oauth:oauth}, function (e, r, body) {
, user_id: perm_data.user_id
}
;
- request.get({url:url, oauth:oauth, json:true}, function (e, r, user) {
+ request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) {
console.log(user)
})
})
@@ -645,7 +646,8 @@ request.get({
It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs).
This can be useful, for example, when using self-signed certificates.
-To allow a different certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`:
+To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`.
+The certificate the domain presents must be signed by the root certificate specified:
```js
request.get({
@@ -764,13 +766,17 @@ The first argument can be either a `url` or an `options` object. The only requir
---
-- `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default).
+- `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.)
- `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below.
- `jar` - If `true` and `tough-cookie` is installed, remember cookies for future use (or define your custom cookie jar; see examples section)
---
-- `pool` - An object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as [your options allow for it](request.js#L747)). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool.
+- `agent` - `http(s).Agent` instance to use
+- `agentClass` - alternatively specify your agent's class name
+- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions).
+- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+
+- `pool` - An object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified.
- A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`).
- Note that if you are sending multiple requests in a loop and creating
multiple new `pool` objects, `maxSockets` will not work as intended. To
@@ -781,10 +787,12 @@ The first argument can be either a `url` or an `options` object. The only requir
request to respond before aborting the request. Note that if the underlying
TCP connection cannot be established, the OS-wide TCP connection timeout will
overrule the `timeout` option ([the default in Linux is around 20 seconds](http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout)).
+
+---
+
- `localAddress` - Local interface to bind for network connections.
- `proxy` - An HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`)
- `strictSSL` - If `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option.
-- `agentOptions` - Object containing user agent options. See documentation above. **Note:** [see tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
- `tunnel` - controls the behavior of
[HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling)
as follows:
@@ -801,9 +809,6 @@ The first argument can be either a `url` or an `options` object. The only requir
---
- `time` - If `true`, the request-response cycle (including all redirects) is timed at millisecond resolution, and the result provided on the response's `elapsedTime` property.
-
----
-
- `har` - A [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-1.2) for details)*
The callback argument gets 3 arguments:
diff --git a/deps/npm/node_modules/request/index.js b/deps/npm/node_modules/request/index.js
index 5872824c8..3fe600175 100755
--- a/deps/npm/node_modules/request/index.js
+++ b/deps/npm/node_modules/request/index.js
@@ -14,7 +14,7 @@
'use strict'
-var extend = require('util')._extend
+var extend = require('extend')
, cookies = require('./lib/cookies')
, helpers = require('./lib/helpers')
@@ -30,12 +30,11 @@ function initParams(uri, options, callback) {
var params = {}
if (typeof options === 'object') {
- params = extend({}, options)
- params = extend(params, {uri: uri})
+ extend(params, options, {uri: uri})
} else if (typeof uri === 'string') {
- params = extend({}, {uri: uri})
+ extend(params, {uri: uri})
} else {
- params = extend({}, uri)
+ extend(params, uri)
}
params.callback = callback
@@ -86,24 +85,18 @@ function wrapRequestMethod (method, options, requester, verb) {
return function (uri, opts, callback) {
var params = initParams(uri, opts, callback)
- var headerlessOptions = extend({}, options)
- delete headerlessOptions.headers
- params = extend(headerlessOptions, params)
-
- if (options.headers) {
- var headers = extend({}, options.headers)
- params.headers = extend(headers, params.headers)
- }
+ var target = {}
+ extend(true, target, options, params)
if (verb) {
- params.method = (verb === 'del' ? 'DELETE' : verb.toUpperCase())
+ target.method = (verb === 'del' ? 'DELETE' : verb.toUpperCase())
}
if (isFunction(requester)) {
method = requester
}
- return method(params, params.callback)
+ return method(target, target.callback)
}
}
@@ -131,7 +124,7 @@ request.defaults = function (options, requester) {
request.forever = function (agentOptions, optionsArg) {
var options = {}
if (optionsArg) {
- options = extend({}, optionsArg)
+ extend(options, optionsArg)
}
if (agentOptions) {
options.agentOptions = agentOptions
diff --git a/deps/npm/node_modules/request/lib/copy.js b/deps/npm/node_modules/request/lib/copy.js
deleted file mode 100644
index ad162a508..000000000
--- a/deps/npm/node_modules/request/lib/copy.js
+++ /dev/null
@@ -1,10 +0,0 @@
-'use strict'
-
-module.exports =
-function copy (obj) {
- var o = {}
- Object.keys(obj).forEach(function (i) {
- o[i] = obj[i]
- })
- return o
-}
diff --git a/deps/npm/node_modules/request/lib/helpers.js b/deps/npm/node_modules/request/lib/helpers.js
index 1d588ca94..5e8594606 100644
--- a/deps/npm/node_modules/request/lib/helpers.js
+++ b/deps/npm/node_modules/request/lib/helpers.js
@@ -46,10 +46,29 @@ function toBase64 (str) {
return (new Buffer(str || '', 'utf8')).toString('base64')
}
+function copy (obj) {
+ var o = {}
+ Object.keys(obj).forEach(function (i) {
+ o[i] = obj[i]
+ })
+ return o
+}
+
+function version () {
+ var numbers = process.version.replace('v', '').split('.')
+ return {
+ major: parseInt(numbers[0], 10),
+ minor: parseInt(numbers[1], 10),
+ patch: parseInt(numbers[2], 10)
+ }
+}
+
exports.isFunction = isFunction
exports.paramsHaveRequestBody = paramsHaveRequestBody
exports.safeStringify = safeStringify
exports.md5 = md5
exports.isReadStream = isReadStream
exports.toBase64 = toBase64
+exports.copy = copy
+exports.version = version
exports.defer = deferMethod()
diff --git a/deps/npm/node_modules/request/lib/oauth.js b/deps/npm/node_modules/request/lib/oauth.js
index 84059724a..c24209b89 100644
--- a/deps/npm/node_modules/request/lib/oauth.js
+++ b/deps/npm/node_modules/request/lib/oauth.js
@@ -1,6 +1,7 @@
'use strict'
-var qs = require('qs')
+var url = require('url')
+ , qs = require('qs')
, caseless = require('caseless')
, uuid = require('node-uuid')
, oauth = require('oauth-sign')
@@ -80,7 +81,7 @@ OAuth.prototype.concatParams = function (oa, sep, wrap) {
}).sort()
if (oa.realm) {
- params.splice(0, 1, 'realm')
+ params.splice(0, 0, 'realm')
}
params.push('oauth_signature')
@@ -129,7 +130,9 @@ OAuth.prototype.onRequest = function (_oauth) {
break
case 'query':
- self.request.path = (query ? '&' : '?') + self.concatParams(oa, '&')
+ var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&')
+ self.request.uri = url.parse(href)
+ self.request.path = self.request.uri.path
break
case 'body':
diff --git a/deps/npm/node_modules/request/lib/redirect.js b/deps/npm/node_modules/request/lib/redirect.js
index 1d4650299..b2d0f613a 100644
--- a/deps/npm/node_modules/request/lib/redirect.js
+++ b/deps/npm/node_modules/request/lib/redirect.js
@@ -113,7 +113,8 @@ Redirect.prototype.onResponse = function (response) {
, redirectUri: redirectTo
}
)
- if (self.followAllRedirects && response.statusCode !== 401 && response.statusCode !== 307) {
+ if (self.followAllRedirects && request.method !== 'HEAD'
+ && response.statusCode !== 401 && response.statusCode !== 307) {
request.method = 'GET'
}
// request.method = 'GET' // Force all redirects to use GET || commented out fixes #215
diff --git a/deps/npm/node_modules/request/lib/tunnel.js b/deps/npm/node_modules/request/lib/tunnel.js
new file mode 100644
index 000000000..cf28016e2
--- /dev/null
+++ b/deps/npm/node_modules/request/lib/tunnel.js
@@ -0,0 +1,183 @@
+'use strict'
+
+var url = require('url')
+ , tunnel = require('tunnel-agent')
+
+var defaultProxyHeaderWhiteList = [
+ 'accept',
+ 'accept-charset',
+ 'accept-encoding',
+ 'accept-language',
+ 'accept-ranges',
+ 'cache-control',
+ 'content-encoding',
+ 'content-language',
+ 'content-length',
+ 'content-location',
+ 'content-md5',
+ 'content-range',
+ 'content-type',
+ 'connection',
+ 'date',
+ 'expect',
+ 'max-forwards',
+ 'pragma',
+ 'referer',
+ 'te',
+ 'transfer-encoding',
+ 'user-agent',
+ 'via'
+]
+
+var defaultProxyHeaderExclusiveList = [
+ 'proxy-authorization'
+]
+
+function constructProxyHost(uriObject) {
+ var port = uriObject.portA
+ , protocol = uriObject.protocol
+ , proxyHost = uriObject.hostname + ':'
+
+ if (port) {
+ proxyHost += port
+ } else if (protocol === 'https:') {
+ proxyHost += '443'
+ } else {
+ proxyHost += '80'
+ }
+
+ return proxyHost
+}
+
+function constructProxyHeaderWhiteList(headers, proxyHeaderWhiteList) {
+ var whiteList = proxyHeaderWhiteList
+ .reduce(function (set, header) {
+ set[header.toLowerCase()] = true
+ return set
+ }, {})
+
+ return Object.keys(headers)
+ .filter(function (header) {
+ return whiteList[header.toLowerCase()]
+ })
+ .reduce(function (set, header) {
+ set[header] = headers[header]
+ return set
+ }, {})
+}
+
+function constructTunnelOptions (request, proxyHeaders) {
+ var proxy = request.proxy
+
+ var tunnelOptions = {
+ proxy : {
+ host : proxy.hostname,
+ port : +proxy.port,
+ proxyAuth : proxy.auth,
+ headers : proxyHeaders
+ },
+ headers : request.headers,
+ ca : request.ca,
+ cert : request.cert,
+ key : request.key,
+ passphrase : request.passphrase,
+ pfx : request.pfx,
+ ciphers : request.ciphers,
+ rejectUnauthorized : request.rejectUnauthorized,
+ secureOptions : request.secureOptions,
+ secureProtocol : request.secureProtocol
+ }
+
+ return tunnelOptions
+}
+
+function constructTunnelFnName(uri, proxy) {
+ var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http')
+ var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http')
+ return [uriProtocol, proxyProtocol].join('Over')
+}
+
+function getTunnelFn(request) {
+ var uri = request.uri
+ var proxy = request.proxy
+ var tunnelFnName = constructTunnelFnName(uri, proxy)
+ return tunnel[tunnelFnName]
+}
+
+
+function Tunnel (request) {
+ this.request = request
+ this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList
+ this.proxyHeaderExclusiveList = []
+}
+
+Tunnel.prototype.isEnabled = function (options) {
+ var request = this.request
+ // Tunnel HTTPS by default, or if a previous request in the redirect chain
+ // was tunneled. Allow the user to override this setting.
+
+ // If self.tunnel is already set (because this is a redirect), use the
+ // existing value.
+ if (typeof request.tunnel !== 'undefined') {
+ return request.tunnel
+ }
+
+ // If options.tunnel is set (the user specified a value), use it.
+ if (typeof options.tunnel !== 'undefined') {
+ return options.tunnel
+ }
+
+ // If the destination is HTTPS, tunnel.
+ if (request.uri.protocol === 'https:') {
+ return true
+ }
+
+ // Otherwise, leave tunnel unset, because if a later request in the redirect
+ // chain is HTTPS then that request (and any subsequent ones) should be
+ // tunneled.
+ return undefined
+}
+
+Tunnel.prototype.setup = function (options) {
+ var self = this
+ , request = self.request
+
+ options = options || {}
+
+ if (typeof request.proxy === 'string') {
+ request.proxy = url.parse(request.proxy)
+ }
+
+ if (!request.proxy || !request.tunnel) {
+ return false
+ }
+
+ // Setup Proxy Header Exclusive List and White List
+ if (options.proxyHeaderWhiteList) {
+ self.proxyHeaderWhiteList = options.proxyHeaderWhiteList
+ }
+ if (options.proxyHeaderExclusiveList) {
+ self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList
+ }
+
+ var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList)
+ var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList)
+
+ // Setup Proxy Headers and Proxy Headers Host
+ // Only send the Proxy White Listed Header names
+ var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList)
+ proxyHeaders.host = constructProxyHost(request.uri)
+
+ proxyHeaderExclusiveList.forEach(request.removeHeader, request)
+
+ // Set Agent from Tunnel Data
+ var tunnelFn = getTunnelFn(request)
+ var tunnelOptions = constructTunnelOptions(request, proxyHeaders)
+ request.agent = tunnelFn(tunnelOptions)
+
+ return true
+}
+
+Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList
+Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList
+exports.Tunnel = Tunnel
diff --git a/deps/npm/node_modules/request/node_modules/aws-sign2/package.json b/deps/npm/node_modules/request/node_modules/aws-sign2/package.json
index 89adc7ded..b454fe469 100644
--- a/deps/npm/node_modules/request/node_modules/aws-sign2/package.json
+++ b/deps/npm/node_modules/request/node_modules/aws-sign2/package.json
@@ -22,9 +22,25 @@
"bugs": {
"url": "https://github.com/mikeal/aws-sign/issues"
},
- "homepage": "https://github.com/mikeal/aws-sign#readme",
"_id": "aws-sign2@0.5.0",
+ "dist": {
+ "shasum": "c57103f7a17fc037f02d7c2e64b602ea223f7d63",
+ "tarball": "http://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz"
+ },
+ "_from": "aws-sign2@>=0.5.0 <0.6.0",
+ "_npmVersion": "1.3.2",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "c57103f7a17fc037f02d7c2e64b602ea223f7d63",
"_resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz",
- "_from": "aws-sign2@>=0.5.0 <0.6.0"
+ "homepage": "https://github.com/mikeal/aws-sign#readme"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/.jshintrc b/deps/npm/node_modules/request/node_modules/bl/.jshintrc
deleted file mode 100644
index c8ef3ca40..000000000
--- a/deps/npm/node_modules/request/node_modules/bl/.jshintrc
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "predef": [ ]
- , "bitwise": false
- , "camelcase": false
- , "curly": false
- , "eqeqeq": false
- , "forin": false
- , "immed": false
- , "latedef": false
- , "noarg": true
- , "noempty": true
- , "nonew": true
- , "plusplus": false
- , "quotmark": true
- , "regexp": false
- , "undef": true
- , "unused": true
- , "strict": false
- , "trailing": true
- , "maxlen": 120
- , "asi": true
- , "boss": true
- , "debug": true
- , "eqnull": true
- , "esnext": true
- , "evil": true
- , "expr": true
- , "funcscope": false
- , "globalstrict": false
- , "iterator": false
- , "lastsemic": true
- , "laxbreak": true
- , "laxcomma": true
- , "loopfunc": true
- , "multistr": false
- , "onecase": false
- , "proto": false
- , "regexdash": false
- , "scripturl": true
- , "smarttabs": false
- , "shadow": false
- , "sub": true
- , "supernew": false
- , "validthis": true
- , "browser": true
- , "couch": false
- , "devel": false
- , "dojo": false
- , "mootools": false
- , "node": true
- , "nonstandard": true
- , "prototypejs": false
- , "rhino": false
- , "worker": true
- , "wsh": false
- , "nomen": false
- , "onevar": false
- , "passfail": false
-} \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/.travis.yml
index 7ddb9c975..81c081418 100644
--- a/deps/npm/node_modules/request/node_modules/bl/.travis.yml
+++ b/deps/npm/node_modules/request/node_modules/bl/.travis.yml
@@ -1,11 +1,14 @@
language: node_js
+before_install:
+ - curl --location http://git.io/1OcIZA | bash -s
node_js:
- 0.8
- - "0.10"
+ - 0.10
+ - 0.11
branches:
only:
- master
notifications:
email:
- rod@vagg.org
-script: npm test \ No newline at end of file
+script: npm test
diff --git a/deps/npm/node_modules/request/node_modules/bl/README.md b/deps/npm/node_modules/request/node_modules/bl/README.md
index 6b7fb6d34..4d87866aa 100644
--- a/deps/npm/node_modules/request/node_modules/bl/README.md
+++ b/deps/npm/node_modules/request/node_modules/bl/README.md
@@ -1,5 +1,7 @@
# bl *(BufferList)*
+[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl)
+
**A Node.js Buffer list collector, reader and streamer thingy.**
[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml
new file mode 100644
index 000000000..a2870dfb1
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml
@@ -0,0 +1,39 @@
+sudo: false
+language: node_js
+before_install:
+ - npm install -g npm
+notifications:
+ email: false
+matrix:
+ include:
+ - node_js: '0.8'
+ env: TASK=test
+ - node_js: '0.10'
+ env: TASK=test
+ - node_js: '0.11'
+ env: TASK=test
+ - node_js: '0.12'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=test
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="39..beta"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="34..beta"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
+ - node_js: 'iojs'
+ env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+script: "npm run $TASK"
+env:
+ global:
+ - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
+ - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.zuul.yml b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.zuul.yml
new file mode 100644
index 000000000..96d9cfbd3
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.zuul.yml
@@ -0,0 +1 @@
+ui: tape
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
index 5c455f420..77fafa3da 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
@@ -1,14 +1,36 @@
# readable-stream
-***Node-core streams for userland***
+***Node-core streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
+
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
-This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
-If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
+[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream)
+
+```bash
+npm install --save readable-stream
+```
+
+***Node-core streams for userland***
+
+This package is a mirror of the Streams2 and Streams3 implementations in
+Node-core, including [documentation](doc/stream.markdown).
+
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+
+As of version 2.0.0 **readable-stream** uses semantic versioning.
-**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
+# Streams WG Team Members
-**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"`
+* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt;
+ - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
+* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
+ - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
+* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;rod@vagg.org&gt;
+ - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
+* **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt;
+* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
+* **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown
new file mode 100644
index 000000000..a2270c880
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown
@@ -0,0 +1,1651 @@
+# Stream
+
+ Stability: 2 - Stable
+
+A stream is an abstract interface implemented by various objects in
+io.js. For example a [request to an HTTP
+server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage) is a stream, as is
+[stdout][]. Streams are readable, writable, or both. All streams are
+instances of [EventEmitter][]
+
+You can load the Stream base classes by doing `require('stream')`.
+There are base classes provided for [Readable][] streams, [Writable][]
+streams, [Duplex][] streams, and [Transform][] streams.
+
+This document is split up into 3 sections. The first explains the
+parts of the API that you need to be aware of to use streams in your
+programs. If you never implement a streaming API yourself, you can
+stop there.
+
+The second section explains the parts of the API that you need to use
+if you implement your own custom streams yourself. The API is
+designed to make this easy for you to do.
+
+The third section goes into more depth about how streams work,
+including some of the internal mechanisms and functions that you
+should probably not modify unless you definitely know what you are
+doing.
+
+
+## API for Stream Consumers
+
+<!--type=misc-->
+
+Streams can be either [Readable][], [Writable][], or both ([Duplex][]).
+
+All streams are EventEmitters, but they also have other custom methods
+and properties depending on whether they are Readable, Writable, or
+Duplex.
+
+If a stream is both Readable and Writable, then it implements all of
+the methods and events below. So, a [Duplex][] or [Transform][] stream is
+fully described by this API, though their implementation may be
+somewhat different.
+
+It is not necessary to implement Stream interfaces in order to consume
+streams in your programs. If you **are** implementing streaming
+interfaces in your own program, please also refer to
+[API for Stream Implementors][] below.
+
+Almost all io.js programs, no matter how simple, use Streams in some
+way. Here is an example of using Streams in an io.js program:
+
+```javascript
+var http = require('http');
+
+var server = http.createServer(function (req, res) {
+ // req is an http.IncomingMessage, which is a Readable Stream
+ // res is an http.ServerResponse, which is a Writable Stream
+
+ var body = '';
+ // we want to get the data as utf8 strings
+ // If you don't set an encoding, then you'll get Buffer objects
+ req.setEncoding('utf8');
+
+ // Readable streams emit 'data' events once a listener is added
+ req.on('data', function (chunk) {
+ body += chunk;
+ });
+
+ // the end event tells you that you have entire body
+ req.on('end', function () {
+ try {
+ var data = JSON.parse(body);
+ } catch (er) {
+ // uh oh! bad json!
+ res.statusCode = 400;
+ return res.end('error: ' + er.message);
+ }
+
+ // write back something interesting to the user:
+ res.write(typeof data);
+ res.end();
+ });
+});
+
+server.listen(1337);
+
+// $ curl localhost:1337 -d '{}'
+// object
+// $ curl localhost:1337 -d '"foo"'
+// string
+// $ curl localhost:1337 -d 'not json'
+// error: Unexpected token o
+```
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+The Readable stream interface is the abstraction for a *source* of
+data that you are reading from. In other words, data comes *out* of a
+Readable stream.
+
+A Readable stream will not start emitting data until you indicate that
+you are ready to receive it.
+
+Readable streams have two "modes": a **flowing mode** and a **paused
+mode**. When in flowing mode, data is read from the underlying system
+and provided to your program as fast as possible. In paused mode, you
+must explicitly call `stream.read()` to get chunks of data out.
+Streams start out in paused mode.
+
+**Note**: If no data event handlers are attached, and there are no
+[`pipe()`][] destinations, and the stream is switched into flowing
+mode, then data will be lost.
+
+You can switch to flowing mode by doing any of the following:
+
+* Adding a [`'data'` event][] handler to listen for data.
+* Calling the [`resume()`][] method to explicitly open the flow.
+* Calling the [`pipe()`][] method to send the data to a [Writable][].
+
+You can switch back to paused mode by doing either of the following:
+
+* If there are no pipe destinations, by calling the [`pause()`][]
+ method.
+* If there are pipe destinations, by removing any [`'data'` event][]
+ handlers, and removing all pipe destinations by calling the
+ [`unpipe()`][] method.
+
+Note that, for backwards compatibility reasons, removing `'data'`
+event handlers will **not** automatically pause the stream. Also, if
+there are piped destinations, then calling `pause()` will not
+guarantee that the stream will *remain* paused once those
+destinations drain and ask for more data.
+
+Examples of readable streams include:
+
+* [http responses, on the client](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage)
+* [http requests, on the server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_http_incomingmessage)
+* [fs read streams](https://iojs.org/dist/v2.3.0/doc/api/fs.html#fs_class_fs_readstream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdout and stderr][]
+* [process.stdin][]
+
+#### Event: 'readable'
+
+When a chunk of data can be read from the stream, it will emit a
+`'readable'` event.
+
+In some cases, listening for a `'readable'` event will cause some data
+to be read into the internal buffer from the underlying system, if it
+hadn't already.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ // there is some data to read now
+});
+```
+
+Once the internal buffer is drained, a `readable` event will fire
+again when more data is available.
+
+#### Event: 'data'
+
+* `chunk` {Buffer | String} The chunk of data.
+
+Attaching a `data` event listener to a stream that has not been
+explicitly paused will switch the stream into flowing mode. Data will
+then be passed as soon as it is available.
+
+If you just want to get all the data out of the stream as fast as
+possible, this is the best way to do so.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+```
+
+#### Event: 'end'
+
+This event fires when there will be no more data to read.
+
+Note that the `end` event **will not fire** unless the data is
+completely consumed. This can be done by switching into flowing mode,
+or by calling `read()` repeatedly until you get to the end.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+});
+readable.on('end', function() {
+ console.log('there will be no more data.');
+});
+```
+
+#### Event: 'close'
+
+Emitted when the underlying resource (for example, the backing file
+descriptor) has been closed. Not all streams will emit this.
+
+#### Event: 'error'
+
+* {Error Object}
+
+Emitted if there was an error receiving data.
+
+#### readable.read([size])
+
+* `size` {Number} Optional argument to specify how much data to read.
+* Return {String | Buffer | null}
+
+The `read()` method pulls some data out of the internal buffer and
+returns it. If there is no data available, then it will return
+`null`.
+
+If you pass in a `size` argument, then it will return that many
+bytes. If `size` bytes are not available, then it will return `null`.
+
+If you do not specify a `size` argument, then it will return all the
+data in the internal buffer.
+
+This method should only be called in paused mode. In flowing mode,
+this method is called automatically until the internal buffer is
+drained.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', function() {
+ var chunk;
+ while (null !== (chunk = readable.read())) {
+ console.log('got %d bytes of data', chunk.length);
+ }
+});
+```
+
+If this method returns a data chunk, then it will also trigger the
+emission of a [`'data'` event][].
+
+#### readable.setEncoding(encoding)
+
+* `encoding` {String} The encoding to use.
+* Return: `this`
+
+Call this function to cause the stream to return strings of the
+specified encoding instead of Buffer objects. For example, if you do
+`readable.setEncoding('utf8')`, then the output data will be
+interpreted as UTF-8 data, and returned as strings. If you do
+`readable.setEncoding('hex')`, then the data will be encoded in
+hexadecimal string format.
+
+This properly handles multi-byte characters that would otherwise be
+potentially mangled if you simply pulled the Buffers directly and
+called `buf.toString(encoding)` on them. If you want to read the data
+as strings, always use this method.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.setEncoding('utf8');
+readable.on('data', function(chunk) {
+ assert.equal(typeof chunk, 'string');
+ console.log('got %d characters of string data', chunk.length);
+});
+```
+
+#### readable.resume()
+
+* Return: `this`
+
+This method will cause the readable stream to resume emitting `data`
+events.
+
+This method will switch the stream into flowing mode. If you do *not*
+want to consume the data from a stream, but you *do* want to get to
+its `end` event, you can call [`readable.resume()`][] to open the flow of
+data.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.resume();
+readable.on('end', function() {
+ console.log('got to the end, but did not read anything');
+});
+```
+
+#### readable.pause()
+
+* Return: `this`
+
+This method will cause a stream in flowing mode to stop emitting
+`data` events, switching out of flowing mode. Any data that becomes
+available will remain in the internal buffer.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('data', function(chunk) {
+ console.log('got %d bytes of data', chunk.length);
+ readable.pause();
+ console.log('there will be no more data for 1 second');
+ setTimeout(function() {
+ console.log('now data will start flowing again');
+ readable.resume();
+ }, 1000);
+});
+```
+
+#### readable.isPaused()
+
+* Return: `Boolean`
+
+This method returns whether or not the `readable` has been **explicitly**
+paused by client code (using `readable.pause()` without a corresponding
+`readable.resume()`).
+
+```javascript
+var readable = new stream.Readable
+
+readable.isPaused() // === false
+readable.pause()
+readable.isPaused() // === true
+readable.resume()
+readable.isPaused() // === false
+```
+
+#### readable.pipe(destination[, options])
+
+* `destination` {[Writable][] Stream} The destination for writing data
+* `options` {Object} Pipe options
+ * `end` {Boolean} End the writer when the reader ends. Default = `true`
+
+This method pulls all the data out of a readable stream, and writes it
+to the supplied destination, automatically managing the flow so that
+the destination is not overwhelmed by a fast readable stream.
+
+Multiple destinations can be piped to safely.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt'
+readable.pipe(writable);
+```
+
+This function returns the destination stream, so you can set up pipe
+chains like so:
+
+```javascript
+var r = fs.createReadStream('file.txt');
+var z = zlib.createGzip();
+var w = fs.createWriteStream('file.txt.gz');
+r.pipe(z).pipe(w);
+```
+
+For example, emulating the Unix `cat` command:
+
+```javascript
+process.stdin.pipe(process.stdout);
+```
+
+By default [`end()`][] is called on the destination when the source stream
+emits `end`, so that `destination` is no longer writable. Pass `{ end:
+false }` as `options` to keep the destination stream open.
+
+This keeps `writer` open so that "Goodbye" can be written at the
+end.
+
+```javascript
+reader.pipe(writer, { end: false });
+reader.on('end', function() {
+ writer.end('Goodbye\n');
+});
+```
+
+Note that `process.stderr` and `process.stdout` are never closed until
+the process exits, regardless of the specified options.
+
+#### readable.unpipe([destination])
+
+* `destination` {[Writable][] Stream} Optional specific stream to unpipe
+
+This method will remove the hooks set up for a previous `pipe()` call.
+
+If the destination is not specified, then all pipes are removed.
+
+If the destination is specified, but no pipe is set up for it, then
+this is a no-op.
+
+```javascript
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt',
+// but only for the first second
+readable.pipe(writable);
+setTimeout(function() {
+ console.log('stop writing to file.txt');
+ readable.unpipe(writable);
+ console.log('manually close the file stream');
+ writable.end();
+}, 1000);
+```
+
+#### readable.unshift(chunk)
+
+* `chunk` {Buffer | String} Chunk of data to unshift onto the read queue
+
+This is useful in certain cases where a stream is being consumed by a
+parser, which needs to "un-consume" some data that it has
+optimistically pulled out of the source, so that the stream can be
+passed on to some other party.
+
+If you find that you must often call `stream.unshift(chunk)` in your
+programs, consider implementing a [Transform][] stream instead. (See API
+for Stream Implementors, below.)
+
+```javascript
+// Pull off a header delimited by \n\n
+// use unshift() if we get too much
+// Call the callback with (error, header, stream)
+var StringDecoder = require('string_decoder').StringDecoder;
+function parseHeader(stream, callback) {
+ stream.on('error', callback);
+ stream.on('readable', onReadable);
+ var decoder = new StringDecoder('utf8');
+ var header = '';
+ function onReadable() {
+ var chunk;
+ while (null !== (chunk = stream.read())) {
+ var str = decoder.write(chunk);
+ if (str.match(/\n\n/)) {
+ // found the header boundary
+ var split = str.split(/\n\n/);
+ header += split.shift();
+ var remaining = split.join('\n\n');
+ var buf = new Buffer(remaining, 'utf8');
+ if (buf.length)
+ stream.unshift(buf);
+ stream.removeListener('error', callback);
+ stream.removeListener('readable', onReadable);
+ // now the body of the message can be read from the stream.
+ callback(null, header, stream);
+ } else {
+ // still reading the header.
+ header += str;
+ }
+ }
+ }
+}
+```
+
+#### readable.wrap(stream)
+
+* `stream` {Stream} An "old style" readable stream
+
+Versions of Node.js prior to v0.10 had streams that did not implement the
+entire Streams API as it is today. (See "Compatibility" below for
+more information.)
+
+If you are using an older io.js library that emits `'data'` events and
+has a [`pause()`][] method that is advisory only, then you can use the
+`wrap()` method to create a [Readable][] stream that uses the old stream
+as its data source.
+
+You will very rarely ever need to call this function, but it exists
+as a convenience for interacting with old io.js programs and libraries.
+
+For example:
+
+```javascript
+var OldReader = require('./old-api-module.js').OldReader;
+var oreader = new OldReader;
+var Readable = require('stream').Readable;
+var myReader = new Readable().wrap(oreader);
+
+myReader.on('readable', function() {
+ myReader.read(); // etc.
+});
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+The Writable stream interface is an abstraction for a *destination*
+that you are writing data *to*.
+
+Examples of writable streams include:
+
+* [http requests, on the client](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_class_http_clientrequest)
+* [http responses, on the server](https://iojs.org/dist/v2.3.0/doc/api/http.html#http_class_http_serverresponse)
+* [fs write streams](https://iojs.org/dist/v2.3.0/doc/api/fs.html#fs_class_fs_writestream)
+* [zlib streams][]
+* [crypto streams][]
+* [tcp sockets][]
+* [child process stdin](https://iojs.org/dist/v2.3.0/doc/api/child_process.html#child_process_child_stdin)
+* [process.stdout][], [process.stderr][]
+
+#### writable.write(chunk[, encoding][, callback])
+
+* `chunk` {String | Buffer} The data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Callback for when this chunk of data is flushed
+* Returns: {Boolean} True if the data was handled completely.
+
+This method writes some data to the underlying system, and calls the
+supplied callback once the data has been fully handled.
+
+The return value indicates if you should continue writing right now.
+If the data had to be buffered internally, then it will return
+`false`. Otherwise, it will return `true`.
+
+This return value is strictly advisory. You MAY continue to write,
+even if it returns `false`. However, writes will be buffered in
+memory, so it is best not to do this excessively. Instead, wait for
+the `drain` event before writing more data.
+
+#### Event: 'drain'
+
+If a [`writable.write(chunk)`][] call returns false, then the `drain`
+event will indicate when it is appropriate to begin writing more data
+to the stream.
+
+```javascript
+// Write the data to the supplied writable stream 1MM times.
+// Be attentive to back-pressure.
+function writeOneMillionTimes(writer, data, encoding, callback) {
+ var i = 1000000;
+ write();
+ function write() {
+ var ok = true;
+ do {
+ i -= 1;
+ if (i === 0) {
+ // last time!
+ writer.write(data, encoding, callback);
+ } else {
+ // see if we should continue, or wait
+ // don't pass the callback, because we're not done yet.
+ ok = writer.write(data, encoding);
+ }
+ } while (i > 0 && ok);
+ if (i > 0) {
+ // had to stop early!
+ // write some more once it drains
+ writer.once('drain', write);
+ }
+ }
+}
+```
+
+#### writable.cork()
+
+Forces buffering of all writes.
+
+Buffered data will be flushed either at `.uncork()` or at `.end()` call.
+
+#### writable.uncork()
+
+Flush all data, buffered since `.cork()` call.
+
+#### writable.setDefaultEncoding(encoding)
+
+* `encoding` {String} The new default encoding
+
+Sets the default encoding for a writable stream.
+
+#### writable.end([chunk][, encoding][, callback])
+
+* `chunk` {String | Buffer} Optional data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Optional callback for when the stream is finished
+
+Call this method when no more data will be written to the stream. If
+supplied, the callback is attached as a listener on the `finish` event.
+
+Calling [`write()`][] after calling [`end()`][] will raise an error.
+
+```javascript
+// write 'hello, ' and then end with 'world!'
+var file = fs.createWriteStream('example.txt');
+file.write('hello, ');
+file.end('world!');
+// writing more now is not allowed!
+```
+
+#### Event: 'finish'
+
+When the [`end()`][] method has been called, and all data has been flushed
+to the underlying system, this event is emitted.
+
+```javascript
+var writer = getWritableStreamSomehow();
+for (var i = 0; i < 100; i ++) {
+ writer.write('hello, #' + i + '!\n');
+}
+writer.end('this is the end\n');
+writer.on('finish', function() {
+ console.error('all writes are now complete.');
+});
+```
+
+#### Event: 'pipe'
+
+* `src` {[Readable][] Stream} source stream that is piping to this writable
+
+This is emitted whenever the `pipe()` method is called on a readable
+stream, adding this writable to its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('pipe', function(src) {
+ console.error('something is piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+```
+
+#### Event: 'unpipe'
+
+* `src` {[Readable][] Stream} The source stream that [unpiped][] this writable
+
+This is emitted whenever the [`unpipe()`][] method is called on a
+readable stream, removing this writable from its set of destinations.
+
+```javascript
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('unpipe', function(src) {
+ console.error('something has stopped piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+reader.unpipe(writer);
+```
+
+#### Event: 'error'
+
+* {Error object}
+
+Emitted if there was an error when writing or piping data.
+
+### Class: stream.Duplex
+
+Duplex streams are streams that implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Duplex streams include:
+
+* [tcp sockets][]
+* [zlib streams][]
+* [crypto streams][]
+
+
+### Class: stream.Transform
+
+Transform streams are [Duplex][] streams where the output is in some way
+computed from the input. They implement both the [Readable][] and
+[Writable][] interfaces. See above for usage.
+
+Examples of Transform streams include:
+
+* [zlib streams][]
+* [crypto streams][]
+
+
+## API for Stream Implementors
+
+<!--type=misc-->
+
+To implement any sort of stream, the pattern is the same:
+
+1. Extend the appropriate parent class in your own subclass. (The
+ [`util.inherits`][] method is particularly helpful for this.)
+2. Call the appropriate parent class constructor in your constructor,
+ to be sure that the internal mechanisms are set up properly.
+2. Implement one or more specific methods, as detailed below.
+
+The class to extend and the method(s) to implement depend on the sort
+of stream class you are writing:
+
+<table>
+ <thead>
+ <tr>
+ <th>
+ <p>Use-case</p>
+ </th>
+ <th>
+ <p>Class</p>
+ </th>
+ <th>
+ <p>Method(s) to implement</p>
+ </th>
+ </tr>
+ </thead>
+ <tr>
+ <td>
+ <p>Reading only</p>
+ </td>
+ <td>
+ <p>[Readable](#stream_class_stream_readable_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Writing only</p>
+ </td>
+ <td>
+ <p>[Writable](#stream_class_stream_writable_1)</p>
+ </td>
+ <td>
+ <p><code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Reading and writing</p>
+ </td>
+ <td>
+ <p>[Duplex](#stream_class_stream_duplex_1)</p>
+ </td>
+ <td>
+ <p><code>[_read][]</code>, <code>[_write][]</code>, <code>_writev</code></p>
+ </td>
+ </tr>
+ <tr>
+ <td>
+ <p>Operate on written data, then read the result</p>
+ </td>
+ <td>
+ <p>[Transform](#stream_class_stream_transform_1)</p>
+ </td>
+ <td>
+ <p><code>_transform</code>, <code>_flush</code></p>
+ </td>
+ </tr>
+</table>
+
+In your implementation code, it is very important to never call the
+methods described in [API for Stream Consumers][] above. Otherwise, you
+can potentially cause adverse side effects in programs that consume
+your streaming interfaces.
+
+### Class: stream.Readable
+
+<!--type=class-->
+
+`stream.Readable` is an abstract class designed to be extended with an
+underlying implementation of the [`_read(size)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+streams in your programs. What follows is an explanation of how to
+implement Readable streams in your programs.
+
+#### Example: A Counting Stream
+
+<!--type=example-->
+
+This is a basic example of a Readable stream. It emits the numerals
+from 1 to 1,000,000 in ascending order, and then ends.
+
+```javascript
+var Readable = require('stream').Readable;
+var util = require('util');
+util.inherits(Counter, Readable);
+
+function Counter(opt) {
+ Readable.call(this, opt);
+ this._max = 1000000;
+ this._index = 1;
+}
+
+Counter.prototype._read = function() {
+ var i = this._index++;
+ if (i > this._max)
+ this.push(null);
+ else {
+ var str = '' + i;
+ var buf = new Buffer(str, 'ascii');
+ this.push(buf);
+ }
+};
+```
+
+#### Example: SimpleProtocol v1 (Sub-optimal)
+
+This is similar to the `parseHeader` function described above, but
+implemented as a custom stream. Also, note that this implementation
+does not convert the incoming data to a string.
+
+However, this would be better implemented as a [Transform][] stream. See
+below for a better implementation.
+
+```javascript
+// A parser for a simple data protocol.
+// The "header" is a JSON object, followed by 2 \n characters, and
+// then a message body.
+//
+// NOTE: This can be done more simply as a Transform stream!
+// Using Readable directly for this is sub-optimal. See the
+// alternative example below under the Transform section.
+
+var Readable = require('stream').Readable;
+var util = require('util');
+
+util.inherits(SimpleProtocol, Readable);
+
+function SimpleProtocol(source, options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(source, options);
+
+ Readable.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+
+ // source is a readable stream, such as a socket or file
+ this._source = source;
+
+ var self = this;
+ source.on('end', function() {
+ self.push(null);
+ });
+
+ // give it a kick whenever the source is readable
+ // read(0) will not consume any bytes
+ source.on('readable', function() {
+ self.read(0);
+ });
+
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._read = function(n) {
+ if (!this._inBody) {
+ var chunk = this._source.read();
+
+ // if the source doesn't have data, we don't have data yet.
+ if (chunk === null)
+ return this.push('');
+
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ this.push('');
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // now, because we got some extra data, unshift the rest
+ // back into the read queue so that our consumer will see it.
+ var b = chunk.slice(split);
+ this.unshift(b);
+
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+ }
+ } else {
+ // from there on, just provide the data to our consumer.
+ // careful not to push(null), since that would indicate EOF.
+ var chunk = this._source.read();
+ if (chunk) this.push(chunk);
+ }
+};
+
+// Usage:
+// var parser = new SimpleProtocol(source);
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+#### new stream.Readable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} The maximum number of bytes to store in
+ the internal buffer before ceasing to read from the underlying
+ resource. Default=16kb, or 16 for `objectMode` streams
+ * `encoding` {String} If specified, then buffers will be decoded to
+ strings using the specified encoding. Default=null
+ * `objectMode` {Boolean} Whether this stream should behave
+ as a stream of objects. Meaning that stream.read(n) returns
+ a single value instead of a Buffer of size n. Default=false
+
+In classes that extend the Readable class, make sure to call the
+Readable constructor so that the buffering settings can be properly
+initialized.
+
+#### readable.\_read(size)
+
+* `size` {Number} Number of bytes to read asynchronously
+
+Note: **Implement this function, but do NOT call it directly.**
+
+This function should NOT be called directly. It should be implemented
+by child classes, and only called by the internal Readable class
+methods.
+
+All Readable stream implementations must provide a `_read` method to
+fetch data from the underlying resource.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+When data is available, put it into the read queue by calling
+`readable.push(chunk)`. If `push` returns false, then you should stop
+reading. When `_read` is called again, you should start pushing more
+data.
+
+The `size` argument is advisory. Implementations where a "read" is a
+single call that returns data can use this to know how much data to
+fetch. Implementations where that is not relevant, such as TCP or
+TLS, may ignore this argument, and simply provide data whenever it
+becomes available. There is no need, for example to "wait" until
+`size` bytes are available before calling [`stream.push(chunk)`][].
+
+#### readable.push(chunk[, encoding])
+
+* `chunk` {Buffer | null | String} Chunk of data to push into the read queue
+* `encoding` {String} Encoding of String chunks. Must be a valid
+ Buffer encoding, such as `'utf8'` or `'ascii'`
+* return {Boolean} Whether or not more pushes should be performed
+
+Note: **This function should be called by Readable implementors, NOT
+by consumers of Readable streams.**
+
+The `_read()` function will not be called again until at least one
+`push(chunk)` call is made.
+
+The `Readable` class works by putting data into a read queue to be
+pulled out later by calling the `read()` method when the `'readable'`
+event fires.
+
+The `push()` method will explicitly insert some data into the read
+queue. If it is called with `null` then it will signal the end of the
+data (EOF).
+
+This API is designed to be as flexible as possible. For example,
+you may be wrapping a lower-level source which has some sort of
+pause/resume mechanism, and a data callback. In those cases, you
+could wrap the low-level source object by doing something like this:
+
+```javascript
+// source is an object with readStop() and readStart() methods,
+// and an `ondata` member that gets called when it has data, and
+// an `onend` member that gets called when the data is over.
+
+util.inherits(SourceWrapper, Readable);
+
+function SourceWrapper(options) {
+ Readable.call(this, options);
+
+ this._source = getLowlevelSourceObject();
+ var self = this;
+
+ // Every time there's data, we push it into the internal buffer.
+ this._source.ondata = function(chunk) {
+ // if push() returns false, then we need to stop reading from source
+ if (!self.push(chunk))
+ self._source.readStop();
+ };
+
+ // When the source ends, we push the EOF-signaling `null` chunk
+ this._source.onend = function() {
+ self.push(null);
+ };
+}
+
+// _read will be called when the stream wants to pull more data in
+// the advisory size argument is ignored in this case.
+SourceWrapper.prototype._read = function(size) {
+ this._source.readStart();
+};
+```
+
+
+### Class: stream.Writable
+
+<!--type=class-->
+
+`stream.Writable` is an abstract class designed to be extended with an
+underlying implementation of the [`_write(chunk, encoding, callback)`][] method.
+
+Please see above under [API for Stream Consumers][] for how to consume
+writable streams in your programs. What follows is an explanation of
+how to implement Writable streams in your programs.
+
+#### new stream.Writable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} Buffer level when [`write()`][] starts
+ returning false. Default=16kb, or 16 for `objectMode` streams
+ * `decodeStrings` {Boolean} Whether or not to decode strings into
+ Buffers before passing them to [`_write()`][]. Default=true
+ * `objectMode` {Boolean} Whether or not the `write(anyObj)` is
+ a valid operation. If set you can write arbitrary data instead
+ of only `Buffer` / `String` data. Default=false
+
+In classes that extend the Writable class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### writable.\_write(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be written. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunk.
+
+All Writable stream implementations must provide a [`_write()`][]
+method to send data to the underlying resource.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+Call the callback using the standard `callback(error)` pattern to
+signal that the write completed successfully or with an error.
+
+If the `decodeStrings` flag is set in the constructor options, then
+`chunk` may be a string rather than a Buffer, and `encoding` will
+indicate the sort of string that it is. This is to support
+implementations that have an optimized handling for certain string
+data encodings. If you do not explicitly set the `decodeStrings`
+option to `false`, then you can safely ignore the `encoding` argument,
+and assume that `chunk` will always be a Buffer.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### writable.\_writev(chunks, callback)
+
+* `chunks` {Array} The chunks to be written. Each chunk has following
+ format: `{ chunk: ..., encoding: ... }`.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunks.
+
+Note: **This function MUST NOT be called directly.** It may be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+This function is completely optional to implement. In most cases it is
+unnecessary. If implemented, it will be called with all the chunks
+that are buffered in the write queue.
+
+
+### Class: stream.Duplex
+
+<!--type=class-->
+
+A "duplex" stream is one that is both Readable and Writable, such as a
+TCP socket connection.
+
+Note that `stream.Duplex` is an abstract class designed to be extended
+with an underlying implementation of the `_read(size)` and
+[`_write(chunk, encoding, callback)`][] methods as you would with a
+Readable or Writable stream class.
+
+Since JavaScript doesn't have multiple prototypal inheritance, this
+class prototypally inherits from Readable, and then parasitically from
+Writable. It is thus up to the user to implement both the lowlevel
+`_read(n)` method as well as the lowlevel
+[`_write(chunk, encoding, callback)`][] method on extension duplex classes.
+
+#### new stream.Duplex(options)
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors. Also has the following fields:
+ * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then
+ the stream will automatically end the readable side when the
+ writable side ends and vice versa.
+ * `readableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for readable side of the stream. Has no effect if `objectMode`
+ is `true`.
+ * `writableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ for writable side of the stream. Has no effect if `objectMode`
+ is `true`.
+
+In classes that extend the Duplex class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+
+### Class: stream.Transform
+
+A "transform" stream is a duplex stream where the output is causally
+connected in some way to the input, such as a [zlib][] stream or a
+[crypto][] stream.
+
+There is no requirement that the output be the same size as the input,
+the same number of chunks, or arrive at the same time. For example, a
+Hash stream will only ever have a single chunk of output which is
+provided when the input is ended. A zlib stream will produce output
+that is either much smaller or much larger than its input.
+
+Rather than implement the [`_read()`][] and [`_write()`][] methods, Transform
+classes must implement the `_transform()` method, and may optionally
+also implement the `_flush()` method. (See below.)
+
+#### new stream.Transform([options])
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors.
+
+In classes that extend the Transform class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### transform.\_transform(chunk, encoding, callback)
+
+* `chunk` {Buffer | String} The chunk to be transformed. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument and data) when you are done processing the supplied chunk.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Transform
+class methods only.
+
+All Transform stream implementations must provide a `_transform`
+method to accept input and produce output.
+
+`_transform` should do whatever has to be done in this specific
+Transform class, to handle the bytes being written, and pass them off
+to the readable portion of the interface. Do asynchronous I/O,
+process things, and so on.
+
+Call `transform.push(outputChunk)` 0 or more times to generate output
+from this input chunk, depending on how much data you want to output
+as a result of this chunk.
+
+Call the callback function only when the current chunk is completely
+consumed. Note that there may or may not be output as a result of any
+particular input chunk. If you supply output as the second argument to the
+callback, it will be passed to push method, in other words the following are
+equivalent:
+
+```javascript
+transform.prototype._transform = function (data, encoding, callback) {
+ this.push(data);
+ callback();
+}
+
+transform.prototype._transform = function (data, encoding, callback) {
+ callback(null, data);
+}
+```
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### transform.\_flush(callback)
+
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done flushing any remaining data.
+
+Note: **This function MUST NOT be called directly.** It MAY be implemented
+by child classes, and if so, will be called by the internal Transform
+class methods only.
+
+In some cases, your transform operation may need to emit a bit more
+data at the end of the stream. For example, a `Zlib` compression
+stream will store up some internal state so that it can optimally
+compress the output. At the end, however, it needs to do the best it
+can with what is left, so that the data will be complete.
+
+In those cases, you can implement a `_flush` method, which will be
+called at the very end, after all the written data is consumed, but
+before emitting `end` to signal the end of the readable side. Just
+like with `_transform`, call `transform.push(chunk)` zero or more
+times, as appropriate, and call `callback` when the flush operation is
+complete.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### Events: 'finish' and 'end'
+
+The [`finish`][] and [`end`][] events are from the parent Writable
+and Readable classes respectively. The `finish` event is fired after
+`.end()` is called and all chunks have been processed by `_transform`,
+`end` is fired after all data has been output which is after the callback
+in `_flush` has been called.
+
+#### Example: `SimpleProtocol` parser v2
+
+The example above of a simple protocol parser can be implemented
+simply by using the higher level [Transform][] stream class, similar to
+the `parseHeader` and `SimpleProtocol v1` examples above.
+
+In this example, rather than providing the input as an argument, it
+would be piped into the parser, which is a more idiomatic io.js stream
+approach.
+
+```javascript
+var util = require('util');
+var Transform = require('stream').Transform;
+util.inherits(SimpleProtocol, Transform);
+
+function SimpleProtocol(options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(options);
+
+ Transform.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
+ if (!this._inBody) {
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+
+ // now, because we got some extra data, emit this first.
+ this.push(chunk.slice(split));
+ }
+ } else {
+ // from there on, just provide the data to our consumer as-is.
+ this.push(chunk);
+ }
+ done();
+};
+
+// Usage:
+// var parser = new SimpleProtocol();
+// source.pipe(parser)
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+
+### Class: stream.PassThrough
+
+This is a trivial implementation of a [Transform][] stream that simply
+passes the input bytes across to the output. Its purpose is mainly
+for examples and testing, but there are occasionally use cases where
+it can come in handy as a building block for novel sorts of streams.
+
+
+## Simplified Constructor API
+
+<!--type=misc-->
+
+In simple cases there is now the added benefit of being able to construct a stream without inheritance.
+
+This can be done by passing the appropriate methods as constructor options:
+
+Examples:
+
+### Readable
+```javascript
+var readable = new stream.Readable({
+ read: function(n) {
+ // sets this._read under the hood
+ }
+});
+```
+
+### Writable
+```javascript
+var writable = new stream.Writable({
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var writable = new stream.Writable({
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Duplex
+```javascript
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+ }
+});
+
+// or
+
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+ },
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+ }
+});
+```
+
+### Transform
+```javascript
+var transform = new stream.Transform({
+ transform: function(chunk, encoding, next) {
+ // sets this._transform under the hood
+ },
+ flush: function(done) {
+ // sets this._flush under the hood
+ }
+});
+```
+
+## Streams: Under the Hood
+
+<!--type=misc-->
+
+### Buffering
+
+<!--type=misc-->
+
+Both Writable and Readable streams will buffer data on an internal
+object called `_writableState.buffer` or `_readableState.buffer`,
+respectively.
+
+The amount of data that will potentially be buffered depends on the
+`highWaterMark` option which is passed into the constructor.
+
+Buffering in Readable streams happens when the implementation calls
+[`stream.push(chunk)`][]. If the consumer of the Stream does not call
+`stream.read()`, then the data will sit in the internal queue until it
+is consumed.
+
+Buffering in Writable streams happens when the user calls
+[`stream.write(chunk)`][] repeatedly, even when `write()` returns `false`.
+
+The purpose of streams, especially with the `pipe()` method, is to
+limit the buffering of data to acceptable levels, so that sources and
+destinations of varying speed will not overwhelm the available memory.
+
+### `stream.read(0)`
+
+There are some cases where you want to trigger a refresh of the
+underlying readable stream mechanisms, without actually consuming any
+data. In that case, you can call `stream.read(0)`, which will always
+return null.
+
+If the internal read buffer is below the `highWaterMark`, and the
+stream is not currently reading, then calling `read(0)` will trigger
+a low-level `_read` call.
+
+There is almost never a need to do this. However, you will see some
+cases in io.js's internals where this is done, particularly in the
+Readable stream class internals.
+
+### `stream.push('')`
+
+Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
+interesting side effect. Because it *is* a call to
+[`stream.push()`][], it will end the `reading` process. However, it
+does *not* add any data to the readable buffer, so there's nothing for
+a user to consume.
+
+Very rarely, there are cases where you have no data to provide now,
+but the consumer of your stream (or, perhaps, another bit of your own
+code) will know when to check again, by calling `stream.read(0)`. In
+those cases, you *may* call `stream.push('')`.
+
+So far, the only use case for this functionality is in the
+[tls.CryptoStream][] class, which is deprecated in io.js v1.0. If you
+find that you have to use `stream.push('')`, please consider another
+approach, because it almost certainly indicates that something is
+horribly wrong.
+
+### Compatibility with Older Node.js Versions
+
+<!--type=misc-->
+
+In versions of Node.js prior to v0.10, the Readable stream interface was
+simpler, but also less powerful and less useful.
+
+* Rather than waiting for you to call the `read()` method, `'data'`
+ events would start emitting immediately. If you needed to do some
+ I/O to decide how to handle data, then you had to store the chunks
+ in some kind of buffer so that they would not be lost.
+* The [`pause()`][] method was advisory, rather than guaranteed. This
+ meant that you still had to be prepared to receive `'data'` events
+ even when the stream was in a paused state.
+
+In io.js v1.0 and Node.js v0.10, the Readable class described below was added.
+For backwards compatibility with older Node.js programs, Readable streams
+switch into "flowing mode" when a `'data'` event handler is added, or
+when the [`resume()`][] method is called. The effect is that, even if
+you are not using the new `read()` method and `'readable'` event, you
+no longer have to worry about losing `'data'` chunks.
+
+Most programs will continue to function normally. However, this
+introduces an edge case in the following conditions:
+
+* No [`'data'` event][] handler is added.
+* The [`resume()`][] method is never called.
+* The stream is not piped to any writable destination.
+
+For example, consider the following code:
+
+```javascript
+// WARNING! BROKEN!
+net.createServer(function(socket) {
+
+ // we add an 'end' method, but never consume the data
+ socket.on('end', function() {
+ // It will never get here.
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+}).listen(1337);
+```
+
+In versions of Node.js prior to v0.10, the incoming message data would be
+simply discarded. However, in io.js v1.0 and Node.js v0.10 and beyond,
+the socket will remain paused forever.
+
+The workaround in this situation is to call the `resume()` method to
+start the flow of data:
+
+```javascript
+// Workaround
+net.createServer(function(socket) {
+
+ socket.on('end', function() {
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+ // start the flow of data, discarding it.
+ socket.resume();
+
+}).listen(1337);
+```
+
+In addition to new Readable streams switching into flowing mode,
+pre-v0.10 style streams can be wrapped in a Readable class using the
+`wrap()` method.
+
+
+### Object Mode
+
+<!--type=misc-->
+
+Normally, Streams operate on Strings and Buffers exclusively.
+
+Streams that are in **object mode** can emit generic JavaScript values
+other than Buffers and Strings.
+
+A Readable stream in object mode will always return a single item from
+a call to `stream.read(size)`, regardless of what the size argument
+is.
+
+A Writable stream in object mode will always ignore the `encoding`
+argument to `stream.write(data, encoding)`.
+
+The special value `null` still retains its special value for object
+mode streams. That is, for object mode readable streams, `null` as a
+return value from `stream.read()` indicates that there is no more
+data, and [`stream.push(null)`][] will signal the end of stream data
+(`EOF`).
+
+No streams in io.js core are object mode streams. This pattern is only
+used by userland streaming libraries.
+
+You should set `objectMode` in your stream child class constructor on
+the options object. Setting `objectMode` mid-stream is not safe.
+
+For Duplex streams `objectMode` can be set exclusively for readable or
+writable side with `readableObjectMode` and `writableObjectMode`
+respectively. These options can be used to implement parsers and
+serializers with Transform streams.
+
+```javascript
+var util = require('util');
+var StringDecoder = require('string_decoder').StringDecoder;
+var Transform = require('stream').Transform;
+util.inherits(JSONParseStream, Transform);
+
+// Gets \n-delimited JSON string data, and emits the parsed objects
+function JSONParseStream() {
+ if (!(this instanceof JSONParseStream))
+ return new JSONParseStream();
+
+ Transform.call(this, { readableObjectMode : true });
+
+ this._buffer = '';
+ this._decoder = new StringDecoder('utf8');
+}
+
+JSONParseStream.prototype._transform = function(chunk, encoding, cb) {
+ this._buffer += this._decoder.write(chunk);
+ // split on newlines
+ var lines = this._buffer.split(/\r?\n/);
+ // keep the last partial line buffered
+ this._buffer = lines.pop();
+ for (var l = 0; l < lines.length; l++) {
+ var line = lines[l];
+ try {
+ var obj = JSON.parse(line);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+
+JSONParseStream.prototype._flush = function(cb) {
+ // Just handle any leftover
+ var rem = this._buffer.trim();
+ if (rem) {
+ try {
+ var obj = JSON.parse(rem);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+```
+
+
+[EventEmitter]: https://iojs.org/dist/v2.3.0/doc/api/events.html#events_class_events_eventemitter
+[Object mode]: #stream_object_mode
+[`stream.push(chunk)`]: #stream_readable_push_chunk_encoding
+[`stream.push(null)`]: #stream_readable_push_chunk_encoding
+[`stream.push()`]: #stream_readable_push_chunk_encoding
+[`unpipe()`]: #stream_readable_unpipe_destination
+[unpiped]: #stream_readable_unpipe_destination
+[tcp sockets]: https://iojs.org/dist/v2.3.0/doc/api/net.html#net_class_net_socket
+[zlib streams]: zlib.html
+[zlib]: zlib.html
+[crypto streams]: crypto.html
+[crypto]: crypto.html
+[tls.CryptoStream]: https://iojs.org/dist/v2.3.0/doc/api/tls.html#tls_class_cryptostream
+[process.stdin]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdin
+[stdout]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdout
+[process.stdout]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stdout
+[process.stderr]: https://iojs.org/dist/v2.3.0/doc/api/process.html#process_process_stderr
+[child process stdout and stderr]: https://iojs.org/dist/v2.3.0/doc/api/child_process.html#child_process_child_stdout
+[API for Stream Consumers]: #stream_api_for_stream_consumers
+[API for Stream Implementors]: #stream_api_for_stream_implementors
+[Readable]: #stream_class_stream_readable
+[Writable]: #stream_class_stream_writable
+[Duplex]: #stream_class_stream_duplex
+[Transform]: #stream_class_stream_transform
+[`end`]: #stream_event_end
+[`finish`]: #stream_event_finish
+[`_read(size)`]: #stream_readable_read_size_1
+[`_read()`]: #stream_readable_read_size_1
+[_read]: #stream_readable_read_size_1
+[`writable.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback
+[`write()`]: #stream_writable_write_chunk_encoding_callback
+[`stream.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
+[`_write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback_1
+[`_write()`]: #stream_writable_write_chunk_encoding_callback_1
+[_write]: #stream_writable_write_chunk_encoding_callback_1
+[`util.inherits`]: https://iojs.org/dist/v2.3.0/doc/api/util.html#util_util_inherits_constructor_superconstructor
+[`end()`]: #stream_writable_end_chunk_encoding_callback
+[`'data'` event]: #stream_event_data
+[`resume()`]: #stream_readable_resume
+[`readable.resume()`]: #stream_readable_resume
+[`pause()`]: #stream_readable_pause
+[`unpipe()`]: #stream_readable_unpipe_destination
+[`pipe()`]: #stream_readable_pipe_destination_options
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
new file mode 100644
index 000000000..c141a99c2
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
@@ -0,0 +1,58 @@
+# streams WG Meeting 2015-01-30
+
+## Links
+
+* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
+* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
+* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
+
+## Agenda
+
+Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
+
+* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
+* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
+* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
+* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
+
+## Minutes
+
+### adopt a charter
+
+* group: +1's all around
+
+### What versioning scheme should be adopted?
+* group: +1’s 3.0.0
+* domenic+group: pulling in patches from other sources where appropriate
+* mikeal: version independently, suggesting versions for io.js
+* mikeal+domenic: work with TC to notify in advance of changes
+simpler stream creation
+
+### streamline creation of streams
+* sam: streamline creation of streams
+* domenic: nice simple solution posted
+ but, we lose the opportunity to change the model
+ may not be backwards incompatible (double check keys)
+
+ **action item:** domenic will check
+
+### remove implicit flowing of streams on(‘data’)
+* add isFlowing / isPaused
+* mikeal: worrying that we’re documenting polyfill methods – confuses users
+* domenic: more reflective API is probably good, with warning labels for users
+* new section for mad scientists (reflective stream access)
+* calvin: name the “third state”
+* mikeal: maybe borrow the name from whatwg?
+* domenic: we’re missing the “third state”
+* consensus: kind of difficult to name the third state
+* mikeal: figure out differences in states / compat
+* mathias: always flow on data – eliminates third state
+ * explore what it breaks
+
+**action items:**
+* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
+* ask rod/build for infrastructure
+* **chris**: explore the “flow on data” approach
+* add isPaused/isFlowing
+* add new docs section
+* move isPaused to that section
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
index b513d61a9..69558af03 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
@@ -1,30 +1,9 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
-module.exports = Duplex;
+'use strict';
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
@@ -35,6 +14,14 @@ var objectKeys = Object.keys || function (obj) {
/*</replacement>*/
+module.exports = Duplex;
+
+/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
@@ -45,10 +32,12 @@ var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
-forEach(objectKeys(Writable.prototype), function(method) {
+var keys = objectKeys(Writable.prototype);
+for (var v = 0; v < keys.length; v++) {
+ var method = keys[v];
if (!Duplex.prototype[method])
Duplex.prototype[method] = Writable.prototype[method];
-});
+}
function Duplex(options) {
if (!(this instanceof Duplex))
@@ -79,7 +68,11 @@ function onend() {
// no more data can be written.
// But allow more writes to happen in this tick.
- process.nextTick(this.end.bind(this));
+ processNextTick(onEndNT, this);
+}
+
+function onEndNT(self) {
+ self.end();
}
function forEach (xs, f) {
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
index 895ca50a1..bddfdd015 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -1,28 +1,9 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
+'use strict';
+
module.exports = PassThrough;
var Transform = require('./_stream_transform');
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
index 630722099..eef3d825d 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
@@ -1,27 +1,13 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict';
module.exports = Readable;
/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
@@ -40,24 +26,57 @@ if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
};
/*</replacement>*/
-var Stream = require('stream');
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
+
+
+/*<replacement>*/
+var debug = require('util');
+if (debug && debug.debuglog) {
+ debug = debug.debuglog('stream');
+} else {
+ debug = function () {};
+}
+/*</replacement>*/
+
var StringDecoder;
util.inherits(Readable, Stream);
function ReadableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
options = options || {};
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.readableObjectMode;
+
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
@@ -66,19 +85,13 @@ function ReadableState(options, stream) {
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
- this.flowing = false;
+ this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
- // In streams that never have any data, and do push(null) right away,
- // the consumer can miss the 'end' event if they do some I/O before
- // consuming the stream. So, we don't emit('end') until some reading
- // happens.
- this.calledRead = false;
-
// a flag to be able to tell if the onwrite cb is called immediately,
- // or on a later tick. We set this to true at first, becuase any
+ // or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
@@ -89,11 +102,6 @@ function ReadableState(options, stream) {
this.emittedReadable = false;
this.readableListening = false;
-
- // object stream flag. Used to make read(n) ignore n and to
- // make all the buffer merging and length checks go away
- this.objectMode = !!options.objectMode;
-
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
@@ -120,6 +128,8 @@ function ReadableState(options, stream) {
}
function Readable(options) {
+ var Duplex = require('./_stream_duplex');
+
if (!(this instanceof Readable))
return new Readable(options);
@@ -128,6 +138,9 @@ function Readable(options) {
// legacy
this.readable = true;
+ if (options && typeof options.read === 'function')
+ this._read = options.read;
+
Stream.call(this);
}
@@ -138,7 +151,7 @@ function Readable(options) {
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
- if (typeof chunk === 'string' && !state.objectMode) {
+ if (!state.objectMode && typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
@@ -155,14 +168,17 @@ Readable.prototype.unshift = function(chunk) {
return readableAddChunk(this, state, chunk, '', true);
};
+Readable.prototype.isPaused = function() {
+ return this._readableState.flowing === false;
+};
+
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
- } else if (chunk === null || chunk === undefined) {
+ } else if (chunk === null) {
state.reading = false;
- if (!state.ended)
- onEofChunk(stream, state);
+ onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
@@ -174,17 +190,24 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront) {
- state.buffer.unshift(chunk);
- } else {
+ if (!addToFront)
state.reading = false;
- state.buffer.push(chunk);
- }
- if (state.needReadable)
- emitReadable(stream);
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront)
+ state.buffer.unshift(chunk);
+ else
+ state.buffer.push(chunk);
+
+ if (state.needReadable)
+ emitReadable(stream);
+ }
maybeReadMore(stream, state);
}
@@ -217,6 +240,7 @@ Readable.prototype.setEncoding = function(enc) {
StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
+ return this;
};
// Don't raise the hwm > 128MB
@@ -263,8 +287,9 @@ function howMuchToRead(n, state) {
if (!state.ended) {
state.needReadable = true;
return 0;
- } else
+ } else {
return state.length;
+ }
}
return n;
@@ -272,10 +297,9 @@ function howMuchToRead(n, state) {
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
+ debug('read', n);
var state = this._readableState;
- state.calledRead = true;
var nOrig = n;
- var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
@@ -286,7 +310,11 @@ Readable.prototype.read = function(n) {
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
- emitReadable(this);
+ debug('read: emitReadable', state.length, state.ended);
+ if (state.length === 0 && state.ended)
+ endReadable(this);
+ else
+ emitReadable(this);
return null;
}
@@ -294,28 +322,9 @@ Readable.prototype.read = function(n) {
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
- ret = null;
-
- // In cases where the decoder did not receive enough data
- // to produce a full chunk, then immediately received an
- // EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
- // howMuchToRead will see this and coerce the amount to
- // read to zero (because it's looking at the length of the
- // first <Buffer > in state.buffer), and we'll end up here.
- //
- // This can only happen via state.decoder -- no other venue
- // exists for pushing a zero-length chunk into state.buffer
- // and triggering this behavior. In this case, we return our
- // remaining data and end the stream, if appropriate.
- if (state.length > 0 && state.decoder) {
- ret = fromList(n, state);
- state.length -= ret.length;
- }
-
if (state.length === 0)
endReadable(this);
-
- return ret;
+ return null;
}
// All the actual chunk generation logic needs to be
@@ -342,17 +351,23 @@ Readable.prototype.read = function(n) {
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
+ debug('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
- if (state.length - n <= state.highWaterMark)
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
+ debug('length less than watermark', doRead);
+ }
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
- if (state.ended || state.reading)
+ if (state.ended || state.reading) {
doRead = false;
+ debug('reading or ended', doRead);
+ }
if (doRead) {
+ debug('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
@@ -363,12 +378,12 @@ Readable.prototype.read = function(n) {
state.sync = false;
}
- // If _read called its callback synchronously, then `reading`
- // will be false, and we need to re-evaluate how much data we
- // can return to the user.
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
if (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
+ var ret;
if (n > 0)
ret = fromList(n, state);
else
@@ -386,19 +401,20 @@ Readable.prototype.read = function(n) {
if (state.length === 0 && !state.ended)
state.needReadable = true;
- // If we happened to read() exactly the remaining amount in the
- // buffer, and the EOF has been seen at this point, then make sure
- // that we emit 'end' on the very next tick.
- if (state.ended && !state.endEmitted && state.length === 0)
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended && state.length === 0)
endReadable(this);
+ if (ret !== null)
+ this.emit('data', ret);
+
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
- if (!Buffer.isBuffer(chunk) &&
- 'string' !== typeof chunk &&
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
@@ -409,7 +425,8 @@ function chunkInvalid(state, chunk) {
function onEofChunk(stream, state) {
- if (state.decoder && !state.ended) {
+ if (state.ended) return;
+ if (state.decoder) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
@@ -418,12 +435,8 @@ function onEofChunk(stream, state) {
}
state.ended = true;
- // if we've ended and we have some data left, then emit
- // 'readable' now to make sure it gets picked up.
- if (state.length > 0)
- emitReadable(stream);
- else
- endReadable(stream);
+ // emit 'readable' now to make sure it gets picked up.
+ emitReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
@@ -432,20 +445,20 @@ function onEofChunk(stream, state) {
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
- if (state.emittedReadable)
- return;
-
- state.emittedReadable = true;
- if (state.sync)
- process.nextTick(function() {
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing);
+ state.emittedReadable = true;
+ if (state.sync)
+ processNextTick(emitReadable_, stream);
+ else
emitReadable_(stream);
- });
- else
- emitReadable_(stream);
+ }
}
function emitReadable_(stream) {
+ debug('emit readable');
stream.emit('readable');
+ flow(stream);
}
@@ -458,9 +471,7 @@ function emitReadable_(stream) {
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
- process.nextTick(function() {
- maybeReadMore_(stream, state);
- });
+ processNextTick(maybeReadMore_, stream, state);
}
}
@@ -468,6 +479,7 @@ function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
+ debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
@@ -502,6 +514,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
break;
}
state.pipesCount += 1;
+ debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
@@ -509,17 +522,20 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
- process.nextTick(endFn);
+ processNextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
- if (readable !== src) return;
- cleanup();
+ debug('onunpipe');
+ if (readable === src) {
+ cleanup();
+ }
}
function onend() {
+ debug('onend');
dest.end();
}
@@ -531,6 +547,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
dest.on('drain', ondrain);
function cleanup() {
+ debug('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
@@ -539,19 +556,34 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
+ src.removeListener('data', ondata);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
- if (!dest._writableState || dest._writableState.needDrain)
+ if (state.awaitDrain &&
+ (!dest._writableState || dest._writableState.needDrain))
ondrain();
}
+ src.on('data', ondata);
+ function ondata(chunk) {
+ debug('ondata');
+ var ret = dest.write(chunk);
+ if (false === ret) {
+ debug('false write response, pause',
+ src._readableState.awaitDrain);
+ src._readableState.awaitDrain++;
+ src.pause();
+ }
+ }
+
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
+ debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EE.listenerCount(dest, 'error') === 0)
@@ -575,12 +607,14 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
}
dest.once('close', onclose);
function onfinish() {
+ debug('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
+ debug('unpipe');
src.unpipe(dest);
}
@@ -589,16 +623,8 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
// start the flow if it hasn't been started already.
if (!state.flowing) {
- // the handler that waits for readable events after all
- // the data gets sucked out in flow.
- // This would be easier to follow with a .once() handler
- // in flow(), but that is too slow.
- this.on('readable', pipeOnReadable);
-
- state.flowing = true;
- process.nextTick(function() {
- flow(src);
- });
+ debug('pipe resume');
+ src.resume();
}
return dest;
@@ -606,63 +632,15 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
function pipeOnDrain(src) {
return function() {
- var dest = this;
var state = src._readableState;
- state.awaitDrain--;
- if (state.awaitDrain === 0)
+ debug('pipeOnDrain', state.awaitDrain);
+ if (state.awaitDrain)
+ state.awaitDrain--;
+ if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) {
+ state.flowing = true;
flow(src);
- };
-}
-
-function flow(src) {
- var state = src._readableState;
- var chunk;
- state.awaitDrain = 0;
-
- function write(dest, i, list) {
- var written = dest.write(chunk);
- if (false === written) {
- state.awaitDrain++;
}
- }
-
- while (state.pipesCount && null !== (chunk = src.read())) {
-
- if (state.pipesCount === 1)
- write(state.pipes, 0, null);
- else
- forEach(state.pipes, write);
-
- src.emit('data', chunk);
-
- // if anyone needs a drain, then we have to wait for that.
- if (state.awaitDrain > 0)
- return;
- }
-
- // if every destination was unpiped, either before entering this
- // function, or in the while loop, then stop flowing.
- //
- // NB: This is a pretty rare edge case.
- if (state.pipesCount === 0) {
- state.flowing = false;
-
- // if there were data event listeners added, then switch to old mode.
- if (EE.listenerCount(src, 'data') > 0)
- emitDataEvents(src);
- return;
- }
-
- // at this point, no one needed a drain, so we just ran out of data
- // on the next readable event, start it over again.
- state.ranOut = true;
-}
-
-function pipeOnReadable() {
- if (this._readableState.ranOut) {
- this._readableState.ranOut = false;
- flow(this);
- }
+ };
}
@@ -685,7 +663,6 @@ Readable.prototype.unpipe = function(dest) {
// got a match.
state.pipes = null;
state.pipesCount = 0;
- this.removeListener('readable', pipeOnReadable);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
@@ -700,7 +677,6 @@ Readable.prototype.unpipe = function(dest) {
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
- this.removeListener('readable', pipeOnReadable);
state.flowing = false;
for (var i = 0; i < len; i++)
@@ -728,8 +704,11 @@ Readable.prototype.unpipe = function(dest) {
Readable.prototype.on = function(ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
- if (ev === 'data' && !this._readableState.flowing)
- emitDataEvents(this);
+ // If listening to data, and it has not explicitly been paused,
+ // then call resume to start the flow of data on the next tick.
+ if (ev === 'data' && false !== this._readableState.flowing) {
+ this.resume();
+ }
if (ev === 'readable' && this.readable) {
var state = this._readableState;
@@ -738,7 +717,7 @@ Readable.prototype.on = function(ev, fn) {
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
- this.read(0);
+ processNextTick(nReadingNextTick, this);
} else if (state.length) {
emitReadable(this, state);
}
@@ -749,66 +728,61 @@ Readable.prototype.on = function(ev, fn) {
};
Readable.prototype.addListener = Readable.prototype.on;
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
+
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
- emitDataEvents(this);
- this.read(0);
- this.emit('resume');
-};
-
-Readable.prototype.pause = function() {
- emitDataEvents(this, true);
- this.emit('pause');
+ var state = this._readableState;
+ if (!state.flowing) {
+ debug('resume');
+ state.flowing = true;
+ resume(this, state);
+ }
+ return this;
};
-function emitDataEvents(stream, startPaused) {
- var state = stream._readableState;
-
- if (state.flowing) {
- // https://github.com/isaacs/readable-stream/issues/16
- throw new Error('Cannot switch to old mode now.');
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true;
+ processNextTick(resume_, stream, state);
}
+}
- var paused = startPaused || false;
- var readable = false;
-
- // convert to an old-style stream.
- stream.readable = true;
- stream.pipe = Stream.prototype.pipe;
- stream.on = stream.addListener = Stream.prototype.on;
-
- stream.on('readable', function() {
- readable = true;
-
- var c;
- while (!paused && (null !== (c = stream.read())))
- stream.emit('data', c);
+function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
- if (c === null) {
- readable = false;
- stream._readableState.needReadable = true;
- }
- });
+ state.resumeScheduled = false;
+ stream.emit('resume');
+ flow(stream);
+ if (state.flowing && !state.reading)
+ stream.read(0);
+}
- stream.pause = function() {
- paused = true;
+Readable.prototype.pause = function() {
+ debug('call pause flowing=%j', this._readableState.flowing);
+ if (false !== this._readableState.flowing) {
+ debug('pause');
+ this._readableState.flowing = false;
this.emit('pause');
- };
-
- stream.resume = function() {
- paused = false;
- if (readable)
- process.nextTick(function() {
- stream.emit('readable');
- });
- else
- this.read(0);
- this.emit('resume');
- };
+ }
+ return this;
+};
- // now make it start, just in case it hadn't already.
- stream.emit('readable');
+function flow(stream) {
+ var state = stream._readableState;
+ debug('flow', state.flowing);
+ if (state.flowing) {
+ do {
+ var chunk = stream.read();
+ } while (null !== chunk && state.flowing);
+ }
}
// wrap an old-style stream as the async data source.
@@ -820,6 +794,7 @@ Readable.prototype.wrap = function(stream) {
var self = this;
stream.on('end', function() {
+ debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
@@ -830,11 +805,11 @@ Readable.prototype.wrap = function(stream) {
});
stream.on('data', function(chunk) {
+ debug('wrapped data');
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
- //if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
@@ -850,11 +825,10 @@ Readable.prototype.wrap = function(stream) {
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
- if (typeof stream[i] === 'function' &&
- typeof this[i] === 'undefined') {
+ if (this[i] === undefined && typeof stream[i] === 'function') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
- }}(i);
+ }; }(i);
}
}
@@ -867,6 +841,7 @@ Readable.prototype.wrap = function(stream) {
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
+ debug('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
@@ -955,16 +930,18 @@ function endReadable(stream) {
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
- if (!state.endEmitted && state.calledRead) {
+ if (!state.endEmitted) {
state.ended = true;
- process.nextTick(function() {
- // Check that we didn't get one last unshift.
- if (!state.endEmitted && state.length === 0) {
- state.endEmitted = true;
- stream.readable = false;
- stream.emit('end');
- }
- });
+ processNextTick(endReadableNT, state, stream);
+ }
+}
+
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
}
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
index eb188df3e..3675d18d9 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
@@ -1,25 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
@@ -62,6 +40,8 @@
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
+'use strict';
+
module.exports = Transform;
var Duplex = require('./_stream_duplex');
@@ -74,7 +54,7 @@ util.inherits = require('inherits');
util.inherits(Transform, Duplex);
-function TransformState(options, stream) {
+function TransformState(stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
@@ -117,7 +97,7 @@ function Transform(options) {
Duplex.call(this, options);
- var ts = this._transformState = new TransformState(options, this);
+ this._transformState = new TransformState(this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
@@ -130,8 +110,16 @@ function Transform(options) {
// sync guard flag.
this._readableState.sync = false;
- this.once('finish', function() {
- if ('function' === typeof this._flush)
+ if (options) {
+ if (typeof options.transform === 'function')
+ this._transform = options.transform;
+
+ if (typeof options.flush === 'function')
+ this._flush = options.flush;
+ }
+
+ this.once('prefinish', function() {
+ if (typeof this._flush === 'function')
this._flush(function(er) {
done(stream, er);
});
@@ -197,7 +185,6 @@ function done(stream, er) {
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
- var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
index 4bdaa4fa4..b23295201 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
@@ -1,31 +1,17 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
+'use strict';
+
module.exports = Writable;
/*<replacement>*/
+var processNextTick = require('process-nextick-args');
+/*</replacement>*/
+
+
+/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
@@ -37,28 +23,49 @@ var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
-var Stream = require('stream');
+
+
+/*<replacement>*/
+var Stream;
+(function (){try{
+ Stream = require('st' + 'ream');
+}catch(_){}finally{
+ if (!Stream)
+ Stream = require('events').EventEmitter;
+}}())
+/*</replacement>*/
+
+var Buffer = require('buffer').Buffer;
util.inherits(Writable, Stream);
+function nop() {}
+
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
+ this.next = null;
}
function WritableState(options, stream) {
+ var Duplex = require('./_stream_duplex');
+
options = options || {};
+ // object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex)
+ this.objectMode = this.objectMode || !!options.writableObjectMode;
+
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
-
- // object stream flag to indicate whether or not this stream
- // contains buffers or objects.
- this.objectMode = !!options.objectMode;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
@@ -90,8 +97,11 @@ function WritableState(options, stream) {
// a flag to see when we're in the middle of a write.
this.writing = false;
+ // when true all writes will be buffered until .uncork() call
+ this.corked = 0;
+
// a flag to be able to tell if the onwrite cb is called immediately,
- // or on a later tick. We set this to true at first, becuase any
+ // or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
@@ -112,12 +122,41 @@ function WritableState(options, stream) {
// the amount that is being written when _write is called.
this.writelen = 0;
- this.buffer = [];
+ this.bufferedRequest = null;
+ this.lastBufferedRequest = null;
+
+ // number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted
+ this.pendingcb = 0;
+
+ // emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams
+ this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
+WritableState.prototype.getBuffer = function writableStateGetBuffer() {
+ var current = this.bufferedRequest;
+ var out = [];
+ while (current) {
+ out.push(current);
+ current = current.next;
+ }
+ return out;
+};
+
+(function (){try {
+Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: require('util-deprecate')(function() {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use ' +
+ '_writableState.getBuffer() instead.')
+});
+}catch(_){}}());
+
+
function Writable(options) {
var Duplex = require('./_stream_duplex');
@@ -131,6 +170,14 @@ function Writable(options) {
// legacy.
this.writable = true;
+ if (options) {
+ if (typeof options.write === 'function')
+ this._write = options.write;
+
+ if (typeof options.writev === 'function')
+ this._writev = options.writev;
+ }
+
Stream.call(this);
}
@@ -140,13 +187,11 @@ Writable.prototype.pipe = function() {
};
-function writeAfterEnd(stream, state, cb) {
+function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
- process.nextTick(function() {
- cb(er);
- });
+ processNextTick(cb, er);
}
// If we get something that is not a buffer, string, null, or undefined,
@@ -156,16 +201,15 @@ function writeAfterEnd(stream, state, cb) {
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
- if (!Buffer.isBuffer(chunk) &&
- 'string' !== typeof chunk &&
+
+ if (!(Buffer.isBuffer(chunk)) &&
+ typeof chunk !== 'string' &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
- process.nextTick(function() {
- cb(er);
- });
+ processNextTick(cb, er);
valid = false;
}
return valid;
@@ -186,16 +230,50 @@ Writable.prototype.write = function(chunk, encoding, cb) {
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
- cb = function() {};
+ cb = nop;
if (state.ended)
- writeAfterEnd(this, state, cb);
- else if (validChunk(this, state, chunk, cb))
+ writeAfterEnd(this, cb);
+ else if (validChunk(this, state, chunk, cb)) {
+ state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
+ }
return ret;
};
+Writable.prototype.cork = function() {
+ var state = this._writableState;
+
+ state.corked++;
+};
+
+Writable.prototype.uncork = function() {
+ var state = this._writableState;
+
+ if (state.corked) {
+ state.corked--;
+
+ if (!state.writing &&
+ !state.corked &&
+ !state.finished &&
+ !state.bufferProcessing &&
+ state.bufferedRequest)
+ clearBuffer(this, state);
+ }
+};
+
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string')
+ encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64',
+'ucs2', 'ucs-2','utf16le', 'utf-16le', 'raw']
+.indexOf((encoding + '').toLowerCase()) > -1))
+ throw new TypeError('Unknown encoding: ' + encoding);
+ this._writableState.defaultEncoding = encoding;
+};
+
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
@@ -210,6 +288,7 @@ function decodeChunk(state, chunk, encoding) {
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
+
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
@@ -221,28 +300,37 @@ function writeOrBuffer(stream, state, chunk, encoding, cb) {
if (!ret)
state.needDrain = true;
- if (state.writing)
- state.buffer.push(new WriteReq(chunk, encoding, cb));
- else
- doWrite(stream, state, len, chunk, encoding, cb);
+ if (state.writing || state.corked) {
+ var last = state.lastBufferedRequest;
+ state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
+ if (last) {
+ last.next = state.lastBufferedRequest;
+ } else {
+ state.bufferedRequest = state.lastBufferedRequest;
+ }
+ } else {
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ }
return ret;
}
-function doWrite(stream, state, len, chunk, encoding, cb) {
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
- stream._write(chunk, encoding, state.onwrite);
+ if (writev)
+ stream._writev(chunk, state.onwrite);
+ else
+ stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
+ --state.pendingcb;
if (sync)
- process.nextTick(function() {
- cb(er);
- });
+ processNextTick(cb, er);
else
cb(er);
@@ -268,15 +356,17 @@ function onwrite(stream, er) {
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
- var finished = needFinish(stream, state);
+ var finished = needFinish(state);
- if (!finished && !state.bufferProcessing && state.buffer.length)
+ if (!finished &&
+ !state.corked &&
+ !state.bufferProcessing &&
+ state.bufferedRequest) {
clearBuffer(stream, state);
+ }
if (sync) {
- process.nextTick(function() {
- afterWrite(stream, state, finished, cb);
- });
+ processNextTick(afterWrite, stream, state, finished, cb);
} else {
afterWrite(stream, state, finished, cb);
}
@@ -286,9 +376,9 @@ function onwrite(stream, er) {
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
+ state.pendingcb--;
cb();
- if (finished)
- finishMaybe(stream, state);
+ finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
@@ -305,37 +395,62 @@ function onwriteDrain(stream, state) {
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
+ var entry = state.bufferedRequest;
+
+ if (stream._writev && entry && entry.next) {
+ // Fast case, write everything using _writev()
+ var buffer = [];
+ var cbs = [];
+ while (entry) {
+ cbs.push(entry.callback);
+ buffer.push(entry);
+ entry = entry.next;
+ }
- for (var c = 0; c < state.buffer.length; c++) {
- var entry = state.buffer[c];
- var chunk = entry.chunk;
- var encoding = entry.encoding;
- var cb = entry.callback;
- var len = state.objectMode ? 1 : chunk.length;
-
- doWrite(stream, state, len, chunk, encoding, cb);
-
- // if we didn't call the onwrite immediately, then
- // it means that we need to wait until it does.
- // also, that means that the chunk and cb are currently
- // being processed, so move the buffer counter past them.
- if (state.writing) {
- c++;
- break;
+ // count the one we are adding, as well.
+ // TODO(isaacs) clean this up
+ state.pendingcb++;
+ state.lastBufferedRequest = null;
+ doWrite(stream, state, true, state.length, buffer, '', function(err) {
+ for (var i = 0; i < cbs.length; i++) {
+ state.pendingcb--;
+ cbs[i](err);
+ }
+ });
+
+ // Clear buffer
+ } else {
+ // Slow case, write chunks one-by-one
+ while (entry) {
+ var chunk = entry.chunk;
+ var encoding = entry.encoding;
+ var cb = entry.callback;
+ var len = state.objectMode ? 1 : chunk.length;
+
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ entry = entry.next;
+ // if we didn't call the onwrite immediately, then
+ // it means that we need to wait until it does.
+ // also, that means that the chunk and cb are currently
+ // being processed, so move the buffer counter past them.
+ if (state.writing) {
+ break;
+ }
}
- }
+ if (entry === null)
+ state.lastBufferedRequest = null;
+ }
+ state.bufferedRequest = entry;
state.bufferProcessing = false;
- if (c < state.buffer.length)
- state.buffer = state.buffer.slice(c);
- else
- state.buffer.length = 0;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
+Writable.prototype._writev = null;
+
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
@@ -348,27 +463,46 @@ Writable.prototype.end = function(chunk, encoding, cb) {
encoding = null;
}
- if (typeof chunk !== 'undefined' && chunk !== null)
+ if (chunk !== null && chunk !== undefined)
this.write(chunk, encoding);
+ // .end() fully uncorks
+ if (state.corked) {
+ state.corked = 1;
+ this.uncork();
+ }
+
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
-function needFinish(stream, state) {
+function needFinish(state) {
return (state.ending &&
state.length === 0 &&
+ state.bufferedRequest === null &&
!state.finished &&
!state.writing);
}
+function prefinish(stream, state) {
+ if (!state.prefinished) {
+ state.prefinished = true;
+ stream.emit('prefinish');
+ }
+}
+
function finishMaybe(stream, state) {
- var need = needFinish(stream, state);
+ var need = needFinish(state);
if (need) {
- state.finished = true;
- stream.emit('finish');
+ if (state.pendingcb === 0) {
+ prefinish(stream, state);
+ state.finished = true;
+ stream.emit('finish');
+ } else {
+ prefinish(stream, state);
+ }
}
return need;
}
@@ -378,7 +512,7 @@ function endWritable(stream, state, cb) {
finishMaybe(stream, state);
if (cb) {
if (state.finished)
- process.nextTick(cb);
+ processNextTick(cb);
else
stream.once('finish', cb);
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/core-util-is/package.json
index b67333380..466dfdfe0 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/core-util-is/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/core-util-is/package.json
@@ -29,9 +29,25 @@
},
"readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n",
"readmeFilename": "README.md",
- "homepage": "https://github.com/isaacs/core-util-is#readme",
+ "homepage": "https://github.com/isaacs/core-util-is",
"_id": "core-util-is@1.0.1",
+ "dist": {
+ "shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
+ "tarball": "http://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz"
+ },
+ "_from": "core-util-is@>=1.0.0 <1.1.0",
+ "_npmVersion": "1.3.23",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "directories": {},
"_shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
- "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz",
- "_from": "core-util-is@>=1.0.0 <1.1.0"
+ "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
index fb1eb3786..19228ab6f 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
@@ -26,13 +26,28 @@
"url": "http://juliangruber.com"
},
"license": "MIT",
- "readme": "\n# isarray\n\n`Array#isArray` for older browsers.\n\n## Usage\n\n```js\nvar isArray = require('isarray');\n\nconsole.log(isArray([])); // => true\nconsole.log(isArray({})); // => false\n```\n\n## Installation\n\nWith [npm](http://npmjs.org) do\n\n```bash\n$ npm install isarray\n```\n\nThen bundle for the browser with\n[browserify](https://github.com/substack/browserify).\n\nWith [component](http://component.io) do\n\n```bash\n$ component install juliangruber/isarray\n```\n\n## License\n\n(MIT)\n\nCopyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
- "readmeFilename": "README.md",
- "bugs": {
- "url": "https://github.com/juliangruber/isarray/issues"
- },
"_id": "isarray@0.0.1",
+ "dist": {
+ "shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
+ "tarball": "http://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
+ },
+ "_from": "isarray@0.0.1",
+ "_npmVersion": "1.2.18",
+ "_npmUser": {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ },
+ "maintainers": [
+ {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ }
+ ],
+ "directories": {},
"_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
"_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "_from": "isarray@0.0.1"
+ "bugs": {
+ "url": "https://github.com/juliangruber/isarray/issues"
+ },
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
new file mode 100644
index 000000000..5ac988553
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+node_js:
+ - "0.8"
+ - "0.10"
+ - "0.11"
+ - "0.12"
+ - "iojs"
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js
new file mode 100644
index 000000000..3eb2f33d0
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js
@@ -0,0 +1,13 @@
+'use strict';
+module.exports = nextTick;
+
+function nextTick(fn) {
+ var args = new Array(arguments.length - 1);
+ var i = 0;
+ while (i < arguments.length) {
+ args[i++] = arguments[i];
+ }
+ process.nextTick(function afterTick() {
+ fn.apply(null, args);
+ });
+}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json
new file mode 100644
index 000000000..9be9ed5b5
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "process-nextick-args",
+ "version": "1.0.2",
+ "description": "process.nextTick but always with args",
+ "main": "index.js",
+ "scripts": {
+ "test": "node test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git"
+ },
+ "author": "",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/calvinmetcalf/process-nextick-args/issues"
+ },
+ "homepage": "https://github.com/calvinmetcalf/process-nextick-args",
+ "devDependencies": {
+ "tap": "~0.2.6"
+ },
+ "gitHead": "295707643b4ed6667c1afb71ffb6101669b5dac2",
+ "_id": "process-nextick-args@1.0.2",
+ "_shasum": "8b4d3fc586668bd5b6573e732edf2b71c1c1d8aa",
+ "_from": "process-nextick-args@>=1.0.0 <1.1.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "dist": {
+ "shasum": "8b4d3fc586668bd5b6573e732edf2b71c1c1d8aa",
+ "tarball": "http://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
new file mode 100644
index 000000000..78e7cfaeb
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
@@ -0,0 +1,18 @@
+process-nextick-args
+=====
+
+[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args)
+
+```bash
+npm install --save process-nextick-args
+```
+
+Always be able to pass arguments to process.nextTick, no matter the platform
+
+```js
+var nextTick = require('process-nextick-args');
+
+nextTick(function (a, b, c) {
+ console.log(a, b, c);
+}, 'step', 3, 'profit');
+```
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/test.js
new file mode 100644
index 000000000..729f775ff
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/test.js
@@ -0,0 +1,17 @@
+var test = require("tap").test;
+var nextTick = require('./');
+
+test('should work', function (t) {
+ t.plan(5);
+ nextTick(function (a) {
+ t.ok(a);
+ nextTick(function (thing) {
+ t.equals(thing, 7);
+ }, 7);
+ }, true);
+ nextTick(function (a, b, c) {
+ t.equals(a, 'step');
+ t.equals(b, 3);
+ t.equals(c, 'profit');
+ }, 'step', 3, 'profit');
+});
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/string_decoder/package.json
index ee7070235..0364d54ba 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/string_decoder/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/string_decoder/package.json
@@ -22,13 +22,33 @@
"browserify"
],
"license": "MIT",
- "readme": "**string_decoder.js** (`require('string_decoder')`) from Node.js core\n\nCopyright Joyent, Inc. and other Node contributors. See LICENCE file for details.\n\nVersion numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**\n\nThe *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.",
- "readmeFilename": "README.md",
+ "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0",
"bugs": {
"url": "https://github.com/rvagg/string_decoder/issues"
},
"_id": "string_decoder@0.10.31",
"_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
+ "_from": "string_decoder@>=0.10.0 <0.11.0",
+ "_npmVersion": "1.4.23",
+ "_npmUser": {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ "maintainers": [
+ {
+ "name": "substack",
+ "email": "mail@substack.net"
+ },
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ }
+ ],
+ "dist": {
+ "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
+ "tarball": "http://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "_from": "string_decoder@>=0.10.0 <0.11.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/History.md
new file mode 100644
index 000000000..ec010299b
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/History.md
@@ -0,0 +1,11 @@
+
+1.0.1 / 2014-11-25
+==================
+
+ * browser: use `console.warn()` for deprecation calls
+ * browser: more jsdocs
+
+1.0.0 / 2014-04-30
+==================
+
+ * initial commit
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
new file mode 100644
index 000000000..6a60e8c22
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/README.md
new file mode 100644
index 000000000..75622fa7c
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/README.md
@@ -0,0 +1,53 @@
+util-deprecate
+==============
+### The Node.js `util.deprecate()` function with browser support
+
+In Node.js, this module simply re-exports the `util.deprecate()` function.
+
+In the web browser (i.e. via browserify), a browser-specific implementation
+of the `util.deprecate()` function is used.
+
+
+## API
+
+A `deprecate()` function is the only thing exposed by this module.
+
+``` javascript
+// setup:
+exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead');
+
+
+// users see:
+foo();
+// foo() is deprecated, use bar() instead
+foo();
+foo();
+```
+
+
+## License
+
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/browser.js
new file mode 100644
index 000000000..55fa5a4bc
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/browser.js
@@ -0,0 +1,62 @@
+
+/**
+ * Module exports.
+ */
+
+module.exports = deprecate;
+
+/**
+ * Mark that a method should not be used.
+ * Returns a modified function which warns once by default.
+ *
+ * If `localStorage.noDeprecation = true` is set, then it is a no-op.
+ *
+ * If `localStorage.throwDeprecation = true` is set, then deprecated functions
+ * will throw an Error when invoked.
+ *
+ * If `localStorage.traceDeprecation = true` is set, then deprecated functions
+ * will invoke `console.trace()` instead of `console.error()`.
+ *
+ * @param {Function} fn - the function to deprecate
+ * @param {String} msg - the string to print to the console when `fn` is invoked
+ * @returns {Function} a new "deprecated" version of `fn`
+ * @api public
+ */
+
+function deprecate (fn, msg) {
+ if (config('noDeprecation')) {
+ return fn;
+ }
+
+ var warned = false;
+ function deprecated() {
+ if (!warned) {
+ if (config('throwDeprecation')) {
+ throw new Error(msg);
+ } else if (config('traceDeprecation')) {
+ console.trace(msg);
+ } else {
+ console.warn(msg);
+ }
+ warned = true;
+ }
+ return fn.apply(this, arguments);
+ }
+
+ return deprecated;
+}
+
+/**
+ * Checks `localStorage` for boolean values for the given `name`.
+ *
+ * @param {String} name
+ * @returns {Boolean}
+ * @api private
+ */
+
+function config (name) {
+ if (!global.localStorage) return false;
+ var val = global.localStorage[name];
+ if (null == val) return false;
+ return String(val).toLowerCase() === 'true';
+}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/node.js
new file mode 100644
index 000000000..5e6fcff5d
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/node.js
@@ -0,0 +1,6 @@
+
+/**
+ * For Node.js, simply re-export the core `util.deprecate` function.
+ */
+
+module.exports = require('util').deprecate;
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/package.json
new file mode 100644
index 000000000..ea487da0e
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/util-deprecate/package.json
@@ -0,0 +1,53 @@
+{
+ "name": "util-deprecate",
+ "version": "1.0.1",
+ "description": "The Node.js `util.deprecate()` function with browser support",
+ "main": "node.js",
+ "browser": "browser.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/util-deprecate.git"
+ },
+ "keywords": [
+ "util",
+ "deprecate",
+ "browserify",
+ "browser",
+ "node"
+ ],
+ "author": {
+ "name": "Nathan Rajlich",
+ "email": "nathan@tootallnate.net",
+ "url": "http://n8.io/"
+ },
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/util-deprecate/issues"
+ },
+ "homepage": "https://github.com/TooTallNate/util-deprecate",
+ "gitHead": "6e923f7d98a0afbe5b9c7db9d0f0029c1936746c",
+ "_id": "util-deprecate@1.0.1",
+ "_shasum": "3556a3d13c4c6aa7983d7e2425478197199b7881",
+ "_from": "util-deprecate@>=1.0.1 <1.1.0",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ "maintainers": [
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ }
+ ],
+ "dist": {
+ "shasum": "3556a3d13c4c6aa7983d7e2425478197199b7881",
+ "tarball": "http://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.1.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.1.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
index d4c12cab4..70ad998ca 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
@@ -1,23 +1,28 @@
{
"name": "readable-stream",
- "version": "1.0.33",
- "description": "Streams2, a user-land copy of the stream library from Node.js v0.10.x",
+ "version": "2.0.2",
+ "description": "Streams3, a user-land copy of the stream library from iojs v2.x",
"main": "readable.js",
"dependencies": {
"core-util-is": "~1.0.0",
+ "inherits": "~2.0.1",
"isarray": "0.0.1",
+ "process-nextick-args": "~1.0.0",
"string_decoder": "~0.10.x",
- "inherits": "~2.0.1"
+ "util-deprecate": "~1.0.1"
},
"devDependencies": {
- "tap": "~0.2.6"
+ "tap": "~0.2.6",
+ "tape": "~4.0.0",
+ "zuul": "~3.0.0"
},
"scripts": {
- "test": "tap test/simple/*.js"
+ "test": "tap test/parallel/*.js",
+ "browser": "zuul --browser-name $BROWSER_NAME --browser-version $BROWSER_VERSION -- test/browser.js"
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/readable-stream.git"
+ "url": "git://github.com/nodejs/readable-stream.git"
},
"keywords": [
"readable",
@@ -27,20 +32,44 @@
"browser": {
"util": false
},
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
"license": "MIT",
- "readme": "# readable-stream\n\n***Node-core streams for userland***\n\n[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)\n[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)\n\nThis package is a mirror of the Streams2 and Streams3 implementations in Node-core.\n\nIf you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *\"stream\"* module in Node-core.\n\n**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.\n\n**readable-stream** uses proper patch-level versioning so if you pin to `\"~1.0.0\"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `\"~1.1.0\"`\n\n",
- "readmeFilename": "README.md",
+ "gitHead": "1a70134a71196eeabb5e27bc7580faaa68d30513",
"bugs": {
- "url": "https://github.com/isaacs/readable-stream/issues"
+ "url": "https://github.com/nodejs/readable-stream/issues"
+ },
+ "homepage": "https://github.com/nodejs/readable-stream#readme",
+ "_id": "readable-stream@2.0.2",
+ "_shasum": "bec81beae8cf455168bc2e5b2b31f5bcfaed9b1b",
+ "_from": "readable-stream@>=2.0.0 <2.1.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
},
- "homepage": "https://github.com/isaacs/readable-stream#readme",
- "_id": "readable-stream@1.0.33",
- "_shasum": "3a360dd66c1b1d7fd4705389860eda1d0f61126c",
- "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.33.tgz",
- "_from": "readable-stream@>=1.0.26 <1.1.0"
+ "dist": {
+ "shasum": "bec81beae8cf455168bc2e5b2b31f5bcfaed9b1b",
+ "tarball": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/readable.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/readable.js
index 8b5337b5c..6222a5798 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/readable.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/readable.js
@@ -1,6 +1,10 @@
-var Stream = require('stream'); // hack to fix a circular dependency issue when used with browserify
+var Stream = (function (){
+ try {
+ return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
+ } catch(_){}
+}());
exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = Stream;
+exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
diff --git a/deps/npm/node_modules/request/node_modules/bl/package.json b/deps/npm/node_modules/request/node_modules/bl/package.json
index d6930087e..151384703 100644
--- a/deps/npm/node_modules/request/node_modules/bl/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/package.json
@@ -1,6 +1,6 @@
{
"name": "bl",
- "version": "0.9.4",
+ "version": "1.0.0",
"description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
"main": "bl.js",
"scripts": {
@@ -25,7 +25,7 @@
],
"license": "MIT",
"dependencies": {
- "readable-stream": "~1.0.26"
+ "readable-stream": "~2.0.0"
},
"devDependencies": {
"tape": "~2.12.3",
@@ -33,13 +33,30 @@
"faucet": "~0.0.1",
"brtapsauce": "~0.3.0"
},
- "readme": "# bl *(BufferList)*\n\n**A Node.js Buffer list collector, reader and streamer thingy.**\n\n[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)\n[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)\n\n**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!\n\nThe original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.\n\n```js\nconst BufferList = require('bl')\n\nvar bl = new BufferList()\nbl.append(new Buffer('abcd'))\nbl.append(new Buffer('efg'))\nbl.append('hi') // bl will also accept & convert Strings\nbl.append(new Buffer('j'))\nbl.append(new Buffer([ 0x3, 0x4 ]))\n\nconsole.log(bl.length) // 12\n\nconsole.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'\nconsole.log(bl.slice(3, 10).toString('ascii')) // 'defghij'\nconsole.log(bl.slice(3, 6).toString('ascii')) // 'def'\nconsole.log(bl.slice(3, 8).toString('ascii')) // 'defgh'\nconsole.log(bl.slice(5, 10).toString('ascii')) // 'fghij'\n\n// or just use toString!\nconsole.log(bl.toString()) // 'abcdefghij\\u0003\\u0004'\nconsole.log(bl.toString('ascii', 3, 8)) // 'defgh'\nconsole.log(bl.toString('ascii', 5, 10)) // 'fghij'\n\n// other standard Buffer readables\nconsole.log(bl.readUInt16BE(10)) // 0x0304\nconsole.log(bl.readUInt16LE(10)) // 0x0403\n```\n\nGive it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:\n\n```js\nconst bl = require('bl')\n , fs = require('fs')\n\nfs.createReadStream('README.md')\n .pipe(bl(function (err, data) { // note 'new' isn't strictly required\n // `data` is a complete Buffer object containing the full data\n console.log(data.toString())\n }))\n```\n\nNote that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.\n\nOr to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):\n```js\nconst hyperquest = require('hyperquest')\n , bl = require('bl')\n , url = 'https://raw.github.com/rvagg/bl/master/README.md'\n\nhyperquest(url).pipe(bl(function (err, data) {\n console.log(data.toString())\n}))\n```\n\nOr, use it as a readable stream to recompose a list of Buffers to an output source:\n\n```js\nconst BufferList = require('bl')\n , fs = require('fs')\n\nvar bl = new BufferList()\nbl.append(new Buffer('abcd'))\nbl.append(new Buffer('efg'))\nbl.append(new Buffer('hi'))\nbl.append(new Buffer('j'))\n\nbl.pipe(fs.createWriteStream('gibberish.txt'))\n```\n\n## API\n\n * <a href=\"#ctor\"><code><b>new BufferList([ callback ])</b></code></a>\n * <a href=\"#length\"><code>bl.<b>length</b></code></a>\n * <a href=\"#append\"><code>bl.<b>append(buffer)</b></code></a>\n * <a href=\"#get\"><code>bl.<b>get(index)</b></code></a>\n * <a href=\"#slice\"><code>bl.<b>slice([ start[, end ] ])</b></code></a>\n * <a href=\"#copy\"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>\n * <a href=\"#duplicate\"><code>bl.<b>duplicate()</b></code></a>\n * <a href=\"#consume\"><code>bl.<b>consume(bytes)</b></code></a>\n * <a href=\"#toString\"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>\n * <a href=\"#readXX\"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>\n * <a href=\"#streams\">Streams</a>\n\n--------------------------------------------------------\n<a name=\"ctor\"></a>\n### new BufferList([ callback | buffer | buffer array ])\nThe constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.\n\nNormally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.\n\n`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:\n\n```js\nvar bl = require('bl')\nvar myinstance = bl()\n\n// equivilant to:\n\nvar BufferList = require('bl')\nvar myinstance = new BufferList()\n```\n\n--------------------------------------------------------\n<a name=\"length\"></a>\n### bl.length\nGet the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.\n\n--------------------------------------------------------\n<a name=\"append\"></a>\n### bl.append(buffer)\n`append(buffer)` adds an additional buffer or BufferList to the internal list.\n\n--------------------------------------------------------\n<a name=\"get\"></a>\n### bl.get(index)\n`get()` will return the byte at the specified index.\n\n--------------------------------------------------------\n<a name=\"slice\"></a>\n### bl.slice([ start, [ end ] ])\n`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.\n\nIf the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.\n\n--------------------------------------------------------\n<a name=\"copy\"></a>\n### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])\n`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.\n\n--------------------------------------------------------\n<a name=\"duplicate\"></a>\n### bl.duplicate()\n`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:\n\n```js\nvar bl = new BufferList()\n\nbl.append('hello')\nbl.append(' world')\nbl.append('\\n')\n\nbl.duplicate().pipe(process.stdout, { end: false })\n\nconsole.log(bl.toString())\n```\n\n--------------------------------------------------------\n<a name=\"consume\"></a>\n### bl.consume(bytes)\n`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.\n\n--------------------------------------------------------\n<a name=\"toString\"></a>\n### bl.toString([encoding, [ start, [ end ]]])\n`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.\n\n--------------------------------------------------------\n<a name=\"readXX\"></a>\n### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()\n\nAll of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.\n\nSee the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.\n\n--------------------------------------------------------\n<a name=\"streams\"></a>\n### Streams\n**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.\n\n--------------------------------------------------------\n\n## Contributors\n\n**bl** is brought to you by the following hackers:\n\n * [Rod Vagg](https://github.com/rvagg)\n * [Matteo Collina](https://github.com/mcollina)\n * [Jarett Cruger](https://github.com/jcrugzz)\n\n=======\n\n<a name=\"license\"></a>\n## License &amp; copyright\n\nCopyright (c) 2013-2014 bl contributors (listed above).\n\nbl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.\n",
- "readmeFilename": "README.md",
+ "gitHead": "1794938be6697a6d1e02cd942a4eea59b353347a",
"bugs": {
"url": "https://github.com/rvagg/bl/issues"
},
- "_id": "bl@0.9.4",
- "_shasum": "4702ddf72fbe0ecd82787c00c113aea1935ad0e7",
- "_resolved": "https://registry.npmjs.org/bl/-/bl-0.9.4.tgz",
- "_from": "bl@>=0.9.0 <0.10.0"
+ "_id": "bl@1.0.0",
+ "_shasum": "ada9a8a89a6d7ac60862f7dec7db207873e0c3f5",
+ "_from": "bl@>=1.0.0 <1.1.0",
+ "_npmVersion": "2.9.0",
+ "_nodeVersion": "2.0.1-nightly20150618d2e4e03444",
+ "_npmUser": {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ "maintainers": [
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ }
+ ],
+ "dist": {
+ "shasum": "ada9a8a89a6d7ac60862f7dec7db207873e0c3f5",
+ "tarball": "http://registry.npmjs.org/bl/-/bl-1.0.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/bl/-/bl-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/caseless/LICENSE b/deps/npm/node_modules/request/node_modules/caseless/LICENSE
new file mode 100644
index 000000000..61789f4a4
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/caseless/LICENSE
@@ -0,0 +1,28 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+1. Definitions.
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+END OF TERMS AND CONDITIONS \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/caseless/index.js b/deps/npm/node_modules/request/node_modules/caseless/index.js
index ba711f68d..d86a70eca 100644
--- a/deps/npm/node_modules/request/node_modules/caseless/index.js
+++ b/deps/npm/node_modules/request/node_modules/caseless/index.js
@@ -49,6 +49,7 @@ module.exports = function (dict) {return new Caseless(dict)}
module.exports.httpify = function (resp, headers) {
var c = new Caseless(headers)
resp.setHeader = function (key, value, clobber) {
+ if (typeof value === 'undefined') return
return c.set(key, value, clobber)
}
resp.hasHeader = function (key) {
diff --git a/deps/npm/node_modules/request/node_modules/caseless/package.json b/deps/npm/node_modules/request/node_modules/caseless/package.json
index d4c0177b1..2cd79ea01 100644
--- a/deps/npm/node_modules/request/node_modules/caseless/package.json
+++ b/deps/npm/node_modules/request/node_modules/caseless/package.json
@@ -1,6 +1,6 @@
{
"name": "caseless",
- "version": "0.10.0",
+ "version": "0.11.0",
"description": "Caseless object set/get/has, very useful when working with HTTP headers.",
"main": "index.js",
"scripts": {
@@ -20,22 +20,23 @@
"name": "Mikeal Rogers",
"email": "mikeal.rogers@gmail.com"
},
- "license": "BSD",
+ "license": "Apache-2.0",
"bugs": {
"url": "https://github.com/mikeal/caseless/issues"
},
"devDependencies": {
"tape": "^2.10.2"
},
- "gitHead": "9c85efef6c37d48923b79f3f282441414dd691c9",
- "homepage": "https://github.com/mikeal/caseless",
- "_id": "caseless@0.10.0",
- "_shasum": "ed6b2719adcd1fd18f58dc081c0f1a5b43963909",
- "_from": "caseless@>=0.10.0 <0.11.0",
- "_npmVersion": "1.4.14",
+ "gitHead": "c578232a02cc2b46b6da8851caf57fdbfac89ff5",
+ "homepage": "https://github.com/mikeal/caseless#readme",
+ "_id": "caseless@0.11.0",
+ "_shasum": "715b96ea9841593cc33067923f5ec60ebda4f7d7",
+ "_from": "caseless@>=0.11.0 <0.12.0",
+ "_npmVersion": "2.8.3",
+ "_nodeVersion": "1.8.1",
"_npmUser": {
- "name": "nylen",
- "email": "jnylen@gmail.com"
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
},
"maintainers": [
{
@@ -45,13 +46,17 @@
{
"name": "nylen",
"email": "jnylen@gmail.com"
+ },
+ {
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
}
],
"dist": {
- "shasum": "ed6b2719adcd1fd18f58dc081c0f1a5b43963909",
- "tarball": "http://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz"
+ "shasum": "715b96ea9841593cc33067923f5ec60ebda4f7d7",
+ "tarball": "http://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/caseless/-/caseless-0.10.0.tgz",
+ "_resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/combined-stream/Readme.md b/deps/npm/node_modules/request/node_modules/combined-stream/Readme.md
index 3068adcaf..3a9e025fb 100644
--- a/deps/npm/node_modules/request/node_modules/combined-stream/Readme.md
+++ b/deps/npm/node_modules/request/node_modules/combined-stream/Readme.md
@@ -2,12 +2,11 @@
A stream that emits multiple other streams one after another.
-**NB** This module uses streams1 and will always use streams1 for
-compatibility. If you're looking for something to use for your project, please
-use a streams2 module, e.g.,
-[multistream](https://www.npmjs.com/package/multistream). Moreover, this module
-is maintained for bugfixes to current downstream consumers, so PRs which add
-features generally aren't accepted.
+**NB** Currently `combined-stream` works with streams vesrion 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatability with `combined-stream`.
+
+- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module.
+
+- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another.
## Installation
diff --git a/deps/npm/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/package.json b/deps/npm/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/package.json
index 4b4890359..8ac66b814 100644
--- a/deps/npm/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/package.json
+++ b/deps/npm/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/package.json
@@ -37,7 +37,7 @@
},
"_id": "delayed-stream@1.0.0",
"_shasum": "df3ae199acadfb7d440aaae0b29e2272b24ec619",
- "_from": "delayed-stream@>=1.0.0 <2.0.0",
+ "_from": "delayed-stream@>=1.0.0 <1.1.0",
"_npmVersion": "2.8.3",
"_nodeVersion": "1.6.4",
"_npmUser": {
diff --git a/deps/npm/node_modules/request/node_modules/combined-stream/package.json b/deps/npm/node_modules/request/node_modules/combined-stream/package.json
index 08d0f2706..10339866f 100644
--- a/deps/npm/node_modules/request/node_modules/combined-stream/package.json
+++ b/deps/npm/node_modules/request/node_modules/combined-stream/package.json
@@ -6,7 +6,7 @@
},
"name": "combined-stream",
"description": "A stream that emits multiple other streams one after another.",
- "version": "1.0.3",
+ "version": "1.0.5",
"homepage": "https://github.com/felixge/node-combined-stream",
"repository": {
"type": "git",
@@ -20,28 +20,28 @@
"node": ">= 0.8"
},
"dependencies": {
- "delayed-stream": "^1.0.0"
+ "delayed-stream": "~1.0.0"
},
"devDependencies": {
"far": "~0.0.7"
},
"license": "MIT",
- "gitHead": "f1a12682aed63acb3cd66857104202a7e7ca5565",
+ "gitHead": "cfc7b815d090a109bcedb5bb0f6713148d55a6b7",
"bugs": {
"url": "https://github.com/felixge/node-combined-stream/issues"
},
- "_id": "combined-stream@1.0.3",
- "_shasum": "c224cc35d3cb98e25dead532472a18e8f75df5ab",
+ "_id": "combined-stream@1.0.5",
+ "_shasum": "938370a57b4a51dea2c77c15d5c5fdf895164009",
"_from": "combined-stream@>=1.0.1 <1.1.0",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "0.12.4",
"_npmUser": {
- "name": "apechimp",
- "email": "apeherder@gmail.com"
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
},
"dist": {
- "shasum": "c224cc35d3cb98e25dead532472a18e8f75df5ab",
- "tarball": "http://registry.npmjs.org/combined-stream/-/combined-stream-1.0.3.tgz"
+ "shasum": "938370a57b4a51dea2c77c15d5c5fdf895164009",
+ "tarball": "http://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz"
},
"maintainers": [
{
@@ -62,6 +62,6 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.3.tgz",
+ "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/extend/.jscs.json b/deps/npm/node_modules/request/node_modules/extend/.jscs.json
new file mode 100644
index 000000000..59faa8bd9
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/.jscs.json
@@ -0,0 +1,103 @@
+{
+ "additionalRules": [],
+
+ "requireSemicolons": true,
+
+ "disallowMultipleSpaces": true,
+
+ "disallowIdentifierNames": [],
+
+ "requireCurlyBraces": ["if", "else", "for", "while", "do", "try", "catch"],
+
+ "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"],
+
+ "disallowSpaceAfterKeywords": [],
+
+ "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true },
+ "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true },
+ "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true },
+ "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true },
+ "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true },
+
+ "requireSpaceBetweenArguments": true,
+
+ "disallowSpacesInsideParentheses": true,
+
+ "disallowSpacesInsideArrayBrackets": true,
+
+ "disallowQuotedKeysInObjects": "allButReserved",
+
+ "disallowSpaceAfterObjectKeys": true,
+
+ "requireCommaBeforeLineBreak": true,
+
+ "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"],
+ "requireSpaceAfterPrefixUnaryOperators": [],
+
+ "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"],
+ "requireSpaceBeforePostfixUnaryOperators": [],
+
+ "disallowSpaceBeforeBinaryOperators": [],
+ "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="],
+
+ "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="],
+ "disallowSpaceAfterBinaryOperators": [],
+
+ "disallowImplicitTypeConversion": ["binary", "string"],
+
+ "disallowKeywords": ["with", "eval"],
+
+ "requireKeywordsOnNewLine": [],
+ "disallowKeywordsOnNewLine": ["else"],
+
+ "requireLineFeedAtFileEnd": true,
+
+ "disallowTrailingWhitespace": true,
+
+ "disallowTrailingComma": true,
+
+ "excludeFiles": ["node_modules/**", "vendor/**"],
+
+ "disallowMultipleLineStrings": true,
+
+ "requireDotNotation": true,
+
+ "requireParenthesesAroundIIFE": true,
+
+ "validateLineBreaks": "LF",
+
+ "validateQuoteMarks": {
+ "escape": true,
+ "mark": "'"
+ },
+
+ "disallowOperatorBeforeLineBreak": [],
+
+ "requireSpaceBeforeKeywords": [
+ "do",
+ "for",
+ "if",
+ "else",
+ "switch",
+ "case",
+ "try",
+ "catch",
+ "finally",
+ "while",
+ "with",
+ "return"
+ ],
+
+ "validateAlignedFunctionParameters": {
+ "lineBreakAfterOpeningBraces": true,
+ "lineBreakBeforeClosingBraces": true
+ },
+
+ "requirePaddingNewLinesBeforeExport": true,
+
+ "validateNewlineAfterArrayElements": {
+ "maximum": 6
+ },
+
+ "requirePaddingNewLinesAfterUseStrict": true
+}
diff --git a/deps/npm/node_modules/request/node_modules/extend/.npmignore b/deps/npm/node_modules/request/node_modules/extend/.npmignore
new file mode 100644
index 000000000..30d74d258
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/.npmignore
@@ -0,0 +1 @@
+test \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/extend/.travis.yml b/deps/npm/node_modules/request/node_modules/extend/.travis.yml
new file mode 100644
index 000000000..ebef64499
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/.travis.yml
@@ -0,0 +1,44 @@
+language: node_js
+node_js:
+ - "iojs-v2.3"
+ - "iojs-v2.2"
+ - "iojs-v2.1"
+ - "iojs-v2.0"
+ - "iojs-v1.8"
+ - "iojs-v1.7"
+ - "iojs-v1.6"
+ - "iojs-v1.5"
+ - "iojs-v1.4"
+ - "iojs-v1.3"
+ - "iojs-v1.2"
+ - "iojs-v1.1"
+ - "iojs-v1.0"
+ - "0.12"
+ - "0.11"
+ - "0.10"
+ - "0.9"
+ - "0.8"
+ - "0.6"
+ - "0.4"
+before_install:
+ - '[ "${TRAVIS_NODE_VERSION}" = "0.6" ] || npm install -g npm@1.4.28 && npm install -g npm'
+sudo: false
+matrix:
+ fast_finish: true
+ allow_failures:
+ - node_js: "iojs-v2.2"
+ - node_js: "iojs-v2.1"
+ - node_js: "iojs-v2.0"
+ - node_js: "iojs-v1.7"
+ - node_js: "iojs-v1.6"
+ - node_js: "iojs-v1.5"
+ - node_js: "iojs-v1.4"
+ - node_js: "iojs-v1.3"
+ - node_js: "iojs-v1.2"
+ - node_js: "iojs-v1.1"
+ - node_js: "iojs-v1.0"
+ - node_js: "0.11"
+ - node_js: "0.9"
+ - node_js: "0.8"
+ - node_js: "0.6"
+ - node_js: "0.4"
diff --git a/deps/npm/node_modules/request/node_modules/extend/CHANGELOG.md b/deps/npm/node_modules/request/node_modules/extend/CHANGELOG.md
new file mode 100644
index 000000000..2874d2ecd
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/CHANGELOG.md
@@ -0,0 +1,68 @@
+3.0.0 / 2015-07-01
+==================
+ * [Possible breaking change] Use global "strict" directive (#32)
+ * [Tests] `int` is an ES3 reserved word
+ * [Tests] Test up to `io.js` `v2.3`
+ * [Tests] Add `npm run eslint`
+ * [Dev Deps] Update `covert`, `jscs`
+
+2.0.1 / 2015-04-25
+==================
+ * Use an inline `isArray` check, for ES3 browsers. (#27)
+ * Some old browsers fail when an identifier is `toString`
+ * Test latest `node` and `io.js` versions on `travis-ci`; speed up builds
+ * Add license info to package.json (#25)
+ * Update `tape`, `jscs`
+ * Adding a CHANGELOG
+
+2.0.0 / 2014-10-01
+==================
+ * Increase code coverage to 100%; run code coverage as part of tests
+ * Add `npm run lint`; Run linter as part of tests
+ * Remove nodeType and setInterval checks in isPlainObject
+ * Updating `tape`, `jscs`, `covert`
+ * General style and README cleanup
+
+1.3.0 / 2014-06-20
+==================
+ * Add component.json for browser support (#18)
+ * Use SVG for badges in README (#16)
+ * Updating `tape`, `covert`
+ * Updating travis-ci to work with multiple node versions
+ * Fix `deep === false` bug (returning target as {}) (#14)
+ * Fixing constructor checks in isPlainObject
+ * Adding additional test coverage
+ * Adding `npm run coverage`
+ * Add LICENSE (#13)
+ * Adding a warning about `false`, per #11
+ * General style and whitespace cleanup
+
+1.2.1 / 2013-09-14
+==================
+ * Fixing hasOwnProperty bugs that would only have shown up in specific browsers. Fixes #8
+ * Updating `tape`
+
+1.2.0 / 2013-09-02
+==================
+ * Updating the README: add badges
+ * Adding a missing variable reference.
+ * Using `tape` instead of `buster` for tests; add more tests (#7)
+ * Adding node 0.10 to Travis CI (#6)
+ * Enabling "npm test" and cleaning up package.json (#5)
+ * Add Travis CI.
+
+1.1.3 / 2012-12-06
+==================
+ * Added unit tests.
+ * Ensure extend function is named. (Looks nicer in a stack trace.)
+ * README cleanup.
+
+1.1.1 / 2012-11-07
+==================
+ * README cleanup.
+ * Added installation instructions.
+ * Added a missing semicolon
+
+1.0.0 / 2012-04-08
+==================
+ * Initial commit
diff --git a/deps/npm/node_modules/request/node_modules/extend/LICENSE b/deps/npm/node_modules/request/node_modules/extend/LICENSE
new file mode 100644
index 000000000..92d41503d
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Stefan Thomas
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/extend/README.md b/deps/npm/node_modules/request/node_modules/extend/README.md
new file mode 100644
index 000000000..3a47b2188
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/README.md
@@ -0,0 +1,61 @@
+[![Build Status][travis-svg]][travis-url]
+[![dependency status][deps-svg]][deps-url]
+[![dev dependency status][dev-deps-svg]][dev-deps-url]
+
+# extend() for Node.js <sup>[![Version Badge][npm-version-png]][npm-url]</sup>
+
+`node-extend` is a port of the classic extend() method from jQuery. It behaves as you expect. It is simple, tried and true.
+
+## Installation
+
+This package is available on [npm][npm-url] as: `extend`
+
+``` sh
+npm install extend
+```
+
+## Usage
+
+**Syntax:** extend **(** [`deep`], `target`, `object1`, [`objectN`] **)**
+
+*Extend one object with one or more others, returning the modified object.*
+
+Keep in mind that the target object will be modified, and will be returned from extend().
+
+If a boolean true is specified as the first argument, extend performs a deep copy, recursively copying any objects it finds. Otherwise, the copy will share structure with the original object(s).
+Undefined properties are not copied. However, properties inherited from the object's prototype will be copied over.
+Warning: passing `false` as the first argument is not supported.
+
+### Arguments
+
+* `deep` *Boolean* (optional)
+If set, the merge becomes recursive (i.e. deep copy).
+* `target` *Object*
+The object to extend.
+* `object1` *Object*
+The object that will be merged into the first.
+* `objectN` *Object* (Optional)
+More objects to merge into the first.
+
+## License
+
+`node-extend` is licensed under the [MIT License][mit-license-url].
+
+## Acknowledgements
+
+All credit to the jQuery authors for perfecting this amazing utility.
+
+Ported to Node.js by [Stefan Thomas][github-justmoon] with contributions by [Jonathan Buchanan][github-insin] and [Jordan Harband][github-ljharb].
+
+[travis-svg]: https://travis-ci.org/justmoon/node-extend.svg
+[travis-url]: https://travis-ci.org/justmoon/node-extend
+[npm-url]: https://npmjs.org/package/extend
+[mit-license-url]: http://opensource.org/licenses/MIT
+[github-justmoon]: https://github.com/justmoon
+[github-insin]: https://github.com/insin
+[github-ljharb]: https://github.com/ljharb
+[npm-version-png]: http://vb.teelaun.ch/justmoon/node-extend.svg
+[deps-svg]: https://david-dm.org/justmoon/node-extend.svg
+[deps-url]: https://david-dm.org/justmoon/node-extend
+[dev-deps-svg]: https://david-dm.org/justmoon/node-extend/dev-status.svg
+[dev-deps-url]: https://david-dm.org/justmoon/node-extend#info=devDependencies
diff --git a/deps/npm/node_modules/request/node_modules/extend/component.json b/deps/npm/node_modules/request/node_modules/extend/component.json
new file mode 100644
index 000000000..0f76b5930
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/component.json
@@ -0,0 +1,31 @@
+{
+ "name": "extend",
+ "author": "Stefan Thomas <justmoon@members.fsf.org> (http://www.justmoon.net)",
+ "version": "3.0.0",
+ "description": "Port of jQuery.extend for node.js and the browser.",
+ "scripts": [
+ "index.js"
+ ],
+ "contributors": [
+ {
+ "name": "Jordan Harband",
+ "url": "https://github.com/ljharb"
+ }
+ ],
+ "keywords": [
+ "extend",
+ "clone",
+ "merge"
+ ],
+ "repository" : {
+ "type": "git",
+ "url": "https://github.com/justmoon/node-extend.git"
+ },
+ "dependencies": {
+ },
+ "devDependencies": {
+ "tape" : "~3.0.0",
+ "covert": "~0.4.0",
+ "jscs": "~1.6.2"
+ }
+}
diff --git a/deps/npm/node_modules/request/node_modules/extend/index.js b/deps/npm/node_modules/request/node_modules/extend/index.js
new file mode 100644
index 000000000..2f957482e
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/index.js
@@ -0,0 +1,85 @@
+'use strict';
+
+var hasOwn = Object.prototype.hasOwnProperty;
+var toStr = Object.prototype.toString;
+
+var isArray = function isArray(arr) {
+ if (typeof Array.isArray === 'function') {
+ return Array.isArray(arr);
+ }
+
+ return toStr.call(arr) === '[object Array]';
+};
+
+var isPlainObject = function isPlainObject(obj) {
+ if (!obj || toStr.call(obj) !== '[object Object]') {
+ return false;
+ }
+
+ var hasOwnConstructor = hasOwn.call(obj, 'constructor');
+ var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf');
+ // Not own constructor property must be Object
+ if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
+ return false;
+ }
+
+ // Own properties are enumerated firstly, so to speed up,
+ // if last one is own, then all properties are own.
+ var key;
+ for (key in obj) {/**/}
+
+ return typeof key === 'undefined' || hasOwn.call(obj, key);
+};
+
+module.exports = function extend() {
+ var options, name, src, copy, copyIsArray, clone,
+ target = arguments[0],
+ i = 1,
+ length = arguments.length,
+ deep = false;
+
+ // Handle a deep copy situation
+ if (typeof target === 'boolean') {
+ deep = target;
+ target = arguments[1] || {};
+ // skip the boolean and the target
+ i = 2;
+ } else if ((typeof target !== 'object' && typeof target !== 'function') || target == null) {
+ target = {};
+ }
+
+ for (; i < length; ++i) {
+ options = arguments[i];
+ // Only deal with non-null/undefined values
+ if (options != null) {
+ // Extend the base object
+ for (name in options) {
+ src = target[name];
+ copy = options[name];
+
+ // Prevent never-ending loop
+ if (target !== copy) {
+ // Recurse if we're merging plain objects or arrays
+ if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) {
+ if (copyIsArray) {
+ copyIsArray = false;
+ clone = src && isArray(src) ? src : [];
+ } else {
+ clone = src && isPlainObject(src) ? src : {};
+ }
+
+ // Never move original objects, clone them
+ target[name] = extend(deep, clone, copy);
+
+ // Don't bring in undefined values
+ } else if (typeof copy !== 'undefined') {
+ target[name] = copy;
+ }
+ }
+ }
+ }
+ }
+
+ // Return the modified object
+ return target;
+};
diff --git a/deps/npm/node_modules/request/node_modules/extend/package.json b/deps/npm/node_modules/request/node_modules/extend/package.json
new file mode 100644
index 000000000..c8c7cac99
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/extend/package.json
@@ -0,0 +1,73 @@
+{
+ "name": "extend",
+ "author": {
+ "name": "Stefan Thomas",
+ "email": "justmoon@members.fsf.org",
+ "url": "http://www.justmoon.net"
+ },
+ "version": "3.0.0",
+ "description": "Port of jQuery.extend for node.js and the browser",
+ "main": "index",
+ "scripts": {
+ "test": "npm run lint && node test/index.js && npm run coverage-quiet",
+ "coverage": "covert test/index.js",
+ "coverage-quiet": "covert test/index.js --quiet",
+ "lint": "npm run jscs && npm run eslint",
+ "jscs": "jscs *.js */*.js",
+ "eslint": "eslint *.js */*.js"
+ },
+ "contributors": [
+ {
+ "name": "Jordan Harband",
+ "url": "https://github.com/ljharb"
+ }
+ ],
+ "keywords": [
+ "extend",
+ "clone",
+ "merge"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/justmoon/node-extend.git"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tape": "^4.0.0",
+ "covert": "^1.1.0",
+ "jscs": "^1.13.1",
+ "eslint": "^0.24.0"
+ },
+ "license": "MIT",
+ "gitHead": "148e7270cab2e9413af2cd0cab147070d755ed6d",
+ "bugs": {
+ "url": "https://github.com/justmoon/node-extend/issues"
+ },
+ "homepage": "https://github.com/justmoon/node-extend#readme",
+ "_id": "extend@3.0.0",
+ "_shasum": "5a474353b9f3353ddd8176dfd37b91c83a46f1d4",
+ "_from": "extend@>=3.0.0 <3.1.0",
+ "_npmVersion": "2.11.3",
+ "_nodeVersion": "2.3.1",
+ "_npmUser": {
+ "name": "ljharb",
+ "email": "ljharb@gmail.com"
+ },
+ "dist": {
+ "shasum": "5a474353b9f3353ddd8176dfd37b91c83a46f1d4",
+ "tarball": "http://registry.npmjs.org/extend/-/extend-3.0.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "justmoon",
+ "email": "justmoon@members.fsf.org"
+ },
+ {
+ "name": "ljharb",
+ "email": "ljharb@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/forever-agent/package.json b/deps/npm/node_modules/request/node_modules/forever-agent/package.json
index 7bdaaaff0..ef074a510 100644
--- a/deps/npm/node_modules/request/node_modules/forever-agent/package.json
+++ b/deps/npm/node_modules/request/node_modules/forever-agent/package.json
@@ -18,14 +18,39 @@
"engines": {
"node": "*"
},
- "readme": "forever-agent\n=============\n\nHTTP Agent that keeps socket connections alive between keep-alive requests. Formerly part of mikeal/request, now a standalone module.\n",
- "readmeFilename": "README.md",
+ "gitHead": "1b3b6163f2b3c2c4122bbfa288c1325c0df9871d",
"bugs": {
"url": "https://github.com/mikeal/forever-agent/issues"
},
- "homepage": "https://github.com/mikeal/forever-agent#readme",
+ "homepage": "https://github.com/mikeal/forever-agent",
"_id": "forever-agent@0.6.1",
+ "scripts": {},
"_shasum": "fbc71f0c41adeb37f96c577ad1ed42d8fdacca91",
+ "_from": "forever-agent@>=0.6.0 <0.7.0",
+ "_npmVersion": "1.4.28",
+ "_npmUser": {
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ {
+ "name": "nylen",
+ "email": "jnylen@gmail.com"
+ },
+ {
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "fbc71f0c41adeb37f96c577ad1ed42d8fdacca91",
+ "tarball": "http://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
- "_from": "forever-agent@>=0.6.0 <0.7.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js
new file mode 100644
index 000000000..1e7717d56
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js
@@ -0,0 +1 @@
+module.exports = FormData; \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
index 5b33f554c..89505f6eb 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
@@ -67,7 +67,7 @@ FormData.prototype._trackLength = function(header, value, options) {
// @check why add CRLF? does this account for custom/multiple CRLFs?
this._overheadLength +=
Buffer.byteLength(header) +
- + FormData.LINE_BREAK.length;
+ FormData.LINE_BREAK.length;
// empty or either doesn't have path or not an http response
if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) {
@@ -180,7 +180,7 @@ FormData.prototype._multiPartFooter = function(field, value, options) {
};
FormData.prototype._lastBoundary = function() {
- return '--' + this.getBoundary() + '--';
+ return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;
};
FormData.prototype.getHeaders = function(userHeaders) {
@@ -306,7 +306,7 @@ FormData.prototype.submit = function(params, cb) {
options.headers = this.getHeaders(params.headers);
// https if specified, fallback to http in any other case
- if (params.protocol == 'https:') {
+ if (options.protocol == 'https:') {
request = https.request(options);
} else {
request = http.request(options);
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jscsrc b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jscsrc
new file mode 100644
index 000000000..b8cfa1731
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jscsrc
@@ -0,0 +1,3 @@
+{
+ "validateIndentation": 4
+} \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jshintrc b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jshintrc
deleted file mode 100644
index 7427dce48..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/.jshintrc
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- // Enforcing options
- "eqeqeq": false,
- "forin": true,
- "indent": 4,
- "noarg": true,
- "undef": true,
- "unused": true,
- "trailing": true,
- "evil": true,
- "laxcomma": true,
-
- // Relaxing options
- "onevar": false,
- "asi": false,
- "eqnull": true,
- "expr": false,
- "loopfunc": true,
- "sub": true,
- "browser": true,
- "node": true,
- "globals": {
- "define": true
- }
-}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/CHANGELOG.md b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/CHANGELOG.md
new file mode 100644
index 000000000..293ba59f7
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/CHANGELOG.md
@@ -0,0 +1,81 @@
+# v1.3.0
+
+New Features:
+- Added `constant`
+- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. (#671, #806)
+- Added `during` and `doDuring`, which are like `whilst` with an async truth test. (#800)
+- `retry` now accepts an `interval` parameter to specify a delay between retries. (#793)
+- `async` should work better in Web Workers due to better `root` detection (#804)
+- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` (#642)
+- Various internal updates (#786, #801, #802, #803)
+- Various doc fixes (#790, #794)
+
+Bug Fixes:
+- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. (#740, #744, #783)
+
+# v1.2.1
+
+Bug Fix:
+
+- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782)
+
+
+# v1.2.0
+
+New Features:
+
+- Added `timesLimit` (#743)
+- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. (#747, #772)
+
+Bug Fixes:
+
+- Fixed a regression in `each` and family with empty arrays that have additional properties. (#775, #777)
+
+
+# v1.1.1
+
+Bug Fix:
+
+- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782)
+
+
+# v1.1.0
+
+New Features:
+
+- `cargo` now supports all of the same methods and event callbacks as `queue`.
+- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. (#769)
+- Optimized `map`, `eachOf`, and `waterfall` families of functions
+- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array (#667).
+- The callback is now optional for the composed results of `compose` and `seq`. (#618)
+- Reduced file size by 4kb, (minified version by 1kb)
+- Added code coverage through `nyc` and `coveralls` (#768)
+
+Bug Fixes:
+
+- `forever` will no longer stack overflow with a synchronous iterator (#622)
+- `eachLimit` and other limit functions will stop iterating once an error occurs (#754)
+- Always pass `null` in callbacks when there is no error (#439)
+- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue (#668)
+- `each` and family will properly handle an empty array (#578)
+- `eachSeries` and family will finish if the underlying array is modified during execution (#557)
+- `queue` will throw if a non-function is passed to `q.push()` (#593)
+- Doc fixes (#629, #766)
+
+
+# v1.0.0
+
+No known breaking changes, we are simply complying with semver from here on out.
+
+Changes:
+
+- Start using a changelog!
+- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) (#168 #704 #321)
+- Detect deadlocks in `auto` (#663)
+- Better support for require.js (#527)
+- Throw if queue created with concurrency `0` (#714)
+- Fix unneeded iteration in `queue.resume()` (#758)
+- Guard against timer mocking overriding `setImmediate` (#609 #611)
+- Miscellaneous doc fixes (#542 #596 #615 #628 #631 #690 #729)
+- Use single noop function internally (#546)
+- Optimize internal `_each`, `_map` and `_keys` functions.
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
index 13b8dd386..c0a93c1b1 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/README.md
@@ -1,6 +1,9 @@
# Async.js
[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async)
+[![NPM version](http://img.shields.io/npm/v/async.svg)](https://www.npmjs.org/package/async)
+[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master)
+[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
Async is a utility module which provides straight-forward, powerful functions
@@ -49,7 +52,66 @@ There are many more functions available so take a look at the docs below for a
full list. This module aims to be comprehensive, so if you feel anything is
missing please create a GitHub issue for it.
-## Common Pitfalls
+## Common Pitfalls <sub>[(StackOverflow)](http://stackoverflow.com/questions/tagged/async.js)</sub>
+### Synchronous iteration functions
+
+If you get an error like `RangeError: Maximum call stack size exceeded.` or other stack overflow issues when using async, you are likely using a synchronous iterator. By *synchronous* we mean a function that calls its callback on the same tick in the javascript event loop, without doing any I/O or using any timers. Calling many callbacks iteratively will quickly overflow the stack. If you run into this issue, just defer your callback with `async.nextTick` to start a new call stack on the next tick of the event loop.
+
+This can also arise by accident if you callback early in certain cases:
+
+```js
+async.eachSeries(hugeArray, function iterator(item, callback) {
+ if (inCache(item)) {
+ callback(null, cache[item]); // if many items are cached, you'll overflow
+ } else {
+ doSomeIO(item, callback);
+ }
+}, function done() {
+ //...
+});
+```
+
+Just change it to:
+
+```js
+async.eachSeries(hugeArray, function iterator(item, callback) {
+ if (inCache(item)) {
+ async.setImmediate(function () {
+ callback(null, cache[item]);
+ });
+ } else {
+ doSomeIO(item, callback);
+ //...
+```
+
+Async guards against synchronous functions in some, but not all, cases. If you are still running into stack overflows, you can defer as suggested above, or wrap functions with [`async.ensureAsync`](#ensureAsync) Functions that are asynchronous by their nature do not have this problem and don't need the extra callback deferral.
+
+If javascript's event loop is still a bit nebulous, check out [this article](http://blog.carbonfive.com/2013/10/27/the-javascript-event-loop-explained/) or [this talk](http://2014.jsconf.eu/speakers/philip-roberts-what-the-heck-is-the-event-loop-anyway.html) for more detailed information about how it works.
+
+
+### Multiple callbacks
+
+Make sure to always `return` when calling a callback early, otherwise you will cause multiple callbacks and unpredictable behavior in many cases.
+
+```js
+async.waterfall([
+ function (callback) {
+ getSomething(options, function (err, result) {
+ if (err) {
+ callback(new Error("failed getting something:" + err.message));
+ // we should return here
+ }
+ // since we did not return, this callback still will be called and
+ // `processData` will be called twice
+ callback(result);
+ });
+ },
+ processData
+], done)
+```
+
+It is always good practice to `return callback(err, result)` whenever a callback call is not the last statement of a function.
+
### Binding a context to an iterator
@@ -61,7 +123,7 @@ a method of another library isn't working as an iterator, study this example:
// Here is a simple object with an (unnecessarily roundabout) squaring method
var AsyncSquaringLibrary = {
squareExponent: 2,
- square: function(number, callback){
+ square: function(number, callback){
var result = Math.pow(number, this.squareExponent);
setTimeout(function(){
callback(null, result);
@@ -79,7 +141,7 @@ async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){
async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){
// result is [1, 4, 9]
// With the help of bind we can attach a context to the iterator before
- // passing it to async. Now the square function will be executed in its
+ // passing it to async. Now the square function will be executed in its
// 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`
// will be as expected.
});
@@ -88,16 +150,20 @@ async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), fun
## Download
The source is available for download from
-[GitHub](http://github.com/caolan/async).
+[GitHub](https://github.com/caolan/async/blob/master/lib/async.js).
Alternatively, you can install using Node Package Manager (`npm`):
npm install async
+As well as using Bower:
+
+ bower install async
+
__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed
## In the Browser
-So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5.
+So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5.
Usage:
@@ -114,68 +180,62 @@ Usage:
## Documentation
+Some functions are also available in the following forms:
+* `<name>Series` - the same as `<name>` but runs only a single async operation at a time
+* `<name>Limit` - the same as `<name>` but runs a maximum of `limit` async operations at a time
+
### Collections
-* [`each`](#each)
-* [`eachSeries`](#eachSeries)
-* [`eachLimit`](#eachLimit)
-* [`map`](#map)
-* [`mapSeries`](#mapSeries)
-* [`mapLimit`](#mapLimit)
-* [`filter`](#filter)
-* [`filterSeries`](#filterSeries)
-* [`reject`](#reject)
-* [`rejectSeries`](#rejectSeries)
-* [`reduce`](#reduce)
-* [`reduceRight`](#reduceRight)
-* [`detect`](#detect)
-* [`detectSeries`](#detectSeries)
+* [`each`](#each), `eachSeries`, `eachLimit`
+* [`forEachOf`](#forEachOf), `forEachOfSeries`, `forEachOfLimit`
+* [`map`](#map), `mapSeries`, `mapLimit`
+* [`filter`](#filter), `filterSeries`, `filterLimit`
+* [`reject`](#reject), `rejectSeries`, `rejectLimit`
+* [`reduce`](#reduce), [`reduceRight`](#reduceRight)
+* [`detect`](#detect), `detectSeries`, `detectLimit`
* [`sortBy`](#sortBy)
-* [`some`](#some)
-* [`every`](#every)
-* [`concat`](#concat)
-* [`concatSeries`](#concatSeries)
+* [`some`](#some), `someLimit`
+* [`every`](#every), `everyLimit`
+* [`concat`](#concat), `concatSeries`
### Control Flow
* [`series`](#seriestasks-callback)
-* [`parallel`](#parallel)
-* [`parallelLimit`](#parallellimittasks-limit-callback)
-* [`whilst`](#whilst)
-* [`doWhilst`](#doWhilst)
-* [`until`](#until)
-* [`doUntil`](#doUntil)
+* [`parallel`](#parallel), `parallelLimit`
+* [`whilst`](#whilst), [`doWhilst`](#doWhilst)
+* [`until`](#until), [`doUntil`](#doUntil)
+* [`during`](#during), [`doDuring`](#doDuring)
* [`forever`](#forever)
* [`waterfall`](#waterfall)
* [`compose`](#compose)
* [`seq`](#seq)
-* [`applyEach`](#applyEach)
-* [`applyEachSeries`](#applyEachSeries)
-* [`queue`](#queue)
-* [`priorityQueue`](#priorityQueue)
+* [`applyEach`](#applyEach), `applyEachSeries`
+* [`queue`](#queue), [`priorityQueue`](#priorityQueue)
* [`cargo`](#cargo)
* [`auto`](#auto)
* [`retry`](#retry)
* [`iterator`](#iterator)
-* [`apply`](#apply)
-* [`nextTick`](#nextTick)
-* [`times`](#times)
-* [`timesSeries`](#timesSeries)
+* [`times`](#times), `timesSeries`, `timesLimit`
### Utils
+* [`apply`](#apply)
+* [`nextTick`](#nextTick)
* [`memoize`](#memoize)
* [`unmemoize`](#unmemoize)
+* [`ensureAsync`](#ensureAsync)
+* [`constant`](#constant)
+* [`asyncify`](#asyncify)
+* [`wrapSync`](#wrapSync)
* [`log`](#log)
* [`dir`](#dir)
* [`noConflict`](#noConflict)
-
## Collections
<a name="forEach" />
<a name="each" />
-### each(arr, iterator, callback)
+### each(arr, iterator, [callback])
Applies the function `iterator` to each item in `arr`, in parallel.
The `iterator` is called with an item from the list, and a callback for when it
@@ -189,10 +249,11 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err)` which must be called once it has
+ The iterator is passed a `callback(err)` which must be called once it has
completed. If no error has occurred, the `callback` should be run without
- arguments or with an explicit `null` argument.
-* `callback(err)` - A callback which is called when all `iterator` functions
+ arguments or with an explicit `null` argument. The array index is not passed
+ to the iterator. If you need the index, use [`forEachOf`](#forEachOf).
+* `callback(err)` - *Optional* A callback which is called when all `iterator` functions
have finished, or an error occurs.
__Examples__
@@ -208,13 +269,13 @@ async.each(openFiles, saveFile, function(err){
```
```js
-// assuming openFiles is an array of file names
+// assuming openFiles is an array of file names
async.each(openFiles, function(file, callback) {
-
+
// Perform operation on file here.
console.log('Processing file ' + file);
-
+
if( file.length > 32 ) {
console.log('This file name is too long');
callback('File name too long');
@@ -235,73 +296,80 @@ async.each(openFiles, function(file, callback) {
});
```
----------------------------------------
-
-<a name="forEachSeries" />
-<a name="eachSeries" />
-### eachSeries(arr, iterator, callback)
-
-The same as [`each`](#each), only `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
-This means the `iterator` functions will complete in order.
+__Related__
+* eachSeries(arr, iterator, [callback])
+* eachLimit(arr, limit, iterator, [callback])
---------------------------------------
-<a name="forEachLimit" />
-<a name="eachLimit" />
-### eachLimit(arr, limit, iterator, callback)
+<a name="forEachOf" />
+<a name="eachOf" />
-The same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously
-running at any time.
+### forEachOf(obj, iterator, [callback])
-Note that the items in `arr` are not processed in batches, so there is no guarantee that
-the first `limit` `iterator` functions will complete before any others are started.
+Like `each`, except that it iterates over objects, and passes the key as the second argument to the iterator.
__Arguments__
-* `arr` - An array to iterate over.
-* `limit` - The maximum number of `iterator`s to run at any time.
-* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err)` which must be called once it has
- completed. If no error has occurred, the callback should be run without
- arguments or with an explicit `null` argument.
-* `callback(err)` - A callback which is called when all `iterator` functions
- have finished, or an error occurs.
+* `obj` - An object or array to iterate over.
+* `iterator(item, key, callback)` - A function to apply to each item in `obj`.
+The `key` is the item's key, or index in the case of an array. The iterator is
+passed a `callback(err)` which must be called once it has completed. If no
+error has occurred, the callback should be run without arguments or with an
+explicit `null` argument.
+* `callback(err)` - *Optional* A callback which is called when all `iterator` functions have finished, or an error occurs.
__Example__
```js
-// Assume documents is an array of JSON objects and requestApi is a
-// function that interacts with a rate-limited REST api.
-
-async.eachLimit(documents, 20, requestApi, function(err){
- // if any of the saves produced an error, err would equal that error
-});
+var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"};
+var configs = {};
+
+async.forEachOf(obj, function (value, key, callback) {
+ fs.readFile(__dirname + value, "utf8", function (err, data) {
+ if (err) return callback(err);
+ try {
+ configs[key] = JSON.parse(data);
+ } catch (e) {
+ return callback(e);
+ }
+ callback();
+ })
+}, function (err) {
+ if (err) console.error(err.message);
+ // configs is now a map of JSON data
+ doSomethingWith(configs);
+})
```
+__Related__
+
+* forEachOfSeries(obj, iterator, [callback])
+* forEachOfLimit(obj, limit, iterator, [callback])
+
---------------------------------------
<a name="map" />
-### map(arr, iterator, callback)
+### map(arr, iterator, [callback])
Produces a new array of values by mapping each value in `arr` through
the `iterator` function. The `iterator` is called with an item from `arr` and a
-callback for when it has finished processing. Each of these callback takes 2 arguments:
-an `error`, and the transformed item from `arr`. If `iterator` passes an error to his
+callback for when it has finished processing. Each of these callback takes 2 arguments:
+an `error`, and the transformed item from `arr`. If `iterator` passes an error to its
callback, the main `callback` (for the `map` function) is immediately called with the error.
Note, that since this function applies the `iterator` to each item in parallel,
-there is no guarantee that the `iterator` functions will complete in order.
+there is no guarantee that the `iterator` functions will complete in order.
However, the results array will be in the same order as the original `arr`.
__Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, transformed)` which must be called once
+ The iterator is passed a `callback(err, transformed)` which must be called once
it has completed with an error (which can be `null`) and a transformed item.
-* `callback(err, results)` - A callback which is called when all `iterator`
+* `callback(err, results)` - *Optional* A callback which is called when all `iterator`
functions have finished, or an error occurs. Results is an array of the
transformed items from the `arr`.
@@ -313,51 +381,15 @@ async.map(['file1','file2','file3'], fs.stat, function(err, results){
});
```
----------------------------------------
-
-<a name="mapSeries" />
-### mapSeries(arr, iterator, callback)
-
-The same as [`map`](#map), only the `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
-The results array will be in the same order as the original.
-
-
----------------------------------------
-
-<a name="mapLimit" />
-### mapLimit(arr, limit, iterator, callback)
-
-The same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously
-running at any time.
-
-Note that the items are not processed in batches, so there is no guarantee that
-the first `limit` `iterator` functions will complete before any others are started.
-
-__Arguments__
-
-* `arr` - An array to iterate over.
-* `limit` - The maximum number of `iterator`s to run at any time.
-* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, transformed)` which must be called once
- it has completed with an error (which can be `null`) and a transformed item.
-* `callback(err, results)` - A callback which is called when all `iterator`
- calls have finished, or an error occurs. The result is an array of the
- transformed items from the original `arr`.
-
-__Example__
-
-```js
-async.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){
- // results is now an array of stats for each file
-});
-```
+__Related__
+* mapSeries(arr, iterator, [callback])
+* mapLimit(arr, limit, iterator, [callback])
---------------------------------------
<a name="select" />
<a name="filter" />
-### filter(arr, iterator, callback)
+### filter(arr, iterator, [callback])
__Alias:__ `select`
@@ -372,9 +404,9 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in `arr`.
- The `iterator` is passed a `callback(truthValue)`, which must be called with a
+ The `iterator` is passed a `callback(truthValue)`, which must be called with a
boolean argument once it has completed.
-* `callback(results)` - A callback which is called after all the `iterator`
+* `callback(results)` - *Optional* A callback which is called after all the `iterator`
functions have finished.
__Example__
@@ -385,48 +417,37 @@ async.filter(['file1','file2','file3'], fs.exists, function(results){
});
```
----------------------------------------
-
-<a name="selectSeries" />
-<a name="filterSeries" />
-### filterSeries(arr, iterator, callback)
-
-__Alias:__ `selectSeries`
+__Related__
-The same as [`filter`](#filter) only the `iterator` is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
-The results array will be in the same order as the original.
+* filterSeries(arr, iterator, [callback])
+* filterLimit(arr, limit, iterator, [callback])
---------------------------------------
<a name="reject" />
-### reject(arr, iterator, callback)
+### reject(arr, iterator, [callback])
The opposite of [`filter`](#filter). Removes values that pass an `async` truth test.
----------------------------------------
-
-<a name="rejectSeries" />
-### rejectSeries(arr, iterator, callback)
-
-The same as [`reject`](#reject), only the `iterator` is applied to each item in `arr`
-in series.
+__Related__
+* rejectSeries(arr, iterator, [callback])
+* rejectLimit(arr, limit, iterator, [callback])
---------------------------------------
<a name="reduce" />
-### reduce(arr, memo, iterator, callback)
+### reduce(arr, memo, iterator, [callback])
__Aliases:__ `inject`, `foldl`
Reduces `arr` into a single value using an async `iterator` to return
-each successive step. `memo` is the initial state of the reduction.
-This function only operates in series.
+each successive step. `memo` is the initial state of the reduction.
+This function only operates in series.
-For performance reasons, it may make sense to split a call to this function into
-a parallel map, and then use the normal `Array.prototype.reduce` on the results.
-This function is for situations where each step in the reduction needs to be async;
+For performance reasons, it may make sense to split a call to this function into
+a parallel map, and then use the normal `Array.prototype.reduce` on the results.
+This function is for situations where each step in the reduction needs to be async;
if you can get the data before reducing it, then it's probably a good idea to do so.
__Arguments__
@@ -435,11 +456,11 @@ __Arguments__
* `memo` - The initial state of the reduction.
* `iterator(memo, item, callback)` - A function applied to each item in the
array to produce the next step in the reduction. The `iterator` is passed a
- `callback(err, reduction)` which accepts an optional error as its first
- argument, and the state of the reduction as the second. If an error is
- passed to the callback, the reduction is stopped and the main `callback` is
+ `callback(err, reduction)` which accepts an optional error as its first
+ argument, and the state of the reduction as the second. If an error is
+ passed to the callback, the reduction is stopped and the main `callback` is
immediately called with the error.
-* `callback(err, result)` - A callback which is called after all the `iterator`
+* `callback(err, result)` - *Optional* A callback which is called after all the `iterator`
functions have finished. Result is the reduced value.
__Example__
@@ -458,7 +479,7 @@ async.reduce([1,2,3], 0, function(memo, item, callback){
---------------------------------------
<a name="reduceRight" />
-### reduceRight(arr, memo, iterator, callback)
+### reduceRight(arr, memo, iterator, [callback])
__Alias:__ `foldr`
@@ -468,7 +489,7 @@ Same as [`reduce`](#reduce), only operates on `arr` in reverse order.
---------------------------------------
<a name="detect" />
-### detect(arr, iterator, callback)
+### detect(arr, iterator, [callback])
Returns the first value in `arr` that passes an async truth test. The
`iterator` is applied in parallel, meaning the first iterator to return `true` will
@@ -481,12 +502,12 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in `arr`.
- The iterator is passed a `callback(truthValue)` which must be called with a
- boolean argument once it has completed.
-* `callback(result)` - A callback which is called as soon as any iterator returns
+ The iterator is passed a `callback(truthValue)` which must be called with a
+ boolean argument once it has completed. **Note: this callback does not take an error as its first argument.**
+* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns
`true`, or after all the `iterator` functions have finished. Result will be
the first item in the array that passes the truth test (iterator) or the
- value `undefined` if none passed.
+ value `undefined` if none passed. **Note: this callback does not take an error as its first argument.**
__Example__
@@ -496,20 +517,15 @@ async.detect(['file1','file2','file3'], fs.exists, function(result){
});
```
----------------------------------------
-
-<a name="detectSeries" />
-### detectSeries(arr, iterator, callback)
-
-The same as [`detect`](#detect), only the `iterator` is applied to each item in `arr`
-in series. This means the result is always the first in the original `arr` (in
-terms of array order) that passes the truth test.
+__Related__
+* detectSeries(arr, iterator, [callback])
+* detectLimit(arr, limit, iterator, [callback])
---------------------------------------
<a name="sortBy" />
-### sortBy(arr, iterator, callback)
+### sortBy(arr, iterator, [callback])
Sorts a list by the results of running each `arr` value through an async `iterator`.
@@ -520,7 +536,7 @@ __Arguments__
The iterator is passed a `callback(err, sortValue)` which must be called once it
has completed with an error (which can be `null`) and a value to use as the sort
criteria.
-* `callback(err, results)` - A callback which is called after all the `iterator`
+* `callback(err, results)` - *Optional* A callback which is called after all the `iterator`
functions have finished, or an error occurs. Results is the items from
the original `arr` sorted by the values returned by the `iterator` calls.
@@ -560,7 +576,7 @@ async.sortBy([1,9,3,5], function(x, callback){
---------------------------------------
<a name="some" />
-### some(arr, iterator, callback)
+### some(arr, iterator, [callback])
__Alias:__ `any`
@@ -574,12 +590,13 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in the array
- in parallel. The iterator is passed a callback(truthValue) which must be
+ in parallel. The iterator is passed a `callback(truthValue)`` which must be
called with a boolean argument once it has completed.
-* `callback(result)` - A callback which is called as soon as any iterator returns
+* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns
`true`, or after all the iterator functions have finished. Result will be
either `true` or `false` depending on the values of the async tests.
+ **Note: the callbacks do not take an error as their first argument.**
__Example__
```js
@@ -588,10 +605,14 @@ async.some(['file1','file2','file3'], fs.exists, function(result){
});
```
+__Related__
+
+* someLimit(arr, limit, iterator, callback)
+
---------------------------------------
<a name="every" />
-### every(arr, iterator, callback)
+### every(arr, iterator, [callback])
__Alias:__ `all`
@@ -604,12 +625,14 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A truth test to apply to each item in the array
- in parallel. The iterator is passed a callback(truthValue) which must be
+ in parallel. The iterator is passed a `callback(truthValue)` which must be
called with a boolean argument once it has completed.
-* `callback(result)` - A callback which is called after all the `iterator`
+* `callback(result)` - *Optional* A callback which is called after all the `iterator`
functions have finished. Result will be either `true` or `false` depending on
the values of the async tests.
+ **Note: the callbacks do not take an error as their first argument.**
+
__Example__
```js
@@ -618,10 +641,14 @@ async.every(['file1','file2','file3'], fs.exists, function(result){
});
```
+__Related__
+
+* everyLimit(arr, limit, iterator, callback)
+
---------------------------------------
<a name="concat" />
-### concat(arr, iterator, callback)
+### concat(arr, iterator, [callback])
Applies `iterator` to each item in `arr`, concatenating the results. Returns the
concatenated list. The `iterator`s are called in parallel, and the results are
@@ -632,9 +659,9 @@ __Arguments__
* `arr` - An array to iterate over.
* `iterator(item, callback)` - A function to apply to each item in `arr`.
- The iterator is passed a `callback(err, results)` which must be called once it
+ The iterator is passed a `callback(err, results)` which must be called once it
has completed with an error (which can be `null`) and an array of results.
-* `callback(err, results)` - A callback which is called after all the `iterator`
+* `callback(err, results)` - *Optional* A callback which is called after all the `iterator`
functions have finished, or an error occurs. Results is an array containing
the concatenated results of the `iterator` function.
@@ -646,12 +673,9 @@ async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){
});
```
----------------------------------------
+__Related__
-<a name="concatSeries" />
-### concatSeries(arr, iterator, callback)
-
-Same as [`concat`](#concat), but executes in series instead of parallel.
+* concatSeries(arr, iterator, [callback])
## Control Flow
@@ -661,7 +685,7 @@ Same as [`concat`](#concat), but executes in series instead of parallel.
Run the functions in the `tasks` array in series, each one running once the previous
function has completed. If any functions in the series pass an error to its
-callback, no more functions are run, and `callback` is immediately called with the value of the error.
+callback, no more functions are run, and `callback` is immediately called with the value of the error.
Otherwise, `callback` receives an array of results when `tasks` have completed.
It is also possible to use an object instead of an array. Each property will be
@@ -670,13 +694,13 @@ instead of an array. This can be a more readable way of handling results from
[`series`](#series).
**Note** that while many implementations preserve the order of object properties, the
-[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6)
+[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6)
explicitly states that
> The mechanics and order of enumerating the properties is not specified.
So if you rely on the order in which your series of functions are executed, and want
-this to work on all platforms, consider using an array.
+this to work on all platforms, consider using an array.
__Arguments__
@@ -684,7 +708,7 @@ __Arguments__
a `callback(err, result)` it must call on completion with an error `err` (which can
be `null`) and an optional `result` value.
* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
+ have completed. This function gets a results array (or object) containing all
the result arguments passed to the `task` callbacks.
__Example__
@@ -735,6 +759,8 @@ callback, the main `callback` is immediately called with the value of the error.
Once the `tasks` have completed, the results are passed to the final `callback` as an
array.
+**Note:** `parallel` is about kicking-off I/O tasks in parallel, not about parallel execution of code. If your tasks do not use any timers or perform any I/O, they will actually be executed in series. Any synchronous setup sections for each task will happen one after the other. JavaScript remains single-threaded.
+
It is also possible to use an object instead of an array. Each property will be
run as a function and the results will be passed to the final `callback` as an object
instead of an array. This can be a more readable way of handling results from
@@ -743,11 +769,11 @@ instead of an array. This can be a more readable way of handling results from
__Arguments__
-* `tasks` - An array or object containing functions to run. Each function is passed
- a `callback(err, result)` which it must call on completion with an error `err`
+* `tasks` - An array or object containing functions to run. Each function is passed
+ a `callback(err, result)` which it must call on completion with an error `err`
(which can be `null`) and an optional `result` value.
* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
+ have completed. This function gets a results array (or object) containing all
the result arguments passed to the task callbacks.
__Example__
@@ -790,26 +816,9 @@ function(err, results) {
});
```
----------------------------------------
-
-<a name="parallelLimit" />
-### parallelLimit(tasks, limit, [callback])
-
-The same as [`parallel`](#parallel), only `tasks` are executed in parallel
-with a maximum of `limit` tasks executing at any time.
+__Related__
-Note that the `tasks` are not executed in batches, so there is no guarantee that
-the first `limit` tasks will complete before any others are started.
-
-__Arguments__
-
-* `tasks` - An array or object containing functions to run, each function is passed
- a `callback(err, result)` it must call on completion with an error `err` (which can
- be `null`) and an optional `result` value.
-* `limit` - The maximum number of `tasks` to run at any time.
-* `callback(err, results)` - An optional callback to run once all the functions
- have completed. This function gets a results array (or object) containing all
- the result arguments passed to the `task` callbacks.
+* parallelLimit(tasks, limit, [callback])
---------------------------------------
@@ -823,7 +832,7 @@ __Arguments__
* `test()` - synchronous truth test to perform before each execution of `fn`.
* `fn(callback)` - A function which is called each time `test` passes. The function is
- passed a `callback(err)`, which must be called once it has completed with an
+ passed a `callback(err)`, which must be called once it has completed with an
optional `err` argument.
* `callback(err)` - A callback which is called after the test fails and repeated
execution of `fn` has stopped.
@@ -850,8 +859,8 @@ async.whilst(
<a name="doWhilst" />
### doWhilst(fn, test, callback)
-The post-check version of [`whilst`](#whilst). To reflect the difference in
-the order of operations, the arguments `test` and `fn` are switched.
+The post-check version of [`whilst`](#whilst). To reflect the difference in
+the order of operations, the arguments `test` and `fn` are switched.
`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.
@@ -874,8 +883,44 @@ Like [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument o
---------------------------------------
+<a name="during" />
+### during(test, fn, callback)
+
+Like [`whilst`](#whilst), except the `test` is an asynchronous function that is passed a callback in the form of `function (err, truth)`. If error is passed to `test` or `fn`, the main callback is immediately called with the value of the error.
+
+__Example__
+
+```js
+var count = 0;
+
+async.during(
+ function (callback) {
+ return callback(null, count < 5);
+ },
+ function (callback) {
+ count++;
+ setTimeout(callback, 1000);
+ },
+ function (err) {
+ // 5 seconds have passed
+ }
+);
+```
+
+---------------------------------------
+
+<a name="doDuring" />
+### doDuring(fn, test, callback)
+
+The post-check version of [`during`](#during). To reflect the difference in
+the order of operations, the arguments `test` and `fn` are switched.
+
+Also a version of [`doWhilst`](#doWhilst) with asynchronous `test` function.
+
+---------------------------------------
+
<a name="forever" />
-### forever(fn, errback)
+### forever(fn, [errback])
Calls the asynchronous function `fn` with a callback parameter that allows it to
call itself again, in series, indefinitely.
@@ -908,9 +953,9 @@ the error.
__Arguments__
-* `tasks` - An array of functions to run, each function is passed a
+* `tasks` - An array of functions to run, each function is passed a
`callback(err, result1, result2, ...)` it must call on completion. The first
- argument is an error (which can be `null`) and any further arguments will be
+ argument is an error (which can be `null`) and any further arguments will be
passed as arguments in order to the next task.
* `callback(err, [results])` - An optional callback to run once all the functions
have completed. This will be passed the results of the last task's callback.
@@ -987,7 +1032,7 @@ Each function is executed with the `this` binding of the composed function.
__Arguments__
-* functions... - the asynchronous functions to compose
+* `functions...` - the asynchronous functions to compose
__Example__
@@ -995,7 +1040,7 @@ __Example__
```js
// Requires lodash (or underscore), express3 and dresende's orm2.
// Part of an app, that fetches cats of the logged user.
-// This example uses `seq` function to avoid overnesting and error
+// This example uses `seq` function to avoid overnesting and error
// handling clutter.
app.get('/cats', function(request, response) {
var User = request.models.User;
@@ -1019,7 +1064,7 @@ app.get('/cats', function(request, response) {
<a name="applyEach" />
### applyEach(fns, args..., callback)
-Applies the provided arguments to each function in the array, calling
+Applies the provided arguments to each function in the array, calling
`callback` after all functions have completed. If you only provide the first
argument, then it will return a function which lets you pass in the
arguments as if it were a single function call.
@@ -1045,30 +1090,27 @@ async.each(
);
```
----------------------------------------
-
-<a name="applyEachSeries" />
-### applyEachSeries(arr, iterator, callback)
+__Related__
-The same as [`applyEach`](#applyEach) only the functions are applied in series.
+* applyEachSeries(tasks, args..., [callback])
---------------------------------------
<a name="queue" />
-### queue(worker, concurrency)
+### queue(worker, [concurrency])
Creates a `queue` object with the specified `concurrency`. Tasks added to the
`queue` are processed in parallel (up to the `concurrency` limit). If all
-`worker`s are in progress, the task is queued until one becomes available.
+`worker`s are in progress, the task is queued until one becomes available.
Once a `worker` completes a `task`, that `task`'s callback is called.
__Arguments__
* `worker(task, callback)` - An asynchronous function for processing a queued
- task, which must call its `callback(err)` argument when finished, with an
- optional `error` as an argument.
+ task, which must call its `callback(err)` argument when finished, with an
+ optional `error` as an argument. If you want to handle errors from an individual task, pass a callback to `q.push()`.
* `concurrency` - An `integer` for determining how many `worker` functions should be
- run in parallel.
+ run in parallel. If omitted, the concurrency defaults to `1`. If the concurrency is `0`, an error is thrown.
__Queue objects__
@@ -1082,11 +1124,11 @@ methods:
* `concurrency` - an integer for determining how many `worker` functions should be
run in parallel. This property can be changed after a `queue` is created to
alter the concurrency on-the-fly.
-* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once
+* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once
the `worker` has finished processing the task. Instead of a single task, a `tasks` array
can be submitted. The respective callback is used for every task in the list.
* `unshift(task, [callback])` - add a new task to the front of the `queue`.
-* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit,
+* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit,
and further tasks will be queued.
* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`.
* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`.
@@ -1154,7 +1196,7 @@ Creates a `cargo` object with the specified payload. Tasks added to the
cargo will be processed altogether (up to the `payload` limit). If the
`worker` is in progress, the task is queued until it becomes available. Once
the `worker` has completed some tasks, each callback of those tasks is called.
-Check out [this animation](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) for how `cargo` and `queue` work.
+Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) for how `cargo` and `queue` work.
While [queue](#queue) passes only one task to one of a group of workers
at a time, cargo passes an array of tasks to a single worker, repeating
@@ -1163,7 +1205,7 @@ when the worker is finished.
__Arguments__
* `worker(tasks, callback)` - An asynchronous function for processing an array of
- queued tasks, which must call its `callback(err)` argument when finished, with
+ queued tasks, which must call its `callback(err)` argument when finished, with
an optional `err` argument.
* `payload` - An optional `integer` for determining how many tasks should be
processed per round; if omitted, the default is unlimited.
@@ -1178,11 +1220,12 @@ methods:
process per round. This property can be changed after a `cargo` is created to
alter the payload on-the-fly.
* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called
- once the `worker` has finished processing the task. Instead of a single task, an array of `tasks`
+ once the `worker` has finished processing the task. Instead of a single task, an array of `tasks`
can be submitted. The respective callback is used for every task in the list.
* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued.
* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`.
* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`.
+* `idle()`, `pause()`, `resume()`, `kill()` - cargo inherits all of the same methods and event calbacks as [`queue`](#queue)
__Example__
@@ -1215,18 +1258,13 @@ cargo.push({name: 'baz'}, function (err) {
<a name="auto" />
### auto(tasks, [callback])
-Determines the best order for running the functions in `tasks`, based on their
-requirements. Each function can optionally depend on other functions being completed
-first, and each function is run as soon as its requirements are satisfied.
+Determines the best order for running the functions in `tasks`, based on their requirements. Each function can optionally depend on other functions being completed first, and each function is run as soon as its requirements are satisfied.
-If any of the functions pass an error to their callback, it will not
-complete (so any other functions depending on it will not run), and the main
-`callback` is immediately called with the error. Functions also receive an
-object containing the results of functions which have completed so far.
+If any of the functions pass an error to their callback, the `auto` sequence will stop. Further tasks will not execute (so any other functions depending on it will not run), and the main `callback` is immediately called with the error. Functions also receive an object containing the results of functions which have completed so far.
-Note, all functions are called with a `results` object as a second argument,
+Note, all functions are called with a `results` object as a second argument,
so it is unsafe to pass functions in the `tasks` object which cannot handle the
-extra argument.
+extra argument.
For example, this snippet of code:
@@ -1243,7 +1281,7 @@ argument, which will fail:
fs.readFile('data.txt', 'utf-8', cb, {});
```
-Instead, wrap the call to `readFile` in a function which does not forward the
+Instead, wrap the call to `readFile` in a function which does not forward the
`results` object:
```js
@@ -1260,13 +1298,13 @@ __Arguments__
requirements, with the function itself the last item in the array. The object's key
of a property serves as the name of the task defined by that property,
i.e. can be used when specifying requirements for other tasks.
- The function receives two arguments: (1) a `callback(err, result)` which must be
- called when finished, passing an `error` (which can be `null`) and the result of
+ The function receives two arguments: (1) a `callback(err, result)` which must be
+ called when finished, passing an `error` (which can be `null`) and the result of
the function's execution, and (2) a `results` object, containing the results of
the previously executed functions.
* `callback(err, results)` - An optional callback which is called when all the
- tasks have been completed. It receives the `err` argument if any `tasks`
- pass an error to their callback. Results are always returned; however, if
+ tasks have been completed. It receives the `err` argument if any `tasks`
+ pass an error to their callback. Results are always returned; however, if
an error occurs, no further `tasks` will be performed, and the results
object will only contain partial results.
@@ -1346,7 +1384,7 @@ new tasks much easier (and the code more readable).
---------------------------------------
<a name="retry" />
-### retry([times = 5], task, [callback])
+### retry([opts = {times: 5, interval: 0}| 5], task, [callback])
Attempts to get a successful response from `task` no more than `times` times before
returning an error. If the task is successful, the `callback` will be passed the result
@@ -1355,9 +1393,10 @@ result (if any) of the final attempt.
__Arguments__
-* `times` - An integer indicating how many times to attempt the `task` before giving up. Defaults to 5.
+* `opts` - Can be either an object with `times` and `interval` or a number. `times` is how many attempts should be made before giving up. `interval` is how long to wait inbetween attempts. Defaults to {times: 5, interval: 0}
+ * if a number is passed in it sets `times` only (with `interval` defaulting to 0).
* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)`
- which must be called when finished, passing `err` (which can be `null`) and the `result` of
+ which must be called when finished, passing `err` (which can be `null`) and the `result` of
the function's execution, and (2) a `results` object, containing the results of
the previously executed functions (if nested inside another control flow).
* `callback(err, results)` - An optional callback which is called when the
@@ -1372,6 +1411,12 @@ async.retry(3, apiMethod, function(err, result) {
});
```
+```js
+async.retry({times: 3, interval: 200}, apiMethod, function(err, result) {
+ // do something with the result
+});
+```
+
It can also be embeded within other control flow functions to retry individual methods
that are not as reliable, like this:
@@ -1426,7 +1471,7 @@ node> nextfn();
<a name="apply" />
### apply(function, arguments..)
-Creates a continuation function with some arguments already applied.
+Creates a continuation function with some arguments already applied.
Useful as a shorthand when combined with other control flow functions. Any arguments
passed to the returned function are added to the arguments originally passed
@@ -1500,15 +1545,16 @@ call_order.push('one')
```
<a name="times" />
-### times(n, callback)
+### times(n, iterator, [callback])
-Calls the `callback` function `n` times, and accumulates results in the same manner
+Calls the `iterator` function `n` times, and accumulates results in the same manner
you would use with [`map`](#map).
__Arguments__
* `n` - The number of times to run the function.
-* `callback` - The function to call `n` times.
+* `iterator` - The function to call `n` times.
+* `callback` - see [`map`](#map)
__Example__
@@ -1529,12 +1575,10 @@ async.times(5, function(n, next){
});
```
-<a name="timesSeries" />
-### timesSeries(n, callback)
+__Related__
-The same as [`times`](#times), only the iterator is applied to each item in `arr` in
-series. The next `iterator` is only called once the current one has completed.
-The results array will be in the same order as the original.
+* timesSeries(n, iterator, [callback])
+* timesLimit(n, limit, iterator, [callback])
## Utils
@@ -1546,13 +1590,15 @@ Caches the results of an `async` function. When creating a hash to store functio
results against, the callback is omitted from the hash and an optional hash
function can be used.
+If no hash function is specified, the first argument is used as a hash key, which may work reasonably if it is a string or a data type that converts to a distinct string. Note that objects and arrays will not behave reasonably. Neither will cases where the other arguments are significant. In such cases, specify your own hash function.
+
The cache of results is exposed as the `memo` property of the function returned
by `memoize`.
__Arguments__
* `fn` - The function to proxy and cache results from.
-* `hasher` - Tn optional function for generating a custom hash for storing
+* `hasher` - An optional function for generating a custom hash for storing
results. It has all the arguments applied to it apart from the callback, and
must be synchronous.
@@ -1581,6 +1627,102 @@ __Arguments__
* `fn` - the memoized function
+---------------------------------------
+
+<a name="ensureAsync" />
+### ensureAsync(fn)
+
+Wrap an async function and ensure it calls its callback on a later tick of the event loop. If the function already calls its callback on a next tick, no extra deferral is added. This is useful for preventing stack overflows (`RangeError: Maximum call stack size exceeded`) and generally keeping [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) contained.
+
+__Arguments__
+
+* `fn` - an async function, one that expects a node-style callback as its last argument
+
+Returns a wrapped function with the exact same call signature as the function passed in.
+
+__Example__
+
+```js
+function sometimesAsync(arg, callback) {
+ if (cache[arg]) {
+ return callback(null, cache[arg]); // this would be synchronous!!
+ } else {
+ doSomeIO(arg, callback); // this IO would be asynchronous
+ }
+}
+
+// this has a risk of stack overflows if many results are cached in a row
+async.mapSeries(args, sometimesAsync, done);
+
+// this will defer sometimesAsync's callback if necessary,
+// preventing stack overflows
+async.mapSeries(args, async.ensureAsync(sometimesAsync), done);
+
+```
+
+---------------------------------------
+
+<a name="constant">
+### constant(values...)
+
+Returns a function that when called, calls-back with the values provided. Useful as the first function in a `waterfall`, or for plugging values in to `auto`.
+
+__Example__
+
+```js
+async.waterfall([
+ async.constant(42),
+ function (value, next) {
+ // value === 42
+ },
+ //...
+], callback);
+
+async.waterfall([
+ async.constant(filename, "utf8"),
+ fs.readFile,
+ function (fileData, next) {
+ //...
+ }
+ //...
+], callback);
+
+async.auto({
+ hostname: async.constant("https://server.net/"),
+ port: findFreePort,
+ launchServer: ["hostname", "port", function (cb, options) {
+ startServer(options, cb);
+ }],
+ //...
+}, callback);
+
+```
+
+---------------------------------------
+
+<a name="asyncify">
+<a name="wrapSync">
+### asyncify(func)
+
+__Alias:__ `wrapSync`
+
+Take a sync function and make it async, passing its return value to a callback. This is useful for plugging sync functions into a waterfall, series, or other async functions. Any arguments passed to the generated function will be passed to the wrapped function (except for the final callback argument). Errors thrown will be passed to the callback.
+
+__Example__
+
+```js
+async.waterfall([
+ async.apply(fs.readFile, filename, "utf8"),
+ async.asyncify(JSON.parse),
+ function (data, next) {
+ // data is the result of parsing the text.
+ // If there was a parsing error, it would have been caught.
+ }
+], callback)
+```
+
+---------------------------------------
+
<a name="log" />
### log(function, arguments)
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/bower.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/bower.json
deleted file mode 100644
index 18176881e..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/bower.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "async",
- "description": "Higher-order functions and common patterns for asynchronous code",
- "version": "0.9.2",
- "main": "lib/async.js",
- "keywords": [
- "async",
- "callback",
- "utility",
- "module"
- ],
- "license": "MIT",
- "repository": {
- "type": "git",
- "url": "https://github.com/caolan/async.git"
- },
- "devDependencies": {
- "nodeunit": ">0.0.0",
- "uglify-js": "1.2.x",
- "nodelint": ">0.0.0",
- "lodash": ">=2.4.1"
- },
- "moduleType": [
- "amd",
- "globals",
- "node"
- ],
- "ignore": [
- "**/.*",
- "node_modules",
- "bower_components",
- "test",
- "tests"
- ],
- "authors": [
- "Caolan McMahon"
- ]
-} \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/component.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/component.json
deleted file mode 100644
index 5003a7c52..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/component.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "name": "async",
- "description": "Higher-order functions and common patterns for asynchronous code",
- "version": "0.9.2",
- "keywords": [
- "async",
- "callback",
- "utility",
- "module"
- ],
- "license": "MIT",
- "repository": "caolan/async",
- "scripts": [
- "lib/async.js"
- ]
-} \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
index 394c41cad..f3cfb8071 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/lib/async.js
@@ -5,18 +5,32 @@
* Copyright 2010-2014 Caolan McMahon
* Released under the MIT license
*/
-/*jshint onevar: false, indent:4 */
-/*global setImmediate: false, setTimeout: false, console: false */
(function () {
var async = {};
+ function noop() {}
+ function identity(v) {
+ return v;
+ }
+ function toBool(v) {
+ return !!v;
+ }
+ function notId(v) {
+ return !v;
+ }
// global on the server, window in the browser
- var root, previous_async;
+ var previous_async;
+
+ // Establish the root object, `window` (`self`) in the browser, `global`
+ // on the server, or `this` in some virtual machines. We use `self`
+ // instead of `window` for `WebWorker` support.
+ var root = typeof self === 'object' && self.self === self && self ||
+ typeof global === 'object' && global.global === global && global ||
+ this;
- root = this;
if (root != null) {
- previous_async = root.async;
+ previous_async = root.async;
}
async.noConflict = function () {
@@ -25,12 +39,19 @@
};
function only_once(fn) {
- var called = false;
return function() {
- if (called) throw new Error("Callback was already called.");
- called = true;
- fn.apply(root, arguments);
- }
+ if (fn === null) throw new Error("Callback was already called.");
+ fn.apply(this, arguments);
+ fn = null;
+ };
+ }
+
+ function _once(fn) {
+ return function() {
+ if (fn === null) return;
+ fn.apply(this, arguments);
+ fn = null;
+ };
}
//// cross-browser compatiblity functions ////
@@ -41,37 +62,66 @@
return _toString.call(obj) === '[object Array]';
};
- var _each = function (arr, iterator) {
- for (var i = 0; i < arr.length; i += 1) {
- iterator(arr[i], i, arr);
- }
- };
+ function _isArrayLike(arr) {
+ return _isArray(arr) || (
+ // has a positive integer length property
+ typeof arr.length === "number" &&
+ arr.length >= 0 &&
+ arr.length % 1 === 0
+ );
+ }
- var _map = function (arr, iterator) {
- if (arr.map) {
- return arr.map(iterator);
+ function _each(coll, iterator) {
+ return _isArrayLike(coll) ?
+ _arrayEach(coll, iterator) :
+ _forEachOf(coll, iterator);
+ }
+
+ function _arrayEach(arr, iterator) {
+ var index = -1,
+ length = arr.length;
+
+ while (++index < length) {
+ iterator(arr[index], index, arr);
}
- var results = [];
- _each(arr, function (x, i, a) {
- results.push(iterator(x, i, a));
- });
- return results;
- };
+ }
+
+ function _map(arr, iterator) {
+ var index = -1,
+ length = arr.length,
+ result = Array(length);
- var _reduce = function (arr, iterator, memo) {
- if (arr.reduce) {
- return arr.reduce(iterator, memo);
+ while (++index < length) {
+ result[index] = iterator(arr[index], index, arr);
}
- _each(arr, function (x, i, a) {
+ return result;
+ }
+
+ function _range(count) {
+ return _map(Array(count), function (v, i) { return i; });
+ }
+
+ function _reduce(arr, iterator, memo) {
+ _arrayEach(arr, function (x, i, a) {
memo = iterator(memo, x, i, a);
});
return memo;
- };
+ }
- var _keys = function (obj) {
- if (Object.keys) {
- return Object.keys(obj);
+ function _forEachOf(object, iterator) {
+ _arrayEach(_keys(object), function (key) {
+ iterator(object[key], key);
+ });
+ }
+
+ function _indexOf(arr, item) {
+ for (var i = 0; i < arr.length; i++) {
+ if (arr[i] === item) return i;
}
+ return -1;
+ }
+
+ var _keys = Object.keys || function (obj) {
var keys = [];
for (var k in obj) {
if (obj.hasOwnProperty(k)) {
@@ -81,311 +131,337 @@
return keys;
};
- //// exported async module functions ////
-
- //// nextTick implementation with browser-compatible fallback ////
- if (typeof process === 'undefined' || !(process.nextTick)) {
- if (typeof setImmediate === 'function') {
- async.nextTick = function (fn) {
- // not a direct alias for IE10 compatibility
- setImmediate(fn);
+ function _keyIterator(coll) {
+ var i = -1;
+ var len;
+ var keys;
+ if (_isArrayLike(coll)) {
+ len = coll.length;
+ return function next() {
+ i++;
+ return i < len ? i : null;
};
- async.setImmediate = async.nextTick;
- }
- else {
- async.nextTick = function (fn) {
- setTimeout(fn, 0);
+ } else {
+ keys = _keys(coll);
+ len = keys.length;
+ return function next() {
+ i++;
+ return i < len ? keys[i] : null;
};
- async.setImmediate = async.nextTick;
}
}
- else {
+
+ // Similar to ES6's rest param (http://ariya.ofilabs.com/2013/03/es6-and-rest-parameter.html)
+ // This accumulates the arguments passed into an array, after a given index.
+ // From underscore.js (https://github.com/jashkenas/underscore/pull/2140).
+ function _restParam(func, startIndex) {
+ startIndex = startIndex == null ? func.length - 1 : +startIndex;
+ return function() {
+ var length = Math.max(arguments.length - startIndex, 0);
+ var rest = Array(length);
+ for (var index = 0; index < length; index++) {
+ rest[index] = arguments[index + startIndex];
+ }
+ switch (startIndex) {
+ case 0: return func.call(this, rest);
+ case 1: return func.call(this, arguments[0], rest);
+ case 2: return func.call(this, arguments[0], arguments[1], rest);
+ }
+ // Currently unused but handle cases outside of the switch statement:
+ // var args = Array(startIndex + 1);
+ // for (index = 0; index < startIndex; index++) {
+ // args[index] = arguments[index];
+ // }
+ // args[startIndex] = rest;
+ // return func.apply(this, args);
+ };
+ }
+
+ function _withoutIndex(iterator) {
+ return function (value, index, callback) {
+ return iterator(value, callback);
+ };
+ }
+
+ //// exported async module functions ////
+
+ //// nextTick implementation with browser-compatible fallback ////
+
+ // capture the global reference to guard against fakeTimer mocks
+ var _setImmediate = typeof setImmediate === 'function' && setImmediate;
+
+ var _delay = _setImmediate ? function(fn) {
+ // not a direct alias for IE10 compatibility
+ _setImmediate(fn);
+ } : function(fn) {
+ setTimeout(fn, 0);
+ };
+
+ if (typeof process === 'object' && typeof process.nextTick === 'function') {
async.nextTick = process.nextTick;
- if (typeof setImmediate !== 'undefined') {
- async.setImmediate = function (fn) {
- // not a direct alias for IE10 compatibility
- setImmediate(fn);
- };
- }
- else {
- async.setImmediate = async.nextTick;
- }
+ } else {
+ async.nextTick = _delay;
}
+ async.setImmediate = _setImmediate ? _delay : async.nextTick;
+
+ async.forEach =
async.each = function (arr, iterator, callback) {
- callback = callback || function () {};
- if (!arr.length) {
- return callback();
- }
+ return async.eachOf(arr, _withoutIndex(iterator), callback);
+ };
+
+ async.forEachSeries =
+ async.eachSeries = function (arr, iterator, callback) {
+ return async.eachOfSeries(arr, _withoutIndex(iterator), callback);
+ };
+
+
+ async.forEachLimit =
+ async.eachLimit = function (arr, limit, iterator, callback) {
+ return _eachOfLimit(limit)(arr, _withoutIndex(iterator), callback);
+ };
+
+ async.forEachOf =
+ async.eachOf = function (object, iterator, callback) {
+ callback = _once(callback || noop);
+ object = object || [];
+ var size = _isArrayLike(object) ? object.length : _keys(object).length;
var completed = 0;
- _each(arr, function (x) {
- iterator(x, only_once(done) );
+ if (!size) {
+ return callback(null);
+ }
+ _each(object, function (value, key) {
+ iterator(object[key], key, only_once(done));
});
function done(err) {
- if (err) {
- callback(err);
- callback = function () {};
- }
- else {
- completed += 1;
- if (completed >= arr.length) {
- callback();
- }
- }
+ if (err) {
+ callback(err);
+ }
+ else {
+ completed += 1;
+ if (completed >= size) {
+ callback(null);
+ }
+ }
}
};
- async.forEach = async.each;
- async.eachSeries = function (arr, iterator, callback) {
- callback = callback || function () {};
- if (!arr.length) {
- return callback();
- }
- var completed = 0;
- var iterate = function () {
- iterator(arr[completed], function (err) {
+ async.forEachOfSeries =
+ async.eachOfSeries = function (obj, iterator, callback) {
+ callback = _once(callback || noop);
+ obj = obj || [];
+ var nextKey = _keyIterator(obj);
+ var key = nextKey();
+ function iterate() {
+ var sync = true;
+ if (key === null) {
+ return callback(null);
+ }
+ iterator(obj[key], key, only_once(function (err) {
if (err) {
callback(err);
- callback = function () {};
}
else {
- completed += 1;
- if (completed >= arr.length) {
- callback();
- }
- else {
- iterate();
+ key = nextKey();
+ if (key === null) {
+ return callback(null);
+ } else {
+ if (sync) {
+ async.nextTick(iterate);
+ } else {
+ iterate();
+ }
}
}
- });
- };
+ }));
+ sync = false;
+ }
iterate();
};
- async.forEachSeries = async.eachSeries;
- async.eachLimit = function (arr, limit, iterator, callback) {
- var fn = _eachLimit(limit);
- fn.apply(null, [arr, iterator, callback]);
+
+
+ async.forEachOfLimit =
+ async.eachOfLimit = function (obj, limit, iterator, callback) {
+ _eachOfLimit(limit)(obj, iterator, callback);
};
- async.forEachLimit = async.eachLimit;
- var _eachLimit = function (limit) {
+ function _eachOfLimit(limit) {
- return function (arr, iterator, callback) {
- callback = callback || function () {};
- if (!arr.length || limit <= 0) {
- return callback();
+ return function (obj, iterator, callback) {
+ callback = _once(callback || noop);
+ obj = obj || [];
+ var nextKey = _keyIterator(obj);
+ if (limit <= 0) {
+ return callback(null);
}
- var completed = 0;
- var started = 0;
+ var done = false;
var running = 0;
+ var errored = false;
(function replenish () {
- if (completed >= arr.length) {
- return callback();
+ if (done && running <= 0) {
+ return callback(null);
}
- while (running < limit && started < arr.length) {
- started += 1;
+ while (running < limit && !errored) {
+ var key = nextKey();
+ if (key === null) {
+ done = true;
+ if (running <= 0) {
+ callback(null);
+ }
+ return;
+ }
running += 1;
- iterator(arr[started - 1], function (err) {
+ iterator(obj[key], key, only_once(function (err) {
+ running -= 1;
if (err) {
callback(err);
- callback = function () {};
+ errored = true;
}
else {
- completed += 1;
- running -= 1;
- if (completed >= arr.length) {
- callback();
- }
- else {
- replenish();
- }
+ replenish();
}
- });
+ }));
}
})();
};
- };
+ }
- var doParallel = function (fn) {
- return function () {
- var args = Array.prototype.slice.call(arguments);
- return fn.apply(null, [async.each].concat(args));
+ function doParallel(fn) {
+ return function (obj, iterator, callback) {
+ return fn(async.eachOf, obj, iterator, callback);
};
- };
- var doParallelLimit = function(limit, fn) {
- return function () {
- var args = Array.prototype.slice.call(arguments);
- return fn.apply(null, [_eachLimit(limit)].concat(args));
+ }
+ function doParallelLimit(fn) {
+ return function (obj, limit, iterator, callback) {
+ return fn(_eachOfLimit(limit), obj, iterator, callback);
};
- };
- var doSeries = function (fn) {
- return function () {
- var args = Array.prototype.slice.call(arguments);
- return fn.apply(null, [async.eachSeries].concat(args));
+ }
+ function doSeries(fn) {
+ return function (obj, iterator, callback) {
+ return fn(async.eachOfSeries, obj, iterator, callback);
};
- };
-
+ }
- var _asyncMap = function (eachfn, arr, iterator, callback) {
- arr = _map(arr, function (x, i) {
- return {index: i, value: x};
- });
- if (!callback) {
- eachfn(arr, function (x, callback) {
- iterator(x.value, function (err) {
- callback(err);
- });
- });
- } else {
- var results = [];
- eachfn(arr, function (x, callback) {
- iterator(x.value, function (err, v) {
- results[x.index] = v;
- callback(err);
- });
- }, function (err) {
- callback(err, results);
+ function _asyncMap(eachfn, arr, iterator, callback) {
+ callback = _once(callback || noop);
+ var results = [];
+ eachfn(arr, function (value, index, callback) {
+ iterator(value, function (err, v) {
+ results[index] = v;
+ callback(err);
});
- }
- };
+ }, function (err) {
+ callback(err, results);
+ });
+ }
+
async.map = doParallel(_asyncMap);
async.mapSeries = doSeries(_asyncMap);
- async.mapLimit = function (arr, limit, iterator, callback) {
- return _mapLimit(limit)(arr, iterator, callback);
- };
-
- var _mapLimit = function(limit) {
- return doParallelLimit(limit, _asyncMap);
- };
+ async.mapLimit = doParallelLimit(_asyncMap);
// reduce only has a series version, as doing reduce in parallel won't
// work in many situations.
+ async.inject =
+ async.foldl =
async.reduce = function (arr, memo, iterator, callback) {
- async.eachSeries(arr, function (x, callback) {
+ async.eachOfSeries(arr, function (x, i, callback) {
iterator(memo, x, function (err, v) {
memo = v;
callback(err);
});
}, function (err) {
- callback(err, memo);
+ callback(err || null, memo);
});
};
- // inject alias
- async.inject = async.reduce;
- // foldl alias
- async.foldl = async.reduce;
+ async.foldr =
async.reduceRight = function (arr, memo, iterator, callback) {
- var reversed = _map(arr, function (x) {
- return x;
- }).reverse();
+ var reversed = _map(arr, identity).reverse();
async.reduce(reversed, memo, iterator, callback);
};
- // foldr alias
- async.foldr = async.reduceRight;
- var _filter = function (eachfn, arr, iterator, callback) {
+ function _filter(eachfn, arr, iterator, callback) {
var results = [];
- arr = _map(arr, function (x, i) {
- return {index: i, value: x};
- });
- eachfn(arr, function (x, callback) {
- iterator(x.value, function (v) {
+ eachfn(arr, function (x, index, callback) {
+ iterator(x, function (v) {
if (v) {
- results.push(x);
+ results.push({index: index, value: x});
}
callback();
});
- }, function (err) {
+ }, function () {
callback(_map(results.sort(function (a, b) {
return a.index - b.index;
}), function (x) {
return x.value;
}));
});
- };
+ }
+
+ async.select =
async.filter = doParallel(_filter);
+
+ async.selectLimit =
+ async.filterLimit = doParallelLimit(_filter);
+
+ async.selectSeries =
async.filterSeries = doSeries(_filter);
- // select alias
- async.select = async.filter;
- async.selectSeries = async.filterSeries;
- var _reject = function (eachfn, arr, iterator, callback) {
- var results = [];
- arr = _map(arr, function (x, i) {
- return {index: i, value: x};
- });
- eachfn(arr, function (x, callback) {
- iterator(x.value, function (v) {
- if (!v) {
- results.push(x);
- }
- callback();
+ function _reject(eachfn, arr, iterator, callback) {
+ _filter(eachfn, arr, function(value, cb) {
+ iterator(value, function(v) {
+ cb(!v);
});
- }, function (err) {
- callback(_map(results.sort(function (a, b) {
- return a.index - b.index;
- }), function (x) {
- return x.value;
- }));
- });
- };
+ }, callback);
+ }
async.reject = doParallel(_reject);
+ async.rejectLimit = doParallelLimit(_reject);
async.rejectSeries = doSeries(_reject);
- var _detect = function (eachfn, arr, iterator, main_callback) {
- eachfn(arr, function (x, callback) {
- iterator(x, function (result) {
- if (result) {
- main_callback(x);
- main_callback = function () {};
- }
- else {
+ function _createTester(eachfn, check, getResult) {
+ return function(arr, limit, iterator, cb) {
+ function done() {
+ if (cb) cb(getResult(false, void 0));
+ }
+ function iteratee(x, _, callback) {
+ if (!cb) return callback();
+ iterator(x, function (v) {
+ if (cb && check(v)) {
+ cb(getResult(true, x));
+ cb = iterator = false;
+ }
callback();
- }
- });
- }, function (err) {
- main_callback();
- });
- };
- async.detect = doParallel(_detect);
- async.detectSeries = doSeries(_detect);
+ });
+ }
+ if (arguments.length > 3) {
+ eachfn(arr, limit, iteratee, done);
+ } else {
+ cb = iterator;
+ iterator = limit;
+ eachfn(arr, iteratee, done);
+ }
+ };
+ }
- async.some = function (arr, iterator, main_callback) {
- async.each(arr, function (x, callback) {
- iterator(x, function (v) {
- if (v) {
- main_callback(true);
- main_callback = function () {};
- }
- callback();
- });
- }, function (err) {
- main_callback(false);
- });
- };
- // any alias
- async.any = async.some;
+ async.any =
+ async.some = _createTester(async.eachOf, toBool, identity);
- async.every = function (arr, iterator, main_callback) {
- async.each(arr, function (x, callback) {
- iterator(x, function (v) {
- if (!v) {
- main_callback(false);
- main_callback = function () {};
- }
- callback();
- });
- }, function (err) {
- main_callback(true);
- });
- };
- // all alias
- async.all = async.every;
+ async.someLimit = _createTester(async.eachOfLimit, toBool, identity);
+
+ async.all =
+ async.every = _createTester(async.eachOf, notId, notId);
+
+ async.everyLimit = _createTester(async.eachOfLimit, notId, notId);
+
+ function _findGetResult(v, x) {
+ return x;
+ }
+ async.detect = _createTester(async.eachOf, identity, _findGetResult);
+ async.detectSeries = _createTester(async.eachOfSeries, identity, _findGetResult);
async.sortBy = function (arr, iterator, callback) {
async.map(arr, function (x, callback) {
@@ -402,147 +478,189 @@
return callback(err);
}
else {
- var fn = function (left, right) {
- var a = left.criteria, b = right.criteria;
- return a < b ? -1 : a > b ? 1 : 0;
- };
- callback(null, _map(results.sort(fn), function (x) {
+ callback(null, _map(results.sort(comparator), function (x) {
return x.value;
}));
}
+
});
+
+ function comparator(left, right) {
+ var a = left.criteria, b = right.criteria;
+ return a < b ? -1 : a > b ? 1 : 0;
+ }
};
async.auto = function (tasks, callback) {
- callback = callback || function () {};
+ callback = _once(callback || noop);
var keys = _keys(tasks);
- var remainingTasks = keys.length
+ var remainingTasks = keys.length;
if (!remainingTasks) {
- return callback();
+ return callback(null);
}
var results = {};
var listeners = [];
- var addListener = function (fn) {
+ function addListener(fn) {
listeners.unshift(fn);
- };
- var removeListener = function (fn) {
- for (var i = 0; i < listeners.length; i += 1) {
- if (listeners[i] === fn) {
- listeners.splice(i, 1);
- return;
- }
- }
- };
- var taskComplete = function () {
- remainingTasks--
- _each(listeners.slice(0), function (fn) {
+ }
+ function removeListener(fn) {
+ var idx = _indexOf(listeners, fn);
+ if (idx >= 0) listeners.splice(idx, 1);
+ }
+ function taskComplete() {
+ remainingTasks--;
+ _arrayEach(listeners.slice(0), function (fn) {
fn();
});
- };
+ }
addListener(function () {
if (!remainingTasks) {
- var theCallback = callback;
- // prevent final callback from calling itself if it errors
- callback = function () {};
-
- theCallback(null, results);
+ callback(null, results);
}
});
- _each(keys, function (k) {
+ _arrayEach(keys, function (k) {
var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]];
- var taskCallback = function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
+ var taskCallback = _restParam(function(err, args) {
if (args.length <= 1) {
args = args[0];
}
if (err) {
var safeResults = {};
- _each(_keys(results), function(rkey) {
- safeResults[rkey] = results[rkey];
+ _forEachOf(results, function(val, rkey) {
+ safeResults[rkey] = val;
});
safeResults[k] = args;
callback(err, safeResults);
- // stop subsequent errors hitting callback multiple times
- callback = function () {};
}
else {
results[k] = args;
async.setImmediate(taskComplete);
}
- };
- var requires = task.slice(0, Math.abs(task.length - 1)) || [];
- var ready = function () {
+ });
+ var requires = task.slice(0, task.length - 1);
+ // prevent dead-locks
+ var len = requires.length;
+ var dep;
+ while (len--) {
+ if (!(dep = tasks[requires[len]])) {
+ throw new Error('Has inexistant dependency');
+ }
+ if (_isArray(dep) && _indexOf(dep, k) >= 0) {
+ throw new Error('Has cyclic dependencies');
+ }
+ }
+ function ready() {
return _reduce(requires, function (a, x) {
return (a && results.hasOwnProperty(x));
}, true) && !results.hasOwnProperty(k);
- };
+ }
if (ready()) {
task[task.length - 1](taskCallback, results);
}
else {
- var listener = function () {
- if (ready()) {
- removeListener(listener);
- task[task.length - 1](taskCallback, results);
- }
- };
addListener(listener);
}
+ function listener() {
+ if (ready()) {
+ removeListener(listener);
+ task[task.length - 1](taskCallback, results);
+ }
+ }
});
};
+
+
async.retry = function(times, task, callback) {
var DEFAULT_TIMES = 5;
+ var DEFAULT_INTERVAL = 0;
+
var attempts = [];
- // Use defaults if times not passed
- if (typeof times === 'function') {
+
+ var opts = {
+ times: DEFAULT_TIMES,
+ interval: DEFAULT_INTERVAL
+ };
+
+ function parseTimes(acc, t){
+ if(typeof t === 'number'){
+ acc.times = parseInt(t, 10) || DEFAULT_TIMES;
+ } else if(typeof t === 'object'){
+ acc.times = parseInt(t.times, 10) || DEFAULT_TIMES;
+ acc.interval = parseInt(t.interval, 10) || DEFAULT_INTERVAL;
+ } else {
+ throw new Error('Unsupported argument type for \'times\': ' + typeof(t));
+ }
+ }
+
+ var length = arguments.length;
+ if (length < 1 || length > 3) {
+ throw new Error('Invalid arguments - must be either (task), (task, callback), (times, task) or (times, task, callback)');
+ } else if (length <= 2 && typeof times === 'function') {
callback = task;
task = times;
- times = DEFAULT_TIMES;
}
- // Make sure times is a number
- times = parseInt(times, 10) || DEFAULT_TIMES;
- var wrappedTask = function(wrappedCallback, wrappedResults) {
- var retryAttempt = function(task, finalAttempt) {
+ if (typeof times !== 'function') {
+ parseTimes(opts, times);
+ }
+ opts.callback = callback;
+ opts.task = task;
+
+ function wrappedTask(wrappedCallback, wrappedResults) {
+ function retryAttempt(task, finalAttempt) {
return function(seriesCallback) {
task(function(err, result){
seriesCallback(!err || finalAttempt, {err: err, result: result});
}, wrappedResults);
};
- };
- while (times) {
- attempts.push(retryAttempt(task, !(times-=1)));
}
+
+ function retryInterval(interval){
+ return function(seriesCallback){
+ setTimeout(function(){
+ seriesCallback(null);
+ }, interval);
+ };
+ }
+
+ while (opts.times) {
+
+ var finalAttempt = !(opts.times-=1);
+ attempts.push(retryAttempt(opts.task, finalAttempt));
+ if(!finalAttempt && opts.interval > 0){
+ attempts.push(retryInterval(opts.interval));
+ }
+ }
+
async.series(attempts, function(done, data){
data = data[data.length - 1];
- (wrappedCallback || callback)(data.err, data.result);
+ (wrappedCallback || opts.callback)(data.err, data.result);
});
}
+
// If a callback is passed, run this as a controll flow
- return callback ? wrappedTask() : wrappedTask
+ return opts.callback ? wrappedTask() : wrappedTask;
};
async.waterfall = function (tasks, callback) {
- callback = callback || function () {};
+ callback = _once(callback || noop);
if (!_isArray(tasks)) {
- var err = new Error('First argument to waterfall must be an array of functions');
- return callback(err);
+ var err = new Error('First argument to waterfall must be an array of functions');
+ return callback(err);
}
if (!tasks.length) {
return callback();
}
- var wrapIterator = function (iterator) {
- return function (err) {
+ function wrapIterator(iterator) {
+ return _restParam(function (err, args) {
if (err) {
- callback.apply(null, arguments);
- callback = function () {};
+ callback.apply(null, [err].concat(args));
}
else {
- var args = Array.prototype.slice.call(arguments, 1);
var next = iterator.next();
if (next) {
args.push(wrapIterator(next));
@@ -550,260 +668,244 @@
else {
args.push(callback);
}
- async.setImmediate(function () {
- iterator.apply(null, args);
- });
+ ensureAsync(iterator).apply(null, args);
}
- };
- };
+ });
+ }
wrapIterator(async.iterator(tasks))();
};
- var _parallel = function(eachfn, tasks, callback) {
- callback = callback || function () {};
- if (_isArray(tasks)) {
- eachfn.map(tasks, function (fn, callback) {
- if (fn) {
- fn(function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
- if (args.length <= 1) {
- args = args[0];
- }
- callback.call(null, err, args);
- });
+ function _parallel(eachfn, tasks, callback) {
+ callback = callback || noop;
+ var results = _isArrayLike(tasks) ? [] : {};
+
+ eachfn(tasks, function (task, key, callback) {
+ task(_restParam(function (err, args) {
+ if (args.length <= 1) {
+ args = args[0];
}
- }, callback);
- }
- else {
- var results = {};
- eachfn.each(_keys(tasks), function (k, callback) {
- tasks[k](function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
- if (args.length <= 1) {
- args = args[0];
- }
- results[k] = args;
- callback(err);
- });
- }, function (err) {
- callback(err, results);
- });
- }
- };
+ results[key] = args;
+ callback(err);
+ }));
+ }, function (err) {
+ callback(err, results);
+ });
+ }
async.parallel = function (tasks, callback) {
- _parallel({ map: async.map, each: async.each }, tasks, callback);
+ _parallel(async.eachOf, tasks, callback);
};
async.parallelLimit = function(tasks, limit, callback) {
- _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
+ _parallel(_eachOfLimit(limit), tasks, callback);
};
- async.series = function (tasks, callback) {
- callback = callback || function () {};
- if (_isArray(tasks)) {
- async.mapSeries(tasks, function (fn, callback) {
- if (fn) {
- fn(function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
- if (args.length <= 1) {
- args = args[0];
- }
- callback.call(null, err, args);
- });
- }
- }, callback);
- }
- else {
- var results = {};
- async.eachSeries(_keys(tasks), function (k, callback) {
- tasks[k](function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
- if (args.length <= 1) {
- args = args[0];
- }
- results[k] = args;
- callback(err);
- });
- }, function (err) {
- callback(err, results);
- });
- }
+ async.series = function(tasks, callback) {
+ _parallel(async.eachOfSeries, tasks, callback);
};
async.iterator = function (tasks) {
- var makeCallback = function (index) {
- var fn = function () {
+ function makeCallback(index) {
+ function fn() {
if (tasks.length) {
tasks[index].apply(null, arguments);
}
return fn.next();
- };
+ }
fn.next = function () {
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
};
return fn;
- };
+ }
return makeCallback(0);
};
- async.apply = function (fn) {
- var args = Array.prototype.slice.call(arguments, 1);
- return function () {
+ async.apply = _restParam(function (fn, args) {
+ return _restParam(function (callArgs) {
return fn.apply(
- null, args.concat(Array.prototype.slice.call(arguments))
+ null, args.concat(callArgs)
);
- };
- };
+ });
+ });
- var _concat = function (eachfn, arr, fn, callback) {
- var r = [];
- eachfn(arr, function (x, cb) {
+ function _concat(eachfn, arr, fn, callback) {
+ var result = [];
+ eachfn(arr, function (x, index, cb) {
fn(x, function (err, y) {
- r = r.concat(y || []);
+ result = result.concat(y || []);
cb(err);
});
}, function (err) {
- callback(err, r);
+ callback(err, result);
});
- };
+ }
async.concat = doParallel(_concat);
async.concatSeries = doSeries(_concat);
async.whilst = function (test, iterator, callback) {
+ callback = callback || noop;
if (test()) {
- iterator(function (err) {
+ var next = _restParam(function(err, args) {
if (err) {
- return callback(err);
+ callback(err);
+ } else if (test.apply(this, args)) {
+ iterator(next);
+ } else {
+ callback(null);
}
- async.whilst(test, iterator, callback);
});
- }
- else {
- callback();
+ iterator(next);
+ } else {
+ callback(null);
}
};
async.doWhilst = function (iterator, test, callback) {
- iterator(function (err) {
- if (err) {
- return callback(err);
- }
- var args = Array.prototype.slice.call(arguments, 1);
- if (test.apply(null, args)) {
- async.doWhilst(iterator, test, callback);
- }
- else {
- callback();
- }
- });
+ var calls = 0;
+ return async.whilst(function() {
+ return ++calls <= 1 || test.apply(this, arguments);
+ }, iterator, callback);
};
async.until = function (test, iterator, callback) {
- if (!test()) {
- iterator(function (err) {
- if (err) {
- return callback(err);
- }
- async.until(test, iterator, callback);
- });
- }
- else {
- callback();
- }
+ return async.whilst(function() {
+ return !test.apply(this, arguments);
+ }, iterator, callback);
};
async.doUntil = function (iterator, test, callback) {
- iterator(function (err) {
+ return async.doWhilst(iterator, function() {
+ return !test.apply(this, arguments);
+ }, callback);
+ };
+
+ async.during = function (test, iterator, callback) {
+ callback = callback || noop;
+
+ var next = _restParam(function(err, args) {
if (err) {
- return callback(err);
+ callback(err);
+ } else {
+ args.push(check);
+ test.apply(this, args);
}
- var args = Array.prototype.slice.call(arguments, 1);
- if (!test.apply(null, args)) {
- async.doUntil(iterator, test, callback);
+ });
+
+ var check = function(err, truth) {
+ if (err) {
+ callback(err);
+ } else if (truth) {
+ iterator(next);
+ } else {
+ callback(null);
}
- else {
- callback();
+ };
+
+ test(check);
+ };
+
+ async.doDuring = function (iterator, test, callback) {
+ var calls = 0;
+ async.during(function(next) {
+ if (calls++ < 1) {
+ next(null, true);
+ } else {
+ test.apply(this, arguments);
}
- });
+ }, iterator, callback);
};
- async.queue = function (worker, concurrency) {
- if (concurrency === undefined) {
+ function _queue(worker, concurrency, payload) {
+ if (concurrency == null) {
concurrency = 1;
}
+ else if(concurrency === 0) {
+ throw new Error('Concurrency must not be zero');
+ }
function _insert(q, data, pos, callback) {
- if (!q.started){
+ if (callback != null && typeof callback !== "function") {
+ throw new Error("task callback must be a function");
+ }
q.started = true;
- }
- if (!_isArray(data)) {
- data = [data];
- }
- if(data.length == 0) {
- // call drain immediately if there are no tasks
- return async.setImmediate(function() {
- if (q.drain) {
- q.drain();
- }
- });
- }
- _each(data, function(task) {
- var item = {
- data: task,
- callback: typeof callback === 'function' ? callback : null
- };
-
- if (pos) {
- q.tasks.unshift(item);
- } else {
- q.tasks.push(item);
- }
-
- if (q.saturated && q.tasks.length === q.concurrency) {
- q.saturated();
- }
- async.setImmediate(q.process);
- });
+ if (!_isArray(data)) {
+ data = [data];
+ }
+ if(data.length === 0 && q.idle()) {
+ // call drain immediately if there are no tasks
+ return async.setImmediate(function() {
+ q.drain();
+ });
+ }
+ _arrayEach(data, function(task) {
+ var item = {
+ data: task,
+ callback: callback || noop
+ };
+
+ if (pos) {
+ q.tasks.unshift(item);
+ } else {
+ q.tasks.push(item);
+ }
+
+ if (q.tasks.length === q.concurrency) {
+ q.saturated();
+ }
+ });
+ async.setImmediate(q.process);
+ }
+ function _next(q, tasks) {
+ return function(){
+ workers -= 1;
+ var args = arguments;
+ _arrayEach(tasks, function (task) {
+ task.callback.apply(task, args);
+ });
+ if (q.tasks.length + workers === 0) {
+ q.drain();
+ }
+ q.process();
+ };
}
var workers = 0;
var q = {
tasks: [],
concurrency: concurrency,
- saturated: null,
- empty: null,
- drain: null,
+ payload: payload,
+ saturated: noop,
+ empty: noop,
+ drain: noop,
started: false,
paused: false,
push: function (data, callback) {
- _insert(q, data, false, callback);
+ _insert(q, data, false, callback);
},
kill: function () {
- q.drain = null;
- q.tasks = [];
+ q.drain = noop;
+ q.tasks = [];
},
unshift: function (data, callback) {
- _insert(q, data, true, callback);
+ _insert(q, data, true, callback);
},
process: function () {
if (!q.paused && workers < q.concurrency && q.tasks.length) {
- var task = q.tasks.shift();
- if (q.empty && q.tasks.length === 0) {
- q.empty();
- }
- workers += 1;
- var next = function () {
- workers -= 1;
- if (task.callback) {
- task.callback.apply(task, arguments);
- }
- if (q.drain && q.tasks.length + workers === 0) {
- q.drain();
+ while(workers < q.concurrency && q.tasks.length){
+ var tasks = q.payload ?
+ q.tasks.splice(0, q.payload) :
+ q.tasks.splice(0, q.tasks.length);
+
+ var data = _map(tasks, function (task) {
+ return task.data;
+ });
+
+ if (q.tasks.length === 0) {
+ q.empty();
}
- q.process();
- };
- var cb = only_once(next);
- worker(task.data, cb);
+ workers += 1;
+ var cb = only_once(_next(q, tasks));
+ worker(data, cb);
+ }
}
},
length: function () {
@@ -816,71 +918,78 @@
return q.tasks.length + workers === 0;
},
pause: function () {
- if (q.paused === true) { return; }
q.paused = true;
},
resume: function () {
if (q.paused === false) { return; }
q.paused = false;
+ var resumeCount = Math.min(q.concurrency, q.tasks.length);
// Need to call q.process once per concurrent
// worker to preserve full concurrency after pause
- for (var w = 1; w <= q.concurrency; w++) {
+ for (var w = 1; w <= resumeCount; w++) {
async.setImmediate(q.process);
}
}
};
return q;
+ }
+
+ async.queue = function (worker, concurrency) {
+ var q = _queue(function (items, cb) {
+ worker(items[0], cb);
+ }, concurrency, 1);
+
+ return q;
};
async.priorityQueue = function (worker, concurrency) {
function _compareTasks(a, b){
- return a.priority - b.priority;
- };
+ return a.priority - b.priority;
+ }
function _binarySearch(sequence, item, compare) {
- var beg = -1,
- end = sequence.length - 1;
- while (beg < end) {
- var mid = beg + ((end - beg + 1) >>> 1);
- if (compare(item, sequence[mid]) >= 0) {
- beg = mid;
- } else {
- end = mid - 1;
+ var beg = -1,
+ end = sequence.length - 1;
+ while (beg < end) {
+ var mid = beg + ((end - beg + 1) >>> 1);
+ if (compare(item, sequence[mid]) >= 0) {
+ beg = mid;
+ } else {
+ end = mid - 1;
+ }
}
- }
- return beg;
+ return beg;
}
function _insert(q, data, priority, callback) {
- if (!q.started){
+ if (callback != null && typeof callback !== "function") {
+ throw new Error("task callback must be a function");
+ }
q.started = true;
- }
- if (!_isArray(data)) {
- data = [data];
- }
- if(data.length == 0) {
- // call drain immediately if there are no tasks
- return async.setImmediate(function() {
- if (q.drain) {
- q.drain();
- }
- });
- }
- _each(data, function(task) {
- var item = {
- data: task,
- priority: priority,
- callback: typeof callback === 'function' ? callback : null
- };
-
- q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item);
-
- if (q.saturated && q.tasks.length === q.concurrency) {
- q.saturated();
- }
- async.setImmediate(q.process);
- });
+ if (!_isArray(data)) {
+ data = [data];
+ }
+ if(data.length === 0) {
+ // call drain immediately if there are no tasks
+ return async.setImmediate(function() {
+ q.drain();
+ });
+ }
+ _arrayEach(data, function(task) {
+ var item = {
+ data: task,
+ priority: priority,
+ callback: typeof callback === 'function' ? callback : noop
+ };
+
+ q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item);
+
+ if (q.tasks.length === q.concurrency) {
+ q.saturated();
+ }
+ async.setImmediate(q.process);
+ });
}
// Start with a normal queue
@@ -888,7 +997,7 @@
// Override push to accept second parameter representing priority
q.push = function (data, priority, callback) {
- _insert(q, data, priority, callback);
+ _insert(q, data, priority, callback);
};
// Remove unshift function
@@ -898,78 +1007,12 @@
};
async.cargo = function (worker, payload) {
- var working = false,
- tasks = [];
-
- var cargo = {
- tasks: tasks,
- payload: payload,
- saturated: null,
- empty: null,
- drain: null,
- drained: true,
- push: function (data, callback) {
- if (!_isArray(data)) {
- data = [data];
- }
- _each(data, function(task) {
- tasks.push({
- data: task,
- callback: typeof callback === 'function' ? callback : null
- });
- cargo.drained = false;
- if (cargo.saturated && tasks.length === payload) {
- cargo.saturated();
- }
- });
- async.setImmediate(cargo.process);
- },
- process: function process() {
- if (working) return;
- if (tasks.length === 0) {
- if(cargo.drain && !cargo.drained) cargo.drain();
- cargo.drained = true;
- return;
- }
-
- var ts = typeof payload === 'number'
- ? tasks.splice(0, payload)
- : tasks.splice(0, tasks.length);
-
- var ds = _map(ts, function (task) {
- return task.data;
- });
-
- if(cargo.empty) cargo.empty();
- working = true;
- worker(ds, function () {
- working = false;
-
- var args = arguments;
- _each(ts, function (data) {
- if (data.callback) {
- data.callback.apply(null, args);
- }
- });
-
- process();
- });
- },
- length: function () {
- return tasks.length;
- },
- running: function () {
- return working;
- }
- };
- return cargo;
+ return _queue(worker, 1, payload);
};
- var _console_fn = function (name) {
- return function (fn) {
- var args = Array.prototype.slice.call(arguments, 1);
- fn.apply(null, args.concat([function (err) {
- var args = Array.prototype.slice.call(arguments, 1);
+ function _console_fn(name) {
+ return _restParam(function (fn, args) {
+ fn.apply(null, args.concat([_restParam(function (err, args) {
if (typeof console !== 'undefined') {
if (err) {
if (console.error) {
@@ -977,14 +1020,14 @@
}
}
else if (console[name]) {
- _each(args, function (x) {
+ _arrayEach(args, function (x) {
console[name](x);
});
}
}
- }]));
- };
- };
+ })]));
+ });
+ }
async.log = _console_fn('log');
async.dir = _console_fn('dir');
/*async.info = _console_fn('info');
@@ -994,11 +1037,8 @@
async.memoize = function (fn, hasher) {
var memo = {};
var queues = {};
- hasher = hasher || function (x) {
- return x;
- };
- var memoized = function () {
- var args = Array.prototype.slice.call(arguments);
+ hasher = hasher || identity;
+ var memoized = _restParam(function memoized(args) {
var callback = args.pop();
var key = hasher.apply(null, args);
if (key in memo) {
@@ -1011,100 +1051,153 @@
}
else {
queues[key] = [callback];
- fn.apply(null, args.concat([function () {
- memo[key] = arguments;
+ fn.apply(null, args.concat([_restParam(function (args) {
+ memo[key] = args;
var q = queues[key];
delete queues[key];
for (var i = 0, l = q.length; i < l; i++) {
- q[i].apply(null, arguments);
+ q[i].apply(null, args);
}
- }]));
+ })]));
}
- };
+ });
memoized.memo = memo;
memoized.unmemoized = fn;
return memoized;
};
async.unmemoize = function (fn) {
- return function () {
- return (fn.unmemoized || fn).apply(null, arguments);
- };
+ return function () {
+ return (fn.unmemoized || fn).apply(null, arguments);
+ };
};
- async.times = function (count, iterator, callback) {
- var counter = [];
- for (var i = 0; i < count; i++) {
- counter.push(i);
- }
- return async.map(counter, iterator, callback);
- };
+ function _times(mapper) {
+ return function (count, iterator, callback) {
+ mapper(_range(count), iterator, callback);
+ };
+ }
- async.timesSeries = function (count, iterator, callback) {
- var counter = [];
- for (var i = 0; i < count; i++) {
- counter.push(i);
- }
- return async.mapSeries(counter, iterator, callback);
+ async.times = _times(async.map);
+ async.timesSeries = _times(async.mapSeries);
+ async.timesLimit = function (count, limit, iterator, callback) {
+ return async.mapLimit(_range(count), limit, iterator, callback);
};
async.seq = function (/* functions... */) {
var fns = arguments;
- return function () {
+ return _restParam(function (args) {
var that = this;
- var args = Array.prototype.slice.call(arguments);
- var callback = args.pop();
+
+ var callback = args[args.length - 1];
+ if (typeof callback == 'function') {
+ args.pop();
+ } else {
+ callback = noop;
+ }
+
async.reduce(fns, args, function (newargs, fn, cb) {
- fn.apply(that, newargs.concat([function () {
- var err = arguments[0];
- var nextargs = Array.prototype.slice.call(arguments, 1);
+ fn.apply(that, newargs.concat([_restParam(function (err, nextargs) {
cb(err, nextargs);
- }]))
+ })]));
},
function (err, results) {
callback.apply(that, [err].concat(results));
});
- };
+ });
};
async.compose = function (/* functions... */) {
- return async.seq.apply(null, Array.prototype.reverse.call(arguments));
+ return async.seq.apply(null, Array.prototype.reverse.call(arguments));
};
- var _applyEach = function (eachfn, fns /*args...*/) {
- var go = function () {
- var that = this;
- var args = Array.prototype.slice.call(arguments);
- var callback = args.pop();
- return eachfn(fns, function (fn, cb) {
- fn.apply(that, args.concat([cb]));
- },
- callback);
- };
- if (arguments.length > 2) {
- var args = Array.prototype.slice.call(arguments, 2);
- return go.apply(this, args);
- }
- else {
- return go;
- }
- };
- async.applyEach = doParallel(_applyEach);
- async.applyEachSeries = doSeries(_applyEach);
+
+ function _applyEach(eachfn) {
+ return _restParam(function(fns, args) {
+ var go = _restParam(function(args) {
+ var that = this;
+ var callback = args.pop();
+ return eachfn(fns, function (fn, _, cb) {
+ fn.apply(that, args.concat([cb]));
+ },
+ callback);
+ });
+ if (args.length) {
+ return go.apply(this, args);
+ }
+ else {
+ return go;
+ }
+ });
+ }
+
+ async.applyEach = _applyEach(async.eachOf);
+ async.applyEachSeries = _applyEach(async.eachOfSeries);
+
async.forever = function (fn, callback) {
+ var done = only_once(callback || noop);
+ var task = ensureAsync(fn);
function next(err) {
if (err) {
- if (callback) {
- return callback(err);
- }
- throw err;
+ return done(err);
}
- fn(next);
+ task(next);
}
next();
};
+ function ensureAsync(fn) {
+ return _restParam(function (args) {
+ var callback = args.pop();
+ args.push(function () {
+ var innerArgs = arguments;
+ if (sync) {
+ async.setImmediate(function () {
+ callback.apply(null, innerArgs);
+ });
+ } else {
+ callback.apply(null, innerArgs);
+ }
+ });
+ var sync = true;
+ fn.apply(this, args);
+ sync = false;
+ });
+ }
+
+ async.ensureAsync = ensureAsync;
+
+ async.constant = _restParam(function(values) {
+ var args = [null].concat(values);
+ return function (callback) {
+ return callback.apply(this, args);
+ };
+ });
+
+ async.wrapSync =
+ async.asyncify = function asyncify(func) {
+ return _restParam(function (args) {
+ var callback = args.pop();
+ var result;
+ try {
+ result = func.apply(this, args);
+ } catch (e) {
+ return callback(e);
+ }
+ // if result is Promise object
+ if (typeof result !== 'undefined' && typeof result.then === "function") {
+ result.then(function(value) {
+ callback(null, value);
+ }).catch(function(err) {
+ callback(err.message ? err : new Error(err));
+ });
+ } else {
+ callback(null, result);
+ }
+ });
+ };
+
// Node.js
if (typeof module !== 'undefined' && module.exports) {
module.exports = async;
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
index 98c2e0b4e..2b8128b92 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
@@ -5,7 +5,7 @@
"author": {
"name": "Caolan McMahon"
},
- "version": "0.9.2",
+ "version": "1.4.0",
"keywords": [
"async",
"callback",
@@ -21,10 +21,21 @@
},
"license": "MIT",
"devDependencies": {
+ "benchmark": "github:bestiejs/benchmark.js",
+ "bluebird": "^2.9.32",
+ "coveralls": "^2.11.2",
+ "es6-promise": "^2.3.0",
+ "jscs": "^1.13.1",
+ "jshint": "~2.8.0",
+ "lodash": "^3.9.0",
+ "mkdirp": "~0.5.1",
+ "native-promise-only": "^0.8.0-a",
"nodeunit": ">0.0.0",
- "uglify-js": "1.2.x",
- "nodelint": ">0.0.0",
- "lodash": ">=2.4.1"
+ "nyc": "^2.1.0",
+ "rsvp": "^3.0.18",
+ "uglify-js": "~2.4.0",
+ "xyz": "^0.5.0",
+ "yargs": "~3.9.1"
},
"jam": {
"main": "lib/async.js",
@@ -38,7 +49,10 @@
]
},
"scripts": {
- "test": "nodeunit test/test-async.js"
+ "test": "npm run-script lint && nodeunit test/test-async.js",
+ "lint": "jshint lib/*.js test/*.js perf/*.js && jscs lib/*.js test/*.js perf/*.js",
+ "coverage": "nyc npm test && nyc report",
+ "coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls"
},
"spm": {
"main": "lib/async.js"
@@ -53,16 +67,20 @@
"tests"
]
},
- "gitHead": "de3a16091d5125384eff4a54deb3998b13c3814c",
+ "gitHead": "5bfcd31c72e003f96df025e75753463da61f49f9",
"homepage": "https://github.com/caolan/async#readme",
- "_id": "async@0.9.2",
- "_shasum": "aea74d5e61c1f899613bf64bda66d4c78f2fd17d",
- "_from": "async@>=0.9.0 <0.10.0",
- "_npmVersion": "2.9.0",
- "_nodeVersion": "2.0.1",
+ "_id": "async@1.4.0",
+ "_shasum": "35f86f83c59e0421d099cd9a91d8278fb578c00d",
+ "_from": "async@>=1.2.1 <2.0.0",
+ "_npmVersion": "2.13.0",
+ "_nodeVersion": "2.4.0",
"_npmUser": {
- "name": "beaugunderson",
- "email": "beau@beaugunderson.com"
+ "name": "megawac",
+ "email": "megawac@gmail.com"
+ },
+ "dist": {
+ "shasum": "35f86f83c59e0421d099cd9a91d8278fb578c00d",
+ "tarball": "http://registry.npmjs.org/async/-/async-1.4.0.tgz"
},
"maintainers": [
{
@@ -72,13 +90,17 @@
{
"name": "beaugunderson",
"email": "beau@beaugunderson.com"
+ },
+ {
+ "name": "aearly",
+ "email": "alexander.early@gmail.com"
+ },
+ {
+ "name": "megawac",
+ "email": "megawac@gmail.com"
}
],
- "dist": {
- "shasum": "aea74d5e61c1f899613bf64bda66d4c78f2fd17d",
- "tarball": "http://registry.npmjs.org/async/-/async-0.9.2.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz",
+ "_resolved": "https://registry.npmjs.org/async/-/async-1.4.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/support/sync-package-managers.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/support/sync-package-managers.js
index 30cb7c2d0..28c22e9f9 100755
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/support/sync-package-managers.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/support/sync-package-managers.js
@@ -34,7 +34,7 @@ var bowerSpecific = {
authors: [packageJson.author]
};
-var bowerInclude = ['name', 'description', 'version', 'main', 'keywords',
+var bowerInclude = ['name', 'description', 'main', 'keywords',
'license', 'homepage', 'repository', 'devDependencies'];
var componentSpecific = {
@@ -43,7 +43,7 @@ var componentSpecific = {
};
var componentInclude = ['name', 'description', 'version', 'keywords',
- 'license'];
+ 'license', 'main'];
var bowerJson = _.merge({}, _.pick(packageJson, bowerInclude), bowerSpecific);
var componentJson = _.merge({}, _.pick(packageJson, componentInclude), componentSpecific);
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/Readme.md b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/Readme.md
deleted file mode 100644
index 8043cb48a..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/Readme.md
+++ /dev/null
@@ -1,132 +0,0 @@
-# combined-stream [![Build Status](https://travis-ci.org/felixge/node-combined-stream.svg?branch=master)](https://travis-ci.org/felixge/node-combined-stream)
-
-A stream that emits multiple other streams one after another.
-
-## Installation
-
-``` bash
-npm install combined-stream
-```
-
-## Usage
-
-Here is a simple example that shows how you can use combined-stream to combine
-two files into one:
-
-``` javascript
-var CombinedStream = require('combined-stream');
-var fs = require('fs');
-
-var combinedStream = CombinedStream.create();
-combinedStream.append(fs.createReadStream('file1.txt'));
-combinedStream.append(fs.createReadStream('file2.txt'));
-
-combinedStream.pipe(fs.createWriteStream('combined.txt'));
-```
-
-While the example above works great, it will pause all source streams until
-they are needed. If you don't want that to happen, you can set `pauseStreams`
-to `false`:
-
-``` javascript
-var CombinedStream = require('combined-stream');
-var fs = require('fs');
-
-var combinedStream = CombinedStream.create({pauseStreams: false});
-combinedStream.append(fs.createReadStream('file1.txt'));
-combinedStream.append(fs.createReadStream('file2.txt'));
-
-combinedStream.pipe(fs.createWriteStream('combined.txt'));
-```
-
-However, what if you don't have all the source streams yet, or you don't want
-to allocate the resources (file descriptors, memory, etc.) for them right away?
-Well, in that case you can simply provide a callback that supplies the stream
-by calling a `next()` function:
-
-``` javascript
-var CombinedStream = require('combined-stream');
-var fs = require('fs');
-
-var combinedStream = CombinedStream.create();
-combinedStream.append(function(next) {
- next(fs.createReadStream('file1.txt'));
-});
-combinedStream.append(function(next) {
- next(fs.createReadStream('file2.txt'));
-});
-
-combinedStream.pipe(fs.createWriteStream('combined.txt'));
-```
-
-## API
-
-### CombinedStream.create([options])
-
-Returns a new combined stream object. Available options are:
-
-* `maxDataSize`
-* `pauseStreams`
-
-The effect of those options is described below.
-
-### combinedStream.pauseStreams = `true`
-
-Whether to apply back pressure to the underlaying streams. If set to `false`,
-the underlaying streams will never be paused. If set to `true`, the
-underlaying streams will be paused right after being appended, as well as when
-`delayedStream.pipe()` wants to throttle.
-
-### combinedStream.maxDataSize = `2 * 1024 * 1024`
-
-The maximum amount of bytes (or characters) to buffer for all source streams.
-If this value is exceeded, `combinedStream` emits an `'error'` event.
-
-### combinedStream.dataSize = `0`
-
-The amount of bytes (or characters) currently buffered by `combinedStream`.
-
-### combinedStream.append(stream)
-
-Appends the given `stream` to the combinedStream object. If `pauseStreams` is
-set to `true, this stream will also be paused right away.
-
-`streams` can also be a function that takes one parameter called `next`. `next`
-is a function that must be invoked in order to provide the `next` stream, see
-example above.
-
-Regardless of how the `stream` is appended, combined-stream always attaches an
-`'error'` listener to it, so you don't have to do that manually.
-
-Special case: `stream` can also be a String or Buffer.
-
-### combinedStream.write(data)
-
-You should not call this, `combinedStream` takes care of piping the appended
-streams into itself for you.
-
-### combinedStream.resume()
-
-Causes `combinedStream` to start drain the streams it manages. The function is
-idempotent, and also emits a `'resume'` event each time which usually goes to
-the stream that is currently being drained.
-
-### combinedStream.pause();
-
-If `combinedStream.pauseStreams` is set to `false`, this does nothing.
-Otherwise a `'pause'` event is emitted, this goes to the stream that is
-currently being drained, so you can use it to apply back pressure.
-
-### combinedStream.end();
-
-Sets `combinedStream.writable` to false, emits an `'end'` event, and removes
-all streams from the queue.
-
-### combinedStream.destroy();
-
-Same as `combinedStream.end()`, except it emits a `'close'` event instead of
-`'end'`.
-
-## License
-
-combined-stream is licensed under the MIT license.
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/lib/combined_stream.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/lib/combined_stream.js
deleted file mode 100644
index 6b5c21b6b..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/lib/combined_stream.js
+++ /dev/null
@@ -1,188 +0,0 @@
-var util = require('util');
-var Stream = require('stream').Stream;
-var DelayedStream = require('delayed-stream');
-
-module.exports = CombinedStream;
-function CombinedStream() {
- this.writable = false;
- this.readable = true;
- this.dataSize = 0;
- this.maxDataSize = 2 * 1024 * 1024;
- this.pauseStreams = true;
-
- this._released = false;
- this._streams = [];
- this._currentStream = null;
-}
-util.inherits(CombinedStream, Stream);
-
-CombinedStream.create = function(options) {
- var combinedStream = new this();
-
- options = options || {};
- for (var option in options) {
- combinedStream[option] = options[option];
- }
-
- return combinedStream;
-};
-
-CombinedStream.isStreamLike = function(stream) {
- return (typeof stream !== 'function')
- && (typeof stream !== 'string')
- && (typeof stream !== 'boolean')
- && (typeof stream !== 'number')
- && (!Buffer.isBuffer(stream));
-};
-
-CombinedStream.prototype.append = function(stream) {
- var isStreamLike = CombinedStream.isStreamLike(stream);
-
- if (isStreamLike) {
- if (!(stream instanceof DelayedStream)) {
- var newStream = DelayedStream.create(stream, {
- maxDataSize: Infinity,
- pauseStream: this.pauseStreams,
- });
- stream.on('data', this._checkDataSize.bind(this));
- stream = newStream;
- }
-
- this._handleErrors(stream);
-
- if (this.pauseStreams) {
- stream.pause();
- }
- }
-
- this._streams.push(stream);
- return this;
-};
-
-CombinedStream.prototype.pipe = function(dest, options) {
- Stream.prototype.pipe.call(this, dest, options);
- this.resume();
- return dest;
-};
-
-CombinedStream.prototype._getNext = function() {
- this._currentStream = null;
- var stream = this._streams.shift();
-
-
- if (typeof stream == 'undefined') {
- this.end();
- return;
- }
-
- if (typeof stream !== 'function') {
- this._pipeNext(stream);
- return;
- }
-
- var getStream = stream;
- getStream(function(stream) {
- var isStreamLike = CombinedStream.isStreamLike(stream);
- if (isStreamLike) {
- stream.on('data', this._checkDataSize.bind(this));
- this._handleErrors(stream);
- }
-
- this._pipeNext(stream);
- }.bind(this));
-};
-
-CombinedStream.prototype._pipeNext = function(stream) {
- this._currentStream = stream;
-
- var isStreamLike = CombinedStream.isStreamLike(stream);
- if (isStreamLike) {
- stream.on('end', this._getNext.bind(this));
- stream.pipe(this, {end: false});
- return;
- }
-
- var value = stream;
- this.write(value);
- this._getNext();
-};
-
-CombinedStream.prototype._handleErrors = function(stream) {
- var self = this;
- stream.on('error', function(err) {
- self._emitError(err);
- });
-};
-
-CombinedStream.prototype.write = function(data) {
- this.emit('data', data);
-};
-
-CombinedStream.prototype.pause = function() {
- if (!this.pauseStreams) {
- return;
- }
-
- if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();
- this.emit('pause');
-};
-
-CombinedStream.prototype.resume = function() {
- if (!this._released) {
- this._released = true;
- this.writable = true;
- this._getNext();
- }
-
- if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();
- this.emit('resume');
-};
-
-CombinedStream.prototype.end = function() {
- this._reset();
- this.emit('end');
-};
-
-CombinedStream.prototype.destroy = function() {
- this._reset();
- this.emit('close');
-};
-
-CombinedStream.prototype._reset = function() {
- this.writable = false;
- this._streams = [];
- this._currentStream = null;
-};
-
-CombinedStream.prototype._checkDataSize = function() {
- this._updateDataSize();
- if (this.dataSize <= this.maxDataSize) {
- return;
- }
-
- var message =
- 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';
- this._emitError(new Error(message));
-};
-
-CombinedStream.prototype._updateDataSize = function() {
- this.dataSize = 0;
-
- var self = this;
- this._streams.forEach(function(stream) {
- if (!stream.dataSize) {
- return;
- }
-
- self.dataSize += stream.dataSize;
- });
-
- if (this._currentStream && this._currentStream.dataSize) {
- this.dataSize += this._currentStream.dataSize;
- }
-};
-
-CombinedStream.prototype._emitError = function(err) {
- this._reset();
- this.emit('error', err);
-};
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/.npmignore b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/.npmignore
deleted file mode 100644
index 2fedb26cc..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/.npmignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*.un~
-/node_modules/*
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Makefile b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Makefile
deleted file mode 100644
index 2d7580746..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Makefile
+++ /dev/null
@@ -1,6 +0,0 @@
-SHELL := /bin/bash
-
-test:
- @./test/run.js
-
-.PHONY: test
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Readme.md b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Readme.md
deleted file mode 100644
index 5cb5b35e5..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/Readme.md
+++ /dev/null
@@ -1,154 +0,0 @@
-# delayed-stream
-
-Buffers events from a stream until you are ready to handle them.
-
-## Installation
-
-``` bash
-npm install delayed-stream
-```
-
-## Usage
-
-The following example shows how to write a http echo server that delays its
-response by 1000 ms.
-
-``` javascript
-var DelayedStream = require('delayed-stream');
-var http = require('http');
-
-http.createServer(function(req, res) {
- var delayed = DelayedStream.create(req);
-
- setTimeout(function() {
- res.writeHead(200);
- delayed.pipe(res);
- }, 1000);
-});
-```
-
-If you are not using `Stream#pipe`, you can also manually release the buffered
-events by calling `delayedStream.resume()`:
-
-``` javascript
-var delayed = DelayedStream.create(req);
-
-setTimeout(function() {
- // Emit all buffered events and resume underlaying source
- delayed.resume();
-}, 1000);
-```
-
-## Implementation
-
-In order to use this meta stream properly, here are a few things you should
-know about the implementation.
-
-### Event Buffering / Proxying
-
-All events of the `source` stream are hijacked by overwriting the `source.emit`
-method. Until node implements a catch-all event listener, this is the only way.
-
-However, delayed-stream still continues to emit all events it captures on the
-`source`, regardless of whether you have released the delayed stream yet or
-not.
-
-Upon creation, delayed-stream captures all `source` events and stores them in
-an internal event buffer. Once `delayedStream.release()` is called, all
-buffered events are emitted on the `delayedStream`, and the event buffer is
-cleared. After that, delayed-stream merely acts as a proxy for the underlaying
-source.
-
-### Error handling
-
-Error events on `source` are buffered / proxied just like any other events.
-However, `delayedStream.create` attaches a no-op `'error'` listener to the
-`source`. This way you only have to handle errors on the `delayedStream`
-object, rather than in two places.
-
-### Buffer limits
-
-delayed-stream provides a `maxDataSize` property that can be used to limit
-the amount of data being buffered. In order to protect you from bad `source`
-streams that don't react to `source.pause()`, this feature is enabled by
-default.
-
-## API
-
-### DelayedStream.create(source, [options])
-
-Returns a new `delayedStream`. Available options are:
-
-* `pauseStream`
-* `maxDataSize`
-
-The description for those properties can be found below.
-
-### delayedStream.source
-
-The `source` stream managed by this object. This is useful if you are
-passing your `delayedStream` around, and you still want to access properties
-on the `source` object.
-
-### delayedStream.pauseStream = true
-
-Whether to pause the underlaying `source` when calling
-`DelayedStream.create()`. Modifying this property afterwards has no effect.
-
-### delayedStream.maxDataSize = 1024 * 1024
-
-The amount of data to buffer before emitting an `error`.
-
-If the underlaying source is emitting `Buffer` objects, the `maxDataSize`
-refers to bytes.
-
-If the underlaying source is emitting JavaScript strings, the size refers to
-characters.
-
-If you know what you are doing, you can set this property to `Infinity` to
-disable this feature. You can also modify this property during runtime.
-
-### delayedStream.maxDataSize = 1024 * 1024
-
-The amount of data to buffer before emitting an `error`.
-
-If the underlaying source is emitting `Buffer` objects, the `maxDataSize`
-refers to bytes.
-
-If the underlaying source is emitting JavaScript strings, the size refers to
-characters.
-
-If you know what you are doing, you can set this property to `Infinity` to
-disable this feature.
-
-### delayedStream.dataSize = 0
-
-The amount of data buffered so far.
-
-### delayedStream.readable
-
-An ECMA5 getter that returns the value of `source.readable`.
-
-### delayedStream.resume()
-
-If the `delayedStream` has not been released so far, `delayedStream.release()`
-is called.
-
-In either case, `source.resume()` is called.
-
-### delayedStream.pause()
-
-Calls `source.pause()`.
-
-### delayedStream.pipe(dest)
-
-Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`.
-
-### delayedStream.release()
-
-Emits and clears all events that have been buffered up so far. This does not
-resume the underlaying source, use `delayedStream.resume()` instead.
-
-## License
-
-delayed-stream is licensed under the MIT license.
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js
deleted file mode 100644
index 7c10d4825..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js
+++ /dev/null
@@ -1,99 +0,0 @@
-var Stream = require('stream').Stream;
-var util = require('util');
-
-module.exports = DelayedStream;
-function DelayedStream() {
- this.source = null;
- this.dataSize = 0;
- this.maxDataSize = 1024 * 1024;
- this.pauseStream = true;
-
- this._maxDataSizeExceeded = false;
- this._released = false;
- this._bufferedEvents = [];
-}
-util.inherits(DelayedStream, Stream);
-
-DelayedStream.create = function(source, options) {
- var delayedStream = new this();
-
- options = options || {};
- for (var option in options) {
- delayedStream[option] = options[option];
- }
-
- delayedStream.source = source;
-
- var realEmit = source.emit;
- source.emit = function() {
- delayedStream._handleEmit(arguments);
- return realEmit.apply(source, arguments);
- };
-
- source.on('error', function() {});
- if (delayedStream.pauseStream) {
- source.pause();
- }
-
- return delayedStream;
-};
-
-DelayedStream.prototype.__defineGetter__('readable', function() {
- return this.source.readable;
-});
-
-DelayedStream.prototype.resume = function() {
- if (!this._released) {
- this.release();
- }
-
- this.source.resume();
-};
-
-DelayedStream.prototype.pause = function() {
- this.source.pause();
-};
-
-DelayedStream.prototype.release = function() {
- this._released = true;
-
- this._bufferedEvents.forEach(function(args) {
- this.emit.apply(this, args);
- }.bind(this));
- this._bufferedEvents = [];
-};
-
-DelayedStream.prototype.pipe = function() {
- var r = Stream.prototype.pipe.apply(this, arguments);
- this.resume();
- return r;
-};
-
-DelayedStream.prototype._handleEmit = function(args) {
- if (this._released) {
- this.emit.apply(this, args);
- return;
- }
-
- if (args[0] === 'data') {
- this.dataSize += args[1].length;
- this._checkIfMaxDataSizeExceeded();
- }
-
- this._bufferedEvents.push(args);
-};
-
-DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {
- if (this._maxDataSizeExceeded) {
- return;
- }
-
- if (this.dataSize <= this.maxDataSize) {
- return;
- }
-
- this._maxDataSizeExceeded = true;
- var message =
- 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'
- this.emit('error', new Error(message));
-};
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json
deleted file mode 100644
index 3324a13e9..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "author": {
- "name": "Felix Geisendörfer",
- "email": "felix@debuggable.com",
- "url": "http://debuggable.com/"
- },
- "name": "delayed-stream",
- "description": "Buffers events from a stream until you are ready to handle them.",
- "version": "0.0.5",
- "homepage": "https://github.com/felixge/node-delayed-stream",
- "repository": {
- "type": "git",
- "url": "git://github.com/felixge/node-delayed-stream.git"
- },
- "main": "./lib/delayed_stream",
- "engines": {
- "node": ">=0.4.0"
- },
- "dependencies": {},
- "devDependencies": {
- "fake": "0.2.0",
- "far": "0.0.1"
- },
- "_id": "delayed-stream@0.0.5",
- "_engineSupported": true,
- "_npmVersion": "1.0.3",
- "_nodeVersion": "v0.4.9-pre",
- "_defaultsLoaded": true,
- "dist": {
- "shasum": "d4b1f43a93e8296dfe02694f4680bc37a313c73f",
- "tarball": "http://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz"
- },
- "scripts": {},
- "directories": {},
- "_shasum": "d4b1f43a93e8296dfe02694f4680bc37a313c73f",
- "_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz",
- "_from": "delayed-stream@0.0.5",
- "bugs": {
- "url": "https://github.com/felixge/node-delayed-stream/issues"
- },
- "readme": "ERROR: No README data found!"
-}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/common.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/common.js
deleted file mode 100644
index 4d71b8a64..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/common.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var common = module.exports;
-
-common.DelayedStream = require('..');
-common.assert = require('assert');
-common.fake = require('fake');
-common.PORT = 49252;
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-http-upload.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-http-upload.js
deleted file mode 100644
index 787539606..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-http-upload.js
+++ /dev/null
@@ -1,36 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var DelayedStream = common.DelayedStream;
-var http = require('http');
-
-var UPLOAD = new Buffer(10 * 1024 * 1024);
-
-var server = http.createServer(function(req, res) {
- var delayed = DelayedStream.create(req, {maxDataSize: UPLOAD.length});
-
- setTimeout(function() {
- res.writeHead(200);
- delayed.pipe(res);
- }, 10);
-});
-server.listen(common.PORT, function() {
- var request = http.request({
- method: 'POST',
- port: common.PORT,
- });
-
- request.write(UPLOAD);
- request.end();
-
- request.on('response', function(res) {
- var received = 0;
- res
- .on('data', function(chunk) {
- received += chunk.length;
- })
- .on('end', function() {
- assert.equal(received, UPLOAD.length);
- server.close();
- });
- });
-});
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-auto-pause.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-auto-pause.js
deleted file mode 100644
index 6f417f3e9..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-auto-pause.js
+++ /dev/null
@@ -1,21 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testAutoPause() {
- var source = new Stream();
-
- fake.expect(source, 'pause', 1);
- var delayedStream = DelayedStream.create(source);
- fake.verify();
-})();
-
-(function testDisableAutoPause() {
- var source = new Stream();
- fake.expect(source, 'pause', 0);
-
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
- fake.verify();
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-pause.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-pause.js
deleted file mode 100644
index b50c39783..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream-pause.js
+++ /dev/null
@@ -1,14 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testDelayEventsUntilResume() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
-
- fake.expect(source, 'pause');
- delayedStream.pause();
- fake.verify();
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream.js
deleted file mode 100644
index fc4047e08..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-delayed-stream.js
+++ /dev/null
@@ -1,48 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testDelayEventsUntilResume() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
-
- // delayedStream must not emit until we resume
- fake.expect(delayedStream, 'emit', 0);
-
- // but our original source must emit
- var params = [];
- source.on('foo', function(param) {
- params.push(param);
- });
-
- source.emit('foo', 1);
- source.emit('foo', 2);
-
- // Make sure delayedStream did not emit, and source did
- assert.deepEqual(params, [1, 2]);
- fake.verify();
-
- // After resume, delayedStream must playback all events
- fake
- .stub(delayedStream, 'emit')
- .times(Infinity)
- .withArg(1, 'newListener');
- fake.expect(delayedStream, 'emit', ['foo', 1]);
- fake.expect(delayedStream, 'emit', ['foo', 2]);
- fake.expect(source, 'resume');
-
- delayedStream.resume();
- fake.verify();
-
- // Calling resume again will delegate to source
- fake.expect(source, 'resume');
- delayedStream.resume();
- fake.verify();
-
- // Emitting more events directly leads to them being emitted
- fake.expect(delayedStream, 'emit', ['foo', 3]);
- source.emit('foo', 3);
- fake.verify();
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-handle-source-errors.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-handle-source-errors.js
deleted file mode 100644
index a9d35e72c..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-handle-source-errors.js
+++ /dev/null
@@ -1,15 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testHandleSourceErrors() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
-
- // We deal with this by attaching a no-op listener to 'error' on the source
- // when creating a new DelayedStream. This way error events on the source
- // won't throw.
- source.emit('error', new Error('something went wrong'));
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-max-data-size.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-max-data-size.js
deleted file mode 100644
index 7638a2bf0..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-max-data-size.js
+++ /dev/null
@@ -1,18 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testMaxDataSize() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {maxDataSize: 1024, pauseStream: false});
-
- source.emit('data', new Buffer(1024));
-
- fake
- .expect(delayedStream, 'emit')
- .withArg(1, 'error');
- source.emit('data', new Buffer(1));
- fake.verify();
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-pipe-resumes.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-pipe-resumes.js
deleted file mode 100644
index 7d312ab1f..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-pipe-resumes.js
+++ /dev/null
@@ -1,13 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testPipeReleases() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
-
- fake.expect(delayedStream, 'resume');
- delayedStream.pipe(new Stream());
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-proxy-readable.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-proxy-readable.js
deleted file mode 100644
index d436163b7..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/integration/test-proxy-readable.js
+++ /dev/null
@@ -1,13 +0,0 @@
-var common = require('../common');
-var assert = common.assert;
-var fake = common.fake.create();
-var DelayedStream = common.DelayedStream;
-var Stream = require('stream').Stream;
-
-(function testProxyReadableProperty() {
- var source = new Stream();
- var delayedStream = DelayedStream.create(source, {pauseStream: false});
-
- source.readable = fake.value('source.readable');
- assert.strictEqual(delayedStream.readable, source.readable);
-})();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/run.js b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/run.js
deleted file mode 100755
index 0bb8e8224..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/test/run.js
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env node
-var far = require('far').create();
-
-far.add(__dirname);
-far.include(/test-.*\.js$/);
-
-far.execute();
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json
deleted file mode 100644
index 57c38da67..000000000
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "author": {
- "name": "Felix Geisendörfer",
- "email": "felix@debuggable.com",
- "url": "http://debuggable.com/"
- },
- "name": "combined-stream",
- "description": "A stream that emits multiple other streams one after another.",
- "version": "0.0.7",
- "homepage": "https://github.com/felixge/node-combined-stream",
- "repository": {
- "type": "git",
- "url": "git://github.com/felixge/node-combined-stream.git"
- },
- "main": "./lib/combined_stream",
- "scripts": {
- "test": "node test/run.js"
- },
- "engines": {
- "node": ">= 0.8"
- },
- "dependencies": {
- "delayed-stream": "0.0.5"
- },
- "devDependencies": {
- "far": "~0.0.7"
- },
- "readme": "# combined-stream [![Build Status](https://travis-ci.org/felixge/node-combined-stream.svg?branch=master)](https://travis-ci.org/felixge/node-combined-stream)\n\nA stream that emits multiple other streams one after another.\n\n## Installation\n\n``` bash\nnpm install combined-stream\n```\n\n## Usage\n\nHere is a simple example that shows how you can use combined-stream to combine\ntwo files into one:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create();\ncombinedStream.append(fs.createReadStream('file1.txt'));\ncombinedStream.append(fs.createReadStream('file2.txt'));\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\nWhile the example above works great, it will pause all source streams until\nthey are needed. If you don't want that to happen, you can set `pauseStreams`\nto `false`:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create({pauseStreams: false});\ncombinedStream.append(fs.createReadStream('file1.txt'));\ncombinedStream.append(fs.createReadStream('file2.txt'));\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\nHowever, what if you don't have all the source streams yet, or you don't want\nto allocate the resources (file descriptors, memory, etc.) for them right away?\nWell, in that case you can simply provide a callback that supplies the stream\nby calling a `next()` function:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create();\ncombinedStream.append(function(next) {\n next(fs.createReadStream('file1.txt'));\n});\ncombinedStream.append(function(next) {\n next(fs.createReadStream('file2.txt'));\n});\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\n## API\n\n### CombinedStream.create([options])\n\nReturns a new combined stream object. Available options are:\n\n* `maxDataSize`\n* `pauseStreams`\n\nThe effect of those options is described below.\n\n### combinedStream.pauseStreams = `true`\n\nWhether to apply back pressure to the underlaying streams. If set to `false`,\nthe underlaying streams will never be paused. If set to `true`, the\nunderlaying streams will be paused right after being appended, as well as when\n`delayedStream.pipe()` wants to throttle.\n\n### combinedStream.maxDataSize = `2 * 1024 * 1024`\n\nThe maximum amount of bytes (or characters) to buffer for all source streams.\nIf this value is exceeded, `combinedStream` emits an `'error'` event.\n\n### combinedStream.dataSize = `0`\n\nThe amount of bytes (or characters) currently buffered by `combinedStream`.\n\n### combinedStream.append(stream)\n\nAppends the given `stream` to the combinedStream object. If `pauseStreams` is\nset to `true, this stream will also be paused right away.\n\n`streams` can also be a function that takes one parameter called `next`. `next`\nis a function that must be invoked in order to provide the `next` stream, see\nexample above.\n\nRegardless of how the `stream` is appended, combined-stream always attaches an\n`'error'` listener to it, so you don't have to do that manually.\n\nSpecial case: `stream` can also be a String or Buffer.\n\n### combinedStream.write(data)\n\nYou should not call this, `combinedStream` takes care of piping the appended\nstreams into itself for you.\n\n### combinedStream.resume()\n\nCauses `combinedStream` to start drain the streams it manages. The function is\nidempotent, and also emits a `'resume'` event each time which usually goes to\nthe stream that is currently being drained.\n\n### combinedStream.pause();\n\nIf `combinedStream.pauseStreams` is set to `false`, this does nothing.\nOtherwise a `'pause'` event is emitted, this goes to the stream that is\ncurrently being drained, so you can use it to apply back pressure.\n\n### combinedStream.end();\n\nSets `combinedStream.writable` to false, emits an `'end'` event, and removes\nall streams from the queue.\n\n### combinedStream.destroy();\n\nSame as `combinedStream.end()`, except it emits a `'close'` event instead of\n`'end'`.\n\n## License\n\ncombined-stream is licensed under the MIT license.\n",
- "readmeFilename": "Readme.md",
- "bugs": {
- "url": "https://github.com/felixge/node-combined-stream/issues"
- },
- "_id": "combined-stream@0.0.7",
- "_shasum": "0137e657baa5a7541c57ac37ac5fc07d73b4dc1f",
- "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz",
- "_from": "combined-stream@>=0.0.4 <0.1.0"
-}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/package.json b/deps/npm/node_modules/request/node_modules/form-data/package.json
index 46ec26dff..9835d4980 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/package.json
+++ b/deps/npm/node_modules/request/node_modules/form-data/package.json
@@ -6,43 +6,71 @@
},
"name": "form-data",
"description": "A module to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.",
- "version": "0.2.0",
+ "version": "1.0.0-rc2",
"repository": {
"type": "git",
"url": "git://github.com/felixge/node-form-data.git"
},
"main": "./lib/form_data",
+ "browser": "./lib/browser",
"scripts": {
"test": "node test/run.js"
},
"engines": {
- "node": ">= 0.8"
+ "node": ">= 0.10"
},
"dependencies": {
- "async": "~0.9.0",
- "combined-stream": "~0.0.4",
- "mime-types": "~2.0.3"
+ "async": "^1.2.1",
+ "combined-stream": "^1.0.3",
+ "mime-types": "^2.1.1"
},
- "licenses": [
- {
- "type": "MIT",
- "url": "https://raw.github.com/felixge/node-form-data/master/License"
- }
- ],
+ "license": "MIT",
"devDependencies": {
- "fake": "~0.2.2",
- "far": "~0.0.7",
- "formidable": "~1.0.14",
- "request": "~2.36.0"
+ "fake": "^0.2.2",
+ "far": "^0.0.7",
+ "formidable": "^1.0.17",
+ "request": "^2.57.0"
},
- "readme": "# Form-Data [![Build Status](https://travis-ci.org/felixge/node-form-data.png?branch=master)](https://travis-ci.org/felixge/node-form-data) [![Dependency Status](https://gemnasium.com/felixge/node-form-data.png)](https://gemnasium.com/felixge/node-form-data)\n\nA module to create readable ```\"multipart/form-data\"``` streams. Can be used to submit forms and file uploads to other web applications.\n\nThe API of this module is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd].\n\n[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface\n[streams2-thing]: http://nodejs.org/api/stream.html#stream_compatibility_with_older_node_versions\n\n## Install\n\n```\nnpm install form-data\n```\n\n## Usage\n\nIn this example we are constructing a form with 3 fields that contain a string,\na buffer and a file stream.\n\n``` javascript\nvar FormData = require('form-data');\nvar fs = require('fs');\n\nvar form = new FormData();\nform.append('my_field', 'my value');\nform.append('my_buffer', new Buffer(10));\nform.append('my_file', fs.createReadStream('/foo/bar.jpg'));\n```\n\nAlso you can use http-response stream:\n\n``` javascript\nvar FormData = require('form-data');\nvar http = require('http');\n\nvar form = new FormData();\n\nhttp.request('http://nodejs.org/images/logo.png', function(response) {\n form.append('my_field', 'my value');\n form.append('my_buffer', new Buffer(10));\n form.append('my_logo', response);\n});\n```\n\nOr @mikeal's request stream:\n\n``` javascript\nvar FormData = require('form-data');\nvar request = require('request');\n\nvar form = new FormData();\n\nform.append('my_field', 'my value');\nform.append('my_buffer', new Buffer(10));\nform.append('my_logo', request('http://nodejs.org/images/logo.png'));\n```\n\nIn order to submit this form to a web application, call ```submit(url, [callback])``` method:\n\n``` javascript\nform.submit('http://example.org/', function(err, res) {\n // res – response object (http.IncomingMessage) //\n res.resume(); // for node-0.10.x\n});\n\n```\n\nFor more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods.\n\n### Alternative submission methods\n\nYou can use node's http client interface:\n\n``` javascript\nvar http = require('http');\n\nvar request = http.request({\n method: 'post',\n host: 'example.org',\n path: '/upload',\n headers: form.getHeaders()\n});\n\nform.pipe(request);\n\nrequest.on('response', function(res) {\n console.log(res.statusCode);\n});\n```\n\nOr if you would prefer the `'Content-Length'` header to be set for you:\n\n``` javascript\nform.submit('example.org/upload', function(err, res) {\n console.log(res.statusCode);\n});\n```\n\nTo use custom headers and pre-known length in parts:\n\n``` javascript\nvar CRLF = '\\r\\n';\nvar form = new FormData();\n\nvar options = {\n header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF,\n knownLength: 1\n};\n\nform.append('my_buffer', buffer, options);\n\nform.submit('http://example.com/', function(err, res) {\n if (err) throw err;\n console.log('Done');\n});\n```\n\nForm-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide \"file\"-related information manually:\n\n``` javascript\nsomeModule.stream(function(err, stdout, stderr) {\n if (err) throw err;\n\n var form = new FormData();\n\n form.append('file', stdout, {\n filename: 'unicycle.jpg',\n contentType: 'image/jpg',\n knownLength: 19806\n });\n\n form.submit('http://example.com/', function(err, res) {\n if (err) throw err;\n console.log('Done');\n });\n});\n```\n\nFor edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter:\n\n``` javascript\nform.submit({\n host: 'example.com',\n path: '/probably.php?extra=params',\n auth: 'username:password'\n}, function(err, res) {\n console.log(res.statusCode);\n});\n```\n\nIn case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`:\n\n``` javascript\nform.submit({\n host: 'example.com',\n path: '/surelynot.php',\n headers: {'x-test-header': 'test-header-value'}\n}, function(err, res) {\n console.log(res.statusCode);\n});\n```\n\n## Notes\n\n- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround.\n- If it feels like FormData hangs after submit and you're on ```node-0.10```, please check [Compatibility with Older Node Versions][streams2-thing]\n\n## TODO\n\n- Add new streams (0.10) support and try really hard not to break it for 0.8.x.\n\n## License\n\nForm-Data is licensed under the MIT license.\n",
- "readmeFilename": "Readme.md",
+ "gitHead": "9f29fefe9633f3adae72d6416fd6822c060ff6b6",
"bugs": {
"url": "https://github.com/felixge/node-form-data/issues"
},
"homepage": "https://github.com/felixge/node-form-data#readme",
- "_id": "form-data@0.2.0",
- "_shasum": "26f8bc26da6440e299cbdcfb69035c4f77a6e466",
- "_resolved": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz",
- "_from": "form-data@>=0.2.0 <0.3.0"
+ "_id": "form-data@1.0.0-rc2",
+ "_shasum": "5bc9c9b3dd3dec1977b0abf58790192081d95235",
+ "_from": "form-data@>=1.0.0-rc1 <1.1.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "0.12.4",
+ "_npmUser": {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ },
+ "dist": {
+ "shasum": "5bc9c9b3dd3dec1977b0abf58790192081d95235",
+ "tarball": "http://registry.npmjs.org/form-data/-/form-data-1.0.0-rc2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "felixge",
+ "email": "felix@debuggable.com"
+ },
+ {
+ "name": "idralyuk",
+ "email": "igor@buran.us"
+ },
+ {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ },
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ {
+ "name": "celer",
+ "email": "dtyree77@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc2.tgz"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/LICENSE b/deps/npm/node_modules/request/node_modules/har-validator/LICENSE
index d52787158..ca55c91af 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/LICENSE
+++ b/deps/npm/node_modules/request/node_modules/har-validator/LICENSE
@@ -1,21 +1,13 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Ahmad Nassri (https://www.ahmadnassri.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+Copyright (c) 2015, Ahmad Nassri <ahmad@ahmadnassri.com>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/README.md b/deps/npm/node_modules/request/node_modules/har-validator/README.md
index 4e580ab86..f40ab755d 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/README.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/README.md
@@ -330,7 +330,7 @@ Donations are welcome to help support the continuous development of this project
## License
-[MIT](LICENSE) &copy; [Ahmad Nassri](https://www.ahmadnassri.com)
+[ISC License](LICENSE) &copy; [Ahmad Nassri](https://www.ahmadnassri.com/)
[license-url]: https://github.com/ahmadnassri/har-validator/blob/master/LICENSE
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/error.js b/deps/npm/node_modules/request/node_modules/har-validator/lib/error.js
index fc08a8721..fc08a8721 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/error.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/error.js
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/index.js b/deps/npm/node_modules/request/node_modules/har-validator/lib/index.js
index d0324ccd1..81d55607c 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/index.js
@@ -20,7 +20,7 @@ var runner = function (schema, data, cb) {
// callback?
if (!cb) {
- return !validate.errors > 0
+ return valid
} else {
return cb(validate.errors ? new ValidationError(validate.errors) : null, valid)
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cache.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cache.json
index a3ab682d5..a3ab682d5 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cache.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cache.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cacheEntry.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cacheEntry.json
index a397439fd..a397439fd 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cacheEntry.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cacheEntry.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/content.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/content.json
index 3710d7939..3710d7939 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/content.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/content.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cookie.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cookie.json
index 576818183..576818183 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/cookie.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/cookie.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/creator.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/creator.json
index 505860064..505860064 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/creator.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/creator.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/entry.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/entry.json
index 7acdc1b82..8a9c022bd 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/entry.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/entry.json
@@ -36,7 +36,10 @@
},
"serverIPAddress": {
"type": "string",
- "format": "ipv4"
+ "oneOf": [
+ { "format": "ipv4" },
+ { "format": "ipv6" }
+ ]
},
"connection": {
"type": "string"
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/har.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/har.json
index b542782db..b542782db 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/har.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/har.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/index.js b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/index.js
index 7b6db7dab..7b6db7dab 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/index.js
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/log.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/log.json
index 0c91d38bf..0c91d38bf 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/log.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/log.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/page.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/page.json
index ef64abe5c..ef64abe5c 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/page.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/page.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/pageTimings.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/pageTimings.json
index adc83cccd..adc83cccd 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/pageTimings.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/pageTimings.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/postData.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/postData.json
index 91958b64a..91958b64a 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/postData.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/postData.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/record.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/record.json
index 04acd5194..04acd5194 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/record.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/record.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/request.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/request.json
index 639af06dc..639af06dc 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/request.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/request.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/response.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/response.json
index de99c55bb..de99c55bb 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/response.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/response.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/timings.json b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/timings.json
index 066ef71a1..066ef71a1 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/src/schemas/timings.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/lib/schemas/timings.json
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/LICENSE b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/LICENSE
index a3966cf93..8d4ca2802 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/LICENSE
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/LICENSE
@@ -7,7 +7,7 @@ of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:</p>
+furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/README.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/README.md
index dad5dcfd1..ee46c627b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/README.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/README.md
@@ -135,7 +135,7 @@ fs.readFileAsync("file.json").then(JSON.parse).then(function(val) {
console.error("invalid json in file");
})
.catch(function(e) {
- console.error("unable to read file")
+ console.error("unable to read file");
});
```
@@ -153,7 +153,7 @@ catch(SyntaxError e) {
console.error("invalid json in file");
}
catch(Error e) {
- console.error("unable to read file")
+ console.error("unable to read file");
}
```
@@ -169,7 +169,7 @@ mapSeries(URLs, function (URL, done) {
var options = {};
needle.get(URL, options, function (error, response, body) {
if (error) {
- return done(error)
+ return done(error);
}
try {
var ret = JSON.parse(body);
@@ -181,13 +181,13 @@ mapSeries(URLs, function (URL, done) {
});
}, function (err, results) {
if (err) {
- console.log(err)
+ console.log(err);
} else {
console.log('All Needle requests successful');
// results is a 1 to 1 mapping in order of URLs > needle.body
processAndSaveAllInDB(results, function (err) {
if (err) {
- return done(err)
+ return done(err);
}
console.log('All Needle requests saved');
done(null);
@@ -328,7 +328,7 @@ Example of such library is the node core library `fs`. So if we promisify it, we
var fs = Promise.promisifyAll(require("fs"));
fs.readFileAsync("myfile.json").then(JSON.parse).then(function (json) {
- console.log("Successful json")
+ console.log("Successful json");
}).catch(SyntaxError, function (e) {
console.error("file contains invalid json");
}).catch(Promise.OperationalError, function (e) {
@@ -411,7 +411,7 @@ Promise.longStackTraces();
Promise.resolve().then(function outer() {
return Promise.resolve().then(function inner() {
return Promise.resolve().then(function evenMoreInner() {
- a.b.c.d()
+ a.b.c.d();
}).catch(function catcher(e){
console.error(e.stack);
});
@@ -477,15 +477,15 @@ Keep the test tab active because some tests are timing-sensitive and will fail i
The value of boolean flags is determined by presence, if you want to pass false value for a boolean flag, use the `no-`-prefix e.g. `--no-browser`.
- - `--run=String`. Which tests to run (or compile when testing in browser). Default `"all"`. Can also be a glob string (relative to ./test/mocha folder)
+ - `--run=String` - Which tests to run (or compile when testing in browser). Default `"all"`. Can also be a glob string (relative to ./test/mocha folder).
- `--cover=String`. Create code coverage using the String as istanbul reporter. Coverage is created in the ./coverage folder. No coverage is created by default, default reporter is `"html"` (use `--cover` to use default reporter).
- `--browser` - Whether to compile tests for browsers. Default `false`.
- - `--port=Number` - Whe port where local server is hosted when testing in browser. Default `9999`
+ - `--port=Number` - Port where local server is hosted when testing in browser. Default `9999`
- `--execute-browser-tests` - Whether to execute the compiled tests for browser when using `--browser`. Default `true`.
- `--open-browser` - Whether to open the default browser when executing browser tests. Default `true`.
- `--fake-timers` - Whether to use fake timers (`setTimeout` etc) when running tests in node. Default `true`.
- `--js-hint` - Whether to run JSHint on source files. Default `true`.
- - `--saucelabs` Wheter to create a tunnel to sauce labs and run tests in their VMs instead of your browser when compiling tests for browser.Default `false`.
+ - `--saucelabs` - Whether to create a tunnel to sauce labs and run tests in their VMs instead of your browser when compiling tests for browser. Default `false`.
## Benchmarks
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/changelog.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/changelog.md
index aa145dcb2..1b1ac116b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/changelog.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/changelog.md
@@ -1,3 +1,50 @@
+## 2.9.34 (2015-07-15)
+
+Bugfixes:
+
+- Correct domain for .map, .each, .filter, .reduce callbacks ([#701](.)).
+ - Preserve bound-with-promise promises across the entire chain ([#702](.)).
+
+## 2.9.33 (2015-07-09)
+
+Bugfixes:
+
+ - Methods on `Function.prototype` are no longer promisified ([#680](.)).
+
+## 2.9.32 (2015-07-03)
+
+Bugfixes:
+
+ - Fix `.return(primitiveValue)` returning a wrapped version of the primitive value when a Node.js domain is active ([#689](.)).
+
+## 2.9.31 (2015-07-03)
+
+Bugfixes:
+
+ - Fix Promises/A+ compliance issue regarding circular thenables: the correct behavior is to go into an infinite loop instead of warning with an error (Fixes [#682](.)).
+ - Fix "(node) warning: possible EventEmitter memory leak detected" ([#661](.)).
+ - Fix callbacks sometimes being called with a wrong node.js domain ([#664](.)).
+ - Fix callbacks sometimes not being called at all in iOS 8.1 WebApp mode ([#666](.), [#687](.)).
+
+## 2.9.30 (2015-06-14)
+
+Bugfixes:
+
+ - Fix regression with `promisifyAll` not promisifying certain methods
+
+## 2.9.29 (2015-06-14)
+
+Bugfixes:
+
+ - Improve `promisifyAll` detection of functions that are class constructors. Fixes mongodb 2.x promisification.
+
+## 2.9.28 (2015-06-14)
+
+Bugfixes:
+
+ - Fix handled rejection being reported as unhandled in certain scenarios when using [.all](.) or [Promise.join](.) ([#645](.))
+ - Fix custom scheduler not being called in Google Chrome when long stack traces are enabled ([#650](.))
+
## 2.9.27 (2015-05-30)
Bugfixes:
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
index a529187a9..ddd6a8a26 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.js
@@ -8,7 +8,7 @@
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:</p>
+ * furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
@@ -23,7 +23,7 @@
*
*/
/**
- * bluebird build version 2.9.27
+ * bluebird build version 2.9.34
* Features enabled: core, race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, cancel, using, filter, any, each, timers
*/
!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.Promise=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof _dereq_=="function"&&_dereq_;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof _dereq_=="function"&&_dereq_;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
@@ -94,8 +94,6 @@ Async.prototype.throwLater = function(fn, arg) {
arg = fn;
fn = function () { throw arg; };
}
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
if (typeof setTimeout !== "undefined") {
setTimeout(function() {
fn(arg);
@@ -109,73 +107,18 @@ Async.prototype.throwLater = function(fn, arg) {
}
};
-Async.prototype._getDomain = function() {};
-
-if (!true) {
-if (util.isNode) {
- var EventsModule = _dereq_("events");
-
- var domainGetter = function() {
- var domain = process.domain;
- if (domain === null) return undefined;
- return domain;
- };
-
- if (EventsModule.usingDomains) {
- Async.prototype._getDomain = domainGetter;
- } else {
- var descriptor =
- Object.getOwnPropertyDescriptor(EventsModule, "usingDomains");
-
- if (descriptor) {
- if (!descriptor.configurable) {
- process.on("domainsActivated", function() {
- Async.prototype._getDomain = domainGetter;
- });
- } else {
- var usingDomains = false;
- Object.defineProperty(EventsModule, "usingDomains", {
- configurable: false,
- enumerable: true,
- get: function() {
- return usingDomains;
- },
- set: function(value) {
- if (usingDomains || !value) return;
- usingDomains = true;
- Async.prototype._getDomain = domainGetter;
- util.toFastProperties(process);
- process.emit("domainsActivated");
- }
- });
- }
- }
- }
-}
-}
-
function AsyncInvokeLater(fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._lateQueue.push(fn, receiver, arg);
this._queueTick();
}
function AsyncInvoke(fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._normalQueue.push(fn, receiver, arg);
this._queueTick();
}
function AsyncSettlePromises(promise) {
- var domain = this._getDomain();
- if (domain !== undefined) {
- var fn = domain.bind(promise._settlePromises);
- this._normalQueue.push(fn, promise, undefined);
- } else {
- this._normalQueue._pushOne(promise);
- }
+ this._normalQueue._pushOne(promise);
this._queueTick();
}
@@ -184,13 +127,18 @@ if (!util.hasDevTools) {
Async.prototype.invoke = AsyncInvoke;
Async.prototype.settlePromises = AsyncSettlePromises;
} else {
+ if (schedule.isStatic) {
+ schedule = function(fn) { setTimeout(fn, 0); };
+ }
Async.prototype.invokeLater = function (fn, receiver, arg) {
if (this._trampolineEnabled) {
AsyncInvokeLater.call(this, fn, receiver, arg);
} else {
- setTimeout(function() {
- fn.call(receiver, arg);
- }, 100);
+ this._schedule(function() {
+ setTimeout(function() {
+ fn.call(receiver, arg);
+ }, 100);
+ });
}
};
@@ -198,9 +146,9 @@ if (!util.hasDevTools) {
if (this._trampolineEnabled) {
AsyncInvoke.call(this, fn, receiver, arg);
} else {
- setTimeout(function() {
+ this._schedule(function() {
fn.call(receiver, arg);
- }, 0);
+ });
}
};
@@ -208,16 +156,14 @@ if (!util.hasDevTools) {
if (this._trampolineEnabled) {
AsyncSettlePromises.call(this, promise);
} else {
- setTimeout(function() {
+ this._schedule(function() {
promise._settlePromises();
- }, 0);
+ });
}
};
}
Async.prototype.invokeFirst = function (fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._normalQueue.unshift(fn, receiver, arg);
this._queueTick();
};
@@ -255,7 +201,7 @@ Async.prototype._reset = function () {
module.exports = new Async();
module.exports.firstLineError = firstLineError;
-},{"./queue.js":28,"./schedule.js":31,"./util.js":38,"events":39}],3:[function(_dereq_,module,exports){
+},{"./queue.js":28,"./schedule.js":31,"./util.js":38}],3:[function(_dereq_,module,exports){
"use strict";
module.exports = function(Promise, INTERNAL, tryConvertToPromise) {
var rejectThis = function(_, e) {
@@ -268,7 +214,6 @@ var targetRejected = function(e, context) {
};
var bindingResolved = function(thisArg, context) {
- this._setBoundTo(thisArg);
if (this._isPending()) {
this._resolveCallback(context.target);
}
@@ -283,6 +228,8 @@ Promise.prototype.bind = function (thisArg) {
var ret = new Promise(INTERNAL);
ret._propagateFrom(this, 1);
var target = this._target();
+
+ ret._setBoundTo(maybePromise);
if (maybePromise instanceof Promise) {
var context = {
promiseRejectionQueued: false,
@@ -294,7 +241,6 @@ Promise.prototype.bind = function (thisArg) {
maybePromise._then(
bindingResolved, bindingRejected, ret._progress, ret, context);
} else {
- ret._setBoundTo(thisArg);
ret._resolveCallback(target);
}
return ret;
@@ -317,13 +263,12 @@ Promise.bind = function (thisArg, value) {
var maybePromise = tryConvertToPromise(thisArg);
var ret = new Promise(INTERNAL);
+ ret._setBoundTo(maybePromise);
if (maybePromise instanceof Promise) {
- maybePromise._then(function(thisArg) {
- ret._setBoundTo(thisArg);
+ maybePromise._then(function() {
ret._resolveCallback(value);
}, ret._reject, ret._progress, ret, null);
} else {
- ret._setBoundTo(thisArg);
ret._resolveCallback(value);
}
return ret;
@@ -903,7 +848,8 @@ var captureStackTrace = (function stackDetection() {
catch(e) {
hasStackAfterThrow = ("stack" in e);
}
- if (!("stack" in err) && hasStackAfterThrow) {
+ if (!("stack" in err) && hasStackAfterThrow &&
+ typeof Error.stackTraceLimit === "number") {
stackFramePattern = v8stackFramePattern;
formatStack = v8stackFormatter;
return function captureStackTrace(o) {
@@ -1045,7 +991,7 @@ function safePredicate(predicate, e) {
CatchFilter.prototype.doFilter = function (e) {
var cb = this._callback;
var promise = this._promise;
- var boundTo = promise._boundTo;
+ var boundTo = promise._boundValue();
for (var i = 0, len = this._instances.length; i < len; ++i) {
var item = this._instances[i];
var itemIsErrorType = item === Error ||
@@ -1123,6 +1069,7 @@ return createContext;
},{}],10:[function(_dereq_,module,exports){
"use strict";
module.exports = function(Promise, CapturedTrace) {
+var getDomain = Promise._getDomain;
var async = _dereq_("./async.js");
var Warning = _dereq_("./errors.js").Warning;
var util = _dereq_("./util.js");
@@ -1137,7 +1084,13 @@ if (debugging) {
async.disableTrampolineIfNecessary();
}
+Promise.prototype._ignoreRejections = function() {
+ this._unsetRejectionIsUnhandled();
+ this._bitField = this._bitField | 16777216;
+};
+
Promise.prototype._ensurePossibleRejectionHandled = function () {
+ if ((this._bitField & 16777216) !== 0) return;
this._setRejectionIsUnhandled();
async.invokeLater(this._notifyUnhandledRejection, this, undefined);
};
@@ -1236,11 +1189,17 @@ Promise.prototype._warn = function(message) {
};
Promise.onPossiblyUnhandledRejection = function (fn) {
- possiblyUnhandledRejection = typeof fn === "function" ? fn : undefined;
+ var domain = getDomain();
+ possiblyUnhandledRejection =
+ typeof fn === "function" ? (domain === null ? fn : domain.bind(fn))
+ : undefined;
};
Promise.onUnhandledRejectionHandled = function (fn) {
- unhandledRejectionHandled = typeof fn === "function" ? fn : undefined;
+ var domain = getDomain();
+ unhandledRejectionHandled =
+ typeof fn === "function" ? (domain === null ? fn : domain.bind(fn))
+ : undefined;
};
Promise.longStackTraces = function () {
@@ -1273,7 +1232,6 @@ return function() {
"use strict";
var util = _dereq_("./util.js");
var isPrimitive = util.isPrimitive;
-var wrapsPrimitiveReceiver = util.wrapsPrimitiveReceiver;
module.exports = function(Promise) {
var returner = function () {
@@ -1304,7 +1262,7 @@ Promise.prototype["return"] =
Promise.prototype.thenReturn = function (value) {
if (value === undefined) return this.then(returnUndefined);
- if (wrapsPrimitiveReceiver && isPrimitive(value)) {
+ if (isPrimitive(value)) {
return this._then(
wrapper(value, 2),
undefined,
@@ -1320,7 +1278,7 @@ Promise.prototype["throw"] =
Promise.prototype.thenThrow = function (reason) {
if (reason === undefined) return this.then(throwUndefined);
- if (wrapsPrimitiveReceiver && isPrimitive(reason)) {
+ if (isPrimitive(reason)) {
return this._then(
wrapper(reason, 1),
undefined,
@@ -1560,7 +1518,6 @@ Promise.filter = function (promises, fn, options) {
"use strict";
module.exports = function(Promise, NEXT_FILTER, tryConvertToPromise) {
var util = _dereq_("./util.js");
-var wrapsPrimitiveReceiver = util.wrapsPrimitiveReceiver;
var isPrimitive = util.isPrimitive;
var thrower = util.thrower;
@@ -1582,7 +1539,7 @@ function throw$(r) {
}
function promisedFinally(ret, reasonOrValue, isFulfilled) {
var then;
- if (wrapsPrimitiveReceiver && isPrimitive(reasonOrValue)) {
+ if (isPrimitive(reasonOrValue)) {
then = isFulfilled ? return$(reasonOrValue) : throw$(reasonOrValue);
} else {
then = isFulfilled ? returnThis : throwThis;
@@ -1595,7 +1552,7 @@ function finallyHandler(reasonOrValue) {
var handler = this.handler;
var ret = promise._isBound()
- ? handler.call(promise._boundTo)
+ ? handler.call(promise._boundValue())
: handler();
if (ret !== undefined) {
@@ -1620,7 +1577,7 @@ function tapHandler(value) {
var handler = this.handler;
var ret = promise._isBound()
- ? handler.call(promise._boundTo, value)
+ ? handler.call(promise._boundValue(), value)
: handler(value);
if (ret !== undefined) {
@@ -1911,6 +1868,7 @@ module.exports = function(Promise,
apiRejection,
tryConvertToPromise,
INTERNAL) {
+var getDomain = Promise._getDomain;
var async = _dereq_("./async.js");
var util = _dereq_("./util.js");
var tryCatch = util.tryCatch;
@@ -1921,7 +1879,8 @@ var EMPTY_ARRAY = [];
function MappingPromiseArray(promises, fn, limit, _filter) {
this.constructor$(promises);
this._promise._captureStackTrace();
- this._callback = fn;
+ var domain = getDomain();
+ this._callback = domain === null ? fn : domain.bind(fn);
this._preservedValues = _filter === INTERNAL
? new Array(this.length())
: null;
@@ -1956,7 +1915,7 @@ MappingPromiseArray.prototype._promiseFulfilled = function (value, index) {
if (preservedValues !== null) preservedValues[index] = value;
var callback = this._callback;
- var receiver = this._promise._boundTo;
+ var receiver = this._promise._boundValue();
this._promise._pushContext();
var ret = tryCatch(callback).call(receiver, value, index, length);
this._promise._popContext();
@@ -2094,7 +2053,8 @@ var errorObj = util.errorObj;
function spreadAdapter(val, nodeback) {
var promise = this;
if (!util.isArray(val)) return successAdapter.call(promise, val, nodeback);
- var ret = tryCatch(nodeback).apply(promise._boundTo, [null].concat(val));
+ var ret =
+ tryCatch(nodeback).apply(promise._boundValue(), [null].concat(val));
if (ret === errorObj) {
async.throwLater(ret.e);
}
@@ -2102,7 +2062,7 @@ function spreadAdapter(val, nodeback) {
function successAdapter(val, nodeback) {
var promise = this;
- var receiver = promise._boundTo;
+ var receiver = promise._boundValue();
var ret = val === undefined
? tryCatch(nodeback).call(receiver, null)
: tryCatch(nodeback).call(receiver, null, val);
@@ -2118,13 +2078,13 @@ function errorAdapter(reason, nodeback) {
newReason.cause = reason;
reason = newReason;
}
- var ret = tryCatch(nodeback).call(promise._boundTo, reason);
+ var ret = tryCatch(nodeback).call(promise._boundValue(), reason);
if (ret === errorObj) {
async.throwLater(ret.e);
}
}
-Promise.prototype.asCallback =
+Promise.prototype.asCallback =
Promise.prototype.nodeify = function (nodeback, options) {
if (typeof nodeback == "function") {
var adapter = successAdapter;
@@ -2233,7 +2193,23 @@ var reflect = function() {
var apiRejection = function(msg) {
return Promise.reject(new TypeError(msg));
};
+
var util = _dereq_("./util.js");
+
+var getDomain;
+if (util.isNode) {
+ getDomain = function() {
+ var ret = process.domain;
+ if (ret === undefined) ret = null;
+ return ret;
+ };
+} else {
+ getDomain = function() {
+ return null;
+ };
+}
+util.notEnumerableProp(Promise, "_getDomain", getDomain);
+
var async = _dereq_("./async.js");
var errors = _dereq_("./errors.js");
var TypeError = Promise.TypeError = errors.TypeError;
@@ -2432,8 +2408,12 @@ Promise.prototype._then = function (
if (!haveInternalData) ret._setIsMigrated();
}
- var callbackIndex =
- target._addCallbacks(didFulfill, didReject, didProgress, ret, receiver);
+ var callbackIndex = target._addCallbacks(didFulfill,
+ didReject,
+ didProgress,
+ ret,
+ receiver,
+ getDomain());
if (target._isResolved() && !target._isSettlePromisesQueued()) {
async.invoke(
@@ -2515,7 +2495,7 @@ Promise.prototype._receiverAt = function (index) {
: this[
index * 5 - 5 + 4];
if (ret === undefined && this._isBound()) {
- return this._boundTo;
+ return this._boundValue();
}
return ret;
};
@@ -2538,6 +2518,20 @@ Promise.prototype._rejectionHandlerAt = function (index) {
: this[index * 5 - 5 + 1];
};
+Promise.prototype._boundValue = function() {
+ var ret = this._boundTo;
+ if (ret !== undefined) {
+ if (ret instanceof Promise) {
+ if (ret.isFulfilled()) {
+ return ret.value();
+ } else {
+ return undefined;
+ }
+ }
+ }
+ return ret;
+};
+
Promise.prototype._migrateCallbacks = function (follower, index) {
var fulfill = follower._fulfillmentHandlerAt(index);
var reject = follower._rejectionHandlerAt(index);
@@ -2545,7 +2539,7 @@ Promise.prototype._migrateCallbacks = function (follower, index) {
var promise = follower._promiseAt(index);
var receiver = follower._receiverAt(index);
if (promise instanceof Promise) promise._setIsMigrated();
- this._addCallbacks(fulfill, reject, progress, promise, receiver);
+ this._addCallbacks(fulfill, reject, progress, promise, receiver, null);
};
Promise.prototype._addCallbacks = function (
@@ -2553,7 +2547,8 @@ Promise.prototype._addCallbacks = function (
reject,
progress,
promise,
- receiver
+ receiver,
+ domain
) {
var index = this._length();
@@ -2565,20 +2560,34 @@ Promise.prototype._addCallbacks = function (
if (index === 0) {
this._promise0 = promise;
if (receiver !== undefined) this._receiver0 = receiver;
- if (typeof fulfill === "function" && !this._isCarryingStackTrace())
- this._fulfillmentHandler0 = fulfill;
- if (typeof reject === "function") this._rejectionHandler0 = reject;
- if (typeof progress === "function") this._progressHandler0 = progress;
+ if (typeof fulfill === "function" && !this._isCarryingStackTrace()) {
+ this._fulfillmentHandler0 =
+ domain === null ? fulfill : domain.bind(fulfill);
+ }
+ if (typeof reject === "function") {
+ this._rejectionHandler0 =
+ domain === null ? reject : domain.bind(reject);
+ }
+ if (typeof progress === "function") {
+ this._progressHandler0 =
+ domain === null ? progress : domain.bind(progress);
+ }
} else {
var base = index * 5 - 5;
this[base + 3] = promise;
this[base + 4] = receiver;
- if (typeof fulfill === "function")
- this[base + 0] = fulfill;
- if (typeof reject === "function")
- this[base + 1] = reject;
- if (typeof progress === "function")
- this[base + 2] = progress;
+ if (typeof fulfill === "function") {
+ this[base + 0] =
+ domain === null ? fulfill : domain.bind(fulfill);
+ }
+ if (typeof reject === "function") {
+ this[base + 1] =
+ domain === null ? reject : domain.bind(reject);
+ }
+ if (typeof progress === "function") {
+ this[base + 2] =
+ domain === null ? progress : domain.bind(progress);
+ }
}
this._setLength(index + 1);
return index;
@@ -2673,7 +2682,7 @@ Promise.prototype._settlePromiseFromHandler = function (
promise._pushContext();
var x;
if (receiver === APPLY && !this._isRejected()) {
- x = tryCatch(handler).apply(this._boundTo, value);
+ x = tryCatch(handler).apply(this._boundValue(), value);
} else {
x = tryCatch(handler).call(receiver, value);
}
@@ -2743,8 +2752,6 @@ Promise.prototype._settlePromiseAt = function (index) {
this._isCarryingStackTrace() ? this._getCarriedStackTrace() : undefined;
var value = this._settledValue;
var receiver = this._receiverAt(index);
-
-
this._clearCallbackDataAtIndex(index);
if (typeof handler === "function") {
@@ -2871,7 +2878,10 @@ Promise.prototype._settlePromises = function () {
}
};
-Promise._makeSelfResolutionError = makeSelfResolutionError;
+util.notEnumerableProp(Promise,
+ "_makeSelfResolutionError",
+ makeSelfResolutionError);
+
_dereq_("./progress.js")(Promise, PromiseArray);
_dereq_("./method.js")(Promise, INTERNAL, tryConvertToPromise, apiRejection);
_dereq_("./bind.js")(Promise, INTERNAL, tryConvertToPromise);
@@ -3006,7 +3016,7 @@ PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) {
if (maybePromise instanceof Promise) {
maybePromise = maybePromise._target();
if (isResolved) {
- maybePromise._unsetRejectionIsUnhandled();
+ maybePromise._ignoreRejections();
} else if (maybePromise._isPending()) {
maybePromise._proxyPromiseArray(this, i);
} else if (maybePromise._isFulfilled()) {
@@ -3205,12 +3215,21 @@ var canEvaluate = util.canEvaluate;
var TypeError = _dereq_("./errors").TypeError;
var defaultSuffix = "Async";
var defaultPromisified = {__isPromisified__: true};
-var noCopyPropsPattern =
- /^(?:length|name|arguments|caller|callee|prototype|__isPromisified__)$/;
-var defaultFilter = function(name, func) {
+var noCopyProps = [
+ "arity", "length",
+ "name",
+ "arguments",
+ "caller",
+ "callee",
+ "prototype",
+ "__isPromisified__"
+];
+var noCopyPropsPattern = new RegExp("^(?:" + noCopyProps.join("|") + ")$");
+
+var defaultFilter = function(name) {
return util.isIdentifier(name) &&
name.charAt(0) !== "_" &&
- !util.isClass(func);
+ name !== "constructor";
};
function propsFilter(key) {
@@ -3354,6 +3373,7 @@ function(callback, receiver, originalName, fn) {
"nodebackForPromise",
"tryCatch",
"errorObj",
+ "notEnumerableProp",
"INTERNAL","'use strict'; \n\
var ret = function (Parameters) { \n\
'use strict'; \n\
@@ -3371,7 +3391,7 @@ function(callback, receiver, originalName, fn) {
} \n\
return promise; \n\
}; \n\
- ret.__isPromisified__ = true; \n\
+ notEnumerableProp(ret, '__isPromisified__', true); \n\
return ret; \n\
"
.replace("Parameters", parameterDeclaration(newParameterCount))
@@ -3385,6 +3405,7 @@ function(callback, receiver, originalName, fn) {
nodebackForPromise,
util.tryCatch,
util.errorObj,
+ util.notEnumerableProp,
INTERNAL
);
};
@@ -3411,7 +3432,7 @@ function makeNodePromisifiedClosure(callback, receiver, _, fn) {
}
return promise;
}
- promisified.__isPromisified__ = true;
+ util.notEnumerableProp(promisified, "__isPromisified__", true);
return promisified;
}
@@ -3714,6 +3735,7 @@ module.exports = function(Promise,
apiRejection,
tryConvertToPromise,
INTERNAL) {
+var getDomain = Promise._getDomain;
var async = _dereq_("./async.js");
var util = _dereq_("./util.js");
var tryCatch = util.tryCatch;
@@ -3742,7 +3764,8 @@ function ReductionPromiseArray(promises, fn, accum, _each) {
}
}
if (!(isPromise || this._zerothIsAccum)) this._gotAccum = true;
- this._callback = fn;
+ var domain = getDomain();
+ this._callback = domain === null ? fn : domain.bind(fn);
this._accum = accum;
if (!rejected) async.invoke(init, this, undefined);
}
@@ -3796,7 +3819,7 @@ ReductionPromiseArray.prototype._promiseFulfilled = function (value, index) {
if (!gotAccum) return;
var callback = this._callback;
- var receiver = this._promise._boundTo;
+ var receiver = this._promise._boundValue();
var ret;
for (var i = this._reducingIndex; i < length; ++i) {
@@ -3868,7 +3891,10 @@ if (util.isNode && typeof MutationObserver === "undefined") {
schedule = util.isRecentNode
? function(fn) { GlobalSetImmediate.call(global, fn); }
: function(fn) { ProcessNextTick.call(process, fn); };
-} else if (typeof MutationObserver !== "undefined") {
+} else if ((typeof MutationObserver !== "undefined") &&
+ !(typeof window !== "undefined" &&
+ window.navigator &&
+ window.navigator.standalone)) {
schedule = function(fn) {
var div = document.createElement("div");
var observer = new MutationObserver(fn);
@@ -4218,12 +4244,7 @@ function doThenable(x, then, context) {
function resolveFromThenable(value) {
if (!promise) return;
- if (x === value) {
- promise._rejectCallback(
- Promise._makeSelfResolutionError(), false, true);
- } else {
- promise._resolveCallback(value);
- }
+ promise._resolveCallback(value);
promise = null;
}
@@ -4533,7 +4554,9 @@ var errorObj = {e: {}};
var tryCatchTarget;
function tryCatcher() {
try {
- return tryCatchTarget.apply(this, arguments);
+ var target = tryCatchTarget;
+ tryCatchTarget = null;
+ return target.apply(this, arguments);
} catch (e) {
errorObj.e = e;
return errorObj;
@@ -4594,6 +4617,7 @@ function withAppended(target, appendee) {
function getDataPropertyOrDefault(obj, key, defaultValue) {
if (es5.isES5) {
var desc = Object.getOwnPropertyDescriptor(obj, key);
+
if (desc != null) {
return desc.get == null && desc.set == null
? desc.value
@@ -4616,23 +4640,32 @@ function notEnumerableProp(obj, name, value) {
return obj;
}
-
-var wrapsPrimitiveReceiver = (function() {
- return this !== "string";
-}).call("string");
-
function thrower(r) {
throw r;
}
var inheritedDataKeys = (function() {
+ var excludedPrototypes = [
+ Array.prototype,
+ Object.prototype,
+ Function.prototype
+ ];
+
+ var isExcludedProto = function(val) {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (excludedPrototypes[i] === val) {
+ return true;
+ }
+ }
+ return false;
+ };
+
if (es5.isES5) {
- var oProto = Object.prototype;
var getKeys = Object.getOwnPropertyNames;
return function(obj) {
var ret = [];
var visitedKeys = Object.create(null);
- while (obj != null && obj !== oProto) {
+ while (obj != null && !isExcludedProto(obj)) {
var keys;
try {
keys = getKeys(obj);
@@ -4653,11 +4686,23 @@ var inheritedDataKeys = (function() {
return ret;
};
} else {
+ var hasProp = {}.hasOwnProperty;
return function(obj) {
+ if (isExcludedProto(obj)) return [];
var ret = [];
+
/*jshint forin:false */
- for (var key in obj) {
- ret.push(key);
+ enumeration: for (var key in obj) {
+ if (hasProp.call(obj, key)) {
+ ret.push(key);
+ } else {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (hasProp.call(excludedPrototypes[i], key)) {
+ continue enumeration;
+ }
+ }
+ ret.push(key);
+ }
}
return ret;
};
@@ -4665,13 +4710,22 @@ var inheritedDataKeys = (function() {
})();
+var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/;
function isClass(fn) {
try {
if (typeof fn === "function") {
var keys = es5.names(fn.prototype);
- if (es5.isES5) return keys.length > 1;
- return keys.length > 0 &&
- !(keys.length === 1 && keys[0] === "constructor");
+
+ var hasMethods = es5.isES5 && keys.length > 1;
+ var hasMethodsOtherThanConstructor = keys.length > 0 &&
+ !(keys.length === 1 && keys[0] === "constructor");
+ var hasThisAssignmentAndStaticMethods =
+ thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0;
+
+ if (hasMethods || hasMethodsOtherThanConstructor ||
+ hasThisAssignmentAndStaticMethods) {
+ return true;
+ }
}
return false;
} catch (e) {
@@ -4751,7 +4805,9 @@ function copyDescriptors(from, to, filter) {
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (filter(key)) {
- es5.defineProperty(to, key, es5.getDescriptor(from, key));
+ try {
+ es5.defineProperty(to, key, es5.getDescriptor(from, key));
+ } catch (ignore) {}
}
}
}
@@ -4773,7 +4829,6 @@ var ret = {
inherits: inherits,
withAppended: withAppended,
maybeWrapAsError: maybeWrapAsError,
- wrapsPrimitiveReceiver: wrapsPrimitiveReceiver,
toFastProperties: toFastProperties,
filledRange: filledRange,
toString: safeToString,
@@ -4792,311 +4847,11 @@ ret.isRecentNode = ret.isNode && (function() {
var version = process.versions.node.split(".").map(Number);
return (version[0] === 0 && version[1] > 10) || (version[0] > 0);
})();
-try {throw new Error(); } catch (e) {ret.lastLineError = e;}
-module.exports = ret;
-
-},{"./es5.js":14}],39:[function(_dereq_,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-function EventEmitter() {
- this._events = this._events || {};
- this._maxListeners = this._maxListeners || undefined;
-}
-module.exports = EventEmitter;
-
-// Backwards-compat with node 0.10.x
-EventEmitter.EventEmitter = EventEmitter;
-
-EventEmitter.prototype._events = undefined;
-EventEmitter.prototype._maxListeners = undefined;
-
-// By default EventEmitters will print a warning if more than 10 listeners are
-// added to it. This is a useful default which helps finding memory leaks.
-EventEmitter.defaultMaxListeners = 10;
-
-// Obviously not all Emitters should be limited to 10. This function allows
-// that to be increased. Set to zero for unlimited.
-EventEmitter.prototype.setMaxListeners = function(n) {
- if (!isNumber(n) || n < 0 || isNaN(n))
- throw TypeError('n must be a positive number');
- this._maxListeners = n;
- return this;
-};
-
-EventEmitter.prototype.emit = function(type) {
- var er, handler, len, args, i, listeners;
-
- if (!this._events)
- this._events = {};
-
- // If there is no 'error' event listener then throw.
- if (type === 'error') {
- if (!this._events.error ||
- (isObject(this._events.error) && !this._events.error.length)) {
- er = arguments[1];
- if (er instanceof Error) {
- throw er; // Unhandled 'error' event
- }
- throw TypeError('Uncaught, unspecified "error" event.');
- }
- }
-
- handler = this._events[type];
-
- if (isUndefined(handler))
- return false;
-
- if (isFunction(handler)) {
- switch (arguments.length) {
- // fast cases
- case 1:
- handler.call(this);
- break;
- case 2:
- handler.call(this, arguments[1]);
- break;
- case 3:
- handler.call(this, arguments[1], arguments[2]);
- break;
- // slower
- default:
- len = arguments.length;
- args = new Array(len - 1);
- for (i = 1; i < len; i++)
- args[i - 1] = arguments[i];
- handler.apply(this, args);
- }
- } else if (isObject(handler)) {
- len = arguments.length;
- args = new Array(len - 1);
- for (i = 1; i < len; i++)
- args[i - 1] = arguments[i];
-
- listeners = handler.slice();
- len = listeners.length;
- for (i = 0; i < len; i++)
- listeners[i].apply(this, args);
- }
-
- return true;
-};
-
-EventEmitter.prototype.addListener = function(type, listener) {
- var m;
-
- if (!isFunction(listener))
- throw TypeError('listener must be a function');
-
- if (!this._events)
- this._events = {};
-
- // To avoid recursion in the case that type === "newListener"! Before
- // adding it to the listeners, first emit "newListener".
- if (this._events.newListener)
- this.emit('newListener', type,
- isFunction(listener.listener) ?
- listener.listener : listener);
-
- if (!this._events[type])
- // Optimize the case of one listener. Don't need the extra array object.
- this._events[type] = listener;
- else if (isObject(this._events[type]))
- // If we've already got an array, just append.
- this._events[type].push(listener);
- else
- // Adding the second element, need to change to array.
- this._events[type] = [this._events[type], listener];
-
- // Check for listener leak
- if (isObject(this._events[type]) && !this._events[type].warned) {
- var m;
- if (!isUndefined(this._maxListeners)) {
- m = this._maxListeners;
- } else {
- m = EventEmitter.defaultMaxListeners;
- }
-
- if (m && m > 0 && this._events[type].length > m) {
- this._events[type].warned = true;
- console.error('(node) warning: possible EventEmitter memory ' +
- 'leak detected. %d listeners added. ' +
- 'Use emitter.setMaxListeners() to increase limit.',
- this._events[type].length);
- if (typeof console.trace === 'function') {
- // not supported in IE 10
- console.trace();
- }
- }
- }
-
- return this;
-};
-
-EventEmitter.prototype.on = EventEmitter.prototype.addListener;
-
-EventEmitter.prototype.once = function(type, listener) {
- if (!isFunction(listener))
- throw TypeError('listener must be a function');
-
- var fired = false;
-
- function g() {
- this.removeListener(type, g);
- if (!fired) {
- fired = true;
- listener.apply(this, arguments);
- }
- }
-
- g.listener = listener;
- this.on(type, g);
-
- return this;
-};
-
-// emits a 'removeListener' event iff the listener was removed
-EventEmitter.prototype.removeListener = function(type, listener) {
- var list, position, length, i;
-
- if (!isFunction(listener))
- throw TypeError('listener must be a function');
-
- if (!this._events || !this._events[type])
- return this;
-
- list = this._events[type];
- length = list.length;
- position = -1;
+if (ret.isNode) ret.toFastProperties(process);
- if (list === listener ||
- (isFunction(list.listener) && list.listener === listener)) {
- delete this._events[type];
- if (this._events.removeListener)
- this.emit('removeListener', type, listener);
-
- } else if (isObject(list)) {
- for (i = length; i-- > 0;) {
- if (list[i] === listener ||
- (list[i].listener && list[i].listener === listener)) {
- position = i;
- break;
- }
- }
-
- if (position < 0)
- return this;
-
- if (list.length === 1) {
- list.length = 0;
- delete this._events[type];
- } else {
- list.splice(position, 1);
- }
-
- if (this._events.removeListener)
- this.emit('removeListener', type, listener);
- }
-
- return this;
-};
-
-EventEmitter.prototype.removeAllListeners = function(type) {
- var key, listeners;
-
- if (!this._events)
- return this;
-
- // not listening for removeListener, no need to emit
- if (!this._events.removeListener) {
- if (arguments.length === 0)
- this._events = {};
- else if (this._events[type])
- delete this._events[type];
- return this;
- }
-
- // emit removeListener for all listeners on all events
- if (arguments.length === 0) {
- for (key in this._events) {
- if (key === 'removeListener') continue;
- this.removeAllListeners(key);
- }
- this.removeAllListeners('removeListener');
- this._events = {};
- return this;
- }
-
- listeners = this._events[type];
-
- if (isFunction(listeners)) {
- this.removeListener(type, listeners);
- } else {
- // LIFO order
- while (listeners.length)
- this.removeListener(type, listeners[listeners.length - 1]);
- }
- delete this._events[type];
-
- return this;
-};
-
-EventEmitter.prototype.listeners = function(type) {
- var ret;
- if (!this._events || !this._events[type])
- ret = [];
- else if (isFunction(this._events[type]))
- ret = [this._events[type]];
- else
- ret = this._events[type].slice();
- return ret;
-};
-
-EventEmitter.listenerCount = function(emitter, type) {
- var ret;
- if (!emitter._events || !emitter._events[type])
- ret = 0;
- else if (isFunction(emitter._events[type]))
- ret = 1;
- else
- ret = emitter._events[type].length;
- return ret;
-};
-
-function isFunction(arg) {
- return typeof arg === 'function';
-}
-
-function isNumber(arg) {
- return typeof arg === 'number';
-}
-
-function isObject(arg) {
- return typeof arg === 'object' && arg !== null;
-}
-
-function isUndefined(arg) {
- return arg === void 0;
-}
+try {throw new Error(); } catch (e) {ret.lastLineError = e;}
+module.exports = ret;
-},{}]},{},[4])(4)
+},{"./es5.js":14}]},{},[4])(4)
}); ;if (typeof window !== 'undefined' && window !== null) { window.P = window.Promise; } else if (typeof self !== 'undefined' && self !== null) { self.P = self.Promise; } \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
index bc182ffdd..6c33b1b7f 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/browser/bluebird.min.js
@@ -8,7 +8,7 @@
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:</p>
+ * furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
@@ -23,9 +23,9 @@
*
*/
/**
- * bluebird build version 2.9.27
+ * bluebird build version 2.9.34
* Features enabled: core, race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, cancel, using, filter, any, each, timers
*/
-!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,r;return function n(t,e,r){function i(s,a){if(!e[s]){if(!t[s]){var u="function"==typeof _dereq_&&_dereq_;if(!a&&u)return u(s,!0);if(o)return o(s,!0);var c=new Error("Cannot find module '"+s+"'");throw c.code="MODULE_NOT_FOUND",c}var l=e[s]={exports:{}};t[s][0].call(l.exports,function(e){var r=t[s][1][e];return i(r?r:e)},l,l.exports,n,t,e,r)}return e[s].exports}for(var o="function"==typeof _dereq_&&_dereq_,s=0;s<r.length;s++)i(r[s]);return i}({1:[function(t,e){"use strict";e.exports=function(t){function e(t){var e=new r(t),n=e.promise();return e.setHowMany(1),e.setUnwrap(),e.init(),n}var r=t._SomePromiseArray;t.any=function(t){return e(t)},t.prototype.any=function(){return e(this)}}},{}],2:[function(t,e){"use strict";function r(){this._isTickUsed=!1,this._lateQueue=new c(16),this._normalQueue=new c(16),this._trampolineEnabled=!0;var t=this;this.drainQueues=function(){t._drainQueues()},this._schedule=u.isStatic?u(this.drainQueues):u}function n(t,e,r){var n=this._getDomain();void 0!==n&&(t=n.bind(t)),this._lateQueue.push(t,e,r),this._queueTick()}function i(t,e,r){var n=this._getDomain();void 0!==n&&(t=n.bind(t)),this._normalQueue.push(t,e,r),this._queueTick()}function o(t){var e=this._getDomain();if(void 0!==e){var r=e.bind(t._settlePromises);this._normalQueue.push(r,t,void 0)}else this._normalQueue._pushOne(t);this._queueTick()}var s;try{throw new Error}catch(a){s=a}var u=t("./schedule.js"),c=t("./queue.js"),l=t("./util.js");r.prototype.disableTrampolineIfNecessary=function(){l.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.enableTrampoline=function(){this._trampolineEnabled||(this._trampolineEnabled=!0,this._schedule=function(t){setTimeout(t,0)})},r.prototype.haveItemsQueued=function(){return this._normalQueue.length()>0},r.prototype.throwLater=function(t,e){1===arguments.length&&(e=t,t=function(){throw e});var r=this._getDomain();if(void 0!==r&&(t=r.bind(t)),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(n){throw new Error("No async scheduler available\n\n See http://goo.gl/m3OTXk\n")}},r.prototype._getDomain=function(){};l.hasDevTools?(r.prototype.invokeLater=function(t,e,r){this._trampolineEnabled?n.call(this,t,e,r):setTimeout(function(){t.call(e,r)},100)},r.prototype.invoke=function(t,e,r){this._trampolineEnabled?i.call(this,t,e,r):setTimeout(function(){t.call(e,r)},0)},r.prototype.settlePromises=function(t){this._trampolineEnabled?o.call(this,t):setTimeout(function(){t._settlePromises()},0)}):(r.prototype.invokeLater=n,r.prototype.invoke=i,r.prototype.settlePromises=o),r.prototype.invokeFirst=function(t,e,r){var n=this._getDomain();void 0!==n&&(t=n.bind(t)),this._normalQueue.unshift(t,e,r),this._queueTick()},r.prototype._drainQueue=function(t){for(;t.length()>0;){var e=t.shift();if("function"==typeof e){var r=t.shift(),n=t.shift();e.call(r,n)}else e._settlePromises()}},r.prototype._drainQueues=function(){this._drainQueue(this._normalQueue),this._reset(),this._drainQueue(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=new r,e.exports.firstLineError=s},{"./queue.js":28,"./schedule.js":31,"./util.js":38,events:39}],3:[function(t,e){"use strict";e.exports=function(t,e,r){var n=function(t,e){this._reject(e)},i=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(n,n,null,this,t)},o=function(t,e){this._setBoundTo(t),this._isPending()&&this._resolveCallback(e.target)},s=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(n){var a=r(n),u=new t(e);u._propagateFrom(this,1);var c=this._target();if(a instanceof t){var l={promiseRejectionQueued:!1,promise:u,target:c,bindingPromise:a};c._then(e,i,u._progress,u,l),a._then(o,s,u._progress,u,l)}else u._setBoundTo(n),u._resolveCallback(c);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=131072|this._bitField,this._boundTo=t):this._bitField=-131073&this._bitField},t.prototype._isBound=function(){return 131072===(131072&this._bitField)},t.bind=function(n,i){var o=r(n),s=new t(e);return o instanceof t?o._then(function(t){s._setBoundTo(t),s._resolveCallback(i)},s._reject,s._progress,s,null):(s._setBoundTo(n),s._resolveCallback(i)),s}}},{}],4:[function(t,e){"use strict";function r(){try{Promise===i&&(Promise=n)}catch(t){}return i}var n;"undefined"!=typeof Promise&&(n=Promise);var i=t("./promise.js")();i.noConflict=r,e.exports=i},{"./promise.js":23}],5:[function(t,e){"use strict";var r=Object.create;if(r){var n=r(null),i=r(null);n[" size"]=i[" size"]=0}e.exports=function(e){function r(t,r){var n;if(null!=t&&(n=t[r]),"function"!=typeof n){var i="Object "+a.classString(t)+" has no method '"+a.toString(r)+"'";throw new e.TypeError(i)}return n}function n(t){var e=this.pop(),n=r(t,e);return n.apply(t,this)}function i(t){return t[this]}function o(t){var e=+this;return 0>e&&(e=Math.max(0,e+t.length)),t[e]}{var s,a=t("./util.js"),u=a.canEvaluate;a.isIdentifier}e.prototype.call=function(t){for(var e=arguments.length,r=new Array(e-1),i=1;e>i;++i)r[i-1]=arguments[i];return r.push(t),this._then(n,void 0,void 0,r,void 0)},e.prototype.get=function(t){var e,r="number"==typeof t;if(r)e=o;else if(u){var n=s(t);e=null!==n?n:i}else e=i;return this._then(e,void 0,void 0,t,void 0)}}},{"./util.js":38}],6:[function(t,e){"use strict";e.exports=function(e){var r=t("./errors.js"),n=t("./async.js"),i=r.CancellationError;e.prototype._cancel=function(t){if(!this.isCancellable())return this;for(var e,r=this;void 0!==(e=r._cancellationParent)&&e.isCancellable();)r=e;this._unsetCancellable(),r._target()._rejectCallback(t,!1,!0)},e.prototype.cancel=function(t){return this.isCancellable()?(void 0===t&&(t=new i),n.invokeLater(this._cancel,this,t),this):this},e.prototype.cancellable=function(){return this._cancellable()?this:(n.enableTrampoline(),this._setCancellable(),this._cancellationParent=void 0,this)},e.prototype.uncancellable=function(){var t=this.then();return t._unsetCancellable(),t},e.prototype.fork=function(t,e,r){var n=this._then(t,e,r,void 0,void 0);return n._setCancellable(),n._cancellationParent=void 0,n}}},{"./async.js":2,"./errors.js":13}],7:[function(t,e){"use strict";e.exports=function(){function e(t){this._parent=t;var r=this._length=1+(void 0===t?0:t._length);j(this,e),r>32&&this.uncycle()}function r(t,e){for(var r=0;r<e.length-1;++r)e[r].push("From previous event:"),e[r]=e[r].join("\n");return r<e.length&&(e[r]=e[r].join("\n")),t+"\n"+e.join("\n")}function n(t){for(var e=0;e<t.length;++e)(0===t[e].length||e+1<t.length&&t[e][0]===t[e+1][0])&&(t.splice(e,1),e--)}function i(t){for(var e=t[0],r=1;r<t.length;++r){for(var n=t[r],i=e.length-1,o=e[i],s=-1,a=n.length-1;a>=0;--a)if(n[a]===o){s=a;break}for(var a=s;a>=0;--a){var u=n[a];if(e[i]!==u)break;e.pop(),i--}e=n}}function o(t){for(var e=[],r=0;r<t.length;++r){var n=t[r],i=_.test(n)||" (No stack trace)"===n,o=i&&y(n);i&&!o&&(v&&" "!==n.charAt(0)&&(n=" "+n),e.push(n))}return e}function s(t){for(var e=t.stack.replace(/\s+$/g,"").split("\n"),r=0;r<e.length;++r){var n=e[r];if(" (No stack trace)"===n||_.test(n))break}return r>0&&(e=e.slice(r)),e}function a(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t.toString();var r=/\[object [a-zA-Z0-9$_]+\]/;if(r.test(e))try{var n=JSON.stringify(t);e=n}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+u(e)+">, no stack trace)"}function u(t){var e=41;return t.length<e?t:t.substr(0,e-3)+"..."}function c(t){var e=t.match(g);return e?{fileName:e[1],line:parseInt(e[2],10)}:void 0}var l,h=t("./async.js"),p=t("./util.js"),f=/[\\\/]bluebird[\\\/]js[\\\/](main|debug|zalgo|instrumented)/,_=null,d=null,v=!1;p.inherits(e,Error),e.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],r={},n=0,i=this;void 0!==i;++n)e.push(i),i=i._parent;t=this._length=n;for(var n=t-1;n>=0;--n){var o=e[n].stack;void 0===r[o]&&(r[o]=n)}for(var n=0;t>n;++n){var s=e[n].stack,a=r[s];if(void 0!==a&&a!==n){a>0&&(e[a-1]._parent=void 0,e[a-1]._length=1),e[n]._parent=void 0,e[n]._length=1;var u=n>0?e[n-1]:this;t-1>a?(u._parent=e[a+1],u._parent.uncycle(),u._length=u._parent._length+1):(u._parent=void 0,u._length=1);for(var c=u._length+1,l=n-2;l>=0;--l)e[l]._length=c,c++;return}}}},e.prototype.parent=function(){return this._parent},e.prototype.hasParent=function(){return void 0!==this._parent},e.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var s=e.parseStackAndMessage(t),a=s.message,u=[s.stack],c=this;void 0!==c;)u.push(o(c.stack.split("\n"))),c=c._parent;i(u),n(u),p.notEnumerableProp(t,"stack",r(a,u)),p.notEnumerableProp(t,"__stackCleaned__",!0)}},e.parseStackAndMessage=function(t){var e=t.stack,r=t.toString();return e="string"==typeof e&&e.length>0?s(t):[" (No stack trace)"],{message:r,stack:o(e)}},e.formatAndLogError=function(t,e){if("undefined"!=typeof console){var r;if("object"==typeof t||"function"==typeof t){var n=t.stack;r=e+d(n,t)}else r=e+String(t);"function"==typeof l?l(r):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}},e.unhandledRejection=function(t){e.formatAndLogError(t,"^--- With additional stack trace: ")},e.isSupported=function(){return"function"==typeof j},e.fireRejectionEvent=function(t,r,n,i){var o=!1;try{"function"==typeof r&&(o=!0,"rejectionHandled"===t?r(i):r(n,i))}catch(s){h.throwLater(s)}var a=!1;try{a=b(t,n,i)}catch(s){a=!0,h.throwLater(s)}var u=!1;if(m)try{u=m(t.toLowerCase(),{reason:n,promise:i})}catch(s){u=!0,h.throwLater(s)}a||o||u||"unhandledRejection"!==t||e.formatAndLogError(n,"Unhandled rejection ")};var y=function(){return!1},g=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;e.setBounds=function(t,r){if(e.isSupported()){for(var n,i,o=t.stack.split("\n"),s=r.stack.split("\n"),a=-1,u=-1,l=0;l<o.length;++l){var h=c(o[l]);if(h){n=h.fileName,a=h.line;break}}for(var l=0;l<s.length;++l){var h=c(s[l]);if(h){i=h.fileName,u=h.line;break}}0>a||0>u||!n||!i||n!==i||a>=u||(y=function(t){if(f.test(t))return!0;var e=c(t);return e&&e.fileName===n&&a<=e.line&&e.line<=u?!0:!1})}};var m,j=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():a(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit=Error.stackTraceLimit+6,_=t,d=e;var r=Error.captureStackTrace;return y=function(t){return f.test(t)},function(t,e){Error.stackTraceLimit=Error.stackTraceLimit+6,r(t,e),Error.stackTraceLimit=Error.stackTraceLimit-6}}var n=new Error;if("string"==typeof n.stack&&n.stack.split("\n")[0].indexOf("stackDetection@")>=0)return _=/@/,d=e,v=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in n||!i?(d=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?a(e):e.toString()},null):(_=t,d=e,function(t){Error.stackTraceLimit=Error.stackTraceLimit+6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit=Error.stackTraceLimit-6})}([]),b=function(){if(p.isNode)return function(t,e,r){return"rejectionHandled"===t?process.emit(t,r):process.emit(t,e,r)};var t=!1,e=!0;try{var r=new self.CustomEvent("test");t=r instanceof CustomEvent}catch(n){}if(!t)try{var i=document.createEvent("CustomEvent");i.initCustomEvent("testingtheevent",!1,!0,{}),self.dispatchEvent(i)}catch(n){e=!1}e&&(m=function(e,r){var n;return t?n=new self.CustomEvent(e,{detail:r,bubbles:!1,cancelable:!0}):self.dispatchEvent&&(n=document.createEvent("CustomEvent"),n.initCustomEvent(e,!1,!0,r)),n?!self.dispatchEvent(n):!1});var o={};return o.unhandledRejection="onunhandledRejection".toLowerCase(),o.rejectionHandled="onrejectionHandled".toLowerCase(),function(t,e,r){var n=o[t],i=self[n];return i?("rejectionHandled"===t?i.call(self,r):i.call(self,e,r),!0):!1}}();return"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(l=function(t){console.warn(t)},p.isNode&&process.stderr.isTTY?l=function(t){process.stderr.write(""+t+"\n")}:p.isNode||"string"!=typeof(new Error).stack||(l=function(t){console.warn("%c"+t,"color: red")})),e}},{"./async.js":2,"./util.js":38}],8:[function(t,e){"use strict";e.exports=function(e){function r(t,e,r){this._instances=t,this._callback=e,this._promise=r}function n(t,e){var r={},n=s(t).call(r,e);if(n===a)return n;var i=u(r);return i.length?(a.e=new c("Catch filter must inherit from Error or be a simple predicate function\n\n See http://goo.gl/o84o68\n"),a):n}var i=t("./util.js"),o=t("./errors.js"),s=i.tryCatch,a=i.errorObj,u=t("./es5.js").keys,c=o.TypeError;return r.prototype.doFilter=function(t){for(var r=this._callback,i=this._promise,o=i._boundTo,u=0,c=this._instances.length;c>u;++u){var l=this._instances[u],h=l===Error||null!=l&&l.prototype instanceof Error;if(h&&t instanceof l){var p=s(r).call(o,t);return p===a?(e.e=p.e,e):p}if("function"==typeof l&&!h){var f=n(l,t);if(f===a){t=a.e;break}if(f){var p=s(r).call(o,t);return p===a?(e.e=p.e,e):p}}}return e.e=t,e},r}},{"./errors.js":13,"./es5.js":14,"./util.js":38}],9:[function(t,e){"use strict";e.exports=function(t,e,r){function n(){this._trace=new e(o())}function i(){return r()?new n:void 0}function o(){var t=s.length-1;return t>=0?s[t]:void 0}var s=[];return n.prototype._pushContext=function(){r()&&void 0!==this._trace&&s.push(this._trace)},n.prototype._popContext=function(){r()&&void 0!==this._trace&&s.pop()},t.prototype._peekContext=o,t.prototype._pushContext=n.prototype._pushContext,t.prototype._popContext=n.prototype._popContext,i}},{}],10:[function(t,e){"use strict";e.exports=function(e,r){var n,i,o=t("./async.js"),s=t("./errors.js").Warning,a=t("./util.js"),u=a.canAttachTrace,c=!1||a.isNode&&(!!process.env.BLUEBIRD_DEBUG||"development"===process.env.NODE_ENV);return c&&o.disableTrampolineIfNecessary(),e.prototype._ensurePossibleRejectionHandled=function(){this._setRejectionIsUnhandled(),o.invokeLater(this._notifyUnhandledRejection,this,void 0)},e.prototype._notifyUnhandledRejectionIsHandled=function(){r.fireRejectionEvent("rejectionHandled",n,void 0,this)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._getCarriedStackTrace()||this._settledValue;this._setUnhandledRejectionIsNotified(),r.fireRejectionEvent("unhandledRejection",i,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=524288|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-524289&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(524288&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=2097152|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-2097153&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(2097152&this._bitField)>0},e.prototype._setCarriedStackTrace=function(t){this._bitField=1048576|this._bitField,this._fulfillmentHandler0=t},e.prototype._isCarryingStackTrace=function(){return(1048576&this._bitField)>0},e.prototype._getCarriedStackTrace=function(){return this._isCarryingStackTrace()?this._fulfillmentHandler0:void 0},e.prototype._captureStackTrace=function(){return c&&(this._trace=new r(this._peekContext())),this},e.prototype._attachExtraTrace=function(t,e){if(c&&u(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var i=r.parseStackAndMessage(t);a.notEnumerableProp(t,"stack",i.message+"\n"+i.stack.join("\n")),a.notEnumerableProp(t,"__stackCleaned__",!0)}}},e.prototype._warn=function(t){var e=new s(t),n=this._peekContext();if(n)n.attachExtraTrace(e);else{var i=r.parseStackAndMessage(e);e.stack=i.message+"\n"+i.stack.join("\n")}r.formatAndLogError(e,"")},e.onPossiblyUnhandledRejection=function(t){i="function"==typeof t?t:void 0},e.onUnhandledRejectionHandled=function(t){n="function"==typeof t?t:void 0},e.longStackTraces=function(){if(o.haveItemsQueued()&&c===!1)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/DT1qyG\n");c=r.isSupported(),c&&o.disableTrampolineIfNecessary()},e.hasLongStackTraces=function(){return c&&r.isSupported()},r.isSupported()||(e.longStackTraces=function(){},c=!1),function(){return c}}},{"./async.js":2,"./errors.js":13,"./util.js":38}],11:[function(t,e){"use strict";var r=t("./util.js"),n=r.isPrimitive,i=r.wrapsPrimitiveReceiver;e.exports=function(t){var e=function(){return this},r=function(){throw this},o=function(){},s=function(){throw void 0},a=function(t,e){return 1===e?function(){throw t}:2===e?function(){return t}:void 0};t.prototype["return"]=t.prototype.thenReturn=function(t){return void 0===t?this.then(o):i&&n(t)?this._then(a(t,2),void 0,void 0,void 0,void 0):this._then(e,void 0,void 0,t,void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return void 0===t?this.then(s):i&&n(t)?this._then(a(t,1),void 0,void 0,void 0,void 0):this._then(r,void 0,void 0,t,void 0)}}},{"./util.js":38}],12:[function(t,e){"use strict";e.exports=function(t,e){var r=t.reduce;t.prototype.each=function(t){return r(this,t,null,e)},t.each=function(t,n){return r(t,n,null,e)}}},{}],13:[function(t,e){"use strict";function r(t,e){function r(n){return this instanceof r?(l(this,"message","string"==typeof n?n:e),l(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new r(n)}return c(r,Error),r}function n(t){return this instanceof n?(l(this,"name","OperationalError"),l(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(l(this,"message",t.message),l(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new n(t)}var i,o,s=t("./es5.js"),a=s.freeze,u=t("./util.js"),c=u.inherits,l=u.notEnumerableProp,h=r("Warning","warning"),p=r("CancellationError","cancellation error"),f=r("TimeoutError","timeout error"),_=r("AggregateError","aggregate error");try{i=TypeError,o=RangeError}catch(d){i=r("TypeError","type error"),o=r("RangeError","range error")}for(var v="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),y=0;y<v.length;++y)"function"==typeof Array.prototype[v[y]]&&(_.prototype[v[y]]=Array.prototype[v[y]]);s.defineProperty(_.prototype,"length",{value:0,configurable:!1,writable:!0,enumerable:!0}),_.prototype.isOperational=!0;var g=0;_.prototype.toString=function(){var t=Array(4*g+1).join(" "),e="\n"+t+"AggregateError of:\n";g++,t=Array(4*g+1).join(" ");for(var r=0;r<this.length;++r){for(var n=this[r]===this?"[Circular AggregateError]":this[r]+"",i=n.split("\n"),o=0;o<i.length;++o)i[o]=t+i[o];n=i.join("\n"),e+=n+"\n"}return g--,e},c(n,Error);var m=Error.__BluebirdErrorTypes__;m||(m=a({CancellationError:p,TimeoutError:f,OperationalError:n,RejectionError:n,AggregateError:_}),l(Error,"__BluebirdErrorTypes__",m)),e.exports={Error:Error,TypeError:i,RangeError:o,CancellationError:m.CancellationError,OperationalError:m.OperationalError,TimeoutError:m.TimeoutError,AggregateError:m.AggregateError,Warning:h}},{"./es5.js":14,"./util.js":38}],14:[function(t,e){var r=function(){"use strict";return void 0===this}();if(r)e.exports={freeze:Object.freeze,defineProperty:Object.defineProperty,getDescriptor:Object.getOwnPropertyDescriptor,keys:Object.keys,names:Object.getOwnPropertyNames,getPrototypeOf:Object.getPrototypeOf,isArray:Array.isArray,isES5:r,propertyIsWritable:function(t,e){var r=Object.getOwnPropertyDescriptor(t,e);return!(r&&!r.writable&&!r.set)}};else{var n={}.hasOwnProperty,i={}.toString,o={}.constructor.prototype,s=function(t){var e=[];for(var r in t)n.call(t,r)&&e.push(r);return e},a=function(t,e){return{value:t[e]}},u=function(t,e,r){return t[e]=r.value,t},c=function(t){return t},l=function(t){try{return Object(t).constructor.prototype}catch(e){return o}},h=function(t){try{return"[object Array]"===i.call(t)}catch(e){return!1}};e.exports={isArray:h,keys:s,names:s,defineProperty:u,getDescriptor:a,freeze:c,getPrototypeOf:l,isES5:r,propertyIsWritable:function(){return!0}}}},{}],15:[function(t,e){"use strict";e.exports=function(t,e){var r=t.map;t.prototype.filter=function(t,n){return r(this,t,n,e)},t.filter=function(t,n,i){return r(t,n,i,e)}}},{}],16:[function(t,e){"use strict";e.exports=function(e,r,n){function i(){return this}function o(){throw this}function s(t){return function(){return t}}function a(t){return function(){throw t}}function u(t,e,r){var n;return n=p&&f(e)?r?s(e):a(e):r?i:o,t._then(n,_,void 0,e,void 0)}function c(t){var i=this.promise,o=this.handler,s=i._isBound()?o.call(i._boundTo):o();if(void 0!==s){var a=n(s,i);if(a instanceof e)return a=a._target(),u(a,t,i.isFulfilled())}return i.isRejected()?(r.e=t,r):t}function l(t){var r=this.promise,i=this.handler,o=r._isBound()?i.call(r._boundTo,t):i(t);if(void 0!==o){var s=n(o,r);if(s instanceof e)return s=s._target(),u(s,t,!0)}return t}var h=t("./util.js"),p=h.wrapsPrimitiveReceiver,f=h.isPrimitive,_=h.thrower;e.prototype._passThroughHandler=function(t,e){if("function"!=typeof t)return this.then();var r={promise:this,handler:t};return this._then(e?c:l,e?c:void 0,void 0,r,void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThroughHandler(t,!0)},e.prototype.tap=function(t){return this._passThroughHandler(t,!1)}}},{"./util.js":38}],17:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t,r,n){for(var o=0;o<r.length;++o){n._pushContext();var s=h(r[o])(t);if(n._popContext(),s===l){n._pushContext();var a=e.reject(l.e);return n._popContext(),a}var u=i(s,n);if(u instanceof e)return u}return null}function s(t,r,i,o){var s=this._promise=new e(n);s._captureStackTrace(),this._stack=o,this._generatorFunction=t,this._receiver=r,this._generator=void 0,this._yieldHandlers="function"==typeof i?[i].concat(p):p}var a=t("./errors.js"),u=a.TypeError,c=t("./util.js"),l=c.errorObj,h=c.tryCatch,p=[];s.prototype.promise=function(){return this._promise},s.prototype._run=function(){this._generator=this._generatorFunction.call(this._receiver),this._receiver=this._generatorFunction=void 0,this._next(void 0)},s.prototype._continue=function(t){if(t===l)return this._promise._rejectCallback(t.e,!1,!0);var r=t.value;if(t.done===!0)this._promise._resolveCallback(r);else{var n=i(r,this._promise);if(!(n instanceof e)&&(n=o(n,this._yieldHandlers,this._promise),null===n))return void this._throw(new u("A value %s was yielded that could not be treated as a promise\n\n See http://goo.gl/4Y4pDk\n\n".replace("%s",r)+"From coroutine:\n"+this._stack.split("\n").slice(1,-7).join("\n")));n._then(this._next,this._throw,void 0,this,null)}},s.prototype._throw=function(t){this._promise._attachExtraTrace(t),this._promise._pushContext();var e=h(this._generator["throw"]).call(this._generator,t);this._promise._popContext(),this._continue(e)},s.prototype._next=function(t){this._promise._pushContext();var e=h(this._generator.next).call(this._generator,t);this._promise._popContext(),this._continue(e)},e.coroutine=function(t,e){if("function"!=typeof t)throw new u("generatorFunction must be a function\n\n See http://goo.gl/6Vqhm0\n");var r=Object(e).yieldHandler,n=s,i=(new Error).stack;return function(){var e=t.apply(this,arguments),o=new n(void 0,void 0,r,i);return o._generator=e,o._next(void 0),o.promise()}},e.coroutine.addYieldHandler=function(t){if("function"!=typeof t)throw new u("fn must be a function\n\n See http://goo.gl/916lJJ\n");p.push(t)},e.spawn=function(t){if("function"!=typeof t)return r("generatorFunction must be a function\n\n See http://goo.gl/6Vqhm0\n");var n=new s(t,this),i=n.promise();return n._run(e.spawn),i}}},{"./errors.js":13,"./util.js":38}],18:[function(t,e){"use strict";e.exports=function(e,r,n,i){{var o=t("./util.js");o.canEvaluate,o.tryCatch,o.errorObj}e.join=function(){var t,e=arguments.length-1;if(e>0&&"function"==typeof arguments[e]){t=arguments[e];var n}for(var i=arguments.length,o=new Array(i),s=0;i>s;++s)o[s]=arguments[s];t&&o.pop();var n=new r(o).promise();return void 0!==t?n.spread(t):n}}},{"./util.js":38}],19:[function(t,e){"use strict";e.exports=function(e,r,n,i,o){function s(t,e,r,n){this.constructor$(t),this._promise._captureStackTrace(),this._callback=e,this._preservedValues=n===o?new Array(this.length()):null,this._limit=r,this._inFlight=0,this._queue=r>=1?[]:_,c.invoke(a,this,void 0)}function a(){this._init$(void 0,-2)}function u(t,e,r,n){var i="object"==typeof r&&null!==r?r.concurrency:0;return i="number"==typeof i&&isFinite(i)&&i>=1?i:0,new s(t,e,i,n)}var c=t("./async.js"),l=t("./util.js"),h=l.tryCatch,p=l.errorObj,f={},_=[];l.inherits(s,r),s.prototype._init=function(){},s.prototype._promiseFulfilled=function(t,r){var n=this._values,o=this.length(),s=this._preservedValues,a=this._limit;if(n[r]===f){if(n[r]=t,a>=1&&(this._inFlight--,this._drainQueue(),this._isResolved()))return}else{if(a>=1&&this._inFlight>=a)return n[r]=t,void this._queue.push(r);null!==s&&(s[r]=t);var u=this._callback,c=this._promise._boundTo;this._promise._pushContext();var l=h(u).call(c,t,r,o);if(this._promise._popContext(),l===p)return this._reject(l.e);var _=i(l,this._promise);if(_ instanceof e){if(_=_._target(),_._isPending())return a>=1&&this._inFlight++,n[r]=f,_._proxyPromiseArray(this,r);if(!_._isFulfilled())return this._reject(_._reason());l=_._value()}n[r]=l}var d=++this._totalResolved;d>=o&&(null!==s?this._filter(n,s):this._resolve(n))},s.prototype._drainQueue=function(){for(var t=this._queue,e=this._limit,r=this._values;t.length>0&&this._inFlight<e;){if(this._isResolved())return;var n=t.pop();this._promiseFulfilled(r[n],n)}},s.prototype._filter=function(t,e){for(var r=e.length,n=new Array(r),i=0,o=0;r>o;++o)t[o]&&(n[i++]=e[o]);n.length=i,this._resolve(n)},s.prototype.preservedValues=function(){return this._preservedValues},e.prototype.map=function(t,e){return"function"!=typeof t?n("fn must be a function\n\n See http://goo.gl/916lJJ\n"):u(this,t,e,null).promise()},e.map=function(t,e,r,i){return"function"!=typeof e?n("fn must be a function\n\n See http://goo.gl/916lJJ\n"):u(t,e,r,i).promise()}}},{"./async.js":2,"./util.js":38}],20:[function(t,e){"use strict";e.exports=function(e,r,n,i){var o=t("./util.js"),s=o.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("fn must be a function\n\n See http://goo.gl/916lJJ\n");return function(){var n=new e(r);n._captureStackTrace(),n._pushContext();var i=s(t).apply(this,arguments);return n._popContext(),n._resolveFromSyncValue(i),n}},e.attempt=e["try"]=function(t,n,a){if("function"!=typeof t)return i("fn must be a function\n\n See http://goo.gl/916lJJ\n");var u=new e(r);u._captureStackTrace(),u._pushContext();var c=o.isArray(n)?s(t).apply(a,n):s(t).call(a,n);return u._popContext(),u._resolveFromSyncValue(c),u},e.prototype._resolveFromSyncValue=function(t){t===o.errorObj?this._rejectCallback(t.e,!1,!0):this._resolveCallback(t,!0)}}},{"./util.js":38}],21:[function(t,e){"use strict";e.exports=function(e){function r(t,e){var r=this;if(!o.isArray(t))return n.call(r,t,e);var i=a(e).apply(r._boundTo,[null].concat(t));i===u&&s.throwLater(i.e)}function n(t,e){var r=this,n=r._boundTo,i=void 0===t?a(e).call(n,null):a(e).call(n,null,t);i===u&&s.throwLater(i.e)}function i(t,e){var r=this;if(!t){var n=r._target(),i=n._getCarriedStackTrace();i.cause=t,t=i}var o=a(e).call(r._boundTo,t);o===u&&s.throwLater(o.e)}var o=t("./util.js"),s=t("./async.js"),a=o.tryCatch,u=o.errorObj;e.prototype.asCallback=e.prototype.nodeify=function(t,e){if("function"==typeof t){var o=n;void 0!==e&&Object(e).spread&&(o=r),this._then(o,i,void 0,this,t)}return this}}},{"./async.js":2,"./util.js":38}],22:[function(t,e){"use strict";e.exports=function(e,r){var n=t("./util.js"),i=t("./async.js"),o=n.tryCatch,s=n.errorObj;e.prototype.progressed=function(t){return this._then(void 0,void 0,t,void 0,void 0)},e.prototype._progress=function(t){this._isFollowingOrFulfilledOrRejected()||this._target()._progressUnchecked(t)},e.prototype._progressHandlerAt=function(t){return 0===t?this._progressHandler0:this[(t<<2)+t-5+2]},e.prototype._doProgressWith=function(t){var r=t.value,i=t.handler,a=t.promise,u=t.receiver,c=o(i).call(u,r);if(c===s){if(null!=c.e&&"StopProgressPropagation"!==c.e.name){var l=n.canAttachTrace(c.e)?c.e:new Error(n.toString(c.e));a._attachExtraTrace(l),a._progress(c.e)}}else c instanceof e?c._then(a._progress,null,null,a,void 0):a._progress(c)},e.prototype._progressUnchecked=function(t){for(var n=this._length(),o=this._progress,s=0;n>s;s++){var a=this._progressHandlerAt(s),u=this._promiseAt(s);if(u instanceof e)"function"==typeof a?i.invoke(this._doProgressWith,this,{handler:a,promise:u,receiver:this._receiverAt(s),value:t}):i.invoke(o,u,t);else{var c=this._receiverAt(s);"function"==typeof a?a.call(c,t,u):c instanceof r&&!c._isResolved()&&c._promiseProgressed(t,u)}}}}},{"./async.js":2,"./util.js":38}],23:[function(t,e){"use strict";e.exports=function(){function e(t){if("function"!=typeof t)throw new c("the promise constructor requires a resolver function\n\n See http://goo.gl/EC22Yn\n");if(this.constructor!==e)throw new c("the promise constructor cannot be invoked directly\n\n See http://goo.gl/KsIlge\n");this._bitField=0,this._fulfillmentHandler0=void 0,this._rejectionHandler0=void 0,this._progressHandler0=void 0,this._promise0=void 0,this._receiver0=void 0,this._settledValue=void 0,t!==l&&this._resolveFromResolver(t)}function r(t){var r=new e(l);r._fulfillmentHandler0=t,r._rejectionHandler0=t,r._progressHandler0=t,r._promise0=t,r._receiver0=t,r._settledValue=t}var n=function(){return new c("circular promise resolution chain\n\n See http://goo.gl/LhFpo0\n")},i=function(){return new e.PromiseInspection(this._target())},o=function(t){return e.reject(new c(t))},s=t("./util.js"),a=t("./async.js"),u=t("./errors.js"),c=e.TypeError=u.TypeError;e.RangeError=u.RangeError,e.CancellationError=u.CancellationError,e.TimeoutError=u.TimeoutError,e.OperationalError=u.OperationalError,e.RejectionError=u.OperationalError,e.AggregateError=u.AggregateError;var l=function(){},h={},p={e:null},f=t("./thenables.js")(e,l),_=t("./promise_array.js")(e,l,f,o),d=t("./captured_trace.js")(),v=t("./debuggability.js")(e,d),y=t("./context.js")(e,d,v),g=t("./catch_filter.js")(p),m=t("./promise_resolver.js"),j=m._nodebackForPromise,b=s.errorObj,w=s.tryCatch;return e.prototype.toString=function(){return"[object Promise]"},e.prototype.caught=e.prototype["catch"]=function(t){var r=arguments.length;if(r>1){var n,i=new Array(r-1),o=0;for(n=0;r-1>n;++n){var s=arguments[n];if("function"!=typeof s)return e.reject(new c("Catch filter must inherit from Error or be a simple predicate function\n\n See http://goo.gl/o84o68\n"));i[o++]=s}i.length=o,t=arguments[n];var a=new g(i,t,this);return this._then(void 0,a.doFilter,void 0,a,void 0)}return this._then(void 0,t,void 0,void 0,void 0)},e.prototype.reflect=function(){return this._then(i,i,void 0,this,void 0)},e.prototype.then=function(t,e,r){if(v()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){var n=".then() only accepts functions but was passed: "+s.classString(t);arguments.length>1&&(n+=", "+s.classString(e)),this._warn(n)}return this._then(t,e,r,void 0,void 0)},e.prototype.done=function(t,e,r){var n=this._then(t,e,r,void 0,void 0);n._setIsFinal()},e.prototype.spread=function(t,e){return this.all()._then(t,e,void 0,h,void 0)},e.prototype.isCancellable=function(){return!this.isResolved()&&this._cancellable()
-},e.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},e.prototype.all=function(){return new _(this).promise()},e.prototype.error=function(t){return this.caught(s.originatesFromRejection,t)},e.is=function(t){return t instanceof e},e.fromNode=function(t){var r=new e(l),n=w(t)(j(r));return n===b&&r._rejectCallback(n.e,!0,!0),r},e.all=function(t){return new _(t).promise()},e.defer=e.pending=function(){var t=new e(l);return new m(t)},e.cast=function(t){var r=f(t);if(!(r instanceof e)){var n=r;r=new e(l),r._fulfillUnchecked(n)}return r},e.resolve=e.fulfilled=e.cast,e.reject=e.rejected=function(t){var r=new e(l);return r._captureStackTrace(),r._rejectCallback(t,!0),r},e.setScheduler=function(t){if("function"!=typeof t)throw new c("fn must be a function\n\n See http://goo.gl/916lJJ\n");var e=a._schedule;return a._schedule=t,e},e.prototype._then=function(t,r,n,i,o){var s=void 0!==o,u=s?o:new e(l);s||(u._propagateFrom(this,5),u._captureStackTrace());var c=this._target();c!==this&&(void 0===i&&(i=this._boundTo),s||u._setIsMigrated());var h=c._addCallbacks(t,r,n,u,i);return c._isResolved()&&!c._isSettlePromisesQueued()&&a.invoke(c._settlePromiseAtPostResolution,c,h),u},e.prototype._settlePromiseAtPostResolution=function(t){this._isRejectionUnhandled()&&this._unsetRejectionIsUnhandled(),this._settlePromiseAt(t)},e.prototype._length=function(){return 131071&this._bitField},e.prototype._isFollowingOrFulfilledOrRejected=function(){return(939524096&this._bitField)>0},e.prototype._isFollowing=function(){return 536870912===(536870912&this._bitField)},e.prototype._setLength=function(t){this._bitField=-131072&this._bitField|131071&t},e.prototype._setFulfilled=function(){this._bitField=268435456|this._bitField},e.prototype._setRejected=function(){this._bitField=134217728|this._bitField},e.prototype._setFollowing=function(){this._bitField=536870912|this._bitField},e.prototype._setIsFinal=function(){this._bitField=33554432|this._bitField},e.prototype._isFinal=function(){return(33554432&this._bitField)>0},e.prototype._cancellable=function(){return(67108864&this._bitField)>0},e.prototype._setCancellable=function(){this._bitField=67108864|this._bitField},e.prototype._unsetCancellable=function(){this._bitField=-67108865&this._bitField},e.prototype._setIsMigrated=function(){this._bitField=4194304|this._bitField},e.prototype._unsetIsMigrated=function(){this._bitField=-4194305&this._bitField},e.prototype._isMigrated=function(){return(4194304&this._bitField)>0},e.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[5*t-5+4];return void 0===e&&this._isBound()?this._boundTo:e},e.prototype._promiseAt=function(t){return 0===t?this._promise0:this[5*t-5+3]},e.prototype._fulfillmentHandlerAt=function(t){return 0===t?this._fulfillmentHandler0:this[5*t-5+0]},e.prototype._rejectionHandlerAt=function(t){return 0===t?this._rejectionHandler0:this[5*t-5+1]},e.prototype._migrateCallbacks=function(t,r){var n=t._fulfillmentHandlerAt(r),i=t._rejectionHandlerAt(r),o=t._progressHandlerAt(r),s=t._promiseAt(r),a=t._receiverAt(r);s instanceof e&&s._setIsMigrated(),this._addCallbacks(n,i,o,s,a)},e.prototype._addCallbacks=function(t,e,r,n,i){var o=this._length();if(o>=131066&&(o=0,this._setLength(0)),0===o)this._promise0=n,void 0!==i&&(this._receiver0=i),"function"!=typeof t||this._isCarryingStackTrace()||(this._fulfillmentHandler0=t),"function"==typeof e&&(this._rejectionHandler0=e),"function"==typeof r&&(this._progressHandler0=r);else{var s=5*o-5;this[s+3]=n,this[s+4]=i,"function"==typeof t&&(this[s+0]=t),"function"==typeof e&&(this[s+1]=e),"function"==typeof r&&(this[s+2]=r)}return this._setLength(o+1),o},e.prototype._setProxyHandlers=function(t,e){var r=this._length();if(r>=131066&&(r=0,this._setLength(0)),0===r)this._promise0=e,this._receiver0=t;else{var n=5*r-5;this[n+3]=e,this[n+4]=t}this._setLength(r+1)},e.prototype._proxyPromiseArray=function(t,e){this._setProxyHandlers(t,e)},e.prototype._resolveCallback=function(t,r){if(!this._isFollowingOrFulfilledOrRejected()){if(t===this)return this._rejectCallback(n(),!1,!0);var i=f(t,this);if(!(i instanceof e))return this._fulfill(t);var o=1|(r?4:0);this._propagateFrom(i,o);var s=i._target();if(s._isPending()){for(var a=this._length(),u=0;a>u;++u)s._migrateCallbacks(this,u);this._setFollowing(),this._setLength(0),this._setFollowee(s)}else s._isFulfilled()?this._fulfillUnchecked(s._value()):this._rejectUnchecked(s._reason(),s._getCarriedStackTrace())}},e.prototype._rejectCallback=function(t,e,r){r||s.markAsOriginatingFromRejection(t);var n=s.ensureErrorObject(t),i=n===t;this._attachExtraTrace(n,e?i:!1),this._reject(t,i?void 0:n)},e.prototype._resolveFromResolver=function(t){var e=this;this._captureStackTrace(),this._pushContext();var r=!0,n=w(t)(function(t){null!==e&&(e._resolveCallback(t),e=null)},function(t){null!==e&&(e._rejectCallback(t,r),e=null)});r=!1,this._popContext(),void 0!==n&&n===b&&null!==e&&(e._rejectCallback(n.e,!0,!0),e=null)},e.prototype._settlePromiseFromHandler=function(t,e,r,i){if(!i._isRejected()){i._pushContext();var o;if(o=e!==h||this._isRejected()?w(t).call(e,r):w(t).apply(this._boundTo,r),i._popContext(),o===b||o===i||o===p){var s=o===i?n():o.e;i._rejectCallback(s,!1,!0)}else i._resolveCallback(o)}},e.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},e.prototype._followee=function(){return this._rejectionHandler0},e.prototype._setFollowee=function(t){this._rejectionHandler0=t},e.prototype._cleanValues=function(){this._cancellable()&&(this._cancellationParent=void 0)},e.prototype._propagateFrom=function(t,e){(1&e)>0&&t._cancellable()&&(this._setCancellable(),this._cancellationParent=t),(4&e)>0&&t._isBound()&&this._setBoundTo(t._boundTo)},e.prototype._fulfill=function(t){this._isFollowingOrFulfilledOrRejected()||this._fulfillUnchecked(t)},e.prototype._reject=function(t,e){this._isFollowingOrFulfilledOrRejected()||this._rejectUnchecked(t,e)},e.prototype._settlePromiseAt=function(t){var r=this._promiseAt(t),n=r instanceof e;if(n&&r._isMigrated())return r._unsetIsMigrated(),a.invoke(this._settlePromiseAt,this,t);var i=this._isFulfilled()?this._fulfillmentHandlerAt(t):this._rejectionHandlerAt(t),o=this._isCarryingStackTrace()?this._getCarriedStackTrace():void 0,s=this._settledValue,u=this._receiverAt(t);this._clearCallbackDataAtIndex(t),"function"==typeof i?n?this._settlePromiseFromHandler(i,u,s,r):i.call(u,s,r):u instanceof _?u._isResolved()||(this._isFulfilled()?u._promiseFulfilled(s,r):u._promiseRejected(s,r)):n&&(this._isFulfilled()?r._fulfill(s):r._reject(s,o)),t>=4&&4===(31&t)&&a.invokeLater(this._setLength,this,0)},e.prototype._clearCallbackDataAtIndex=function(t){if(0===t)this._isCarryingStackTrace()||(this._fulfillmentHandler0=void 0),this._rejectionHandler0=this._progressHandler0=this._receiver0=this._promise0=void 0;else{var e=5*t-5;this[e+3]=this[e+4]=this[e+0]=this[e+1]=this[e+2]=void 0}},e.prototype._isSettlePromisesQueued=function(){return-1073741824===(-1073741824&this._bitField)},e.prototype._setSettlePromisesQueued=function(){this._bitField=-1073741824|this._bitField},e.prototype._unsetSettlePromisesQueued=function(){this._bitField=1073741823&this._bitField},e.prototype._queueSettlePromises=function(){a.settlePromises(this),this._setSettlePromisesQueued()},e.prototype._fulfillUnchecked=function(t){if(t===this){var e=n();return this._attachExtraTrace(e),this._rejectUnchecked(e,void 0)}this._setFulfilled(),this._settledValue=t,this._cleanValues(),this._length()>0&&this._queueSettlePromises()},e.prototype._rejectUncheckedCheckError=function(t){var e=s.ensureErrorObject(t);this._rejectUnchecked(t,e===t?void 0:e)},e.prototype._rejectUnchecked=function(t,e){if(t===this){var r=n();return this._attachExtraTrace(r),this._rejectUnchecked(r)}return this._setRejected(),this._settledValue=t,this._cleanValues(),this._isFinal()?void a.throwLater(function(t){throw"stack"in t&&a.invokeFirst(d.unhandledRejection,void 0,t),t},void 0===e?t:e):(void 0!==e&&e!==t&&this._setCarriedStackTrace(e),void(this._length()>0?this._queueSettlePromises():this._ensurePossibleRejectionHandled()))},e.prototype._settlePromises=function(){this._unsetSettlePromisesQueued();for(var t=this._length(),e=0;t>e;e++)this._settlePromiseAt(e)},e._makeSelfResolutionError=n,t("./progress.js")(e,_),t("./method.js")(e,l,f,o),t("./bind.js")(e,l,f),t("./finally.js")(e,p,f),t("./direct_resolve.js")(e),t("./synchronous_inspection.js")(e),t("./join.js")(e,_,f,l),e.Promise=e,t("./map.js")(e,_,o,f,l),t("./cancel.js")(e),t("./using.js")(e,o,f,y),t("./generators.js")(e,o,l,f),t("./nodeify.js")(e),t("./call_get.js")(e),t("./props.js")(e,_,f,o),t("./race.js")(e,l,f,o),t("./reduce.js")(e,_,o,f,l),t("./settle.js")(e,_),t("./some.js")(e,_,o),t("./promisify.js")(e,l),t("./any.js")(e),t("./each.js")(e,l),t("./timers.js")(e,l),t("./filter.js")(e,l),s.toFastProperties(e),s.toFastProperties(e.prototype),r({a:1}),r({b:2}),r({c:3}),r(1),r(function(){}),r(void 0),r(!1),r(new e(l)),d.setBounds(a.firstLineError,s.lastLineError),e}},{"./any.js":1,"./async.js":2,"./bind.js":3,"./call_get.js":5,"./cancel.js":6,"./captured_trace.js":7,"./catch_filter.js":8,"./context.js":9,"./debuggability.js":10,"./direct_resolve.js":11,"./each.js":12,"./errors.js":13,"./filter.js":15,"./finally.js":16,"./generators.js":17,"./join.js":18,"./map.js":19,"./method.js":20,"./nodeify.js":21,"./progress.js":22,"./promise_array.js":24,"./promise_resolver.js":25,"./promisify.js":26,"./props.js":27,"./race.js":29,"./reduce.js":30,"./settle.js":32,"./some.js":33,"./synchronous_inspection.js":34,"./thenables.js":35,"./timers.js":36,"./using.js":37,"./util.js":38}],24:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){switch(t){case-2:return[];case-3:return{}}}function s(t){var n,i=this._promise=new e(r);t instanceof e&&(n=t,i._propagateFrom(n,5)),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var a=t("./util.js"),u=a.isArray;return s.prototype.length=function(){return this._length},s.prototype.promise=function(){return this._promise},s.prototype._init=function c(t,r){var s=n(this._values,this._promise);if(s instanceof e){if(s=s._target(),this._values=s,!s._isFulfilled())return s._isPending()?void s._then(c,this._reject,void 0,this,r):void this._reject(s._reason());if(s=s._value(),!u(s)){var a=new e.TypeError("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n");return void this.__hardReject__(a)}}else if(!u(s))return void this._promise._reject(i("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n")._reason());if(0===s.length)return void(-5===r?this._resolveEmptyArray():this._resolve(o(r)));var l=this.getActualLength(s.length);this._length=l,this._values=this.shouldCopyValues()?new Array(l):this._values;for(var h=this._promise,p=0;l>p;++p){var f=this._isResolved(),_=n(s[p],h);_ instanceof e?(_=_._target(),f?_._unsetRejectionIsUnhandled():_._isPending()?_._proxyPromiseArray(this,p):_._isFulfilled()?this._promiseFulfilled(_._value(),p):this._promiseRejected(_._reason(),p)):f||this._promiseFulfilled(_,p)}},s.prototype._isResolved=function(){return null===this._values},s.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},s.prototype.__hardReject__=s.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1,!0)},s.prototype._promiseProgressed=function(t,e){this._promise._progress({index:e,value:t})},s.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var r=++this._totalResolved;r>=this._length&&this._resolve(this._values)},s.prototype._promiseRejected=function(t){this._totalResolved++,this._reject(t)},s.prototype.shouldCopyValues=function(){return!0},s.prototype.getActualLength=function(t){return t},s}},{"./util.js":38}],25:[function(t,e){"use strict";function r(t){return t instanceof Error&&p.getPrototypeOf(t)===Error.prototype}function n(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=p.keys(t),i=0;i<n.length;++i){var o=n[i];f.test(o)||(e[o]=t[o])}return e}return s.markAsOriginatingFromRejection(t),t}function i(t){return function(e,r){if(null!==t){if(e){var i=n(a(e));t._attachExtraTrace(i),t._reject(i)}else if(arguments.length>2){for(var o=arguments.length,s=new Array(o-1),u=1;o>u;++u)s[u-1]=arguments[u];t._fulfill(s)}else t._fulfill(r);t=null}}}var o,s=t("./util.js"),a=s.maybeWrapAsError,u=t("./errors.js"),c=u.TimeoutError,l=u.OperationalError,h=s.haveGetters,p=t("./es5.js"),f=/^(?:name|message|stack|cause)$/;if(o=h?function(t){this.promise=t}:function(t){this.promise=t,this.asCallback=i(t),this.callback=this.asCallback},h){var _={get:function(){return i(this.promise)}};p.defineProperty(o.prototype,"asCallback",_),p.defineProperty(o.prototype,"callback",_)}o._nodebackForPromise=i,o.prototype.toString=function(){return"[object PromiseResolver]"},o.prototype.resolve=o.prototype.fulfill=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._resolveCallback(t)},o.prototype.reject=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._rejectCallback(t)},o.prototype.progress=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._progress(t)},o.prototype.cancel=function(t){this.promise.cancel(t)},o.prototype.timeout=function(){this.reject(new c("timeout"))},o.prototype.isResolved=function(){return this.promise.isResolved()},o.prototype.toJSON=function(){return this.promise.toJSON()},e.exports=o},{"./errors.js":13,"./es5.js":14,"./util.js":38}],26:[function(t,e){"use strict";e.exports=function(e,r){function n(t){return!b.test(t)}function i(t){try{return t.__isPromisified__===!0}catch(e){return!1}}function o(t,e,r){var n=f.getDataPropertyOrDefault(t,e+r,j);return n?i(n):!1}function s(t,e,r){for(var n=0;n<t.length;n+=2){var i=t[n];if(r.test(i))for(var o=i.replace(r,""),s=0;s<t.length;s+=2)if(t[s]===o)throw new g("Cannot promisify an API that has normal methods with '%s'-suffix\n\n See http://goo.gl/iWrZbw\n".replace("%s",e))}}function a(t,e,r,n){for(var a=f.inheritedDataKeys(t),u=[],c=0;c<a.length;++c){var l=a[c],h=t[l],p=n===w?!0:w(l,h,t);"function"!=typeof h||i(h)||o(t,l,e)||!n(l,h,t,p)||u.push(l,h)}return s(u,e,r),u}function u(t,n,i,o){function s(){var i=n;n===p&&(i=this);var o=new e(r);o._captureStackTrace();var s="string"==typeof u&&this!==a?this[u]:t,c=_(o);try{s.apply(i,d(arguments,c))}catch(l){o._rejectCallback(v(l),!0,!0)}return o}var a=function(){return this}(),u=t;return"string"==typeof u&&(t=o),s.__isPromisified__=!0,s}function c(t,e,r,n){for(var i=new RegExp(k(e)+"$"),o=a(t,e,i,r),s=0,u=o.length;u>s;s+=2){var c=o[s],l=o[s+1],h=c+e;t[h]=n===E?E(c,p,c,l,e):n(l,function(){return E(c,p,c,l,e)})}return f.toFastProperties(t),t}function l(t,e){return E(t,e,void 0,t)}var h,p={},f=t("./util.js"),_=t("./promise_resolver.js")._nodebackForPromise,d=f.withAppended,v=f.maybeWrapAsError,y=f.canEvaluate,g=t("./errors").TypeError,m="Async",j={__isPromisified__:!0},b=/^(?:length|name|arguments|caller|callee|prototype|__isPromisified__)$/,w=function(t,e){return f.isIdentifier(t)&&"_"!==t.charAt(0)&&!f.isClass(e)},k=function(t){return t.replace(/([$])/,"\\$")},E=y?h:u;e.promisify=function(t,e){if("function"!=typeof t)throw new g("fn must be a function\n\n See http://goo.gl/916lJJ\n");if(i(t))return t;var r=l(t,arguments.length<2?p:e);return f.copyDescriptors(t,r,n),r},e.promisifyAll=function(t,e){if("function"!=typeof t&&"object"!=typeof t)throw new g("the target of promisifyAll must be an object or a function\n\n See http://goo.gl/9ITlV0\n");e=Object(e);var r=e.suffix;"string"!=typeof r&&(r=m);var n=e.filter;"function"!=typeof n&&(n=w);var i=e.promisifier;if("function"!=typeof i&&(i=E),!f.isIdentifier(r))throw new RangeError("suffix must be a valid identifier\n\n See http://goo.gl/8FZo5V\n");for(var o=f.inheritedDataKeys(t),s=0;s<o.length;++s){var a=t[o[s]];"constructor"!==o[s]&&f.isClass(a)&&(c(a.prototype,r,n,i),c(a,r,n,i))}return c(t,r,n,i)}}},{"./errors":13,"./promise_resolver.js":25,"./util.js":38}],27:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){for(var e=c.keys(t),r=e.length,n=new Array(2*r),i=0;r>i;++i){var o=e[i];n[i]=t[o],n[i+r]=o}this.constructor$(n)}function s(t){var r,s=n(t);return u(s)?(r=s instanceof e?s._then(e.props,void 0,void 0,void 0,void 0):new o(s).promise(),s instanceof e&&r._propagateFrom(s,4),r):i("cannot await properties of a non-object\n\n See http://goo.gl/OsFKC8\n")}var a=t("./util.js"),u=a.isObject,c=t("./es5.js");a.inherits(o,r),o.prototype._init=function(){this._init$(void 0,-3)},o.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var r=++this._totalResolved;if(r>=this._length){for(var n={},i=this.length(),o=0,s=this.length();s>o;++o)n[this._values[o+i]]=this._values[o];this._resolve(n)}},o.prototype._promiseProgressed=function(t,e){this._promise._progress({key:this._values[e+this.length()],value:t})},o.prototype.shouldCopyValues=function(){return!1},o.prototype.getActualLength=function(t){return t>>1},e.prototype.props=function(){return s(this)},e.props=function(t){return s(t)}}},{"./es5.js":14,"./util.js":38}],28:[function(t,e){"use strict";function r(t,e,r,n,i){for(var o=0;i>o;++o)r[o+n]=t[o+e],t[o+e]=void 0}function n(t){this._capacity=t,this._length=0,this._front=0}n.prototype._willBeOverCapacity=function(t){return this._capacity<t},n.prototype._pushOne=function(t){var e=this.length();this._checkCapacity(e+1);var r=this._front+e&this._capacity-1;this[r]=t,this._length=e+1},n.prototype._unshiftOne=function(t){var e=this._capacity;this._checkCapacity(this.length()+1);var r=this._front,n=(r-1&e-1^e)-e;this[n]=t,this._front=n,this._length=this.length()+1},n.prototype.unshift=function(t,e,r){this._unshiftOne(r),this._unshiftOne(e),this._unshiftOne(t)},n.prototype.push=function(t,e,r){var n=this.length()+3;if(this._willBeOverCapacity(n))return this._pushOne(t),this._pushOne(e),void this._pushOne(r);var i=this._front+n-3;this._checkCapacity(n);var o=this._capacity-1;this[i+0&o]=t,this[i+1&o]=e,this[i+2&o]=r,this._length=n},n.prototype.shift=function(){var t=this._front,e=this[t];return this[t]=void 0,this._front=t+1&this._capacity-1,this._length--,e},n.prototype.length=function(){return this._length},n.prototype._checkCapacity=function(t){this._capacity<t&&this._resizeTo(this._capacity<<1)},n.prototype._resizeTo=function(t){var e=this._capacity;this._capacity=t;var n=this._front,i=this._length,o=n+i&e-1;r(this,0,this,e,o)},e.exports=n},{}],29:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t,o){var u=n(t);if(u instanceof e)return a(u);if(!s(t))return i("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n");var c=new e(r);void 0!==o&&c._propagateFrom(o,5);for(var l=c._fulfill,h=c._reject,p=0,f=t.length;f>p;++p){var _=t[p];(void 0!==_||p in t)&&e.cast(_)._then(l,h,void 0,c,null)}return c}var s=t("./util.js").isArray,a=function(t){return t.then(function(e){return o(e,t)})};e.race=function(t){return o(t,void 0)},e.prototype.race=function(){return o(this,void 0)}}},{"./util.js":38}],30:[function(t,e){"use strict";e.exports=function(e,r,n,i,o){function s(t,r,n,s){this.constructor$(t),this._promise._captureStackTrace(),this._preservedValues=s===o?[]:null,this._zerothIsAccum=void 0===n,this._gotAccum=!1,this._reducingIndex=this._zerothIsAccum?1:0,this._valuesPhase=void 0;var u=i(n,this._promise),l=!1,h=u instanceof e;h&&(u=u._target(),u._isPending()?u._proxyPromiseArray(this,-1):u._isFulfilled()?(n=u._value(),this._gotAccum=!0):(this._reject(u._reason()),l=!0)),h||this._zerothIsAccum||(this._gotAccum=!0),this._callback=r,this._accum=n,l||c.invoke(a,this,void 0)}function a(){this._init$(void 0,-5)}function u(t,e,r,i){if("function"!=typeof e)return n("fn must be a function\n\n See http://goo.gl/916lJJ\n");var o=new s(t,e,r,i);return o.promise()}var c=t("./async.js"),l=t("./util.js"),h=l.tryCatch,p=l.errorObj;l.inherits(s,r),s.prototype._init=function(){},s.prototype._resolveEmptyArray=function(){(this._gotAccum||this._zerothIsAccum)&&this._resolve(null!==this._preservedValues?[]:this._accum)},s.prototype._promiseFulfilled=function(t,r){var n=this._values;n[r]=t;var o,s=this.length(),a=this._preservedValues,u=null!==a,c=this._gotAccum,l=this._valuesPhase;if(!l)for(l=this._valuesPhase=new Array(s),o=0;s>o;++o)l[o]=0;if(o=l[r],0===r&&this._zerothIsAccum?(this._accum=t,this._gotAccum=c=!0,l[r]=0===o?1:2):-1===r?(this._accum=t,this._gotAccum=c=!0):0===o?l[r]=1:(l[r]=2,this._accum=t),c){for(var f,_=this._callback,d=this._promise._boundTo,v=this._reducingIndex;s>v;++v)if(o=l[v],2!==o){if(1!==o)return;if(t=n[v],this._promise._pushContext(),u?(a.push(t),f=h(_).call(d,t,v,s)):f=h(_).call(d,this._accum,t,v,s),this._promise._popContext(),f===p)return this._reject(f.e);var y=i(f,this._promise);if(y instanceof e){if(y=y._target(),y._isPending())return l[v]=4,y._proxyPromiseArray(this,v);if(!y._isFulfilled())return this._reject(y._reason());f=y._value()}this._reducingIndex=v+1,this._accum=f}else this._reducingIndex=v+1;this._resolve(u?a:this._accum)}},e.prototype.reduce=function(t,e){return u(this,t,e,null)},e.reduce=function(t,e,r,n){return u(t,e,r,n)}}},{"./async.js":2,"./util.js":38}],31:[function(t,e){"use strict";var r,n=t("./util"),i=function(){throw new Error("No async scheduler available\n\n See http://goo.gl/m3OTXk\n")};if(n.isNode&&"undefined"==typeof MutationObserver){var o=global.setImmediate,s=process.nextTick;r=n.isRecentNode?function(t){o.call(global,t)}:function(t){s.call(process,t)}}else"undefined"!=typeof MutationObserver?(r=function(t){var e=document.createElement("div"),r=new MutationObserver(t);return r.observe(e,{attributes:!0}),function(){e.classList.toggle("foo")}},r.isStatic=!0):r="undefined"!=typeof setImmediate?function(t){setImmediate(t)}:"undefined"!=typeof setTimeout?function(t){setTimeout(t,0)}:i;e.exports=r},{"./util":38}],32:[function(t,e){"use strict";e.exports=function(e,r){function n(t){this.constructor$(t)}var i=e.PromiseInspection,o=t("./util.js");o.inherits(n,r),n.prototype._promiseResolved=function(t,e){this._values[t]=e;var r=++this._totalResolved;r>=this._length&&this._resolve(this._values)},n.prototype._promiseFulfilled=function(t,e){var r=new i;r._bitField=268435456,r._settledValue=t,this._promiseResolved(e,r)},n.prototype._promiseRejected=function(t,e){var r=new i;r._bitField=134217728,r._settledValue=t,this._promiseResolved(e,r)},e.settle=function(t){return new n(t).promise()},e.prototype.settle=function(){return new n(this).promise()}}},{"./util.js":38}],33:[function(t,e){"use strict";e.exports=function(e,r,n){function i(t){this.constructor$(t),this._howMany=0,this._unwrap=!1,this._initialized=!1}function o(t,e){if((0|e)!==e||0>e)return n("expecting a positive integer\n\n See http://goo.gl/1wAmHx\n");var r=new i(t),o=r.promise();return r.setHowMany(e),r.init(),o}var s=t("./util.js"),a=t("./errors.js").RangeError,u=t("./errors.js").AggregateError,c=s.isArray;s.inherits(i,r),i.prototype._init=function(){if(this._initialized){if(0===this._howMany)return void this._resolve([]);this._init$(void 0,-5);var t=c(this._values);!this._isResolved()&&t&&this._howMany>this._canPossiblyFulfill()&&this._reject(this._getRangeError(this.length()))}},i.prototype.init=function(){this._initialized=!0,this._init()},i.prototype.setUnwrap=function(){this._unwrap=!0},i.prototype.howMany=function(){return this._howMany},i.prototype.setHowMany=function(t){this._howMany=t},i.prototype._promiseFulfilled=function(t){this._addFulfilled(t),this._fulfilled()===this.howMany()&&(this._values.length=this.howMany(),this._resolve(1===this.howMany()&&this._unwrap?this._values[0]:this._values))},i.prototype._promiseRejected=function(t){if(this._addRejected(t),this.howMany()>this._canPossiblyFulfill()){for(var e=new u,r=this.length();r<this._values.length;++r)e.push(this._values[r]);this._reject(e)}},i.prototype._fulfilled=function(){return this._totalResolved},i.prototype._rejected=function(){return this._values.length-this.length()},i.prototype._addRejected=function(t){this._values.push(t)},i.prototype._addFulfilled=function(t){this._values[this._totalResolved++]=t},i.prototype._canPossiblyFulfill=function(){return this.length()-this._rejected()},i.prototype._getRangeError=function(t){var e="Input array must contain at least "+this._howMany+" items but contains only "+t+" items";return new a(e)},i.prototype._resolveEmptyArray=function(){this._reject(this._getRangeError(0))},e.some=function(t,e){return o(t,e)},e.prototype.some=function(t){return o(this,t)},e._SomePromiseArray=i}},{"./errors.js":13,"./util.js":38}],34:[function(t,e){"use strict";e.exports=function(t){function e(t){void 0!==t?(t=t._target(),this._bitField=t._bitField,this._settledValue=t._settledValue):(this._bitField=0,this._settledValue=void 0)}e.prototype.value=function(){if(!this.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/hc1DLj\n");return this._settledValue},e.prototype.error=e.prototype.reason=function(){if(!this.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/hPuiwB\n");return this._settledValue},e.prototype.isFulfilled=t.prototype._isFulfilled=function(){return(268435456&this._bitField)>0},e.prototype.isRejected=t.prototype._isRejected=function(){return(134217728&this._bitField)>0},e.prototype.isPending=t.prototype._isPending=function(){return 0===(402653184&this._bitField)},e.prototype.isResolved=t.prototype._isResolved=function(){return(402653184&this._bitField)>0},t.prototype.isPending=function(){return this._target()._isPending()},t.prototype.isRejected=function(){return this._target()._isRejected()},t.prototype.isFulfilled=function(){return this._target()._isFulfilled()},t.prototype.isResolved=function(){return this._target()._isResolved()},t.prototype._value=function(){return this._settledValue},t.prototype._reason=function(){return this._unsetRejectionIsUnhandled(),this._settledValue},t.prototype.value=function(){var t=this._target();if(!t.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/hc1DLj\n");return t._settledValue},t.prototype.reason=function(){var t=this._target();if(!t.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/hPuiwB\n");return t._unsetRejectionIsUnhandled(),t._settledValue},t.PromiseInspection=e}},{}],35:[function(t,e){"use strict";e.exports=function(e,r){function n(t,n){if(c(t)){if(t instanceof e)return t;if(o(t)){var l=new e(r);return t._then(l._fulfillUnchecked,l._rejectUncheckedCheckError,l._progressUnchecked,l,null),l}var h=a.tryCatch(i)(t);if(h===u){n&&n._pushContext();var l=e.reject(h.e);return n&&n._popContext(),l}if("function"==typeof h)return s(t,h,n)}return t}function i(t){return t.then}function o(t){return l.call(t,"_promise0")}function s(t,n,i){function o(r){l&&(t===r?l._rejectCallback(e._makeSelfResolutionError(),!1,!0):l._resolveCallback(r),l=null)}function s(t){l&&(l._rejectCallback(t,p,!0),l=null)}function c(t){l&&"function"==typeof l._progress&&l._progress(t)}var l=new e(r),h=l;i&&i._pushContext(),l._captureStackTrace(),i&&i._popContext();var p=!0,f=a.tryCatch(n).call(t,o,s,c);return p=!1,l&&f===u&&(l._rejectCallback(f.e,!0,!0),l=null),h}var a=t("./util.js"),u=a.errorObj,c=a.isObject,l={}.hasOwnProperty;return n}},{"./util.js":38}],36:[function(t,e){"use strict";e.exports=function(e,r){function n(t){var e=this;return e instanceof Number&&(e=+e),clearTimeout(e),t}function i(t){var e=this;throw e instanceof Number&&(e=+e),clearTimeout(e),t}var o=t("./util.js"),s=e.TimeoutError,a=function(t,e){if(t.isPending()){"string"!=typeof e&&(e="operation timed out");var r=new s(e);o.markAsOriginatingFromRejection(r),t._attachExtraTrace(r),t._cancel(r)}},u=function(t){return c(+this).thenReturn(t)},c=e.delay=function(t,n){if(void 0===n){n=t,t=void 0;var i=new e(r);return setTimeout(function(){i._fulfill()},n),i}return n=+n,e.resolve(t)._then(u,null,null,n,void 0)};e.prototype.delay=function(t){return c(this,t)},e.prototype.timeout=function(t,e){t=+t;var r=this.then().cancellable();r._cancellationParent=this;var o=setTimeout(function(){a(r,e)},t);return r._then(n,i,void 0,o,void 0)}}},{"./util.js":38}],37:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){for(var r=t.length,n=0;r>n;++n){var i=t[n];if(i.isRejected())return e.reject(i.error());t[n]=i._settledValue}return t}function s(t){setTimeout(function(){throw t},0)}function a(t){var e=n(t);return e!==t&&"function"==typeof t._isDisposable&&"function"==typeof t._getDisposer&&t._isDisposable()&&e._setDisposable(t._getDisposer()),e}function u(t,r){function i(){if(o>=u)return c.resolve();var l=a(t[o++]);if(l instanceof e&&l._isDisposable()){try{l=n(l._getDisposer().tryDispose(r),t.promise)}catch(h){return s(h)}if(l instanceof e)return l._then(i,s,null,null,null)}i()}var o=0,u=t.length,c=e.defer();return i(),c.promise}function c(t){var e=new v;return e._settledValue=t,e._bitField=268435456,u(this,e).thenReturn(t)}function l(t){var e=new v;return e._settledValue=t,e._bitField=134217728,u(this,e).thenThrow(t)}function h(t,e,r){this._data=t,this._promise=e,this._context=r}function p(t,e,r){this.constructor$(t,e,r)}function f(t){return h.isDisposer(t)?(this.resources[this.index]._setDisposable(t),t.promise()):t}var _=t("./errors.js").TypeError,d=t("./util.js").inherits,v=e.PromiseInspection;h.prototype.data=function(){return this._data},h.prototype.promise=function(){return this._promise},h.prototype.resource=function(){return this.promise().isFulfilled()?this.promise().value():null},h.prototype.tryDispose=function(t){var e=this.resource(),r=this._context;void 0!==r&&r._pushContext();var n=null!==e?this.doDispose(e,t):null;return void 0!==r&&r._popContext(),this._promise._unsetDisposable(),this._data=null,n},h.isDisposer=function(t){return null!=t&&"function"==typeof t.resource&&"function"==typeof t.tryDispose},d(p,h),p.prototype.doDispose=function(t,e){var r=this.data();return r.call(t,t,e)},e.using=function(){var t=arguments.length;if(2>t)return r("you must pass at least 2 arguments to Promise.using");var i=arguments[t-1];if("function"!=typeof i)return r("fn must be a function\n\n See http://goo.gl/916lJJ\n");t--;for(var s=new Array(t),a=0;t>a;++a){var u=arguments[a];if(h.isDisposer(u)){var p=u;u=u.promise(),u._setDisposable(p)}else{var _=n(u);_ instanceof e&&(u=_._then(f,null,null,{resources:s,index:a},void 0))}s[a]=u}var d=e.settle(s).then(o).then(function(t){d._pushContext();var e;try{e=i.apply(void 0,t)}finally{d._popContext()}return e})._then(c,l,void 0,s,void 0);return s.promise=d,d},e.prototype._setDisposable=function(t){this._bitField=262144|this._bitField,this._disposer=t},e.prototype._isDisposable=function(){return(262144&this._bitField)>0},e.prototype._getDisposer=function(){return this._disposer},e.prototype._unsetDisposable=function(){this._bitField=-262145&this._bitField,this._disposer=void 0},e.prototype.disposer=function(t){if("function"==typeof t)return new p(t,this,i());throw new _}}},{"./errors.js":13,"./util.js":38}],38:[function(t,e,r){"use strict";function n(){try{return C.apply(this,arguments)}catch(t){return F.e=t,F
-}}function i(t){return C=t,n}function o(t){return null==t||t===!0||t===!1||"string"==typeof t||"number"==typeof t}function s(t){return!o(t)}function a(t){return o(t)?new Error(v(t)):t}function u(t,e){var r,n=t.length,i=new Array(n+1);for(r=0;n>r;++r)i[r]=t[r];return i[r]=e,i}function c(t,e,r){if(!w.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var n=Object.getOwnPropertyDescriptor(t,e);return null!=n?null==n.get&&null==n.set?n.value:r:void 0}function l(t,e,r){if(o(t))return t;var n={value:r,configurable:!0,enumerable:!1,writable:!0};return w.defineProperty(t,e,n),t}function h(t){throw t}function p(t){try{if("function"==typeof t){var e=w.names(t.prototype);return w.isES5?e.length>1:e.length>0&&!(1===e.length&&"constructor"===e[0])}return!1}catch(r){return!1}}function f(t){function e(){}e.prototype=t;for(var r=8;r--;)new e;return t}function _(t){return R.test(t)}function d(t,e,r){for(var n=new Array(t),i=0;t>i;++i)n[i]=e+i+r;return n}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){try{l(t,"isOperational",!0)}catch(e){}}function g(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function m(t){return t instanceof Error&&w.propertyIsWritable(t,"stack")}function j(t){return{}.toString.call(t)}function b(t,e,r){for(var n=w.names(t),i=0;i<n.length;++i){var o=n[i];r(o)&&w.defineProperty(e,o,w.getDescriptor(t,o))}}var w=t("./es5.js"),k="undefined"==typeof navigator,E=function(){try{var t={};return w.defineProperty(t,"f",{get:function(){return 3}}),3===t.f}catch(e){return!1}}(),F={e:{}},C,T=function(t,e){function r(){this.constructor=t,this.constructor$=e;for(var r in e.prototype)n.call(e.prototype,r)&&"$"!==r.charAt(r.length-1)&&(this[r+"$"]=e.prototype[r])}var n={}.hasOwnProperty;return r.prototype=e.prototype,t.prototype=new r,t.prototype},x=function(){return"string"!==this}.call("string"),P=function(){if(w.isES5){var t=Object.prototype,e=Object.getOwnPropertyNames;return function(r){for(var n=[],i=Object.create(null);null!=r&&r!==t;){var o;try{o=e(r)}catch(s){return n}for(var a=0;a<o.length;++a){var u=o[a];if(!i[u]){i[u]=!0;var c=Object.getOwnPropertyDescriptor(r,u);null!=c&&null==c.get&&null==c.set&&n.push(u)}}r=w.getPrototypeOf(r)}return n}}return function(t){var e=[];for(var r in t)e.push(r);return e}}(),R=/^[a-z$_][a-z$_0-9]*$/i,A=function(){return"stack"in new Error?function(t){return m(t)?t:new Error(v(t))}:function(t){if(m(t))return t;try{throw new Error(v(t))}catch(e){return e}}}(),S={isClass:p,isIdentifier:_,inheritedDataKeys:P,getDataPropertyOrDefault:c,thrower:h,isArray:w.isArray,haveGetters:E,notEnumerableProp:l,isPrimitive:o,isObject:s,canEvaluate:k,errorObj:F,tryCatch:i,inherits:T,withAppended:u,maybeWrapAsError:a,wrapsPrimitiveReceiver:x,toFastProperties:f,filledRange:d,toString:v,canAttachTrace:m,ensureErrorObject:A,originatesFromRejection:g,markAsOriginatingFromRejection:y,classString:j,copyDescriptors:b,hasDevTools:"undefined"!=typeof chrome&&chrome&&"function"==typeof chrome.loadTimes,isNode:"undefined"!=typeof process&&"[object process]"===j(process).toLowerCase()};S.isRecentNode=S.isNode&&function(){var t=process.versions.node.split(".").map(Number);return 0===t[0]&&t[1]>10||t[0]>0}();try{throw new Error}catch(O){S.lastLineError=O}e.exports=S},{"./es5.js":14}],39:[function(t,e){function r(){this._events=this._events||{},this._maxListeners=this._maxListeners||void 0}function n(t){return"function"==typeof t}function i(t){return"number"==typeof t}function o(t){return"object"==typeof t&&null!==t}function s(t){return void 0===t}e.exports=r,r.EventEmitter=r,r.prototype._events=void 0,r.prototype._maxListeners=void 0,r.defaultMaxListeners=10,r.prototype.setMaxListeners=function(t){if(!i(t)||0>t||isNaN(t))throw TypeError("n must be a positive number");return this._maxListeners=t,this},r.prototype.emit=function(t){var e,r,i,a,u,c;if(this._events||(this._events={}),"error"===t&&(!this._events.error||o(this._events.error)&&!this._events.error.length)){if(e=arguments[1],e instanceof Error)throw e;throw TypeError('Uncaught, unspecified "error" event.')}if(r=this._events[t],s(r))return!1;if(n(r))switch(arguments.length){case 1:r.call(this);break;case 2:r.call(this,arguments[1]);break;case 3:r.call(this,arguments[1],arguments[2]);break;default:for(i=arguments.length,a=new Array(i-1),u=1;i>u;u++)a[u-1]=arguments[u];r.apply(this,a)}else if(o(r)){for(i=arguments.length,a=new Array(i-1),u=1;i>u;u++)a[u-1]=arguments[u];for(c=r.slice(),i=c.length,u=0;i>u;u++)c[u].apply(this,a)}return!0},r.prototype.addListener=function(t,e){var i;if(!n(e))throw TypeError("listener must be a function");if(this._events||(this._events={}),this._events.newListener&&this.emit("newListener",t,n(e.listener)?e.listener:e),this._events[t]?o(this._events[t])?this._events[t].push(e):this._events[t]=[this._events[t],e]:this._events[t]=e,o(this._events[t])&&!this._events[t].warned){var i;i=s(this._maxListeners)?r.defaultMaxListeners:this._maxListeners,i&&i>0&&this._events[t].length>i&&(this._events[t].warned=!0,console.error("(node) warning: possible EventEmitter memory leak detected. %d listeners added. Use emitter.setMaxListeners() to increase limit.",this._events[t].length),"function"==typeof console.trace&&console.trace())}return this},r.prototype.on=r.prototype.addListener,r.prototype.once=function(t,e){function r(){this.removeListener(t,r),i||(i=!0,e.apply(this,arguments))}if(!n(e))throw TypeError("listener must be a function");var i=!1;return r.listener=e,this.on(t,r),this},r.prototype.removeListener=function(t,e){var r,i,s,a;if(!n(e))throw TypeError("listener must be a function");if(!this._events||!this._events[t])return this;if(r=this._events[t],s=r.length,i=-1,r===e||n(r.listener)&&r.listener===e)delete this._events[t],this._events.removeListener&&this.emit("removeListener",t,e);else if(o(r)){for(a=s;a-->0;)if(r[a]===e||r[a].listener&&r[a].listener===e){i=a;break}if(0>i)return this;1===r.length?(r.length=0,delete this._events[t]):r.splice(i,1),this._events.removeListener&&this.emit("removeListener",t,e)}return this},r.prototype.removeAllListeners=function(t){var e,r;if(!this._events)return this;if(!this._events.removeListener)return 0===arguments.length?this._events={}:this._events[t]&&delete this._events[t],this;if(0===arguments.length){for(e in this._events)"removeListener"!==e&&this.removeAllListeners(e);return this.removeAllListeners("removeListener"),this._events={},this}if(r=this._events[t],n(r))this.removeListener(t,r);else for(;r.length;)this.removeListener(t,r[r.length-1]);return delete this._events[t],this},r.prototype.listeners=function(t){var e;return e=this._events&&this._events[t]?n(this._events[t])?[this._events[t]]:this._events[t].slice():[]},r.listenerCount=function(t,e){var r;return r=t._events&&t._events[e]?n(t._events[e])?1:t._events[e].length:0}},{}]},{},[4])(4)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file
+!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,r;return function n(t,e,r){function i(s,a){if(!e[s]){if(!t[s]){var u="function"==typeof _dereq_&&_dereq_;if(!a&&u)return u(s,!0);if(o)return o(s,!0);var c=new Error("Cannot find module '"+s+"'");throw c.code="MODULE_NOT_FOUND",c}var l=e[s]={exports:{}};t[s][0].call(l.exports,function(e){var r=t[s][1][e];return i(r?r:e)},l,l.exports,n,t,e,r)}return e[s].exports}for(var o="function"==typeof _dereq_&&_dereq_,s=0;s<r.length;s++)i(r[s]);return i}({1:[function(t,e){"use strict";e.exports=function(t){function e(t){var e=new r(t),n=e.promise();return e.setHowMany(1),e.setUnwrap(),e.init(),n}var r=t._SomePromiseArray;t.any=function(t){return e(t)},t.prototype.any=function(){return e(this)}}},{}],2:[function(t,e){"use strict";function r(){this._isTickUsed=!1,this._lateQueue=new c(16),this._normalQueue=new c(16),this._trampolineEnabled=!0;var t=this;this.drainQueues=function(){t._drainQueues()},this._schedule=u.isStatic?u(this.drainQueues):u}function n(t,e,r){this._lateQueue.push(t,e,r),this._queueTick()}function i(t,e,r){this._normalQueue.push(t,e,r),this._queueTick()}function o(t){this._normalQueue._pushOne(t),this._queueTick()}var s;try{throw new Error}catch(a){s=a}var u=t("./schedule.js"),c=t("./queue.js"),l=t("./util.js");r.prototype.disableTrampolineIfNecessary=function(){l.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.enableTrampoline=function(){this._trampolineEnabled||(this._trampolineEnabled=!0,this._schedule=function(t){setTimeout(t,0)})},r.prototype.haveItemsQueued=function(){return this._normalQueue.length()>0},r.prototype.throwLater=function(t,e){if(1===arguments.length&&(e=t,t=function(){throw e}),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(r){throw new Error("No async scheduler available\n\n See http://goo.gl/m3OTXk\n")}},l.hasDevTools?(u.isStatic&&(u=function(t){setTimeout(t,0)}),r.prototype.invokeLater=function(t,e,r){this._trampolineEnabled?n.call(this,t,e,r):this._schedule(function(){setTimeout(function(){t.call(e,r)},100)})},r.prototype.invoke=function(t,e,r){this._trampolineEnabled?i.call(this,t,e,r):this._schedule(function(){t.call(e,r)})},r.prototype.settlePromises=function(t){this._trampolineEnabled?o.call(this,t):this._schedule(function(){t._settlePromises()})}):(r.prototype.invokeLater=n,r.prototype.invoke=i,r.prototype.settlePromises=o),r.prototype.invokeFirst=function(t,e,r){this._normalQueue.unshift(t,e,r),this._queueTick()},r.prototype._drainQueue=function(t){for(;t.length()>0;){var e=t.shift();if("function"==typeof e){var r=t.shift(),n=t.shift();e.call(r,n)}else e._settlePromises()}},r.prototype._drainQueues=function(){this._drainQueue(this._normalQueue),this._reset(),this._drainQueue(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=new r,e.exports.firstLineError=s},{"./queue.js":28,"./schedule.js":31,"./util.js":38}],3:[function(t,e){"use strict";e.exports=function(t,e,r){var n=function(t,e){this._reject(e)},i=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(n,n,null,this,t)},o=function(t,e){this._isPending()&&this._resolveCallback(e.target)},s=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(n){var a=r(n),u=new t(e);u._propagateFrom(this,1);var c=this._target();if(u._setBoundTo(a),a instanceof t){var l={promiseRejectionQueued:!1,promise:u,target:c,bindingPromise:a};c._then(e,i,u._progress,u,l),a._then(o,s,u._progress,u,l)}else u._resolveCallback(c);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=131072|this._bitField,this._boundTo=t):this._bitField=-131073&this._bitField},t.prototype._isBound=function(){return 131072===(131072&this._bitField)},t.bind=function(n,i){var o=r(n),s=new t(e);return s._setBoundTo(o),o instanceof t?o._then(function(){s._resolveCallback(i)},s._reject,s._progress,s,null):s._resolveCallback(i),s}}},{}],4:[function(t,e){"use strict";function r(){try{Promise===i&&(Promise=n)}catch(t){}return i}var n;"undefined"!=typeof Promise&&(n=Promise);var i=t("./promise.js")();i.noConflict=r,e.exports=i},{"./promise.js":23}],5:[function(t,e){"use strict";var r=Object.create;if(r){var n=r(null),i=r(null);n[" size"]=i[" size"]=0}e.exports=function(e){function r(t,r){var n;if(null!=t&&(n=t[r]),"function"!=typeof n){var i="Object "+a.classString(t)+" has no method '"+a.toString(r)+"'";throw new e.TypeError(i)}return n}function n(t){var e=this.pop(),n=r(t,e);return n.apply(t,this)}function i(t){return t[this]}function o(t){var e=+this;return 0>e&&(e=Math.max(0,e+t.length)),t[e]}{var s,a=t("./util.js"),u=a.canEvaluate;a.isIdentifier}e.prototype.call=function(t){for(var e=arguments.length,r=new Array(e-1),i=1;e>i;++i)r[i-1]=arguments[i];return r.push(t),this._then(n,void 0,void 0,r,void 0)},e.prototype.get=function(t){var e,r="number"==typeof t;if(r)e=o;else if(u){var n=s(t);e=null!==n?n:i}else e=i;return this._then(e,void 0,void 0,t,void 0)}}},{"./util.js":38}],6:[function(t,e){"use strict";e.exports=function(e){var r=t("./errors.js"),n=t("./async.js"),i=r.CancellationError;e.prototype._cancel=function(t){if(!this.isCancellable())return this;for(var e,r=this;void 0!==(e=r._cancellationParent)&&e.isCancellable();)r=e;this._unsetCancellable(),r._target()._rejectCallback(t,!1,!0)},e.prototype.cancel=function(t){return this.isCancellable()?(void 0===t&&(t=new i),n.invokeLater(this._cancel,this,t),this):this},e.prototype.cancellable=function(){return this._cancellable()?this:(n.enableTrampoline(),this._setCancellable(),this._cancellationParent=void 0,this)},e.prototype.uncancellable=function(){var t=this.then();return t._unsetCancellable(),t},e.prototype.fork=function(t,e,r){var n=this._then(t,e,r,void 0,void 0);return n._setCancellable(),n._cancellationParent=void 0,n}}},{"./async.js":2,"./errors.js":13}],7:[function(t,e){"use strict";e.exports=function(){function e(t){this._parent=t;var r=this._length=1+(void 0===t?0:t._length);j(this,e),r>32&&this.uncycle()}function r(t,e){for(var r=0;r<e.length-1;++r)e[r].push("From previous event:"),e[r]=e[r].join("\n");return r<e.length&&(e[r]=e[r].join("\n")),t+"\n"+e.join("\n")}function n(t){for(var e=0;e<t.length;++e)(0===t[e].length||e+1<t.length&&t[e][0]===t[e+1][0])&&(t.splice(e,1),e--)}function i(t){for(var e=t[0],r=1;r<t.length;++r){for(var n=t[r],i=e.length-1,o=e[i],s=-1,a=n.length-1;a>=0;--a)if(n[a]===o){s=a;break}for(var a=s;a>=0;--a){var u=n[a];if(e[i]!==u)break;e.pop(),i--}e=n}}function o(t){for(var e=[],r=0;r<t.length;++r){var n=t[r],i=_.test(n)||" (No stack trace)"===n,o=i&&y(n);i&&!o&&(v&&" "!==n.charAt(0)&&(n=" "+n),e.push(n))}return e}function s(t){for(var e=t.stack.replace(/\s+$/g,"").split("\n"),r=0;r<e.length;++r){var n=e[r];if(" (No stack trace)"===n||_.test(n))break}return r>0&&(e=e.slice(r)),e}function a(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t.toString();var r=/\[object [a-zA-Z0-9$_]+\]/;if(r.test(e))try{var n=JSON.stringify(t);e=n}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+u(e)+">, no stack trace)"}function u(t){var e=41;return t.length<e?t:t.substr(0,e-3)+"..."}function c(t){var e=t.match(g);return e?{fileName:e[1],line:parseInt(e[2],10)}:void 0}var l,h=t("./async.js"),p=t("./util.js"),f=/[\\\/]bluebird[\\\/]js[\\\/](main|debug|zalgo|instrumented)/,_=null,d=null,v=!1;p.inherits(e,Error),e.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],r={},n=0,i=this;void 0!==i;++n)e.push(i),i=i._parent;t=this._length=n;for(var n=t-1;n>=0;--n){var o=e[n].stack;void 0===r[o]&&(r[o]=n)}for(var n=0;t>n;++n){var s=e[n].stack,a=r[s];if(void 0!==a&&a!==n){a>0&&(e[a-1]._parent=void 0,e[a-1]._length=1),e[n]._parent=void 0,e[n]._length=1;var u=n>0?e[n-1]:this;t-1>a?(u._parent=e[a+1],u._parent.uncycle(),u._length=u._parent._length+1):(u._parent=void 0,u._length=1);for(var c=u._length+1,l=n-2;l>=0;--l)e[l]._length=c,c++;return}}}},e.prototype.parent=function(){return this._parent},e.prototype.hasParent=function(){return void 0!==this._parent},e.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var s=e.parseStackAndMessage(t),a=s.message,u=[s.stack],c=this;void 0!==c;)u.push(o(c.stack.split("\n"))),c=c._parent;i(u),n(u),p.notEnumerableProp(t,"stack",r(a,u)),p.notEnumerableProp(t,"__stackCleaned__",!0)}},e.parseStackAndMessage=function(t){var e=t.stack,r=t.toString();return e="string"==typeof e&&e.length>0?s(t):[" (No stack trace)"],{message:r,stack:o(e)}},e.formatAndLogError=function(t,e){if("undefined"!=typeof console){var r;if("object"==typeof t||"function"==typeof t){var n=t.stack;r=e+d(n,t)}else r=e+String(t);"function"==typeof l?l(r):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}},e.unhandledRejection=function(t){e.formatAndLogError(t,"^--- With additional stack trace: ")},e.isSupported=function(){return"function"==typeof j},e.fireRejectionEvent=function(t,r,n,i){var o=!1;try{"function"==typeof r&&(o=!0,"rejectionHandled"===t?r(i):r(n,i))}catch(s){h.throwLater(s)}var a=!1;try{a=b(t,n,i)}catch(s){a=!0,h.throwLater(s)}var u=!1;if(m)try{u=m(t.toLowerCase(),{reason:n,promise:i})}catch(s){u=!0,h.throwLater(s)}a||o||u||"unhandledRejection"!==t||e.formatAndLogError(n,"Unhandled rejection ")};var y=function(){return!1},g=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;e.setBounds=function(t,r){if(e.isSupported()){for(var n,i,o=t.stack.split("\n"),s=r.stack.split("\n"),a=-1,u=-1,l=0;l<o.length;++l){var h=c(o[l]);if(h){n=h.fileName,a=h.line;break}}for(var l=0;l<s.length;++l){var h=c(s[l]);if(h){i=h.fileName,u=h.line;break}}0>a||0>u||!n||!i||n!==i||a>=u||(y=function(t){if(f.test(t))return!0;var e=c(t);return e&&e.fileName===n&&a<=e.line&&e.line<=u?!0:!1})}};var m,j=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():a(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit=Error.stackTraceLimit+6,_=t,d=e;var r=Error.captureStackTrace;return y=function(t){return f.test(t)},function(t,e){Error.stackTraceLimit=Error.stackTraceLimit+6,r(t,e),Error.stackTraceLimit=Error.stackTraceLimit-6}}var n=new Error;if("string"==typeof n.stack&&n.stack.split("\n")[0].indexOf("stackDetection@")>=0)return _=/@/,d=e,v=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in n||!i||"number"!=typeof Error.stackTraceLimit?(d=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?a(e):e.toString()},null):(_=t,d=e,function(t){Error.stackTraceLimit=Error.stackTraceLimit+6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit=Error.stackTraceLimit-6})}([]),b=function(){if(p.isNode)return function(t,e,r){return"rejectionHandled"===t?process.emit(t,r):process.emit(t,e,r)};var t=!1,e=!0;try{var r=new self.CustomEvent("test");t=r instanceof CustomEvent}catch(n){}if(!t)try{var i=document.createEvent("CustomEvent");i.initCustomEvent("testingtheevent",!1,!0,{}),self.dispatchEvent(i)}catch(n){e=!1}e&&(m=function(e,r){var n;return t?n=new self.CustomEvent(e,{detail:r,bubbles:!1,cancelable:!0}):self.dispatchEvent&&(n=document.createEvent("CustomEvent"),n.initCustomEvent(e,!1,!0,r)),n?!self.dispatchEvent(n):!1});var o={};return o.unhandledRejection="onunhandledRejection".toLowerCase(),o.rejectionHandled="onrejectionHandled".toLowerCase(),function(t,e,r){var n=o[t],i=self[n];return i?("rejectionHandled"===t?i.call(self,r):i.call(self,e,r),!0):!1}}();return"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(l=function(t){console.warn(t)},p.isNode&&process.stderr.isTTY?l=function(t){process.stderr.write(""+t+"\n")}:p.isNode||"string"!=typeof(new Error).stack||(l=function(t){console.warn("%c"+t,"color: red")})),e}},{"./async.js":2,"./util.js":38}],8:[function(t,e){"use strict";e.exports=function(e){function r(t,e,r){this._instances=t,this._callback=e,this._promise=r}function n(t,e){var r={},n=s(t).call(r,e);if(n===a)return n;var i=u(r);return i.length?(a.e=new c("Catch filter must inherit from Error or be a simple predicate function\n\n See http://goo.gl/o84o68\n"),a):n}var i=t("./util.js"),o=t("./errors.js"),s=i.tryCatch,a=i.errorObj,u=t("./es5.js").keys,c=o.TypeError;return r.prototype.doFilter=function(t){for(var r=this._callback,i=this._promise,o=i._boundValue(),u=0,c=this._instances.length;c>u;++u){var l=this._instances[u],h=l===Error||null!=l&&l.prototype instanceof Error;if(h&&t instanceof l){var p=s(r).call(o,t);return p===a?(e.e=p.e,e):p}if("function"==typeof l&&!h){var f=n(l,t);if(f===a){t=a.e;break}if(f){var p=s(r).call(o,t);return p===a?(e.e=p.e,e):p}}}return e.e=t,e},r}},{"./errors.js":13,"./es5.js":14,"./util.js":38}],9:[function(t,e){"use strict";e.exports=function(t,e,r){function n(){this._trace=new e(o())}function i(){return r()?new n:void 0}function o(){var t=s.length-1;return t>=0?s[t]:void 0}var s=[];return n.prototype._pushContext=function(){r()&&void 0!==this._trace&&s.push(this._trace)},n.prototype._popContext=function(){r()&&void 0!==this._trace&&s.pop()},t.prototype._peekContext=o,t.prototype._pushContext=n.prototype._pushContext,t.prototype._popContext=n.prototype._popContext,i}},{}],10:[function(t,e){"use strict";e.exports=function(e,r){var n,i,o=e._getDomain,s=t("./async.js"),a=t("./errors.js").Warning,u=t("./util.js"),c=u.canAttachTrace,l=!1||u.isNode&&(!!process.env.BLUEBIRD_DEBUG||"development"===process.env.NODE_ENV);return l&&s.disableTrampolineIfNecessary(),e.prototype._ignoreRejections=function(){this._unsetRejectionIsUnhandled(),this._bitField=16777216|this._bitField},e.prototype._ensurePossibleRejectionHandled=function(){0===(16777216&this._bitField)&&(this._setRejectionIsUnhandled(),s.invokeLater(this._notifyUnhandledRejection,this,void 0))},e.prototype._notifyUnhandledRejectionIsHandled=function(){r.fireRejectionEvent("rejectionHandled",n,void 0,this)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._getCarriedStackTrace()||this._settledValue;this._setUnhandledRejectionIsNotified(),r.fireRejectionEvent("unhandledRejection",i,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=524288|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-524289&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(524288&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=2097152|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-2097153&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(2097152&this._bitField)>0},e.prototype._setCarriedStackTrace=function(t){this._bitField=1048576|this._bitField,this._fulfillmentHandler0=t},e.prototype._isCarryingStackTrace=function(){return(1048576&this._bitField)>0},e.prototype._getCarriedStackTrace=function(){return this._isCarryingStackTrace()?this._fulfillmentHandler0:void 0},e.prototype._captureStackTrace=function(){return l&&(this._trace=new r(this._peekContext())),this},e.prototype._attachExtraTrace=function(t,e){if(l&&c(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var i=r.parseStackAndMessage(t);u.notEnumerableProp(t,"stack",i.message+"\n"+i.stack.join("\n")),u.notEnumerableProp(t,"__stackCleaned__",!0)}}},e.prototype._warn=function(t){var e=new a(t),n=this._peekContext();if(n)n.attachExtraTrace(e);else{var i=r.parseStackAndMessage(e);e.stack=i.message+"\n"+i.stack.join("\n")}r.formatAndLogError(e,"")},e.onPossiblyUnhandledRejection=function(t){var e=o();i="function"==typeof t?null===e?t:e.bind(t):void 0},e.onUnhandledRejectionHandled=function(t){var e=o();n="function"==typeof t?null===e?t:e.bind(t):void 0},e.longStackTraces=function(){if(s.haveItemsQueued()&&l===!1)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/DT1qyG\n");l=r.isSupported(),l&&s.disableTrampolineIfNecessary()},e.hasLongStackTraces=function(){return l&&r.isSupported()},r.isSupported()||(e.longStackTraces=function(){},l=!1),function(){return l}}},{"./async.js":2,"./errors.js":13,"./util.js":38}],11:[function(t,e){"use strict";var r=t("./util.js"),n=r.isPrimitive;e.exports=function(t){var e=function(){return this},r=function(){throw this},i=function(){},o=function(){throw void 0},s=function(t,e){return 1===e?function(){throw t}:2===e?function(){return t}:void 0};t.prototype["return"]=t.prototype.thenReturn=function(t){return void 0===t?this.then(i):n(t)?this._then(s(t,2),void 0,void 0,void 0,void 0):this._then(e,void 0,void 0,t,void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return void 0===t?this.then(o):n(t)?this._then(s(t,1),void 0,void 0,void 0,void 0):this._then(r,void 0,void 0,t,void 0)}}},{"./util.js":38}],12:[function(t,e){"use strict";e.exports=function(t,e){var r=t.reduce;t.prototype.each=function(t){return r(this,t,null,e)},t.each=function(t,n){return r(t,n,null,e)}}},{}],13:[function(t,e){"use strict";function r(t,e){function r(n){return this instanceof r?(l(this,"message","string"==typeof n?n:e),l(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new r(n)}return c(r,Error),r}function n(t){return this instanceof n?(l(this,"name","OperationalError"),l(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(l(this,"message",t.message),l(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new n(t)}var i,o,s=t("./es5.js"),a=s.freeze,u=t("./util.js"),c=u.inherits,l=u.notEnumerableProp,h=r("Warning","warning"),p=r("CancellationError","cancellation error"),f=r("TimeoutError","timeout error"),_=r("AggregateError","aggregate error");try{i=TypeError,o=RangeError}catch(d){i=r("TypeError","type error"),o=r("RangeError","range error")}for(var v="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),y=0;y<v.length;++y)"function"==typeof Array.prototype[v[y]]&&(_.prototype[v[y]]=Array.prototype[v[y]]);s.defineProperty(_.prototype,"length",{value:0,configurable:!1,writable:!0,enumerable:!0}),_.prototype.isOperational=!0;var g=0;_.prototype.toString=function(){var t=Array(4*g+1).join(" "),e="\n"+t+"AggregateError of:\n";g++,t=Array(4*g+1).join(" ");for(var r=0;r<this.length;++r){for(var n=this[r]===this?"[Circular AggregateError]":this[r]+"",i=n.split("\n"),o=0;o<i.length;++o)i[o]=t+i[o];n=i.join("\n"),e+=n+"\n"}return g--,e},c(n,Error);var m=Error.__BluebirdErrorTypes__;m||(m=a({CancellationError:p,TimeoutError:f,OperationalError:n,RejectionError:n,AggregateError:_}),l(Error,"__BluebirdErrorTypes__",m)),e.exports={Error:Error,TypeError:i,RangeError:o,CancellationError:m.CancellationError,OperationalError:m.OperationalError,TimeoutError:m.TimeoutError,AggregateError:m.AggregateError,Warning:h}},{"./es5.js":14,"./util.js":38}],14:[function(t,e){var r=function(){"use strict";return void 0===this}();if(r)e.exports={freeze:Object.freeze,defineProperty:Object.defineProperty,getDescriptor:Object.getOwnPropertyDescriptor,keys:Object.keys,names:Object.getOwnPropertyNames,getPrototypeOf:Object.getPrototypeOf,isArray:Array.isArray,isES5:r,propertyIsWritable:function(t,e){var r=Object.getOwnPropertyDescriptor(t,e);return!(r&&!r.writable&&!r.set)}};else{var n={}.hasOwnProperty,i={}.toString,o={}.constructor.prototype,s=function(t){var e=[];for(var r in t)n.call(t,r)&&e.push(r);return e},a=function(t,e){return{value:t[e]}},u=function(t,e,r){return t[e]=r.value,t},c=function(t){return t},l=function(t){try{return Object(t).constructor.prototype}catch(e){return o}},h=function(t){try{return"[object Array]"===i.call(t)}catch(e){return!1}};e.exports={isArray:h,keys:s,names:s,defineProperty:u,getDescriptor:a,freeze:c,getPrototypeOf:l,isES5:r,propertyIsWritable:function(){return!0}}}},{}],15:[function(t,e){"use strict";e.exports=function(t,e){var r=t.map;t.prototype.filter=function(t,n){return r(this,t,n,e)},t.filter=function(t,n,i){return r(t,n,i,e)}}},{}],16:[function(t,e){"use strict";e.exports=function(e,r,n){function i(){return this}function o(){throw this}function s(t){return function(){return t}}function a(t){return function(){throw t}}function u(t,e,r){var n;return n=p(e)?r?s(e):a(e):r?i:o,t._then(n,f,void 0,e,void 0)}function c(t){var i=this.promise,o=this.handler,s=i._isBound()?o.call(i._boundValue()):o();if(void 0!==s){var a=n(s,i);if(a instanceof e)return a=a._target(),u(a,t,i.isFulfilled())}return i.isRejected()?(r.e=t,r):t}function l(t){var r=this.promise,i=this.handler,o=r._isBound()?i.call(r._boundValue(),t):i(t);if(void 0!==o){var s=n(o,r);if(s instanceof e)return s=s._target(),u(s,t,!0)}return t}var h=t("./util.js"),p=h.isPrimitive,f=h.thrower;e.prototype._passThroughHandler=function(t,e){if("function"!=typeof t)return this.then();var r={promise:this,handler:t};return this._then(e?c:l,e?c:void 0,void 0,r,void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThroughHandler(t,!0)},e.prototype.tap=function(t){return this._passThroughHandler(t,!1)}}},{"./util.js":38}],17:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t,r,n){for(var o=0;o<r.length;++o){n._pushContext();var s=h(r[o])(t);if(n._popContext(),s===l){n._pushContext();var a=e.reject(l.e);return n._popContext(),a}var u=i(s,n);if(u instanceof e)return u}return null}function s(t,r,i,o){var s=this._promise=new e(n);s._captureStackTrace(),this._stack=o,this._generatorFunction=t,this._receiver=r,this._generator=void 0,this._yieldHandlers="function"==typeof i?[i].concat(p):p}var a=t("./errors.js"),u=a.TypeError,c=t("./util.js"),l=c.errorObj,h=c.tryCatch,p=[];s.prototype.promise=function(){return this._promise},s.prototype._run=function(){this._generator=this._generatorFunction.call(this._receiver),this._receiver=this._generatorFunction=void 0,this._next(void 0)},s.prototype._continue=function(t){if(t===l)return this._promise._rejectCallback(t.e,!1,!0);var r=t.value;if(t.done===!0)this._promise._resolveCallback(r);else{var n=i(r,this._promise);if(!(n instanceof e)&&(n=o(n,this._yieldHandlers,this._promise),null===n))return void this._throw(new u("A value %s was yielded that could not be treated as a promise\n\n See http://goo.gl/4Y4pDk\n\n".replace("%s",r)+"From coroutine:\n"+this._stack.split("\n").slice(1,-7).join("\n")));n._then(this._next,this._throw,void 0,this,null)}},s.prototype._throw=function(t){this._promise._attachExtraTrace(t),this._promise._pushContext();var e=h(this._generator["throw"]).call(this._generator,t);this._promise._popContext(),this._continue(e)},s.prototype._next=function(t){this._promise._pushContext();var e=h(this._generator.next).call(this._generator,t);this._promise._popContext(),this._continue(e)},e.coroutine=function(t,e){if("function"!=typeof t)throw new u("generatorFunction must be a function\n\n See http://goo.gl/6Vqhm0\n");var r=Object(e).yieldHandler,n=s,i=(new Error).stack;return function(){var e=t.apply(this,arguments),o=new n(void 0,void 0,r,i);return o._generator=e,o._next(void 0),o.promise()}},e.coroutine.addYieldHandler=function(t){if("function"!=typeof t)throw new u("fn must be a function\n\n See http://goo.gl/916lJJ\n");p.push(t)},e.spawn=function(t){if("function"!=typeof t)return r("generatorFunction must be a function\n\n See http://goo.gl/6Vqhm0\n");var n=new s(t,this),i=n.promise();return n._run(e.spawn),i}}},{"./errors.js":13,"./util.js":38}],18:[function(t,e){"use strict";e.exports=function(e,r,n,i){{var o=t("./util.js");o.canEvaluate,o.tryCatch,o.errorObj}e.join=function(){var t,e=arguments.length-1;if(e>0&&"function"==typeof arguments[e]){t=arguments[e];var n}for(var i=arguments.length,o=new Array(i),s=0;i>s;++s)o[s]=arguments[s];t&&o.pop();var n=new r(o).promise();return void 0!==t?n.spread(t):n}}},{"./util.js":38}],19:[function(t,e){"use strict";e.exports=function(e,r,n,i,o){function s(t,e,r,n){this.constructor$(t),this._promise._captureStackTrace();var i=c();this._callback=null===i?e:i.bind(e),this._preservedValues=n===o?new Array(this.length()):null,this._limit=r,this._inFlight=0,this._queue=r>=1?[]:d,l.invoke(a,this,void 0)}function a(){this._init$(void 0,-2)}function u(t,e,r,n){var i="object"==typeof r&&null!==r?r.concurrency:0;return i="number"==typeof i&&isFinite(i)&&i>=1?i:0,new s(t,e,i,n)}var c=e._getDomain,l=t("./async.js"),h=t("./util.js"),p=h.tryCatch,f=h.errorObj,_={},d=[];h.inherits(s,r),s.prototype._init=function(){},s.prototype._promiseFulfilled=function(t,r){var n=this._values,o=this.length(),s=this._preservedValues,a=this._limit;if(n[r]===_){if(n[r]=t,a>=1&&(this._inFlight--,this._drainQueue(),this._isResolved()))return}else{if(a>=1&&this._inFlight>=a)return n[r]=t,void this._queue.push(r);null!==s&&(s[r]=t);var u=this._callback,c=this._promise._boundValue();this._promise._pushContext();var l=p(u).call(c,t,r,o);if(this._promise._popContext(),l===f)return this._reject(l.e);var h=i(l,this._promise);if(h instanceof e){if(h=h._target(),h._isPending())return a>=1&&this._inFlight++,n[r]=_,h._proxyPromiseArray(this,r);if(!h._isFulfilled())return this._reject(h._reason());l=h._value()}n[r]=l}var d=++this._totalResolved;d>=o&&(null!==s?this._filter(n,s):this._resolve(n))},s.prototype._drainQueue=function(){for(var t=this._queue,e=this._limit,r=this._values;t.length>0&&this._inFlight<e;){if(this._isResolved())return;var n=t.pop();this._promiseFulfilled(r[n],n)}},s.prototype._filter=function(t,e){for(var r=e.length,n=new Array(r),i=0,o=0;r>o;++o)t[o]&&(n[i++]=e[o]);n.length=i,this._resolve(n)},s.prototype.preservedValues=function(){return this._preservedValues},e.prototype.map=function(t,e){return"function"!=typeof t?n("fn must be a function\n\n See http://goo.gl/916lJJ\n"):u(this,t,e,null).promise()},e.map=function(t,e,r,i){return"function"!=typeof e?n("fn must be a function\n\n See http://goo.gl/916lJJ\n"):u(t,e,r,i).promise()}}},{"./async.js":2,"./util.js":38}],20:[function(t,e){"use strict";e.exports=function(e,r,n,i){var o=t("./util.js"),s=o.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("fn must be a function\n\n See http://goo.gl/916lJJ\n");return function(){var n=new e(r);n._captureStackTrace(),n._pushContext();var i=s(t).apply(this,arguments);return n._popContext(),n._resolveFromSyncValue(i),n}},e.attempt=e["try"]=function(t,n,a){if("function"!=typeof t)return i("fn must be a function\n\n See http://goo.gl/916lJJ\n");var u=new e(r);u._captureStackTrace(),u._pushContext();var c=o.isArray(n)?s(t).apply(a,n):s(t).call(a,n);return u._popContext(),u._resolveFromSyncValue(c),u},e.prototype._resolveFromSyncValue=function(t){t===o.errorObj?this._rejectCallback(t.e,!1,!0):this._resolveCallback(t,!0)}}},{"./util.js":38}],21:[function(t,e){"use strict";e.exports=function(e){function r(t,e){var r=this;if(!o.isArray(t))return n.call(r,t,e);var i=a(e).apply(r._boundValue(),[null].concat(t));i===u&&s.throwLater(i.e)}function n(t,e){var r=this,n=r._boundValue(),i=void 0===t?a(e).call(n,null):a(e).call(n,null,t);i===u&&s.throwLater(i.e)}function i(t,e){var r=this;if(!t){var n=r._target(),i=n._getCarriedStackTrace();i.cause=t,t=i}var o=a(e).call(r._boundValue(),t);o===u&&s.throwLater(o.e)}var o=t("./util.js"),s=t("./async.js"),a=o.tryCatch,u=o.errorObj;e.prototype.asCallback=e.prototype.nodeify=function(t,e){if("function"==typeof t){var o=n;void 0!==e&&Object(e).spread&&(o=r),this._then(o,i,void 0,this,t)}return this}}},{"./async.js":2,"./util.js":38}],22:[function(t,e){"use strict";e.exports=function(e,r){var n=t("./util.js"),i=t("./async.js"),o=n.tryCatch,s=n.errorObj;e.prototype.progressed=function(t){return this._then(void 0,void 0,t,void 0,void 0)},e.prototype._progress=function(t){this._isFollowingOrFulfilledOrRejected()||this._target()._progressUnchecked(t)},e.prototype._progressHandlerAt=function(t){return 0===t?this._progressHandler0:this[(t<<2)+t-5+2]},e.prototype._doProgressWith=function(t){var r=t.value,i=t.handler,a=t.promise,u=t.receiver,c=o(i).call(u,r);if(c===s){if(null!=c.e&&"StopProgressPropagation"!==c.e.name){var l=n.canAttachTrace(c.e)?c.e:new Error(n.toString(c.e));a._attachExtraTrace(l),a._progress(c.e)}}else c instanceof e?c._then(a._progress,null,null,a,void 0):a._progress(c)},e.prototype._progressUnchecked=function(t){for(var n=this._length(),o=this._progress,s=0;n>s;s++){var a=this._progressHandlerAt(s),u=this._promiseAt(s);if(u instanceof e)"function"==typeof a?i.invoke(this._doProgressWith,this,{handler:a,promise:u,receiver:this._receiverAt(s),value:t}):i.invoke(o,u,t);else{var c=this._receiverAt(s);"function"==typeof a?a.call(c,t,u):c instanceof r&&!c._isResolved()&&c._promiseProgressed(t,u)}}}}},{"./async.js":2,"./util.js":38}],23:[function(t,e){"use strict";e.exports=function(){function e(t){if("function"!=typeof t)throw new l("the promise constructor requires a resolver function\n\n See http://goo.gl/EC22Yn\n");if(this.constructor!==e)throw new l("the promise constructor cannot be invoked directly\n\n See http://goo.gl/KsIlge\n");this._bitField=0,this._fulfillmentHandler0=void 0,this._rejectionHandler0=void 0,this._progressHandler0=void 0,this._promise0=void 0,this._receiver0=void 0,this._settledValue=void 0,t!==h&&this._resolveFromResolver(t)}function r(t){var r=new e(h);r._fulfillmentHandler0=t,r._rejectionHandler0=t,r._progressHandler0=t,r._promise0=t,r._receiver0=t,r._settledValue=t}var n,i=function(){return new l("circular promise resolution chain\n\n See http://goo.gl/LhFpo0\n")},o=function(){return new e.PromiseInspection(this._target())},s=function(t){return e.reject(new l(t))},a=t("./util.js");n=a.isNode?function(){var t=process.domain;return void 0===t&&(t=null),t}:function(){return null},a.notEnumerableProp(e,"_getDomain",n);var u=t("./async.js"),c=t("./errors.js"),l=e.TypeError=c.TypeError;e.RangeError=c.RangeError,e.CancellationError=c.CancellationError,e.TimeoutError=c.TimeoutError,e.OperationalError=c.OperationalError,e.RejectionError=c.OperationalError,e.AggregateError=c.AggregateError;var h=function(){},p={},f={e:null},_=t("./thenables.js")(e,h),d=t("./promise_array.js")(e,h,_,s),v=t("./captured_trace.js")(),y=t("./debuggability.js")(e,v),g=t("./context.js")(e,v,y),m=t("./catch_filter.js")(f),j=t("./promise_resolver.js"),b=j._nodebackForPromise,w=a.errorObj,k=a.tryCatch;return e.prototype.toString=function(){return"[object Promise]"},e.prototype.caught=e.prototype["catch"]=function(t){var r=arguments.length;if(r>1){var n,i=new Array(r-1),o=0;for(n=0;r-1>n;++n){var s=arguments[n];if("function"!=typeof s)return e.reject(new l("Catch filter must inherit from Error or be a simple predicate function\n\n See http://goo.gl/o84o68\n"));i[o++]=s}i.length=o,t=arguments[n];var a=new m(i,t,this);return this._then(void 0,a.doFilter,void 0,a,void 0)}return this._then(void 0,t,void 0,void 0,void 0)},e.prototype.reflect=function(){return this._then(o,o,void 0,this,void 0)},e.prototype.then=function(t,e,r){if(y()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){var n=".then() only accepts functions but was passed: "+a.classString(t);arguments.length>1&&(n+=", "+a.classString(e)),this._warn(n)}return this._then(t,e,r,void 0,void 0)},e.prototype.done=function(t,e,r){var n=this._then(t,e,r,void 0,void 0);n._setIsFinal()},e.prototype.spread=function(t,e){return this.all()._then(t,e,void 0,p,void 0)
+},e.prototype.isCancellable=function(){return!this.isResolved()&&this._cancellable()},e.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},e.prototype.all=function(){return new d(this).promise()},e.prototype.error=function(t){return this.caught(a.originatesFromRejection,t)},e.is=function(t){return t instanceof e},e.fromNode=function(t){var r=new e(h),n=k(t)(b(r));return n===w&&r._rejectCallback(n.e,!0,!0),r},e.all=function(t){return new d(t).promise()},e.defer=e.pending=function(){var t=new e(h);return new j(t)},e.cast=function(t){var r=_(t);if(!(r instanceof e)){var n=r;r=new e(h),r._fulfillUnchecked(n)}return r},e.resolve=e.fulfilled=e.cast,e.reject=e.rejected=function(t){var r=new e(h);return r._captureStackTrace(),r._rejectCallback(t,!0),r},e.setScheduler=function(t){if("function"!=typeof t)throw new l("fn must be a function\n\n See http://goo.gl/916lJJ\n");var e=u._schedule;return u._schedule=t,e},e.prototype._then=function(t,r,i,o,s){var a=void 0!==s,c=a?s:new e(h);a||(c._propagateFrom(this,5),c._captureStackTrace());var l=this._target();l!==this&&(void 0===o&&(o=this._boundTo),a||c._setIsMigrated());var p=l._addCallbacks(t,r,i,c,o,n());return l._isResolved()&&!l._isSettlePromisesQueued()&&u.invoke(l._settlePromiseAtPostResolution,l,p),c},e.prototype._settlePromiseAtPostResolution=function(t){this._isRejectionUnhandled()&&this._unsetRejectionIsUnhandled(),this._settlePromiseAt(t)},e.prototype._length=function(){return 131071&this._bitField},e.prototype._isFollowingOrFulfilledOrRejected=function(){return(939524096&this._bitField)>0},e.prototype._isFollowing=function(){return 536870912===(536870912&this._bitField)},e.prototype._setLength=function(t){this._bitField=-131072&this._bitField|131071&t},e.prototype._setFulfilled=function(){this._bitField=268435456|this._bitField},e.prototype._setRejected=function(){this._bitField=134217728|this._bitField},e.prototype._setFollowing=function(){this._bitField=536870912|this._bitField},e.prototype._setIsFinal=function(){this._bitField=33554432|this._bitField},e.prototype._isFinal=function(){return(33554432&this._bitField)>0},e.prototype._cancellable=function(){return(67108864&this._bitField)>0},e.prototype._setCancellable=function(){this._bitField=67108864|this._bitField},e.prototype._unsetCancellable=function(){this._bitField=-67108865&this._bitField},e.prototype._setIsMigrated=function(){this._bitField=4194304|this._bitField},e.prototype._unsetIsMigrated=function(){this._bitField=-4194305&this._bitField},e.prototype._isMigrated=function(){return(4194304&this._bitField)>0},e.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[5*t-5+4];return void 0===e&&this._isBound()?this._boundValue():e},e.prototype._promiseAt=function(t){return 0===t?this._promise0:this[5*t-5+3]},e.prototype._fulfillmentHandlerAt=function(t){return 0===t?this._fulfillmentHandler0:this[5*t-5+0]},e.prototype._rejectionHandlerAt=function(t){return 0===t?this._rejectionHandler0:this[5*t-5+1]},e.prototype._boundValue=function(){var t=this._boundTo;return void 0!==t&&t instanceof e?t.isFulfilled()?t.value():void 0:t},e.prototype._migrateCallbacks=function(t,r){var n=t._fulfillmentHandlerAt(r),i=t._rejectionHandlerAt(r),o=t._progressHandlerAt(r),s=t._promiseAt(r),a=t._receiverAt(r);s instanceof e&&s._setIsMigrated(),this._addCallbacks(n,i,o,s,a,null)},e.prototype._addCallbacks=function(t,e,r,n,i,o){var s=this._length();if(s>=131066&&(s=0,this._setLength(0)),0===s)this._promise0=n,void 0!==i&&(this._receiver0=i),"function"!=typeof t||this._isCarryingStackTrace()||(this._fulfillmentHandler0=null===o?t:o.bind(t)),"function"==typeof e&&(this._rejectionHandler0=null===o?e:o.bind(e)),"function"==typeof r&&(this._progressHandler0=null===o?r:o.bind(r));else{var a=5*s-5;this[a+3]=n,this[a+4]=i,"function"==typeof t&&(this[a+0]=null===o?t:o.bind(t)),"function"==typeof e&&(this[a+1]=null===o?e:o.bind(e)),"function"==typeof r&&(this[a+2]=null===o?r:o.bind(r))}return this._setLength(s+1),s},e.prototype._setProxyHandlers=function(t,e){var r=this._length();if(r>=131066&&(r=0,this._setLength(0)),0===r)this._promise0=e,this._receiver0=t;else{var n=5*r-5;this[n+3]=e,this[n+4]=t}this._setLength(r+1)},e.prototype._proxyPromiseArray=function(t,e){this._setProxyHandlers(t,e)},e.prototype._resolveCallback=function(t,r){if(!this._isFollowingOrFulfilledOrRejected()){if(t===this)return this._rejectCallback(i(),!1,!0);var n=_(t,this);if(!(n instanceof e))return this._fulfill(t);var o=1|(r?4:0);this._propagateFrom(n,o);var s=n._target();if(s._isPending()){for(var a=this._length(),u=0;a>u;++u)s._migrateCallbacks(this,u);this._setFollowing(),this._setLength(0),this._setFollowee(s)}else s._isFulfilled()?this._fulfillUnchecked(s._value()):this._rejectUnchecked(s._reason(),s._getCarriedStackTrace())}},e.prototype._rejectCallback=function(t,e,r){r||a.markAsOriginatingFromRejection(t);var n=a.ensureErrorObject(t),i=n===t;this._attachExtraTrace(n,e?i:!1),this._reject(t,i?void 0:n)},e.prototype._resolveFromResolver=function(t){var e=this;this._captureStackTrace(),this._pushContext();var r=!0,n=k(t)(function(t){null!==e&&(e._resolveCallback(t),e=null)},function(t){null!==e&&(e._rejectCallback(t,r),e=null)});r=!1,this._popContext(),void 0!==n&&n===w&&null!==e&&(e._rejectCallback(n.e,!0,!0),e=null)},e.prototype._settlePromiseFromHandler=function(t,e,r,n){if(!n._isRejected()){n._pushContext();var o;if(o=e!==p||this._isRejected()?k(t).call(e,r):k(t).apply(this._boundValue(),r),n._popContext(),o===w||o===n||o===f){var s=o===n?i():o.e;n._rejectCallback(s,!1,!0)}else n._resolveCallback(o)}},e.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},e.prototype._followee=function(){return this._rejectionHandler0},e.prototype._setFollowee=function(t){this._rejectionHandler0=t},e.prototype._cleanValues=function(){this._cancellable()&&(this._cancellationParent=void 0)},e.prototype._propagateFrom=function(t,e){(1&e)>0&&t._cancellable()&&(this._setCancellable(),this._cancellationParent=t),(4&e)>0&&t._isBound()&&this._setBoundTo(t._boundTo)},e.prototype._fulfill=function(t){this._isFollowingOrFulfilledOrRejected()||this._fulfillUnchecked(t)},e.prototype._reject=function(t,e){this._isFollowingOrFulfilledOrRejected()||this._rejectUnchecked(t,e)},e.prototype._settlePromiseAt=function(t){var r=this._promiseAt(t),n=r instanceof e;if(n&&r._isMigrated())return r._unsetIsMigrated(),u.invoke(this._settlePromiseAt,this,t);var i=this._isFulfilled()?this._fulfillmentHandlerAt(t):this._rejectionHandlerAt(t),o=this._isCarryingStackTrace()?this._getCarriedStackTrace():void 0,s=this._settledValue,a=this._receiverAt(t);this._clearCallbackDataAtIndex(t),"function"==typeof i?n?this._settlePromiseFromHandler(i,a,s,r):i.call(a,s,r):a instanceof d?a._isResolved()||(this._isFulfilled()?a._promiseFulfilled(s,r):a._promiseRejected(s,r)):n&&(this._isFulfilled()?r._fulfill(s):r._reject(s,o)),t>=4&&4===(31&t)&&u.invokeLater(this._setLength,this,0)},e.prototype._clearCallbackDataAtIndex=function(t){if(0===t)this._isCarryingStackTrace()||(this._fulfillmentHandler0=void 0),this._rejectionHandler0=this._progressHandler0=this._receiver0=this._promise0=void 0;else{var e=5*t-5;this[e+3]=this[e+4]=this[e+0]=this[e+1]=this[e+2]=void 0}},e.prototype._isSettlePromisesQueued=function(){return-1073741824===(-1073741824&this._bitField)},e.prototype._setSettlePromisesQueued=function(){this._bitField=-1073741824|this._bitField},e.prototype._unsetSettlePromisesQueued=function(){this._bitField=1073741823&this._bitField},e.prototype._queueSettlePromises=function(){u.settlePromises(this),this._setSettlePromisesQueued()},e.prototype._fulfillUnchecked=function(t){if(t===this){var e=i();return this._attachExtraTrace(e),this._rejectUnchecked(e,void 0)}this._setFulfilled(),this._settledValue=t,this._cleanValues(),this._length()>0&&this._queueSettlePromises()},e.prototype._rejectUncheckedCheckError=function(t){var e=a.ensureErrorObject(t);this._rejectUnchecked(t,e===t?void 0:e)},e.prototype._rejectUnchecked=function(t,e){if(t===this){var r=i();return this._attachExtraTrace(r),this._rejectUnchecked(r)}return this._setRejected(),this._settledValue=t,this._cleanValues(),this._isFinal()?void u.throwLater(function(t){throw"stack"in t&&u.invokeFirst(v.unhandledRejection,void 0,t),t},void 0===e?t:e):(void 0!==e&&e!==t&&this._setCarriedStackTrace(e),void(this._length()>0?this._queueSettlePromises():this._ensurePossibleRejectionHandled()))},e.prototype._settlePromises=function(){this._unsetSettlePromisesQueued();for(var t=this._length(),e=0;t>e;e++)this._settlePromiseAt(e)},a.notEnumerableProp(e,"_makeSelfResolutionError",i),t("./progress.js")(e,d),t("./method.js")(e,h,_,s),t("./bind.js")(e,h,_),t("./finally.js")(e,f,_),t("./direct_resolve.js")(e),t("./synchronous_inspection.js")(e),t("./join.js")(e,d,_,h),e.Promise=e,t("./map.js")(e,d,s,_,h),t("./cancel.js")(e),t("./using.js")(e,s,_,g),t("./generators.js")(e,s,h,_),t("./nodeify.js")(e),t("./call_get.js")(e),t("./props.js")(e,d,_,s),t("./race.js")(e,h,_,s),t("./reduce.js")(e,d,s,_,h),t("./settle.js")(e,d),t("./some.js")(e,d,s),t("./promisify.js")(e,h),t("./any.js")(e),t("./each.js")(e,h),t("./timers.js")(e,h),t("./filter.js")(e,h),a.toFastProperties(e),a.toFastProperties(e.prototype),r({a:1}),r({b:2}),r({c:3}),r(1),r(function(){}),r(void 0),r(!1),r(new e(h)),v.setBounds(u.firstLineError,a.lastLineError),e}},{"./any.js":1,"./async.js":2,"./bind.js":3,"./call_get.js":5,"./cancel.js":6,"./captured_trace.js":7,"./catch_filter.js":8,"./context.js":9,"./debuggability.js":10,"./direct_resolve.js":11,"./each.js":12,"./errors.js":13,"./filter.js":15,"./finally.js":16,"./generators.js":17,"./join.js":18,"./map.js":19,"./method.js":20,"./nodeify.js":21,"./progress.js":22,"./promise_array.js":24,"./promise_resolver.js":25,"./promisify.js":26,"./props.js":27,"./race.js":29,"./reduce.js":30,"./settle.js":32,"./some.js":33,"./synchronous_inspection.js":34,"./thenables.js":35,"./timers.js":36,"./using.js":37,"./util.js":38}],24:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){switch(t){case-2:return[];case-3:return{}}}function s(t){var n,i=this._promise=new e(r);t instanceof e&&(n=t,i._propagateFrom(n,5)),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var a=t("./util.js"),u=a.isArray;return s.prototype.length=function(){return this._length},s.prototype.promise=function(){return this._promise},s.prototype._init=function c(t,r){var s=n(this._values,this._promise);if(s instanceof e){if(s=s._target(),this._values=s,!s._isFulfilled())return s._isPending()?void s._then(c,this._reject,void 0,this,r):void this._reject(s._reason());if(s=s._value(),!u(s)){var a=new e.TypeError("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n");return void this.__hardReject__(a)}}else if(!u(s))return void this._promise._reject(i("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n")._reason());if(0===s.length)return void(-5===r?this._resolveEmptyArray():this._resolve(o(r)));var l=this.getActualLength(s.length);this._length=l,this._values=this.shouldCopyValues()?new Array(l):this._values;for(var h=this._promise,p=0;l>p;++p){var f=this._isResolved(),_=n(s[p],h);_ instanceof e?(_=_._target(),f?_._ignoreRejections():_._isPending()?_._proxyPromiseArray(this,p):_._isFulfilled()?this._promiseFulfilled(_._value(),p):this._promiseRejected(_._reason(),p)):f||this._promiseFulfilled(_,p)}},s.prototype._isResolved=function(){return null===this._values},s.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},s.prototype.__hardReject__=s.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1,!0)},s.prototype._promiseProgressed=function(t,e){this._promise._progress({index:e,value:t})},s.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var r=++this._totalResolved;r>=this._length&&this._resolve(this._values)},s.prototype._promiseRejected=function(t){this._totalResolved++,this._reject(t)},s.prototype.shouldCopyValues=function(){return!0},s.prototype.getActualLength=function(t){return t},s}},{"./util.js":38}],25:[function(t,e){"use strict";function r(t){return t instanceof Error&&p.getPrototypeOf(t)===Error.prototype}function n(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=p.keys(t),i=0;i<n.length;++i){var o=n[i];f.test(o)||(e[o]=t[o])}return e}return s.markAsOriginatingFromRejection(t),t}function i(t){return function(e,r){if(null!==t){if(e){var i=n(a(e));t._attachExtraTrace(i),t._reject(i)}else if(arguments.length>2){for(var o=arguments.length,s=new Array(o-1),u=1;o>u;++u)s[u-1]=arguments[u];t._fulfill(s)}else t._fulfill(r);t=null}}}var o,s=t("./util.js"),a=s.maybeWrapAsError,u=t("./errors.js"),c=u.TimeoutError,l=u.OperationalError,h=s.haveGetters,p=t("./es5.js"),f=/^(?:name|message|stack|cause)$/;if(o=h?function(t){this.promise=t}:function(t){this.promise=t,this.asCallback=i(t),this.callback=this.asCallback},h){var _={get:function(){return i(this.promise)}};p.defineProperty(o.prototype,"asCallback",_),p.defineProperty(o.prototype,"callback",_)}o._nodebackForPromise=i,o.prototype.toString=function(){return"[object PromiseResolver]"},o.prototype.resolve=o.prototype.fulfill=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._resolveCallback(t)},o.prototype.reject=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._rejectCallback(t)},o.prototype.progress=function(t){if(!(this instanceof o))throw new TypeError("Illegal invocation, resolver resolve/reject must be called within a resolver context. Consider using the promise constructor instead.\n\n See http://goo.gl/sdkXL9\n");this.promise._progress(t)},o.prototype.cancel=function(t){this.promise.cancel(t)},o.prototype.timeout=function(){this.reject(new c("timeout"))},o.prototype.isResolved=function(){return this.promise.isResolved()},o.prototype.toJSON=function(){return this.promise.toJSON()},e.exports=o},{"./errors.js":13,"./es5.js":14,"./util.js":38}],26:[function(t,e){"use strict";e.exports=function(e,r){function n(t){return!w.test(t)}function i(t){try{return t.__isPromisified__===!0}catch(e){return!1}}function o(t,e,r){var n=f.getDataPropertyOrDefault(t,e+r,j);return n?i(n):!1}function s(t,e,r){for(var n=0;n<t.length;n+=2){var i=t[n];if(r.test(i))for(var o=i.replace(r,""),s=0;s<t.length;s+=2)if(t[s]===o)throw new g("Cannot promisify an API that has normal methods with '%s'-suffix\n\n See http://goo.gl/iWrZbw\n".replace("%s",e))}}function a(t,e,r,n){for(var a=f.inheritedDataKeys(t),u=[],c=0;c<a.length;++c){var l=a[c],h=t[l],p=n===k?!0:k(l,h,t);"function"!=typeof h||i(h)||o(t,l,e)||!n(l,h,t,p)||u.push(l,h)}return s(u,e,r),u}function u(t,n,i,o){function s(){var i=n;n===p&&(i=this);var o=new e(r);o._captureStackTrace();var s="string"==typeof u&&this!==a?this[u]:t,c=_(o);try{s.apply(i,d(arguments,c))}catch(l){o._rejectCallback(v(l),!0,!0)}return o}var a=function(){return this}(),u=t;return"string"==typeof u&&(t=o),f.notEnumerableProp(s,"__isPromisified__",!0),s}function c(t,e,r,n){for(var i=new RegExp(E(e)+"$"),o=a(t,e,i,r),s=0,u=o.length;u>s;s+=2){var c=o[s],l=o[s+1],h=c+e;t[h]=n===F?F(c,p,c,l,e):n(l,function(){return F(c,p,c,l,e)})}return f.toFastProperties(t),t}function l(t,e){return F(t,e,void 0,t)}var h,p={},f=t("./util.js"),_=t("./promise_resolver.js")._nodebackForPromise,d=f.withAppended,v=f.maybeWrapAsError,y=f.canEvaluate,g=t("./errors").TypeError,m="Async",j={__isPromisified__:!0},b=["arity","length","name","arguments","caller","callee","prototype","__isPromisified__"],w=new RegExp("^(?:"+b.join("|")+")$"),k=function(t){return f.isIdentifier(t)&&"_"!==t.charAt(0)&&"constructor"!==t},E=function(t){return t.replace(/([$])/,"\\$")},F=y?h:u;e.promisify=function(t,e){if("function"!=typeof t)throw new g("fn must be a function\n\n See http://goo.gl/916lJJ\n");if(i(t))return t;var r=l(t,arguments.length<2?p:e);return f.copyDescriptors(t,r,n),r},e.promisifyAll=function(t,e){if("function"!=typeof t&&"object"!=typeof t)throw new g("the target of promisifyAll must be an object or a function\n\n See http://goo.gl/9ITlV0\n");e=Object(e);var r=e.suffix;"string"!=typeof r&&(r=m);var n=e.filter;"function"!=typeof n&&(n=k);var i=e.promisifier;if("function"!=typeof i&&(i=F),!f.isIdentifier(r))throw new RangeError("suffix must be a valid identifier\n\n See http://goo.gl/8FZo5V\n");for(var o=f.inheritedDataKeys(t),s=0;s<o.length;++s){var a=t[o[s]];"constructor"!==o[s]&&f.isClass(a)&&(c(a.prototype,r,n,i),c(a,r,n,i))}return c(t,r,n,i)}}},{"./errors":13,"./promise_resolver.js":25,"./util.js":38}],27:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){for(var e=c.keys(t),r=e.length,n=new Array(2*r),i=0;r>i;++i){var o=e[i];n[i]=t[o],n[i+r]=o}this.constructor$(n)}function s(t){var r,s=n(t);return u(s)?(r=s instanceof e?s._then(e.props,void 0,void 0,void 0,void 0):new o(s).promise(),s instanceof e&&r._propagateFrom(s,4),r):i("cannot await properties of a non-object\n\n See http://goo.gl/OsFKC8\n")}var a=t("./util.js"),u=a.isObject,c=t("./es5.js");a.inherits(o,r),o.prototype._init=function(){this._init$(void 0,-3)},o.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var r=++this._totalResolved;if(r>=this._length){for(var n={},i=this.length(),o=0,s=this.length();s>o;++o)n[this._values[o+i]]=this._values[o];this._resolve(n)}},o.prototype._promiseProgressed=function(t,e){this._promise._progress({key:this._values[e+this.length()],value:t})},o.prototype.shouldCopyValues=function(){return!1},o.prototype.getActualLength=function(t){return t>>1},e.prototype.props=function(){return s(this)},e.props=function(t){return s(t)}}},{"./es5.js":14,"./util.js":38}],28:[function(t,e){"use strict";function r(t,e,r,n,i){for(var o=0;i>o;++o)r[o+n]=t[o+e],t[o+e]=void 0}function n(t){this._capacity=t,this._length=0,this._front=0}n.prototype._willBeOverCapacity=function(t){return this._capacity<t},n.prototype._pushOne=function(t){var e=this.length();this._checkCapacity(e+1);var r=this._front+e&this._capacity-1;this[r]=t,this._length=e+1},n.prototype._unshiftOne=function(t){var e=this._capacity;this._checkCapacity(this.length()+1);var r=this._front,n=(r-1&e-1^e)-e;this[n]=t,this._front=n,this._length=this.length()+1},n.prototype.unshift=function(t,e,r){this._unshiftOne(r),this._unshiftOne(e),this._unshiftOne(t)},n.prototype.push=function(t,e,r){var n=this.length()+3;if(this._willBeOverCapacity(n))return this._pushOne(t),this._pushOne(e),void this._pushOne(r);var i=this._front+n-3;this._checkCapacity(n);var o=this._capacity-1;this[i+0&o]=t,this[i+1&o]=e,this[i+2&o]=r,this._length=n},n.prototype.shift=function(){var t=this._front,e=this[t];return this[t]=void 0,this._front=t+1&this._capacity-1,this._length--,e},n.prototype.length=function(){return this._length},n.prototype._checkCapacity=function(t){this._capacity<t&&this._resizeTo(this._capacity<<1)},n.prototype._resizeTo=function(t){var e=this._capacity;this._capacity=t;var n=this._front,i=this._length,o=n+i&e-1;r(this,0,this,e,o)},e.exports=n},{}],29:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t,o){var u=n(t);if(u instanceof e)return a(u);if(!s(t))return i("expecting an array, a promise or a thenable\n\n See http://goo.gl/s8MMhc\n");var c=new e(r);void 0!==o&&c._propagateFrom(o,5);for(var l=c._fulfill,h=c._reject,p=0,f=t.length;f>p;++p){var _=t[p];(void 0!==_||p in t)&&e.cast(_)._then(l,h,void 0,c,null)}return c}var s=t("./util.js").isArray,a=function(t){return t.then(function(e){return o(e,t)})};e.race=function(t){return o(t,void 0)},e.prototype.race=function(){return o(this,void 0)}}},{"./util.js":38}],30:[function(t,e){"use strict";e.exports=function(e,r,n,i,o){function s(t,r,n,s){this.constructor$(t),this._promise._captureStackTrace(),this._preservedValues=s===o?[]:null,this._zerothIsAccum=void 0===n,this._gotAccum=!1,this._reducingIndex=this._zerothIsAccum?1:0,this._valuesPhase=void 0;var u=i(n,this._promise),h=!1,p=u instanceof e;p&&(u=u._target(),u._isPending()?u._proxyPromiseArray(this,-1):u._isFulfilled()?(n=u._value(),this._gotAccum=!0):(this._reject(u._reason()),h=!0)),p||this._zerothIsAccum||(this._gotAccum=!0);var f=c();this._callback=null===f?r:f.bind(r),this._accum=n,h||l.invoke(a,this,void 0)}function a(){this._init$(void 0,-5)}function u(t,e,r,i){if("function"!=typeof e)return n("fn must be a function\n\n See http://goo.gl/916lJJ\n");var o=new s(t,e,r,i);return o.promise()}var c=e._getDomain,l=t("./async.js"),h=t("./util.js"),p=h.tryCatch,f=h.errorObj;h.inherits(s,r),s.prototype._init=function(){},s.prototype._resolveEmptyArray=function(){(this._gotAccum||this._zerothIsAccum)&&this._resolve(null!==this._preservedValues?[]:this._accum)},s.prototype._promiseFulfilled=function(t,r){var n=this._values;n[r]=t;var o,s=this.length(),a=this._preservedValues,u=null!==a,c=this._gotAccum,l=this._valuesPhase;if(!l)for(l=this._valuesPhase=new Array(s),o=0;s>o;++o)l[o]=0;if(o=l[r],0===r&&this._zerothIsAccum?(this._accum=t,this._gotAccum=c=!0,l[r]=0===o?1:2):-1===r?(this._accum=t,this._gotAccum=c=!0):0===o?l[r]=1:(l[r]=2,this._accum=t),c){for(var h,_=this._callback,d=this._promise._boundValue(),v=this._reducingIndex;s>v;++v)if(o=l[v],2!==o){if(1!==o)return;if(t=n[v],this._promise._pushContext(),u?(a.push(t),h=p(_).call(d,t,v,s)):h=p(_).call(d,this._accum,t,v,s),this._promise._popContext(),h===f)return this._reject(h.e);var y=i(h,this._promise);if(y instanceof e){if(y=y._target(),y._isPending())return l[v]=4,y._proxyPromiseArray(this,v);if(!y._isFulfilled())return this._reject(y._reason());h=y._value()}this._reducingIndex=v+1,this._accum=h}else this._reducingIndex=v+1;this._resolve(u?a:this._accum)}},e.prototype.reduce=function(t,e){return u(this,t,e,null)},e.reduce=function(t,e,r,n){return u(t,e,r,n)}}},{"./async.js":2,"./util.js":38}],31:[function(t,e){"use strict";var r,n=t("./util"),i=function(){throw new Error("No async scheduler available\n\n See http://goo.gl/m3OTXk\n")};if(n.isNode&&"undefined"==typeof MutationObserver){var o=global.setImmediate,s=process.nextTick;r=n.isRecentNode?function(t){o.call(global,t)}:function(t){s.call(process,t)}}else"undefined"==typeof MutationObserver||"undefined"!=typeof window&&window.navigator&&window.navigator.standalone?r="undefined"!=typeof setImmediate?function(t){setImmediate(t)}:"undefined"!=typeof setTimeout?function(t){setTimeout(t,0)}:i:(r=function(t){var e=document.createElement("div"),r=new MutationObserver(t);return r.observe(e,{attributes:!0}),function(){e.classList.toggle("foo")}},r.isStatic=!0);e.exports=r},{"./util":38}],32:[function(t,e){"use strict";e.exports=function(e,r){function n(t){this.constructor$(t)}var i=e.PromiseInspection,o=t("./util.js");o.inherits(n,r),n.prototype._promiseResolved=function(t,e){this._values[t]=e;var r=++this._totalResolved;r>=this._length&&this._resolve(this._values)},n.prototype._promiseFulfilled=function(t,e){var r=new i;r._bitField=268435456,r._settledValue=t,this._promiseResolved(e,r)},n.prototype._promiseRejected=function(t,e){var r=new i;r._bitField=134217728,r._settledValue=t,this._promiseResolved(e,r)},e.settle=function(t){return new n(t).promise()},e.prototype.settle=function(){return new n(this).promise()}}},{"./util.js":38}],33:[function(t,e){"use strict";e.exports=function(e,r,n){function i(t){this.constructor$(t),this._howMany=0,this._unwrap=!1,this._initialized=!1}function o(t,e){if((0|e)!==e||0>e)return n("expecting a positive integer\n\n See http://goo.gl/1wAmHx\n");var r=new i(t),o=r.promise();return r.setHowMany(e),r.init(),o}var s=t("./util.js"),a=t("./errors.js").RangeError,u=t("./errors.js").AggregateError,c=s.isArray;s.inherits(i,r),i.prototype._init=function(){if(this._initialized){if(0===this._howMany)return void this._resolve([]);this._init$(void 0,-5);var t=c(this._values);!this._isResolved()&&t&&this._howMany>this._canPossiblyFulfill()&&this._reject(this._getRangeError(this.length()))}},i.prototype.init=function(){this._initialized=!0,this._init()},i.prototype.setUnwrap=function(){this._unwrap=!0},i.prototype.howMany=function(){return this._howMany},i.prototype.setHowMany=function(t){this._howMany=t},i.prototype._promiseFulfilled=function(t){this._addFulfilled(t),this._fulfilled()===this.howMany()&&(this._values.length=this.howMany(),this._resolve(1===this.howMany()&&this._unwrap?this._values[0]:this._values))},i.prototype._promiseRejected=function(t){if(this._addRejected(t),this.howMany()>this._canPossiblyFulfill()){for(var e=new u,r=this.length();r<this._values.length;++r)e.push(this._values[r]);this._reject(e)}},i.prototype._fulfilled=function(){return this._totalResolved},i.prototype._rejected=function(){return this._values.length-this.length()},i.prototype._addRejected=function(t){this._values.push(t)},i.prototype._addFulfilled=function(t){this._values[this._totalResolved++]=t},i.prototype._canPossiblyFulfill=function(){return this.length()-this._rejected()},i.prototype._getRangeError=function(t){var e="Input array must contain at least "+this._howMany+" items but contains only "+t+" items";return new a(e)},i.prototype._resolveEmptyArray=function(){this._reject(this._getRangeError(0))},e.some=function(t,e){return o(t,e)},e.prototype.some=function(t){return o(this,t)},e._SomePromiseArray=i}},{"./errors.js":13,"./util.js":38}],34:[function(t,e){"use strict";e.exports=function(t){function e(t){void 0!==t?(t=t._target(),this._bitField=t._bitField,this._settledValue=t._settledValue):(this._bitField=0,this._settledValue=void 0)}e.prototype.value=function(){if(!this.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/hc1DLj\n");return this._settledValue},e.prototype.error=e.prototype.reason=function(){if(!this.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/hPuiwB\n");return this._settledValue},e.prototype.isFulfilled=t.prototype._isFulfilled=function(){return(268435456&this._bitField)>0},e.prototype.isRejected=t.prototype._isRejected=function(){return(134217728&this._bitField)>0},e.prototype.isPending=t.prototype._isPending=function(){return 0===(402653184&this._bitField)},e.prototype.isResolved=t.prototype._isResolved=function(){return(402653184&this._bitField)>0},t.prototype.isPending=function(){return this._target()._isPending()},t.prototype.isRejected=function(){return this._target()._isRejected()},t.prototype.isFulfilled=function(){return this._target()._isFulfilled()},t.prototype.isResolved=function(){return this._target()._isResolved()},t.prototype._value=function(){return this._settledValue},t.prototype._reason=function(){return this._unsetRejectionIsUnhandled(),this._settledValue},t.prototype.value=function(){var t=this._target();if(!t.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/hc1DLj\n");return t._settledValue},t.prototype.reason=function(){var t=this._target();if(!t.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/hPuiwB\n");return t._unsetRejectionIsUnhandled(),t._settledValue},t.PromiseInspection=e}},{}],35:[function(t,e){"use strict";e.exports=function(e,r){function n(t,n){if(c(t)){if(t instanceof e)return t;if(o(t)){var l=new e(r);return t._then(l._fulfillUnchecked,l._rejectUncheckedCheckError,l._progressUnchecked,l,null),l}var h=a.tryCatch(i)(t);if(h===u){n&&n._pushContext();var l=e.reject(h.e);return n&&n._popContext(),l}if("function"==typeof h)return s(t,h,n)}return t}function i(t){return t.then}function o(t){return l.call(t,"_promise0")}function s(t,n,i){function o(t){l&&(l._resolveCallback(t),l=null)}function s(t){l&&(l._rejectCallback(t,p,!0),l=null)}function c(t){l&&"function"==typeof l._progress&&l._progress(t)}var l=new e(r),h=l;i&&i._pushContext(),l._captureStackTrace(),i&&i._popContext();var p=!0,f=a.tryCatch(n).call(t,o,s,c);return p=!1,l&&f===u&&(l._rejectCallback(f.e,!0,!0),l=null),h}var a=t("./util.js"),u=a.errorObj,c=a.isObject,l={}.hasOwnProperty;return n}},{"./util.js":38}],36:[function(t,e){"use strict";e.exports=function(e,r){function n(t){var e=this;return e instanceof Number&&(e=+e),clearTimeout(e),t}function i(t){var e=this;throw e instanceof Number&&(e=+e),clearTimeout(e),t}var o=t("./util.js"),s=e.TimeoutError,a=function(t,e){if(t.isPending()){"string"!=typeof e&&(e="operation timed out");var r=new s(e);o.markAsOriginatingFromRejection(r),t._attachExtraTrace(r),t._cancel(r)}},u=function(t){return c(+this).thenReturn(t)},c=e.delay=function(t,n){if(void 0===n){n=t,t=void 0;var i=new e(r);return setTimeout(function(){i._fulfill()},n),i}return n=+n,e.resolve(t)._then(u,null,null,n,void 0)};e.prototype.delay=function(t){return c(this,t)},e.prototype.timeout=function(t,e){t=+t;var r=this.then().cancellable();r._cancellationParent=this;var o=setTimeout(function(){a(r,e)},t);return r._then(n,i,void 0,o,void 0)}}},{"./util.js":38}],37:[function(t,e){"use strict";e.exports=function(e,r,n,i){function o(t){for(var r=t.length,n=0;r>n;++n){var i=t[n];if(i.isRejected())return e.reject(i.error());t[n]=i._settledValue}return t}function s(t){setTimeout(function(){throw t},0)}function a(t){var e=n(t);return e!==t&&"function"==typeof t._isDisposable&&"function"==typeof t._getDisposer&&t._isDisposable()&&e._setDisposable(t._getDisposer()),e}function u(t,r){function i(){if(o>=u)return c.resolve();var l=a(t[o++]);if(l instanceof e&&l._isDisposable()){try{l=n(l._getDisposer().tryDispose(r),t.promise)}catch(h){return s(h)}if(l instanceof e)return l._then(i,s,null,null,null)}i()}var o=0,u=t.length,c=e.defer();return i(),c.promise}function c(t){var e=new v;return e._settledValue=t,e._bitField=268435456,u(this,e).thenReturn(t)}function l(t){var e=new v;return e._settledValue=t,e._bitField=134217728,u(this,e).thenThrow(t)}function h(t,e,r){this._data=t,this._promise=e,this._context=r}function p(t,e,r){this.constructor$(t,e,r)}function f(t){return h.isDisposer(t)?(this.resources[this.index]._setDisposable(t),t.promise()):t}var _=t("./errors.js").TypeError,d=t("./util.js").inherits,v=e.PromiseInspection;h.prototype.data=function(){return this._data},h.prototype.promise=function(){return this._promise},h.prototype.resource=function(){return this.promise().isFulfilled()?this.promise().value():null},h.prototype.tryDispose=function(t){var e=this.resource(),r=this._context;void 0!==r&&r._pushContext();var n=null!==e?this.doDispose(e,t):null;return void 0!==r&&r._popContext(),this._promise._unsetDisposable(),this._data=null,n},h.isDisposer=function(t){return null!=t&&"function"==typeof t.resource&&"function"==typeof t.tryDispose},d(p,h),p.prototype.doDispose=function(t,e){var r=this.data();return r.call(t,t,e)},e.using=function(){var t=arguments.length;if(2>t)return r("you must pass at least 2 arguments to Promise.using");var i=arguments[t-1];if("function"!=typeof i)return r("fn must be a function\n\n See http://goo.gl/916lJJ\n");t--;for(var s=new Array(t),a=0;t>a;++a){var u=arguments[a];if(h.isDisposer(u)){var p=u;u=u.promise(),u._setDisposable(p)}else{var _=n(u);_ instanceof e&&(u=_._then(f,null,null,{resources:s,index:a},void 0))}s[a]=u}var d=e.settle(s).then(o).then(function(t){d._pushContext();var e;try{e=i.apply(void 0,t)}finally{d._popContext()}return e})._then(c,l,void 0,s,void 0);return s.promise=d,d},e.prototype._setDisposable=function(t){this._bitField=262144|this._bitField,this._disposer=t
+},e.prototype._isDisposable=function(){return(262144&this._bitField)>0},e.prototype._getDisposer=function(){return this._disposer},e.prototype._unsetDisposable=function(){this._bitField=-262145&this._bitField,this._disposer=void 0},e.prototype.disposer=function(t){if("function"==typeof t)return new p(t,this,i());throw new _}}},{"./errors.js":13,"./util.js":38}],38:[function(t,e,r){"use strict";function n(){try{var t=C;return C=null,t.apply(this,arguments)}catch(e){return F.e=e,F}}function i(t){return C=t,n}function o(t){return null==t||t===!0||t===!1||"string"==typeof t||"number"==typeof t}function s(t){return!o(t)}function a(t){return o(t)?new Error(v(t)):t}function u(t,e){var r,n=t.length,i=new Array(n+1);for(r=0;n>r;++r)i[r]=t[r];return i[r]=e,i}function c(t,e,r){if(!w.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var n=Object.getOwnPropertyDescriptor(t,e);return null!=n?null==n.get&&null==n.set?n.value:r:void 0}function l(t,e,r){if(o(t))return t;var n={value:r,configurable:!0,enumerable:!1,writable:!0};return w.defineProperty(t,e,n),t}function h(t){throw t}function p(t){try{if("function"==typeof t){var e=w.names(t.prototype),r=w.isES5&&e.length>1,n=e.length>0&&!(1===e.length&&"constructor"===e[0]),i=x.test(t+"")&&w.names(t).length>0;if(r||n||i)return!0}return!1}catch(o){return!1}}function f(t){function e(){}e.prototype=t;for(var r=8;r--;)new e;return t}function _(t){return R.test(t)}function d(t,e,r){for(var n=new Array(t),i=0;t>i;++i)n[i]=e+i+r;return n}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){try{l(t,"isOperational",!0)}catch(e){}}function g(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function m(t){return t instanceof Error&&w.propertyIsWritable(t,"stack")}function j(t){return{}.toString.call(t)}function b(t,e,r){for(var n=w.names(t),i=0;i<n.length;++i){var o=n[i];if(r(o))try{w.defineProperty(e,o,w.getDescriptor(t,o))}catch(s){}}}var w=t("./es5.js"),k="undefined"==typeof navigator,E=function(){try{var t={};return w.defineProperty(t,"f",{get:function(){return 3}}),3===t.f}catch(e){return!1}}(),F={e:{}},C,P=function(t,e){function r(){this.constructor=t,this.constructor$=e;for(var r in e.prototype)n.call(e.prototype,r)&&"$"!==r.charAt(r.length-1)&&(this[r+"$"]=e.prototype[r])}var n={}.hasOwnProperty;return r.prototype=e.prototype,t.prototype=new r,t.prototype},T=function(){var t=[Array.prototype,Object.prototype,Function.prototype],e=function(e){for(var r=0;r<t.length;++r)if(t[r]===e)return!0;return!1};if(w.isES5){var r=Object.getOwnPropertyNames;return function(t){for(var n=[],i=Object.create(null);null!=t&&!e(t);){var o;try{o=r(t)}catch(s){return n}for(var a=0;a<o.length;++a){var u=o[a];if(!i[u]){i[u]=!0;var c=Object.getOwnPropertyDescriptor(t,u);null!=c&&null==c.get&&null==c.set&&n.push(u)}}t=w.getPrototypeOf(t)}return n}}var n={}.hasOwnProperty;return function(r){if(e(r))return[];var i=[];t:for(var o in r)if(n.call(r,o))i.push(o);else{for(var s=0;s<t.length;++s)if(n.call(t[s],o))continue t;i.push(o)}return i}}(),x=/this\s*\.\s*\S+\s*=/,R=/^[a-z$_][a-z$_0-9]*$/i,S=function(){return"stack"in new Error?function(t){return m(t)?t:new Error(v(t))}:function(t){if(m(t))return t;try{throw new Error(v(t))}catch(e){return e}}}(),A={isClass:p,isIdentifier:_,inheritedDataKeys:T,getDataPropertyOrDefault:c,thrower:h,isArray:w.isArray,haveGetters:E,notEnumerableProp:l,isPrimitive:o,isObject:s,canEvaluate:k,errorObj:F,tryCatch:i,inherits:P,withAppended:u,maybeWrapAsError:a,toFastProperties:f,filledRange:d,toString:v,canAttachTrace:m,ensureErrorObject:S,originatesFromRejection:g,markAsOriginatingFromRejection:y,classString:j,copyDescriptors:b,hasDevTools:"undefined"!=typeof chrome&&chrome&&"function"==typeof chrome.loadTimes,isNode:"undefined"!=typeof process&&"[object process]"===j(process).toLowerCase()};A.isRecentNode=A.isNode&&function(){var t=process.versions.node.split(".").map(Number);return 0===t[0]&&t[1]>10||t[0]>0}(),A.isNode&&A.toFastProperties(process);try{throw new Error}catch(O){A.lastLineError=O}e.exports=A},{"./es5.js":14}]},{},[4])(4)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/async.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/async.js
index 3b5f828ce..010445961 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/async.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/async.js
@@ -42,8 +42,6 @@ Async.prototype.throwLater = function(fn, arg) {
arg = fn;
fn = function () { throw arg; };
}
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
if (typeof setTimeout !== "undefined") {
setTimeout(function() {
fn(arg);
@@ -57,73 +55,18 @@ Async.prototype.throwLater = function(fn, arg) {
}
};
-Async.prototype._getDomain = function() {};
-
-if (!false) {
-if (util.isNode) {
- var EventsModule = require("events");
-
- var domainGetter = function() {
- var domain = process.domain;
- if (domain === null) return undefined;
- return domain;
- };
-
- if (EventsModule.usingDomains) {
- Async.prototype._getDomain = domainGetter;
- } else {
- var descriptor =
- Object.getOwnPropertyDescriptor(EventsModule, "usingDomains");
-
- if (descriptor) {
- if (!descriptor.configurable) {
- process.on("domainsActivated", function() {
- Async.prototype._getDomain = domainGetter;
- });
- } else {
- var usingDomains = false;
- Object.defineProperty(EventsModule, "usingDomains", {
- configurable: false,
- enumerable: true,
- get: function() {
- return usingDomains;
- },
- set: function(value) {
- if (usingDomains || !value) return;
- usingDomains = true;
- Async.prototype._getDomain = domainGetter;
- util.toFastProperties(process);
- process.emit("domainsActivated");
- }
- });
- }
- }
- }
-}
-}
-
function AsyncInvokeLater(fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._lateQueue.push(fn, receiver, arg);
this._queueTick();
}
function AsyncInvoke(fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._normalQueue.push(fn, receiver, arg);
this._queueTick();
}
function AsyncSettlePromises(promise) {
- var domain = this._getDomain();
- if (domain !== undefined) {
- var fn = domain.bind(promise._settlePromises);
- this._normalQueue.push(fn, promise, undefined);
- } else {
- this._normalQueue._pushOne(promise);
- }
+ this._normalQueue._pushOne(promise);
this._queueTick();
}
@@ -132,13 +75,18 @@ if (!util.hasDevTools) {
Async.prototype.invoke = AsyncInvoke;
Async.prototype.settlePromises = AsyncSettlePromises;
} else {
+ if (schedule.isStatic) {
+ schedule = function(fn) { setTimeout(fn, 0); };
+ }
Async.prototype.invokeLater = function (fn, receiver, arg) {
if (this._trampolineEnabled) {
AsyncInvokeLater.call(this, fn, receiver, arg);
} else {
- setTimeout(function() {
- fn.call(receiver, arg);
- }, 100);
+ this._schedule(function() {
+ setTimeout(function() {
+ fn.call(receiver, arg);
+ }, 100);
+ });
}
};
@@ -146,9 +94,9 @@ if (!util.hasDevTools) {
if (this._trampolineEnabled) {
AsyncInvoke.call(this, fn, receiver, arg);
} else {
- setTimeout(function() {
+ this._schedule(function() {
fn.call(receiver, arg);
- }, 0);
+ });
}
};
@@ -156,16 +104,14 @@ if (!util.hasDevTools) {
if (this._trampolineEnabled) {
AsyncSettlePromises.call(this, promise);
} else {
- setTimeout(function() {
+ this._schedule(function() {
promise._settlePromises();
- }, 0);
+ });
}
};
}
Async.prototype.invokeFirst = function (fn, receiver, arg) {
- var domain = this._getDomain();
- if (domain !== undefined) fn = domain.bind(fn);
this._normalQueue.unshift(fn, receiver, arg);
this._queueTick();
};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/bind.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/bind.js
index d6f6da257..9d8257ae5 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/bind.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/bind.js
@@ -10,7 +10,6 @@ var targetRejected = function(e, context) {
};
var bindingResolved = function(thisArg, context) {
- this._setBoundTo(thisArg);
if (this._isPending()) {
this._resolveCallback(context.target);
}
@@ -25,6 +24,8 @@ Promise.prototype.bind = function (thisArg) {
var ret = new Promise(INTERNAL);
ret._propagateFrom(this, 1);
var target = this._target();
+
+ ret._setBoundTo(maybePromise);
if (maybePromise instanceof Promise) {
var context = {
promiseRejectionQueued: false,
@@ -36,7 +37,6 @@ Promise.prototype.bind = function (thisArg) {
maybePromise._then(
bindingResolved, bindingRejected, ret._progress, ret, context);
} else {
- ret._setBoundTo(thisArg);
ret._resolveCallback(target);
}
return ret;
@@ -59,13 +59,12 @@ Promise.bind = function (thisArg, value) {
var maybePromise = tryConvertToPromise(thisArg);
var ret = new Promise(INTERNAL);
+ ret._setBoundTo(maybePromise);
if (maybePromise instanceof Promise) {
- maybePromise._then(function(thisArg) {
- ret._setBoundTo(thisArg);
+ maybePromise._then(function() {
ret._resolveCallback(value);
}, ret._reject, ret._progress, ret, null);
} else {
- ret._setBoundTo(thisArg);
ret._resolveCallback(value);
}
return ret;
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/captured_trace.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/captured_trace.js
index 6fda9e80c..802acd35b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/captured_trace.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/captured_trace.js
@@ -382,7 +382,8 @@ var captureStackTrace = (function stackDetection() {
catch(e) {
hasStackAfterThrow = ("stack" in e);
}
- if (!("stack" in err) && hasStackAfterThrow) {
+ if (!("stack" in err) && hasStackAfterThrow &&
+ typeof Error.stackTraceLimit === "number") {
stackFramePattern = v8stackFramePattern;
formatStack = v8stackFormatter;
return function captureStackTrace(o) {
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/catch_filter.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/catch_filter.js
index 040f05720..df1273339 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/catch_filter.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/catch_filter.js
@@ -30,7 +30,7 @@ function safePredicate(predicate, e) {
CatchFilter.prototype.doFilter = function (e) {
var cb = this._callback;
var promise = this._promise;
- var boundTo = promise._boundTo;
+ var boundTo = promise._boundValue();
for (var i = 0, len = this._instances.length; i < len; ++i) {
var item = this._instances[i];
var itemIsErrorType = item === Error ||
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/debuggability.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/debuggability.js
index 5ac176705..f377ffa14 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/debuggability.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/debuggability.js
@@ -1,5 +1,6 @@
"use strict";
module.exports = function(Promise, CapturedTrace) {
+var getDomain = Promise._getDomain;
var async = require("./async.js");
var Warning = require("./errors.js").Warning;
var util = require("./util.js");
@@ -14,7 +15,13 @@ if (debugging) {
async.disableTrampolineIfNecessary();
}
+Promise.prototype._ignoreRejections = function() {
+ this._unsetRejectionIsUnhandled();
+ this._bitField = this._bitField | 16777216;
+};
+
Promise.prototype._ensurePossibleRejectionHandled = function () {
+ if ((this._bitField & 16777216) !== 0) return;
this._setRejectionIsUnhandled();
async.invokeLater(this._notifyUnhandledRejection, this, undefined);
};
@@ -113,11 +120,17 @@ Promise.prototype._warn = function(message) {
};
Promise.onPossiblyUnhandledRejection = function (fn) {
- possiblyUnhandledRejection = typeof fn === "function" ? fn : undefined;
+ var domain = getDomain();
+ possiblyUnhandledRejection =
+ typeof fn === "function" ? (domain === null ? fn : domain.bind(fn))
+ : undefined;
};
Promise.onUnhandledRejectionHandled = function (fn) {
- unhandledRejectionHandled = typeof fn === "function" ? fn : undefined;
+ var domain = getDomain();
+ unhandledRejectionHandled =
+ typeof fn === "function" ? (domain === null ? fn : domain.bind(fn))
+ : undefined;
};
Promise.longStackTraces = function () {
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/direct_resolve.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/direct_resolve.js
index 47a9ce9dc..f1c2efd76 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/direct_resolve.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/direct_resolve.js
@@ -1,7 +1,6 @@
"use strict";
var util = require("./util.js");
var isPrimitive = util.isPrimitive;
-var wrapsPrimitiveReceiver = util.wrapsPrimitiveReceiver;
module.exports = function(Promise) {
var returner = function () {
@@ -32,7 +31,7 @@ Promise.prototype["return"] =
Promise.prototype.thenReturn = function (value) {
if (value === undefined) return this.then(returnUndefined);
- if (wrapsPrimitiveReceiver && isPrimitive(value)) {
+ if (isPrimitive(value)) {
return this._then(
wrapper(value, 2),
undefined,
@@ -48,7 +47,7 @@ Promise.prototype["throw"] =
Promise.prototype.thenThrow = function (reason) {
if (reason === undefined) return this.then(throwUndefined);
- if (wrapsPrimitiveReceiver && isPrimitive(reason)) {
+ if (isPrimitive(reason)) {
return this._then(
wrapper(reason, 1),
undefined,
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/finally.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/finally.js
index ed84a2a1f..c9342bcf2 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/finally.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/finally.js
@@ -1,7 +1,6 @@
"use strict";
module.exports = function(Promise, NEXT_FILTER, tryConvertToPromise) {
var util = require("./util.js");
-var wrapsPrimitiveReceiver = util.wrapsPrimitiveReceiver;
var isPrimitive = util.isPrimitive;
var thrower = util.thrower;
@@ -23,7 +22,7 @@ function throw$(r) {
}
function promisedFinally(ret, reasonOrValue, isFulfilled) {
var then;
- if (wrapsPrimitiveReceiver && isPrimitive(reasonOrValue)) {
+ if (isPrimitive(reasonOrValue)) {
then = isFulfilled ? return$(reasonOrValue) : throw$(reasonOrValue);
} else {
then = isFulfilled ? returnThis : throwThis;
@@ -36,7 +35,7 @@ function finallyHandler(reasonOrValue) {
var handler = this.handler;
var ret = promise._isBound()
- ? handler.call(promise._boundTo)
+ ? handler.call(promise._boundValue())
: handler();
if (ret !== undefined) {
@@ -61,7 +60,7 @@ function tapHandler(value) {
var handler = this.handler;
var ret = promise._isBound()
- ? handler.call(promise._boundTo, value)
+ ? handler.call(promise._boundValue(), value)
: handler(value);
if (ret !== undefined) {
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/map.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/map.js
index 66a5b179c..2f40efd24 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/map.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/map.js
@@ -4,6 +4,7 @@ module.exports = function(Promise,
apiRejection,
tryConvertToPromise,
INTERNAL) {
+var getDomain = Promise._getDomain;
var async = require("./async.js");
var util = require("./util.js");
var tryCatch = util.tryCatch;
@@ -14,7 +15,8 @@ var EMPTY_ARRAY = [];
function MappingPromiseArray(promises, fn, limit, _filter) {
this.constructor$(promises);
this._promise._captureStackTrace();
- this._callback = fn;
+ var domain = getDomain();
+ this._callback = domain === null ? fn : domain.bind(fn);
this._preservedValues = _filter === INTERNAL
? new Array(this.length())
: null;
@@ -49,7 +51,7 @@ MappingPromiseArray.prototype._promiseFulfilled = function (value, index) {
if (preservedValues !== null) preservedValues[index] = value;
var callback = this._callback;
- var receiver = this._promise._boundTo;
+ var receiver = this._promise._boundValue();
this._promise._pushContext();
var ret = tryCatch(callback).call(receiver, value, index, length);
this._promise._popContext();
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
index f305b936b..257565db5 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/nodeify.js
@@ -8,7 +8,8 @@ var errorObj = util.errorObj;
function spreadAdapter(val, nodeback) {
var promise = this;
if (!util.isArray(val)) return successAdapter.call(promise, val, nodeback);
- var ret = tryCatch(nodeback).apply(promise._boundTo, [null].concat(val));
+ var ret =
+ tryCatch(nodeback).apply(promise._boundValue(), [null].concat(val));
if (ret === errorObj) {
async.throwLater(ret.e);
}
@@ -16,7 +17,7 @@ function spreadAdapter(val, nodeback) {
function successAdapter(val, nodeback) {
var promise = this;
- var receiver = promise._boundTo;
+ var receiver = promise._boundValue();
var ret = val === undefined
? tryCatch(nodeback).call(receiver, null)
: tryCatch(nodeback).call(receiver, null, val);
@@ -32,13 +33,13 @@ function errorAdapter(reason, nodeback) {
newReason.cause = reason;
reason = newReason;
}
- var ret = tryCatch(nodeback).call(promise._boundTo, reason);
+ var ret = tryCatch(nodeback).call(promise._boundValue(), reason);
if (ret === errorObj) {
async.throwLater(ret.e);
}
}
-Promise.prototype.asCallback =
+Promise.prototype.asCallback =
Promise.prototype.nodeify = function (nodeback, options) {
if (typeof nodeback == "function") {
var adapter = successAdapter;
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
index f80d247b1..eb081181a 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise.js
@@ -9,7 +9,23 @@ var reflect = function() {
var apiRejection = function(msg) {
return Promise.reject(new TypeError(msg));
};
+
var util = require("./util.js");
+
+var getDomain;
+if (util.isNode) {
+ getDomain = function() {
+ var ret = process.domain;
+ if (ret === undefined) ret = null;
+ return ret;
+ };
+} else {
+ getDomain = function() {
+ return null;
+ };
+}
+util.notEnumerableProp(Promise, "_getDomain", getDomain);
+
var async = require("./async.js");
var errors = require("./errors.js");
var TypeError = Promise.TypeError = errors.TypeError;
@@ -208,8 +224,12 @@ Promise.prototype._then = function (
if (!haveInternalData) ret._setIsMigrated();
}
- var callbackIndex =
- target._addCallbacks(didFulfill, didReject, didProgress, ret, receiver);
+ var callbackIndex = target._addCallbacks(didFulfill,
+ didReject,
+ didProgress,
+ ret,
+ receiver,
+ getDomain());
if (target._isResolved() && !target._isSettlePromisesQueued()) {
async.invoke(
@@ -291,7 +311,7 @@ Promise.prototype._receiverAt = function (index) {
: this[
index * 5 - 5 + 4];
if (ret === undefined && this._isBound()) {
- return this._boundTo;
+ return this._boundValue();
}
return ret;
};
@@ -314,6 +334,20 @@ Promise.prototype._rejectionHandlerAt = function (index) {
: this[index * 5 - 5 + 1];
};
+Promise.prototype._boundValue = function() {
+ var ret = this._boundTo;
+ if (ret !== undefined) {
+ if (ret instanceof Promise) {
+ if (ret.isFulfilled()) {
+ return ret.value();
+ } else {
+ return undefined;
+ }
+ }
+ }
+ return ret;
+};
+
Promise.prototype._migrateCallbacks = function (follower, index) {
var fulfill = follower._fulfillmentHandlerAt(index);
var reject = follower._rejectionHandlerAt(index);
@@ -321,7 +355,7 @@ Promise.prototype._migrateCallbacks = function (follower, index) {
var promise = follower._promiseAt(index);
var receiver = follower._receiverAt(index);
if (promise instanceof Promise) promise._setIsMigrated();
- this._addCallbacks(fulfill, reject, progress, promise, receiver);
+ this._addCallbacks(fulfill, reject, progress, promise, receiver, null);
};
Promise.prototype._addCallbacks = function (
@@ -329,7 +363,8 @@ Promise.prototype._addCallbacks = function (
reject,
progress,
promise,
- receiver
+ receiver,
+ domain
) {
var index = this._length();
@@ -341,20 +376,34 @@ Promise.prototype._addCallbacks = function (
if (index === 0) {
this._promise0 = promise;
if (receiver !== undefined) this._receiver0 = receiver;
- if (typeof fulfill === "function" && !this._isCarryingStackTrace())
- this._fulfillmentHandler0 = fulfill;
- if (typeof reject === "function") this._rejectionHandler0 = reject;
- if (typeof progress === "function") this._progressHandler0 = progress;
+ if (typeof fulfill === "function" && !this._isCarryingStackTrace()) {
+ this._fulfillmentHandler0 =
+ domain === null ? fulfill : domain.bind(fulfill);
+ }
+ if (typeof reject === "function") {
+ this._rejectionHandler0 =
+ domain === null ? reject : domain.bind(reject);
+ }
+ if (typeof progress === "function") {
+ this._progressHandler0 =
+ domain === null ? progress : domain.bind(progress);
+ }
} else {
var base = index * 5 - 5;
this[base + 3] = promise;
this[base + 4] = receiver;
- if (typeof fulfill === "function")
- this[base + 0] = fulfill;
- if (typeof reject === "function")
- this[base + 1] = reject;
- if (typeof progress === "function")
- this[base + 2] = progress;
+ if (typeof fulfill === "function") {
+ this[base + 0] =
+ domain === null ? fulfill : domain.bind(fulfill);
+ }
+ if (typeof reject === "function") {
+ this[base + 1] =
+ domain === null ? reject : domain.bind(reject);
+ }
+ if (typeof progress === "function") {
+ this[base + 2] =
+ domain === null ? progress : domain.bind(progress);
+ }
}
this._setLength(index + 1);
return index;
@@ -449,7 +498,7 @@ Promise.prototype._settlePromiseFromHandler = function (
promise._pushContext();
var x;
if (receiver === APPLY && !this._isRejected()) {
- x = tryCatch(handler).apply(this._boundTo, value);
+ x = tryCatch(handler).apply(this._boundValue(), value);
} else {
x = tryCatch(handler).call(receiver, value);
}
@@ -519,8 +568,6 @@ Promise.prototype._settlePromiseAt = function (index) {
this._isCarryingStackTrace() ? this._getCarriedStackTrace() : undefined;
var value = this._settledValue;
var receiver = this._receiverAt(index);
-
-
this._clearCallbackDataAtIndex(index);
if (typeof handler === "function") {
@@ -647,7 +694,10 @@ Promise.prototype._settlePromises = function () {
}
};
-Promise._makeSelfResolutionError = makeSelfResolutionError;
+util.notEnumerableProp(Promise,
+ "_makeSelfResolutionError",
+ makeSelfResolutionError);
+
require("./progress.js")(Promise, PromiseArray);
require("./method.js")(Promise, INTERNAL, tryConvertToPromise, apiRejection);
require("./bind.js")(Promise, INTERNAL, tryConvertToPromise);
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise_array.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise_array.js
index 6dac86640..b2e8f1cc5 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise_array.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promise_array.js
@@ -80,7 +80,7 @@ PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) {
if (maybePromise instanceof Promise) {
maybePromise = maybePromise._target();
if (isResolved) {
- maybePromise._unsetRejectionIsUnhandled();
+ maybePromise._ignoreRejections();
} else if (maybePromise._isPending()) {
maybePromise._proxyPromiseArray(this, i);
} else if (maybePromise._isFulfilled()) {
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
index 035534459..2f088a30b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/promisify.js
@@ -10,12 +10,21 @@ var canEvaluate = util.canEvaluate;
var TypeError = require("./errors").TypeError;
var defaultSuffix = "Async";
var defaultPromisified = {__isPromisified__: true};
-var noCopyPropsPattern =
- /^(?:length|name|arguments|caller|callee|prototype|__isPromisified__)$/;
-var defaultFilter = function(name, func) {
+var noCopyProps = [
+ "arity", "length",
+ "name",
+ "arguments",
+ "caller",
+ "callee",
+ "prototype",
+ "__isPromisified__"
+];
+var noCopyPropsPattern = new RegExp("^(?:" + noCopyProps.join("|") + ")$");
+
+var defaultFilter = function(name) {
return util.isIdentifier(name) &&
name.charAt(0) !== "_" &&
- !util.isClass(func);
+ name !== "constructor";
};
function propsFilter(key) {
@@ -159,6 +168,7 @@ function(callback, receiver, originalName, fn) {
"nodebackForPromise",
"tryCatch",
"errorObj",
+ "notEnumerableProp",
"INTERNAL","'use strict'; \n\
var ret = function (Parameters) { \n\
'use strict'; \n\
@@ -176,7 +186,7 @@ function(callback, receiver, originalName, fn) {
} \n\
return promise; \n\
}; \n\
- ret.__isPromisified__ = true; \n\
+ notEnumerableProp(ret, '__isPromisified__', true); \n\
return ret; \n\
"
.replace("Parameters", parameterDeclaration(newParameterCount))
@@ -190,6 +200,7 @@ function(callback, receiver, originalName, fn) {
nodebackForPromise,
util.tryCatch,
util.errorObj,
+ util.notEnumerableProp,
INTERNAL
);
};
@@ -216,7 +227,7 @@ function makeNodePromisifiedClosure(callback, receiver, _, fn) {
}
return promise;
}
- promisified.__isPromisified__ = true;
+ util.notEnumerableProp(promisified, "__isPromisified__", true);
return promisified;
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/reduce.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/reduce.js
index 319222012..1f92dafac 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/reduce.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/reduce.js
@@ -4,6 +4,7 @@ module.exports = function(Promise,
apiRejection,
tryConvertToPromise,
INTERNAL) {
+var getDomain = Promise._getDomain;
var async = require("./async.js");
var util = require("./util.js");
var tryCatch = util.tryCatch;
@@ -32,7 +33,8 @@ function ReductionPromiseArray(promises, fn, accum, _each) {
}
}
if (!(isPromise || this._zerothIsAccum)) this._gotAccum = true;
- this._callback = fn;
+ var domain = getDomain();
+ this._callback = domain === null ? fn : domain.bind(fn);
this._accum = accum;
if (!rejected) async.invoke(init, this, undefined);
}
@@ -86,7 +88,7 @@ ReductionPromiseArray.prototype._promiseFulfilled = function (value, index) {
if (!gotAccum) return;
var callback = this._callback;
- var receiver = this._promise._boundTo;
+ var receiver = this._promise._boundValue();
var ret;
for (var i = this._reducingIndex; i < length; ++i) {
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/schedule.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/schedule.js
index 95e2b0051..bb04a8a2d 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/schedule.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/schedule.js
@@ -10,7 +10,10 @@ if (util.isNode && typeof MutationObserver === "undefined") {
schedule = util.isRecentNode
? function(fn) { GlobalSetImmediate.call(global, fn); }
: function(fn) { ProcessNextTick.call(process, fn); };
-} else if (typeof MutationObserver !== "undefined") {
+} else if ((typeof MutationObserver !== "undefined") &&
+ !(typeof window !== "undefined" &&
+ window.navigator &&
+ window.navigator.standalone)) {
schedule = function(fn) {
var div = document.createElement("div");
var observer = new MutationObserver(fn);
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/thenables.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/thenables.js
index c858f86ab..eadfffb59 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/thenables.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/thenables.js
@@ -61,12 +61,7 @@ function doThenable(x, then, context) {
function resolveFromThenable(value) {
if (!promise) return;
- if (x === value) {
- promise._rejectCallback(
- Promise._makeSelfResolutionError(), false, true);
- } else {
- promise._resolveCallback(value);
- }
+ promise._resolveCallback(value);
promise = null;
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/util.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/util.js
index ab67f8759..ea3934471 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/util.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/js/main/util.js
@@ -21,7 +21,9 @@ var errorObj = {e: {}};
var tryCatchTarget;
function tryCatcher() {
try {
- return tryCatchTarget.apply(this, arguments);
+ var target = tryCatchTarget;
+ tryCatchTarget = null;
+ return target.apply(this, arguments);
} catch (e) {
errorObj.e = e;
return errorObj;
@@ -82,6 +84,7 @@ function withAppended(target, appendee) {
function getDataPropertyOrDefault(obj, key, defaultValue) {
if (es5.isES5) {
var desc = Object.getOwnPropertyDescriptor(obj, key);
+
if (desc != null) {
return desc.get == null && desc.set == null
? desc.value
@@ -104,23 +107,32 @@ function notEnumerableProp(obj, name, value) {
return obj;
}
-
-var wrapsPrimitiveReceiver = (function() {
- return this !== "string";
-}).call("string");
-
function thrower(r) {
throw r;
}
var inheritedDataKeys = (function() {
+ var excludedPrototypes = [
+ Array.prototype,
+ Object.prototype,
+ Function.prototype
+ ];
+
+ var isExcludedProto = function(val) {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (excludedPrototypes[i] === val) {
+ return true;
+ }
+ }
+ return false;
+ };
+
if (es5.isES5) {
- var oProto = Object.prototype;
var getKeys = Object.getOwnPropertyNames;
return function(obj) {
var ret = [];
var visitedKeys = Object.create(null);
- while (obj != null && obj !== oProto) {
+ while (obj != null && !isExcludedProto(obj)) {
var keys;
try {
keys = getKeys(obj);
@@ -141,11 +153,23 @@ var inheritedDataKeys = (function() {
return ret;
};
} else {
+ var hasProp = {}.hasOwnProperty;
return function(obj) {
+ if (isExcludedProto(obj)) return [];
var ret = [];
+
/*jshint forin:false */
- for (var key in obj) {
- ret.push(key);
+ enumeration: for (var key in obj) {
+ if (hasProp.call(obj, key)) {
+ ret.push(key);
+ } else {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (hasProp.call(excludedPrototypes[i], key)) {
+ continue enumeration;
+ }
+ }
+ ret.push(key);
+ }
}
return ret;
};
@@ -153,13 +177,22 @@ var inheritedDataKeys = (function() {
})();
+var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/;
function isClass(fn) {
try {
if (typeof fn === "function") {
var keys = es5.names(fn.prototype);
- if (es5.isES5) return keys.length > 1;
- return keys.length > 0 &&
- !(keys.length === 1 && keys[0] === "constructor");
+
+ var hasMethods = es5.isES5 && keys.length > 1;
+ var hasMethodsOtherThanConstructor = keys.length > 0 &&
+ !(keys.length === 1 && keys[0] === "constructor");
+ var hasThisAssignmentAndStaticMethods =
+ thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0;
+
+ if (hasMethods || hasMethodsOtherThanConstructor ||
+ hasThisAssignmentAndStaticMethods) {
+ return true;
+ }
}
return false;
} catch (e) {
@@ -239,7 +272,9 @@ function copyDescriptors(from, to, filter) {
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (filter(key)) {
- es5.defineProperty(to, key, es5.getDescriptor(from, key));
+ try {
+ es5.defineProperty(to, key, es5.getDescriptor(from, key));
+ } catch (ignore) {}
}
}
}
@@ -261,7 +296,6 @@ var ret = {
inherits: inherits,
withAppended: withAppended,
maybeWrapAsError: maybeWrapAsError,
- wrapsPrimitiveReceiver: wrapsPrimitiveReceiver,
toFastProperties: toFastProperties,
filledRange: filledRange,
toString: safeToString,
@@ -280,5 +314,8 @@ ret.isRecentNode = ret.isNode && (function() {
var version = process.versions.node.split(".").map(Number);
return (version[0] === 0 && version[1] > 10) || (version[0] > 0);
})();
+
+if (ret.isNode) ret.toFastProperties(process);
+
try {throw new Error(); } catch (e) {ret.lastLineError = e;}
module.exports = ret;
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/package.json
index 2bb86a9a1..2b598a154 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/bluebird/package.json
@@ -1,7 +1,7 @@
{
"name": "bluebird",
"description": "Full featured Promises/A+ implementation with exceptionally good performance",
- "version": "2.9.27",
+ "version": "2.9.34",
"keywords": [
"promise",
"performance",
@@ -15,7 +15,10 @@
"future",
"flow control",
"dsl",
- "fluent interface"
+ "fluent interface",
+ "parallel",
+ "thread",
+ "concurrency"
],
"scripts": {
"lint": "node scripts/jshint.js",
@@ -69,15 +72,14 @@
"js/browser",
"js/main",
"js/zalgo",
- "LICENSE",
"zalgo.js"
],
- "gitHead": "634af0e27ff4faab62c6c5bfd105527abcf8b06e",
- "_id": "bluebird@2.9.27",
- "_shasum": "8eab7da1e866998f9f71c209b9875f9949a7adca",
- "_from": "bluebird@>=2.9.26 <3.0.0",
- "_npmVersion": "2.7.1",
- "_nodeVersion": "1.6.2",
+ "gitHead": "386ba4f7d588693e5d675290a6b7fade08e0d626",
+ "_id": "bluebird@2.9.34",
+ "_shasum": "2f7b4ec80216328a9fddebdf69c8d4942feff7d8",
+ "_from": "bluebird@>=2.9.30 <3.0.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
"_npmUser": {
"name": "esailija",
"email": "petka_antonov@hotmail.com"
@@ -89,10 +91,10 @@
}
],
"dist": {
- "shasum": "8eab7da1e866998f9f71c209b9875f9949a7adca",
- "tarball": "http://registry.npmjs.org/bluebird/-/bluebird-2.9.27.tgz"
+ "shasum": "2f7b4ec80216328a9fddebdf69c8d4942feff7d8",
+ "tarball": "http://registry.npmjs.org/bluebird/-/bluebird-2.9.34.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.9.27.tgz",
+ "_resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.9.34.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/index.js
index 4138a64dd..cbe928862 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/index.js
@@ -5,6 +5,7 @@ var stripAnsi = require('strip-ansi');
var hasAnsi = require('has-ansi');
var supportsColor = require('supports-color');
var defineProps = Object.defineProperties;
+var isSimpleWindowsTerm = process.platform === 'win32' && !/^xterm/i.test(process.env.TERM);
function Chalk(options) {
// detect mode if not set manually
@@ -12,22 +13,10 @@ function Chalk(options) {
}
// use bright blue on Windows as the normal blue color is illegible
-if (process.platform === 'win32') {
+if (isSimpleWindowsTerm) {
ansiStyles.blue.open = '\u001b[94m';
}
-function build(_styles) {
- var builder = function builder() {
- return applyStyle.apply(builder, arguments);
- };
- builder._styles = _styles;
- builder.enabled = this.enabled;
- // __proto__ is used because we must return a function, but there is
- // no way to create a function with a different prototype.
- builder.__proto__ = proto;
- return builder;
-}
-
var styles = (function () {
var ret = {};
@@ -46,11 +35,27 @@ var styles = (function () {
var proto = defineProps(function chalk() {}, styles);
+function build(_styles) {
+ var builder = function builder() {
+ return applyStyle.apply(builder, arguments);
+ };
+
+ builder._styles = _styles;
+ builder.enabled = this.enabled;
+ // __proto__ is used because we must return a function, but there is
+ // no way to create a function with a different prototype.
+ /*eslint no-proto: 0 */
+ builder.__proto__ = proto;
+
+ return builder;
+}
+
function applyStyle() {
// support varags, but simply cast to string in case there's only one arg
var args = arguments;
var argsLen = args.length;
var str = argsLen !== 0 && String(arguments[0]);
+
if (argsLen > 1) {
// don't slice `arguments`, it prevents v8 optimizations
for (var a = 1; a < argsLen; a++) {
@@ -62,18 +67,29 @@ function applyStyle() {
return str;
}
- /*jshint validthis: true */
var nestedStyles = this._styles;
-
var i = nestedStyles.length;
+
+ // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe,
+ // see https://github.com/chalk/chalk/issues/58
+ // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop.
+ var originalDim = ansiStyles.dim.open;
+ if (isSimpleWindowsTerm && (nestedStyles.indexOf('gray') !== -1 || nestedStyles.indexOf('grey') !== -1)) {
+ ansiStyles.dim.open = '';
+ }
+
while (i--) {
var code = ansiStyles[nestedStyles[i]];
+
// Replace any instances already present with a re-opening code
// otherwise only the part of the string until said closing code
// will be colored, and the rest will simply be 'plain'.
str = code.open + str.replace(code.closeRe, code.open) + code.close;
}
+ // Reset the original 'dim' if we changed it to work around the Windows dimmed gray issue.
+ ansiStyles.dim.open = originalDim;
+
return str;
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/index.js
index caf9e119e..78945278f 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/index.js
@@ -1,56 +1,65 @@
'use strict';
-var styles = module.exports = {
- modifiers: {
- reset: [0, 0],
- bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
- dim: [2, 22],
- italic: [3, 23],
- underline: [4, 24],
- inverse: [7, 27],
- hidden: [8, 28],
- strikethrough: [9, 29]
- },
- colors: {
- black: [30, 39],
- red: [31, 39],
- green: [32, 39],
- yellow: [33, 39],
- blue: [34, 39],
- magenta: [35, 39],
- cyan: [36, 39],
- white: [37, 39],
- gray: [90, 39]
- },
- bgColors: {
- bgBlack: [40, 49],
- bgRed: [41, 49],
- bgGreen: [42, 49],
- bgYellow: [43, 49],
- bgBlue: [44, 49],
- bgMagenta: [45, 49],
- bgCyan: [46, 49],
- bgWhite: [47, 49]
- }
-};
-
-// fix humans
-styles.colors.grey = styles.colors.gray;
-
-Object.keys(styles).forEach(function (groupName) {
- var group = styles[groupName];
-
- Object.keys(group).forEach(function (styleName) {
- var style = group[styleName];
-
- styles[styleName] = group[styleName] = {
- open: '\u001b[' + style[0] + 'm',
- close: '\u001b[' + style[1] + 'm'
- };
- });
+function assembleStyles () {
+ var styles = {
+ modifiers: {
+ reset: [0, 0],
+ bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
+ dim: [2, 22],
+ italic: [3, 23],
+ underline: [4, 24],
+ inverse: [7, 27],
+ hidden: [8, 28],
+ strikethrough: [9, 29]
+ },
+ colors: {
+ black: [30, 39],
+ red: [31, 39],
+ green: [32, 39],
+ yellow: [33, 39],
+ blue: [34, 39],
+ magenta: [35, 39],
+ cyan: [36, 39],
+ white: [37, 39],
+ gray: [90, 39]
+ },
+ bgColors: {
+ bgBlack: [40, 49],
+ bgRed: [41, 49],
+ bgGreen: [42, 49],
+ bgYellow: [43, 49],
+ bgBlue: [44, 49],
+ bgMagenta: [45, 49],
+ bgCyan: [46, 49],
+ bgWhite: [47, 49]
+ }
+ };
+
+ // fix humans
+ styles.colors.grey = styles.colors.gray;
+
+ Object.keys(styles).forEach(function (groupName) {
+ var group = styles[groupName];
+
+ Object.keys(group).forEach(function (styleName) {
+ var style = group[styleName];
- Object.defineProperty(styles, groupName, {
- value: group,
- enumerable: false
+ styles[styleName] = group[styleName] = {
+ open: '\u001b[' + style[0] + 'm',
+ close: '\u001b[' + style[1] + 'm'
+ };
+ });
+
+ Object.defineProperty(styles, groupName, {
+ value: group,
+ enumerable: false
+ });
});
+
+ return styles;
+}
+
+Object.defineProperty(module, 'exports', {
+ enumerable: true,
+ get: assembleStyles
});
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
index 3ae71e3ab..b6a9ceaea 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
@@ -1,27 +1,25 @@
{
"name": "ansi-styles",
- "version": "2.0.1",
+ "version": "2.1.0",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": {
"type": "git",
- "url": "git+https://github.com/sindresorhus/ansi-styles.git"
+ "url": "git+https://github.com/chalk/ansi-styles.git"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "url": "sindresorhus.com"
},
"maintainers": [
{
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
- "name": "Joshua Appelman",
- "email": "jappelman@xebia.com",
- "url": "http://jbnicolai.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
}
],
"engines": {
@@ -58,14 +56,25 @@
"devDependencies": {
"mocha": "*"
},
- "readme": "# ansi-styles [![Build Status](https://travis-ci.org/sindresorhus/ansi-styles.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-styles)\n\n> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal\n\nYou probably want the higher-level [chalk](https://github.com/sindresorhus/chalk) module for styling your strings.\n\n![](screenshot.png)\n\n\n## Install\n\n```sh\n$ npm install --save ansi-styles\n```\n\n\n## Usage\n\n```js\nvar ansi = require('ansi-styles');\n\nconsole.log(ansi.green.open + 'Hello world!' + ansi.green.close);\n```\n\n\n## API\n\nEach style has an `open` and `close` property.\n\n\n## Styles\n\n### Modifiers\n\n- `reset`\n- `bold`\n- `dim`\n- `italic` *(not widely supported)*\n- `underline`\n- `inverse`\n- `hidden`\n- `strikethrough` *(not widely supported)*\n\n### Colors\n\n- `black`\n- `red`\n- `green`\n- `yellow`\n- `blue`\n- `magenta`\n- `cyan`\n- `white`\n- `gray`\n\n### Background colors\n\n- `bgBlack`\n- `bgRed`\n- `bgGreen`\n- `bgYellow`\n- `bgBlue`\n- `bgMagenta`\n- `bgCyan`\n- `bgWhite`\n\n\n## Advanced usage\n\nBy default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.\n\n- `ansi.modifiers`\n- `ansi.colors`\n- `ansi.bgColors`\n\n\n###### Example\n\n```js\nconsole.log(ansi.colors.green.open);\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
+ "gitHead": "18421cbe4a2d93359ec2599a894f704be126d066",
"bugs": {
- "url": "https://github.com/sindresorhus/ansi-styles/issues"
+ "url": "https://github.com/chalk/ansi-styles/issues"
},
- "homepage": "https://github.com/sindresorhus/ansi-styles#readme",
- "_id": "ansi-styles@2.0.1",
- "_shasum": "b033f57f93e2d28adeb8bc11138fa13da0fd20a3",
- "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.0.1.tgz",
- "_from": "ansi-styles@>=2.0.1 <3.0.0"
+ "homepage": "https://github.com/chalk/ansi-styles",
+ "_id": "ansi-styles@2.1.0",
+ "_shasum": "990f747146927b559a932bf92959163d60c0d0e2",
+ "_from": "ansi-styles@>=2.1.0 <3.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "0.12.4",
+ "_npmUser": {
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ },
+ "dist": {
+ "shasum": "990f747146927b559a932bf92959163d60c0d0e2",
+ "tarball": "http://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/readme.md
index 89ec6a7c1..3f933f616 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/readme.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/readme.md
@@ -1,15 +1,15 @@
-# ansi-styles [![Build Status](https://travis-ci.org/sindresorhus/ansi-styles.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-styles)
+# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles)
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
-You probably want the higher-level [chalk](https://github.com/sindresorhus/chalk) module for styling your strings.
+You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
![](screenshot.png)
## Install
-```sh
+```
$ npm install --save ansi-styles
```
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
index b2bafb26a..813c9089d 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
@@ -14,14 +14,12 @@
},
"maintainers": [
{
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
- "name": "Joshua Appelman",
- "email": "jappelman@xebia.com",
- "url": "http://jbnicolai.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
}
],
"engines": {
@@ -48,14 +46,25 @@
"devDependencies": {
"mocha": "*"
},
- "readme": "# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp)\n\n> Escape RegExp special characters\n\n\n## Install\n\n```sh\n$ npm install --save escape-string-regexp\n```\n\n\n## Usage\n\n```js\nvar escapeStringRegexp = require('escape-string-regexp');\n\nvar escapedString = escapeStringRegexp('how much $ for a unicorn?');\n//=> how much \\$ for a unicorn\\?\n\nnew RegExp(escapedString);\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
+ "gitHead": "1e446e6b4449b5f1f8868cd31bf8fd25ee37fb4b",
"bugs": {
"url": "https://github.com/sindresorhus/escape-string-regexp/issues"
},
- "homepage": "https://github.com/sindresorhus/escape-string-regexp#readme",
+ "homepage": "https://github.com/sindresorhus/escape-string-regexp",
"_id": "escape-string-regexp@1.0.3",
"_shasum": "9e2d8b25bc2555c3336723750e03f099c2735bb5",
+ "_from": "escape-string-regexp@>=1.0.2 <2.0.0",
+ "_npmVersion": "2.1.16",
+ "_nodeVersion": "0.10.35",
+ "_npmUser": {
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ },
+ "dist": {
+ "shasum": "9e2d8b25bc2555c3336723750e03f099c2735bb5",
+ "tarball": "http://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz",
- "_from": "escape-string-regexp@>=1.0.2 <2.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/cli.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/cli.js
deleted file mode 100755
index 0386a8242..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/cli.js
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env node
-'use strict';
-var stdin = require('get-stdin');
-var pkg = require('./package.json');
-var hasAnsi = require('./');
-var argv = process.argv.slice(2);
-var input = argv[0];
-
-function help() {
- console.log([
- '',
- ' ' + pkg.description,
- '',
- ' Usage',
- ' has-ansi <string>',
- ' echo <string> | has-ansi',
- '',
- ' Exits with code 0 if input has ANSI escape codes and 1 if not'
- ].join('\n'));
-}
-
-function init(data) {
- process.exit(hasAnsi(data) ? 0 : 1);
-}
-
-if (argv.indexOf('--help') !== -1) {
- help();
- return;
-}
-
-if (argv.indexOf('--version') !== -1) {
- console.log(pkg.version);
- return;
-}
-
-if (process.stdin.isTTY) {
- if (!input) {
- help();
- return;
- }
-
- init(input);
-} else {
- stdin(init);
-}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/index.js
new file mode 100644
index 000000000..4906755bc
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/index.js
@@ -0,0 +1,4 @@
+'use strict';
+module.exports = function () {
+ return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g;
+};
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/License b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/license
index 4804b7ab4..654d0bfe9 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/License
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/license
@@ -1,4 +1,6 @@
-Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/deps/npm/node_modules/ansi-regex/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/package.json
index 68962d0f1..7fc07677a 100644
--- a/deps/npm/node_modules/ansi-regex/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/package.json
@@ -1,16 +1,16 @@
{
"name": "ansi-regex",
- "version": "1.1.1",
+ "version": "2.0.0",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": {
"type": "git",
- "url": "https://github.com/sindresorhus/ansi-regex"
+ "url": "git+https://github.com/sindresorhus/ansi-regex.git"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "url": "sindresorhus.com"
},
"maintainers": [
{
@@ -62,25 +62,25 @@
"devDependencies": {
"mocha": "*"
},
- "gitHead": "47fb974630af70998157b30fad6eb5e5bd7c7cd6",
+ "gitHead": "57c3f2941a73079fa8b081e02a522e3d29913e2f",
"bugs": {
"url": "https://github.com/sindresorhus/ansi-regex/issues"
},
"homepage": "https://github.com/sindresorhus/ansi-regex",
- "_id": "ansi-regex@1.1.1",
- "_shasum": "41c847194646375e6a1a5d10c3ca054ef9fc980d",
- "_from": "ansi-regex@>=1.1.1 <1.2.0",
- "_npmVersion": "2.1.16",
- "_nodeVersion": "0.10.35",
+ "_id": "ansi-regex@2.0.0",
+ "_shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
+ "_from": "ansi-regex@>=2.0.0 <3.0.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
"_npmUser": {
- "name": "jbnicolai",
- "email": "jappelman@xebia.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
"dist": {
- "shasum": "41c847194646375e6a1a5d10c3ca054ef9fc980d",
- "tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-1.1.1.tgz"
+ "shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
+ "tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-1.1.1.tgz",
+ "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/readme.md
new file mode 100644
index 000000000..1a4894ec1
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/ansi-regex/readme.md
@@ -0,0 +1,31 @@
+# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)
+
+> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
+
+
+## Install
+
+```
+$ npm install --save ansi-regex
+```
+
+
+## Usage
+
+```js
+var ansiRegex = require('ansi-regex');
+
+ansiRegex().test('\u001b[4mcake\u001b[0m');
+//=> true
+
+ansiRegex().test('cake');
+//=> false
+
+'\u001b[4mcake\u001b[0m'.match(ansiRegex());
+//=> ['\u001b[4m', '\u001b[0m']
+```
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/index.js
deleted file mode 100644
index 0f1aeb3df..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-'use strict';
-
-module.exports = function (cb) {
- var stdin = process.stdin;
- var ret = '';
-
- if (stdin.isTTY) {
- setImmediate(cb, '');
- return;
- }
-
- stdin.setEncoding('utf8');
-
- stdin.on('readable', function () {
- var chunk;
-
- while (chunk = stdin.read()) {
- ret += chunk;
- }
- });
-
- stdin.on('end', function () {
- cb(ret);
- });
-};
-
-module.exports.buffer = function (cb) {
- var stdin = process.stdin;
- var ret = [];
- var len = 0;
-
- if (stdin.isTTY) {
- setImmediate(cb, new Buffer(''));
- return;
- }
-
- stdin.on('readable', function () {
- var chunk;
-
- while (chunk = stdin.read()) {
- ret.push(chunk);
- len += chunk.length;
- }
- });
-
- stdin.on('end', function () {
- cb(Buffer.concat(ret, len));
- });
-};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/package.json
deleted file mode 100644
index 0075a2213..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/package.json
+++ /dev/null
@@ -1,48 +0,0 @@
-{
- "name": "get-stdin",
- "version": "4.0.1",
- "description": "Easier stdin",
- "license": "MIT",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/sindresorhus/get-stdin.git"
- },
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
- },
- "engines": {
- "node": ">=0.10.0"
- },
- "scripts": {
- "test": "node test.js && node test-buffer.js && echo unicorns | node test-real.js"
- },
- "files": [
- "index.js"
- ],
- "keywords": [
- "std",
- "stdin",
- "stdio",
- "concat",
- "buffer",
- "stream",
- "process",
- "stream"
- ],
- "devDependencies": {
- "ava": "0.0.4",
- "buffer-equal": "0.0.1"
- },
- "readme": "# get-stdin [![Build Status](https://travis-ci.org/sindresorhus/get-stdin.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stdin)\n\n> Easier stdin\n\n\n## Install\n\n```sh\n$ npm install --save get-stdin\n```\n\n\n## Usage\n\n```js\n// example.js\nvar stdin = require('get-stdin');\n\nstdin(function (data) {\n\tconsole.log(data);\n\t//=> unicorns\n});\n```\n\n```sh\n$ echo unicorns | node example.js\nunicorns\n```\n\n\n## API\n\n### stdin(callback)\n\nGet `stdin` as a string.\n\n### stdin.buffer(callback)\n\nGet `stdin` as a buffer.\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
- "bugs": {
- "url": "https://github.com/sindresorhus/get-stdin/issues"
- },
- "homepage": "https://github.com/sindresorhus/get-stdin#readme",
- "_id": "get-stdin@4.0.1",
- "_shasum": "b968c6b0a04384324902e8bf1a5df32579a450fe",
- "_resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz",
- "_from": "get-stdin@>=4.0.1 <5.0.0"
-}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/readme.md
deleted file mode 100644
index bc1d32a8a..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/node_modules/get-stdin/readme.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# get-stdin [![Build Status](https://travis-ci.org/sindresorhus/get-stdin.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stdin)
-
-> Easier stdin
-
-
-## Install
-
-```sh
-$ npm install --save get-stdin
-```
-
-
-## Usage
-
-```js
-// example.js
-var stdin = require('get-stdin');
-
-stdin(function (data) {
- console.log(data);
- //=> unicorns
-});
-```
-
-```sh
-$ echo unicorns | node example.js
-unicorns
-```
-
-
-## API
-
-### stdin(callback)
-
-Get `stdin` as a string.
-
-### stdin.buffer(callback)
-
-Get `stdin` as a buffer.
-
-
-## License
-
-MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
index 11bcab998..d39a62eb9 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
@@ -1,6 +1,6 @@
{
"name": "has-ansi",
- "version": "1.0.3",
+ "version": "2.0.0",
"description": "Check if a string has ANSI escape codes",
"license": "MIT",
"repository": {
@@ -10,36 +10,28 @@
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "url": "sindresorhus.com"
},
"maintainers": [
{
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
- "name": "Joshua Appelman",
- "email": "jappelman@xebia.com",
- "url": "http://jbnicolai.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
}
],
- "bin": {
- "has-ansi": "cli.js"
- },
"engines": {
"node": ">=0.10.0"
},
"scripts": {
- "test": "mocha"
+ "test": "node test.js"
},
"files": [
- "index.js",
- "cli.js"
+ "index.js"
],
"keywords": [
- "cli",
- "bin",
"ansi",
"styles",
"color",
@@ -64,20 +56,30 @@
"has"
],
"dependencies": {
- "ansi-regex": "^1.1.0",
- "get-stdin": "^4.0.1"
+ "ansi-regex": "^2.0.0"
},
"devDependencies": {
- "mocha": "*"
+ "ava": "0.0.4"
},
- "readme": "# has-ansi [![Build Status](https://travis-ci.org/sindresorhus/has-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/has-ansi)\n\n> Check if a string has [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)\n\n\n## Install\n\n```sh\n$ npm install --save has-ansi\n```\n\n\n## Usage\n\n```js\nvar hasAnsi = require('has-ansi');\n\nhasAnsi('\\u001b[4mcake\\u001b[0m');\n//=> true\n\nhasAnsi('cake');\n//=> false\n```\n\n\n## CLI\n\n```sh\n$ npm install --global has-ansi\n```\n\n```\n$ has-ansi --help\n\n Usage\n has-ansi <string>\n echo <string> | has-ansi\n\n Exits with code 0 if input has ANSI escape codes and 1 if not\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
+ "gitHead": "0722275e1bef139fcd09137da6e5550c3cd368b9",
"bugs": {
"url": "https://github.com/sindresorhus/has-ansi/issues"
},
- "homepage": "https://github.com/sindresorhus/has-ansi#readme",
- "_id": "has-ansi@1.0.3",
- "_shasum": "c0b5b1615d9e382b0ff67169d967b425e48ca538",
- "_resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-1.0.3.tgz",
- "_from": "has-ansi@>=1.0.3 <2.0.0"
+ "homepage": "https://github.com/sindresorhus/has-ansi",
+ "_id": "has-ansi@2.0.0",
+ "_shasum": "34f5049ce1ecdf2b0649af3ef24e45ed35416d91",
+ "_from": "has-ansi@>=2.0.0 <3.0.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
+ "_npmUser": {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ "dist": {
+ "shasum": "34f5049ce1ecdf2b0649af3ef24e45ed35416d91",
+ "tarball": "http://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/readme.md
index 0fa149a82..02bc7c230 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/readme.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/readme.md
@@ -5,7 +5,7 @@
## Install
-```sh
+```
$ npm install --save has-ansi
```
@@ -23,21 +23,12 @@ hasAnsi('cake');
```
-## CLI
-
-```sh
-$ npm install --global has-ansi
-```
+## Related
-```
-$ has-ansi --help
-
- Usage
- has-ansi <string>
- echo <string> | has-ansi
-
- Exits with code 0 if input has ANSI escape codes and 1 if not
-```
+- [has-ansi-cli](https://github.com/sindresorhus/has-ansi-cli) - CLI for this module
+- [strip-ansi](https://github.com/sindresorhus/strip-ansi) - Strip ANSI escape codes
+- [ansi-regex](https://github.com/sindresorhus/ansi-regex) - Regular expression for matching ANSI escape codes
+- [chalk](https://github.com/sindresorhus/chalk) - Terminal string styling done right
## License
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/index.js
new file mode 100644
index 000000000..099480fbf
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/index.js
@@ -0,0 +1,6 @@
+'use strict';
+var ansiRegex = require('ansi-regex')();
+
+module.exports = function (str) {
+ return typeof str === 'string' ? str.replace(ansiRegex, '') : str;
+};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/license b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/license
new file mode 100644
index 000000000..654d0bfe9
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/index.js
new file mode 100644
index 000000000..4906755bc
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/index.js
@@ -0,0 +1,4 @@
+'use strict';
+module.exports = function () {
+ return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g;
+};
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/license b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/license
new file mode 100644
index 000000000..654d0bfe9
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/package.json
new file mode 100644
index 000000000..7fc07677a
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/package.json
@@ -0,0 +1,86 @@
+{
+ "name": "ansi-regex",
+ "version": "2.0.0",
+ "description": "Regular expression for matching ANSI escape codes",
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/ansi-regex.git"
+ },
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "maintainers": [
+ {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ {
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ }
+ ],
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "mocha test/test.js",
+ "view-supported": "node test/viewCodes.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "ansi",
+ "styles",
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "tty",
+ "escape",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "command-line",
+ "text",
+ "regex",
+ "regexp",
+ "re",
+ "match",
+ "test",
+ "find",
+ "pattern"
+ ],
+ "devDependencies": {
+ "mocha": "*"
+ },
+ "gitHead": "57c3f2941a73079fa8b081e02a522e3d29913e2f",
+ "bugs": {
+ "url": "https://github.com/sindresorhus/ansi-regex/issues"
+ },
+ "homepage": "https://github.com/sindresorhus/ansi-regex",
+ "_id": "ansi-regex@2.0.0",
+ "_shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
+ "_from": "ansi-regex@>=2.0.0 <3.0.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
+ "_npmUser": {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ "dist": {
+ "shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
+ "tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/readme.md
new file mode 100644
index 000000000..1a4894ec1
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/node_modules/ansi-regex/readme.md
@@ -0,0 +1,31 @@
+# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)
+
+> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
+
+
+## Install
+
+```
+$ npm install --save ansi-regex
+```
+
+
+## Usage
+
+```js
+var ansiRegex = require('ansi-regex');
+
+ansiRegex().test('\u001b[4mcake\u001b[0m');
+//=> true
+
+ansiRegex().test('cake');
+//=> false
+
+'\u001b[4mcake\u001b[0m'.match(ansiRegex());
+//=> ['\u001b[4m', '\u001b[0m']
+```
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/strip-ansi/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/package.json
index f766de042..2871d0379 100644
--- a/deps/npm/node_modules/strip-ansi/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/package.json
@@ -1,29 +1,35 @@
{
"name": "strip-ansi",
- "version": "2.0.1",
+ "version": "3.0.0",
"description": "Strip ANSI escape codes",
"license": "MIT",
"repository": {
"type": "git",
- "url": "https://github.com/sindresorhus/strip-ansi"
+ "url": "git+https://github.com/sindresorhus/strip-ansi.git"
},
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
- },
- "bin": {
- "strip-ansi": "cli.js"
+ "url": "sindresorhus.com"
},
+ "maintainers": [
+ {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ {
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ }
+ ],
"engines": {
"node": ">=0.10.0"
},
"scripts": {
- "test": "mocha"
+ "test": "node test.js"
},
"files": [
- "index.js",
- "cli.js"
+ "index.js"
],
"keywords": [
"strip",
@@ -36,7 +42,6 @@
"colors",
"terminal",
"console",
- "cli",
"string",
"tty",
"escape",
@@ -51,39 +56,30 @@
"text"
],
"dependencies": {
- "ansi-regex": "^1.0.0"
+ "ansi-regex": "^2.0.0"
},
"devDependencies": {
- "mocha": "*"
+ "ava": "0.0.4"
},
- "gitHead": "1eff0936c01f89efa312d9d51deed137259871a1",
+ "gitHead": "3f05b9810e1438f946e2eb84ee854cc00b972e9e",
"bugs": {
"url": "https://github.com/sindresorhus/strip-ansi/issues"
},
"homepage": "https://github.com/sindresorhus/strip-ansi",
- "_id": "strip-ansi@2.0.1",
- "_shasum": "df62c1aa94ed2f114e1d0f21fd1d50482b79a60e",
- "_from": "strip-ansi@>=2.0.1 <2.1.0",
- "_npmVersion": "1.4.28",
+ "_id": "strip-ansi@3.0.0",
+ "_shasum": "7510b665567ca914ccb5d7e072763ac968be3724",
+ "_from": "strip-ansi@>=3.0.0 <4.0.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
"_npmUser": {
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
- "maintainers": [
- {
- "name": "sindresorhus",
- "email": "sindresorhus@gmail.com"
- },
- {
- "name": "jbnicolai",
- "email": "jappelman@xebia.com"
- }
- ],
"dist": {
- "shasum": "df62c1aa94ed2f114e1d0f21fd1d50482b79a60e",
- "tarball": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-2.0.1.tgz"
+ "shasum": "7510b665567ca914ccb5d7e072763ac968be3724",
+ "tarball": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-2.0.1.tgz",
+ "_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/readme.md
new file mode 100644
index 000000000..76091512d
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/strip-ansi/readme.md
@@ -0,0 +1,33 @@
+# strip-ansi [![Build Status](https://travis-ci.org/sindresorhus/strip-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-ansi)
+
+> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
+
+
+## Install
+
+```
+$ npm install --save strip-ansi
+```
+
+
+## Usage
+
+```js
+var stripAnsi = require('strip-ansi');
+
+stripAnsi('\u001b[4mcake\u001b[0m');
+//=> 'cake'
+```
+
+
+## Related
+
+- [strip-ansi-cli](https://github.com/sindresorhus/strip-ansi-cli) - CLI for this module
+- [has-ansi](https://github.com/sindresorhus/has-ansi) - Check if a string has ANSI escape codes
+- [ansi-regex](https://github.com/sindresorhus/ansi-regex) - Regular expression for matching ANSI escape codes
+- [chalk](https://github.com/sindresorhus/chalk) - Terminal string styling done right
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/cli.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/cli.js
deleted file mode 100755
index e74698766..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/cli.js
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env node
-'use strict';
-var pkg = require('./package.json');
-var supportsColor = require('./');
-var argv = process.argv.slice(2);
-
-function help() {
- console.log([
- '',
- ' ' + pkg.description,
- '',
- ' Usage',
- ' supports-color',
- '',
- ' Exits with code 0 if color is supported and 1 if not'
- ].join('\n'));
-}
-
-if (argv.indexOf('--help') !== -1) {
- help();
- return;
-}
-
-if (argv.indexOf('--version') !== -1) {
- console.log(pkg.version);
- return;
-}
-
-process.exit(supportsColor ? 0 : 1);
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/index.js
index a17196485..4346e272e 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/index.js
@@ -1,21 +1,28 @@
'use strict';
var argv = process.argv;
+var terminator = argv.indexOf('--');
+var hasFlag = function (flag) {
+ flag = '--' + flag;
+ var pos = argv.indexOf(flag);
+ return pos !== -1 && (terminator !== -1 ? pos < terminator : true);
+};
+
module.exports = (function () {
if ('FORCE_COLOR' in process.env) {
return true;
}
- if (argv.indexOf('--no-color') !== -1 ||
- argv.indexOf('--no-colors') !== -1 ||
- argv.indexOf('--color=false') !== -1) {
+ if (hasFlag('no-color') ||
+ hasFlag('no-colors') ||
+ hasFlag('color=false')) {
return false;
}
- if (argv.indexOf('--color') !== -1 ||
- argv.indexOf('--colors') !== -1 ||
- argv.indexOf('--color=true') !== -1 ||
- argv.indexOf('--color=always') !== -1) {
+ if (hasFlag('color') ||
+ hasFlag('colors') ||
+ hasFlag('color=true') ||
+ hasFlag('color=always')) {
return true;
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/package.json
index ca9b2c42e..38a1ecb3c 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/package.json
@@ -1,11 +1,11 @@
{
"name": "supports-color",
- "version": "1.3.1",
+ "version": "2.0.0",
"description": "Detect whether a terminal supports color",
"license": "MIT",
"repository": {
"type": "git",
- "url": "git+https://github.com/sindresorhus/supports-color.git"
+ "url": "git+https://github.com/chalk/supports-color.git"
},
"author": {
"name": "Sindre Sorhus",
@@ -14,19 +14,14 @@
},
"maintainers": [
{
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
- "name": "Joshua Appelman",
- "email": "jappelman@xebia.com",
- "url": "jbnicolai.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
}
],
- "bin": {
- "supports-color": "cli.js"
- },
"engines": {
"node": ">=0.8.0"
},
@@ -34,12 +29,9 @@
"test": "mocha"
},
"files": [
- "index.js",
- "cli.js"
+ "index.js"
],
"keywords": [
- "cli",
- "bin",
"color",
"colour",
"colors",
@@ -63,14 +55,25 @@
"mocha": "*",
"require-uncached": "^1.0.2"
},
- "readme": "# supports-color [![Build Status](https://travis-ci.org/sindresorhus/supports-color.svg?branch=master)](https://travis-ci.org/sindresorhus/supports-color)\n\n> Detect whether a terminal supports color\n\n\n## Install\n\n```\n$ npm install --save supports-color\n```\n\n\n## Usage\n\n```js\nvar supportsColor = require('supports-color');\n\nif (supportsColor) {\n\tconsole.log('Terminal supports color');\n}\n```\n\nIt obeys the `--color` and `--no-color` CLI flags.\n\nFor situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`.\n\n\n## CLI\n\n```\n$ npm install --global supports-color\n```\n\n```\n$ supports-color --help\n\n Usage\n supports-color\n\n Exits with code 0 if color is supported and 1 if not\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
+ "gitHead": "8400d98ade32b2adffd50902c06d9e725a5c6588",
"bugs": {
- "url": "https://github.com/sindresorhus/supports-color/issues"
+ "url": "https://github.com/chalk/supports-color/issues"
+ },
+ "homepage": "https://github.com/chalk/supports-color",
+ "_id": "supports-color@2.0.0",
+ "_shasum": "535d045ce6b6363fa40117084629995e9df324c7",
+ "_from": "supports-color@>=2.0.0 <3.0.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
+ "_npmUser": {
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
+ },
+ "dist": {
+ "shasum": "535d045ce6b6363fa40117084629995e9df324c7",
+ "tarball": "http://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz"
},
- "homepage": "https://github.com/sindresorhus/supports-color#readme",
- "_id": "supports-color@1.3.1",
- "_shasum": "15758df09d8ff3b4acc307539fabe27095e1042d",
- "_resolved": "https://registry.npmjs.org/supports-color/-/supports-color-1.3.1.tgz",
- "_from": "supports-color@>=1.3.0 <2.0.0"
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/readme.md
index fe6016f9d..b4761f1ec 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/readme.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/supports-color/readme.md
@@ -1,4 +1,4 @@
-# supports-color [![Build Status](https://travis-ci.org/sindresorhus/supports-color.svg?branch=master)](https://travis-ci.org/sindresorhus/supports-color)
+# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color)
> Detect whether a terminal supports color
@@ -25,20 +25,10 @@ It obeys the `--color` and `--no-color` CLI flags.
For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`.
-## CLI
+## Related
-```
-$ npm install --global supports-color
-```
-
-```
-$ supports-color --help
-
- Usage
- supports-color
-
- Exits with code 0 if color is supported and 1 if not
-```
+- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module
+- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right
## License
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
index ce07ec6e4..e42022799 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
@@ -1,22 +1,24 @@
{
"name": "chalk",
- "version": "1.0.0",
+ "version": "1.1.0",
"description": "Terminal string styling done right. Much color.",
"license": "MIT",
"repository": {
"type": "git",
- "url": "git+https://github.com/sindresorhus/chalk.git"
+ "url": "git+https://github.com/chalk/chalk.git"
},
"maintainers": [
{
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "http://sindresorhus.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
- "name": "Joshua Appelman",
- "email": "jappelman@xebia.com",
- "url": "http://jbnicolai.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ },
+ {
+ "name": "unicorn",
+ "email": "sindresorhus+unicorn@gmail.com"
}
],
"engines": {
@@ -24,7 +26,9 @@
},
"scripts": {
"test": "mocha",
- "bench": "matcha benchmark.js"
+ "bench": "matcha benchmark.js",
+ "coverage": "nyc npm test && nyc report",
+ "coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls"
},
"files": [
"index.js"
@@ -37,7 +41,9 @@
"console",
"cli",
"string",
+ "str",
"ansi",
+ "style",
"styles",
"tty",
"formatting",
@@ -51,24 +57,40 @@
"text"
],
"dependencies": {
- "ansi-styles": "^2.0.1",
+ "ansi-styles": "^2.1.0",
"escape-string-regexp": "^1.0.2",
- "has-ansi": "^1.0.3",
- "strip-ansi": "^2.0.1",
- "supports-color": "^1.3.0"
+ "has-ansi": "^2.0.0",
+ "strip-ansi": "^3.0.0",
+ "supports-color": "^2.0.0"
},
"devDependencies": {
+ "coveralls": "^2.11.2",
"matcha": "^0.6.0",
- "mocha": "*"
+ "mocha": "*",
+ "nyc": "^3.0.0",
+ "require-uncached": "^1.0.2",
+ "resolve-from": "^1.0.0",
+ "semver": "^4.3.3"
},
- "readme": "<h1 align=\"center\">\n\t<br>\n\t<img width=\"360\" src=\"https://cdn.rawgit.com/sindresorhus/chalk/19935d6484811c5e468817f846b7b3d417d7bf4a/logo.svg\" alt=\"chalk\">\n\t<br>\n\t<br>\n</h1>\n\n> Terminal string styling done right\n\n[![Build Status](https://travis-ci.org/sindresorhus/chalk.svg?branch=master)](https://travis-ci.org/sindresorhus/chalk) [![](http://img.shields.io/badge/unicorn-approved-ff69b4.svg?style=flat)](https://www.youtube.com/watch?v=Sm368W0OsHo)\n\n[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough.\n\n**Chalk is a clean and focused alternative.**\n\n![screenshot](https://github.com/sindresorhus/ansi-styles/raw/master/screenshot.png)\n\n\n## Why\n\n- Highly performant\n- Doesn't extend `String.prototype`\n- Expressive API\n- Ability to nest styles\n- Clean and focused\n- Auto-detects color support\n- Actively maintained\n- [Used by ~3000 modules](https://www.npmjs.com/browse/depended/chalk)\n\n\n## Install\n\n```\n$ npm install --save chalk\n```\n\n\n## Usage\n\nChalk comes with an easy to use composable API where you just chain and nest the styles you want.\n\n```js\nvar chalk = require('chalk');\n\n// style a string\nchalk.blue('Hello world!');\n\n// combine styled and normal strings\nchalk.blue('Hello') + 'World' + chalk.red('!');\n\n// compose multiple styles using the chainable API\nchalk.blue.bgRed.bold('Hello world!');\n\n// pass in multiple arguments\nchalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz');\n\n// nest styles\nchalk.red('Hello', chalk.underline.bgBlue('world') + '!');\n\n// nest styles of the same type even (color, underline, background)\nchalk.green(\n\t'I am a green line ' +\n\tchalk.blue.underline.bold('with a blue substring') +\n\t' that becomes green again!'\n);\n```\n\nEasily define your own themes.\n\n```js\nvar chalk = require('chalk');\nvar error = chalk.bold.red;\nconsole.log(error('Error!'));\n```\n\nTake advantage of console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data).\n\n```js\nvar name = 'Sindre';\nconsole.log(chalk.green('Hello %s'), name);\n//=> Hello Sindre\n```\n\n\n## API\n\n### chalk.`<style>[.<style>...](string, [string...])`\n\nExample: `chalk.red.bold.underline('Hello', 'world');`\n\nChain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter, and later styles take precedent in case of a conflict. This simply means that `Chalk.red.yellow.green` is equivalent to `Chalk.green`.\n\nMultiple arguments will be separated by space.\n\n### chalk.enabled\n\nColor support is automatically detected, but you can override it by setting the `enabled` property. You should however only do this in your own code as it applies globally to all chalk consumers.\n\nIf you need to change this in a reusable module create a new instance:\n\n```js\nvar ctx = new chalk.constructor({enabled: false});\n```\n\n### chalk.supportsColor\n\nDetect whether the terminal [supports color](https://github.com/sindresorhus/supports-color). Used internally and handled for you, but exposed for convenience.\n\nCan be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`.\n\n### chalk.styles\n\nExposes the styles as [ANSI escape codes](https://github.com/sindresorhus/ansi-styles).\n\nGenerally not useful, but you might need just the `.open` or `.close` escape code if you're mixing externally styled strings with your own.\n\n```js\nvar chalk = require('chalk');\n\nconsole.log(chalk.styles.red);\n//=> {open: '\\u001b[31m', close: '\\u001b[39m'}\n\nconsole.log(chalk.styles.red.open + 'Hello' + chalk.styles.red.close);\n```\n\n### chalk.hasColor(string)\n\nCheck whether a string [has color](https://github.com/sindresorhus/has-ansi).\n\n### chalk.stripColor(string)\n\n[Strip color](https://github.com/sindresorhus/strip-ansi) from a string.\n\nCan be useful in combination with `.supportsColor` to strip color on externally styled text when it's not supported.\n\nExample:\n\n```js\nvar chalk = require('chalk');\nvar styledString = getText();\n\nif (!chalk.supportsColor) {\n\tstyledString = chalk.stripColor(styledString);\n}\n```\n\n\n## Styles\n\n### Modifiers\n\n- `reset`\n- `bold`\n- `dim`\n- `italic` *(not widely supported)*\n- `underline`\n- `inverse`\n- `hidden`\n- `strikethrough` *(not widely supported)*\n\n### Colors\n\n- `black`\n- `red`\n- `green`\n- `yellow`\n- `blue` *(on Windows the bright version is used as normal blue is illegible)*\n- `magenta`\n- `cyan`\n- `white`\n- `gray`\n\n### Background colors\n\n- `bgBlack`\n- `bgRed`\n- `bgGreen`\n- `bgYellow`\n- `bgBlue`\n- `bgMagenta`\n- `bgCyan`\n- `bgWhite`\n\n\n## 256-colors\n\nChalk does not support support anything other than the base eight colors, which guarantees it will work on all terminals and systems. Some terminals, specifically `xterm` compliant ones, will support the full range of 8-bit colors. For this the lower level [ansi-256-colors](https://github.com/jbnicolai/ansi-256-colors) package can be used.\n\n\n## Windows\n\nIf you're on Windows, do yourself a favor and use [`cmder`](http://bliker.github.io/cmder/) instead of `cmd.exe`.\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
- "readmeFilename": "readme.md",
+ "gitHead": "e9bb6e6000b1c5d4508afabfdc85dd70f582f515",
"bugs": {
- "url": "https://github.com/sindresorhus/chalk/issues"
+ "url": "https://github.com/chalk/chalk/issues"
+ },
+ "homepage": "https://github.com/chalk/chalk",
+ "_id": "chalk@1.1.0",
+ "_shasum": "09b453cec497a75520e4a60ae48214a8700e0921",
+ "_from": "chalk@>=1.0.0 <2.0.0",
+ "_npmVersion": "2.10.1",
+ "_nodeVersion": "0.12.4",
+ "_npmUser": {
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
+ },
+ "dist": {
+ "shasum": "09b453cec497a75520e4a60ae48214a8700e0921",
+ "tarball": "http://registry.npmjs.org/chalk/-/chalk-1.1.0.tgz"
},
- "homepage": "https://github.com/sindresorhus/chalk#readme",
- "_id": "chalk@1.0.0",
- "_shasum": "b3cf4ed0ff5397c99c75b8f679db2f52831f96dc",
- "_resolved": "https://registry.npmjs.org/chalk/-/chalk-1.0.0.tgz",
- "_from": "chalk@>=1.0.0 <2.0.0"
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/readme.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/readme.md
index 43c706433..f757e59d6 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/readme.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/readme.md
@@ -1,19 +1,24 @@
<h1 align="center">
<br>
- <img width="360" src="https://cdn.rawgit.com/sindresorhus/chalk/19935d6484811c5e468817f846b7b3d417d7bf4a/logo.svg" alt="chalk">
+ <br>
+ <img width="360" src="https://cdn.rawgit.com/chalk/chalk/19935d6484811c5e468817f846b7b3d417d7bf4a/logo.svg" alt="chalk">
+ <br>
<br>
<br>
</h1>
> Terminal string styling done right
-[![Build Status](https://travis-ci.org/sindresorhus/chalk.svg?branch=master)](https://travis-ci.org/sindresorhus/chalk) [![](http://img.shields.io/badge/unicorn-approved-ff69b4.svg?style=flat)](https://www.youtube.com/watch?v=Sm368W0OsHo)
+[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk)
+[![Coverage Status](https://coveralls.io/repos/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/r/chalk/chalk?branch=master)
+[![](http://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4)
+
[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough.
**Chalk is a clean and focused alternative.**
-![screenshot](https://github.com/sindresorhus/ansi-styles/raw/master/screenshot.png)
+![](https://github.com/chalk/ansi-styles/raw/master/screenshot.png)
## Why
@@ -25,7 +30,7 @@
- Clean and focused
- Auto-detects color support
- Actively maintained
-- [Used by ~3000 modules](https://www.npmjs.com/browse/depended/chalk)
+- [Used by ~4000 modules](https://www.npmjs.com/browse/depended/chalk) as of May 24, 2015
## Install
@@ -104,13 +109,13 @@ var ctx = new chalk.constructor({enabled: false});
### chalk.supportsColor
-Detect whether the terminal [supports color](https://github.com/sindresorhus/supports-color). Used internally and handled for you, but exposed for convenience.
+Detect whether the terminal [supports color](https://github.com/chalk/supports-color). Used internally and handled for you, but exposed for convenience.
Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`.
### chalk.styles
-Exposes the styles as [ANSI escape codes](https://github.com/sindresorhus/ansi-styles).
+Exposes the styles as [ANSI escape codes](https://github.com/chalk/ansi-styles).
Generally not useful, but you might need just the `.open` or `.close` escape code if you're mixing externally styled strings with your own.
@@ -125,11 +130,11 @@ console.log(chalk.styles.red.open + 'Hello' + chalk.styles.red.close);
### chalk.hasColor(string)
-Check whether a string [has color](https://github.com/sindresorhus/has-ansi).
+Check whether a string [has color](https://github.com/chalk/has-ansi).
### chalk.stripColor(string)
-[Strip color](https://github.com/sindresorhus/strip-ansi) from a string.
+[Strip color](https://github.com/chalk/strip-ansi) from a string.
Can be useful in combination with `.supportsColor` to strip color on externally styled text when it's not supported.
@@ -192,6 +197,16 @@ Chalk does not support support anything other than the base eight colors, which
If you're on Windows, do yourself a favor and use [`cmder`](http://bliker.github.io/cmder/) instead of `cmd.exe`.
+## Related
+
+- [chalk-cli](https://github.com/chalk/chalk-cli) - CLI for this module
+- [ansi-styles](https://github.com/chalk/ansi-styles/) - ANSI escape codes for styling strings in the terminal
+- [supports-color](https://github.com/chalk/supports-color/) - Detect whether a terminal supports color
+- [strip-ansi](https://github.com/chalk/strip-ansi) - Strip ANSI escape codes
+- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes
+- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes
+
+
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/package.json
index 64d7d644d..5b8486a16 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/node_modules/graceful-readlink/package.json
@@ -22,10 +22,27 @@
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
- "readme": "# graceful-readlink\n[![NPM Version](http://img.shields.io/npm/v/graceful-readlink.svg?style=flat)](https://www.npmjs.org/package/graceful-readlink)\n[![NPM Downloads](https://img.shields.io/npm/dm/graceful-readlink.svg?style=flat)](https://www.npmjs.org/package/graceful-readlink)\n\n\n## Usage\n\n```js\nvar readlinkSync = require('graceful-readlink').readlinkSync;\nconsole.log(readlinkSync(f));\n// output\n// the file pointed to when `f` is a symbolic link\n// the `f` itself when `f` is not a symbolic link\n```\n## Licence\n\nMIT License\n",
- "readmeFilename": "README.md",
+ "gitHead": "f6655275bebef706fb63fd01b5f062a7052419a5",
"_id": "graceful-readlink@1.0.1",
"_shasum": "4cafad76bc62f02fa039b2f94e9a3dd3a391a725",
+ "_from": "graceful-readlink@>=1.0.0",
+ "_npmVersion": "2.1.17",
+ "_nodeVersion": "0.11.14",
+ "_npmUser": {
+ "name": "zhiyelee",
+ "email": "zhiyelee@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "zhiyelee",
+ "email": "zhiyelee@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "4cafad76bc62f02fa039b2f94e9a3dd3a391a725",
+ "tarball": "http://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz",
- "_from": "graceful-readlink@>=1.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-function/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-function/package.json
index e2bc77284..db1ac2aa3 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-function/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-function/package.json
@@ -28,10 +28,26 @@
"devDependencies": {
"tape": "^2.13.4"
},
- "readme": "# generate-function\n\nModule that helps you write generated functions in Node\n\n```\nnpm install generate-function\n```\n\n[![build status](http://img.shields.io/travis/mafintosh/generate-function.svg?style=flat)](http://travis-ci.org/mafintosh/generate-function)\n\n## Disclamer\n\nWriting code that generates code is hard.\nYou should only use this if you really, really, really need this for performance reasons (like schema validators / parsers etc).\n\n## Usage\n\n``` js\nvar genfun = require('generate-function')\n\nvar addNumber = function(val) {\n var fn = genfun()\n ('function add(n) {')\n ('return n + %d', val) // supports format strings to insert values\n ('}')\n\n return fn.toFunction() // will compile the function\n}\n\nvar add2 = addNumber(2)\n\nconsole.log('1+2=', add2(1))\nconsole.log(add2.toString()) // prints the generated function\n```\n\nIf you need to close over variables in your generated function pass them to `toFunction(scope)`\n\n``` js\nvar multiply = function(a, b) {\n return a * b\n}\n\nvar addAndMultiplyNumber = function(val) {\n var fn = genfun()\n ('function(n) {')\n ('if (typeof n !== \"number\") {') // ending a line with { will indent the source\n ('throw new Error(\"argument should be a number\")')\n ('}')\n ('var result = multiply(%d, n+%d)', val, val)\n ('return result')\n ('}')\n\n // use fn.toString() if you want to see the generated source\n\n return fn.toFunction({\n multiply: multiply\n })\n}\n\nvar addAndMultiply2 = addAndMultiplyNumber(2)\n\nconsole.log('(3 + 2) * 2 =', addAndMultiply2(3))\n```\n\n## Related\n\nSee [generate-object-property](https://github.com/mafintosh/generate-object-property) if you need to safely generate code that\ncan be used to reference an object property\n\n## License\n\nMIT",
- "readmeFilename": "README.md",
+ "gitHead": "3d5fc8de5859be95f58e3af9bfb5f663edd95149",
"_id": "generate-function@2.0.0",
"_shasum": "6858fe7c0969b7d4e9093337647ac79f60dfbe74",
+ "_from": "generate-function@>=2.0.0 <3.0.0",
+ "_npmVersion": "1.4.23",
+ "_npmUser": {
+ "name": "mafintosh",
+ "email": "mathiasbuus@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mafintosh",
+ "email": "mathiasbuus@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "6858fe7c0969b7d4e9093337647ac79f60dfbe74",
+ "tarball": "http://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz",
- "_from": "generate-function@>=2.0.0 <3.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/package.json
index 41225e266..9f2361947 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/generate-object-property/node_modules/is-property/package.json
@@ -30,15 +30,30 @@
"name": "Mikola Lysenko"
},
"license": "MIT",
- "readmeFilename": "README.md",
"gitHead": "0a85ea5b6b1264ea1cdecc6e5cf186adbb3ffc50",
"bugs": {
"url": "https://github.com/mikolalysenko/is-property/issues"
},
- "readme": "is-property\n===========\nTests if a property of a JavaScript object can be accessed using the dot (.) notation or if it must be enclosed in brackets, (ie use x[\" ... \"])\n\nExample\n-------\n\n```javascript\nvar isProperty = require(\"is-property\")\n\nconsole.log(isProperty(\"foo\")) //Prints true\nconsole.log(isProperty(\"0\")) //Prints false\n```\n\nInstall\n-------\n\n npm install is-property\n \n### `require(\"is-property\")(str)`\nChecks if str is a property\n\n* `str` is a string which we will test if it is a property or not\n\n**Returns** true or false depending if str is a property\n\n## Credits\n(c) 2013 Mikola Lysenko. MIT License",
- "homepage": "https://github.com/mikolalysenko/is-property#readme",
+ "homepage": "https://github.com/mikolalysenko/is-property",
"_id": "is-property@1.0.2",
"_shasum": "57fe1c4e48474edd65b09911f26b1cd4095dda84",
+ "_from": "is-property@>=1.0.0 <2.0.0",
+ "_npmVersion": "2.1.4",
+ "_nodeVersion": "0.10.26",
+ "_npmUser": {
+ "name": "mikolalysenko",
+ "email": "mikolalysenko@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikolalysenko",
+ "email": "mikolalysenko@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "57fe1c4e48474edd65b09911f26b1cd4095dda84",
+ "tarball": "http://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz"
+ },
"_resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz",
- "_from": "is-property@>=1.0.0 <2.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
index 64e45bd03..a86a787e0 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
@@ -35,11 +35,26 @@
"scripts": {
"test": "node test.js"
},
- "readme": "# JSON Pointer for nodejs\n\nThis is an implementation of [JSON Pointer](http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-08).\n\n## Usage\n\n var jsonpointer = require(\"jsonpointer\");\n var obj = { foo: 1, bar: { baz: 2}, qux: [3, 4, 5]};\n var one = jsonpointer.get(obj, \"/foo\");\n var two = jsonpointer.get(obj, \"/bar/baz\");\n var three = jsonpointer.get(obj, \"/qux/0\");\n var four = jsonpointer.get(obj, \"/qux/1\");\n var five = jsonpointer.get(obj, \"/qux/2\");\n\n jsonpointer.set(obj, \"/foo\", 6); // obj.foo = 6;\n\n## Testing\n\n $ node test.js\n All tests pass.\n $\n\n[![Build Status](https://travis-ci.org/janl/node-jsonpointer.png?branch=master)](undefined)\n\n## Author\n\n(c) 2011 Jan Lehnardt <jan@apache.org>\n\n## License\n\nMIT License.",
- "readmeFilename": "README.md",
- "homepage": "https://github.com/janl/node-jsonpointer#readme",
"_id": "jsonpointer@1.1.0",
+ "dist": {
+ "shasum": "c3c72efaed3b97154163dc01dd349e1cfe0f80fc",
+ "tarball": "http://registry.npmjs.org/jsonpointer/-/jsonpointer-1.1.0.tgz"
+ },
+ "_npmVersion": "1.1.69",
+ "_npmUser": {
+ "name": "jan",
+ "email": "jan@apache.org"
+ },
+ "maintainers": [
+ {
+ "name": "jan",
+ "email": "jan@apache.org"
+ }
+ ],
+ "directories": {},
"_shasum": "c3c72efaed3b97154163dc01dd349e1cfe0f80fc",
"_resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-1.1.0.tgz",
- "_from": "jsonpointer@>=1.1.0 <2.0.0"
+ "_from": "jsonpointer@>=1.1.0 <2.0.0",
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/janl/node-jsonpointer#readme"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc
deleted file mode 100644
index 77887b5f0..000000000
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/.jshintrc
+++ /dev/null
@@ -1,30 +0,0 @@
-{
- "maxdepth": 4,
- "maxstatements": 200,
- "maxcomplexity": 12,
- "maxlen": 80,
- "maxparams": 5,
-
- "curly": true,
- "eqeqeq": true,
- "immed": true,
- "latedef": false,
- "noarg": true,
- "noempty": true,
- "nonew": true,
- "undef": true,
- "unused": "vars",
- "trailing": true,
-
- "quotmark": true,
- "expr": true,
- "asi": true,
-
- "browser": false,
- "esnext": true,
- "devel": false,
- "node": false,
- "nonstandard": false,
-
- "predef": ["require", "module", "__dirname", "__filename"]
-}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
index c087642df..907a720da 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
@@ -63,10 +63,26 @@
"engines": {
"node": ">=0.4"
},
- "readme": "# xtend\n\n[![browser support][3]][4]\n\n[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges)\n\nExtend like a boss\n\nxtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence.\n\n## Examples\n\n```js\nvar extend = require(\"xtend\")\n\n// extend returns a new object. Does not mutate arguments\nvar combination = extend({\n a: \"a\",\n b: 'c'\n}, {\n b: \"b\"\n})\n// { a: \"a\", b: \"b\" }\n```\n\n## Stability status: Locked\n\n## MIT Licenced\n\n\n [3]: http://ci.testling.com/Raynos/xtend.png\n [4]: http://ci.testling.com/Raynos/xtend\n",
- "readmeFilename": "README.md",
+ "gitHead": "94a95d76154103290533b2c55ffa0fe4be16bfef",
"_id": "xtend@4.0.0",
"_shasum": "8bc36ff87aedbe7ce9eaf0bca36b2354a743840f",
+ "_from": "xtend@>=4.0.0 <5.0.0",
+ "_npmVersion": "1.4.15",
+ "_npmUser": {
+ "name": "raynos",
+ "email": "raynos2@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "raynos",
+ "email": "raynos2@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "8bc36ff87aedbe7ce9eaf0bca36b2354a743840f",
+ "tarball": "http://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.0.tgz",
- "_from": "xtend@>=4.0.0 <5.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/package.json b/deps/npm/node_modules/request/node_modules/har-validator/package.json
index 76cfafba1..7a32287fa 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/package.json
@@ -1,5 +1,5 @@
{
- "version": "1.7.1",
+ "version": "1.8.0",
"name": "har-validator",
"description": "Extremely fast HTTP Archive (HAR) validator using JSON Schema",
"author": {
@@ -12,10 +12,10 @@
"type": "git",
"url": "git+https://github.com/ahmadnassri/har-validator.git"
},
- "license": "MIT",
- "main": "./src/index.js",
+ "license": "ISC",
+ "main": "lib/index",
"bin": {
- "har-validator": "./bin/har-validator"
+ "har-validator": "bin/har-validator"
},
"keywords": [
"har",
@@ -29,14 +29,16 @@
},
"files": [
"bin",
- "src"
+ "lib"
],
"bugs": {
"url": "https://github.com/ahmadnassri/har-validator/issues"
},
"scripts": {
- "test": "standard && echint && mocha --reporter spec",
- "coverage": "istanbul cover ./node_modules/mocha/bin/_mocha",
+ "pretest": "standard && echint",
+ "test": "mocha",
+ "posttest": "npm run coverage",
+ "coverage": "istanbul cover --dir coverage _mocha -- -R dot",
"codeclimate": "codeclimate < coverage/lcov.info"
},
"echint": {
@@ -46,24 +48,24 @@
},
"devDependencies": {
"codeclimate-test-reporter": "0.0.4",
- "echint": "^1.1.0",
- "istanbul": "^0.3.14",
+ "echint": "^1.3.0",
+ "istanbul": "^0.3.15",
"mocha": "^2.2.5",
- "require-directory": "^2.1.0",
- "should": "^6.0.3",
- "standard": "^3.11.1"
+ "require-directory": "^2.1.1",
+ "should": "^7.0.1",
+ "standard": "^4.3.1"
},
"dependencies": {
- "bluebird": "^2.9.26",
+ "bluebird": "^2.9.30",
"chalk": "^1.0.0",
"commander": "^2.8.1",
"is-my-json-valid": "^2.12.0"
},
- "gitHead": "328d7f2f37affcc4fca1db13da68f2be817ad31c",
- "_id": "har-validator@1.7.1",
- "_shasum": "8ec8952f8287d21b451ba3e36f27ed8d997d8a95",
+ "gitHead": "8fd21c30edb23a1fed2d50b934d055d1be3dd7c9",
+ "_id": "har-validator@1.8.0",
+ "_shasum": "d83842b0eb4c435960aeb108a067a3aa94c0eeb2",
"_from": "har-validator@>=1.6.1 <2.0.0",
- "_npmVersion": "2.7.4",
+ "_npmVersion": "2.11.2",
"_nodeVersion": "0.12.2",
"_npmUser": {
"name": "ahmadnassri",
@@ -76,10 +78,10 @@
}
],
"dist": {
- "shasum": "8ec8952f8287d21b451ba3e36f27ed8d997d8a95",
- "tarball": "http://registry.npmjs.org/har-validator/-/har-validator-1.7.1.tgz"
+ "shasum": "d83842b0eb4c435960aeb108a067a3aa94c0eeb2",
+ "tarball": "http://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/har-validator/-/har-validator-1.7.1.tgz",
+ "_resolved": "https://registry.npmjs.org/har-validator/-/har-validator-1.8.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/.npmignore b/deps/npm/node_modules/request/node_modules/hawk/.npmignore
index 70febc05e..ab108bf92 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/.npmignore
+++ b/deps/npm/node_modules/request/node_modules/hawk/.npmignore
@@ -17,4 +17,3 @@ config.json
*/*/._*
coverage.*
lib-cov
-
diff --git a/deps/npm/node_modules/request/node_modules/hawk/.travis.yml b/deps/npm/node_modules/request/node_modules/hawk/.travis.yml
index 047f7e3d5..77795c6a9 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/.travis.yml
+++ b/deps/npm/node_modules/request/node_modules/hawk/.travis.yml
@@ -2,4 +2,3 @@ language: node_js
node_js:
- 0.10
-
diff --git a/deps/npm/node_modules/request/node_modules/hawk/Makefile b/deps/npm/node_modules/request/node_modules/hawk/Makefile
deleted file mode 100755
index b102d5ab8..000000000
--- a/deps/npm/node_modules/request/node_modules/hawk/Makefile
+++ /dev/null
@@ -1,8 +0,0 @@
-test:
- @node node_modules/lab/bin/lab -a code
-test-cov:
- @node node_modules/lab/bin/lab -a code -t 100
-test-cov-html:
- @node node_modules/lab/bin/lab -a code -r html -o coverage.html
-
-.PHONY: test test-cov test-cov-html
diff --git a/deps/npm/node_modules/request/node_modules/hawk/README.md b/deps/npm/node_modules/request/node_modules/hawk/README.md
index 0bcd28790..4aff23f3a 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/README.md
+++ b/deps/npm/node_modules/request/node_modules/hawk/README.md
@@ -3,10 +3,9 @@
<img align="right" src="https://raw.github.com/hueniverse/hawk/master/images/logo.png" /> **Hawk** is an HTTP authentication scheme using a message authentication code (MAC) algorithm to provide partial
HTTP request cryptographic verification. For more complex use cases such as access delegation, see [Oz](https://github.com/hueniverse/oz).
-Current version: **2.3**
+Current version: **3.x**
-Note: 2.x is the same exact protocol as 1.1. The version increment reflects a change in the internal error format
-used by the module and used by the node API.
+Note: 3.x and 2.x are the same exact protocol as 1.1. The version increments reflect changes in the node API.
[![Build Status](https://secure.travis-ci.org/hueniverse/hawk.png)](http://travis-ci.org/hueniverse/hawk)
diff --git a/deps/npm/node_modules/request/node_modules/hawk/example/usage.js b/deps/npm/node_modules/request/node_modules/hawk/example/usage.js
index 8c063f6cf..13b860b4c 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/example/usage.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/example/usage.js
@@ -10,7 +10,7 @@ var Hawk = require('../lib');
var internals = {
credentials: {
dh37fgj492je: {
- id: 'dh37fgj492je', // Required by Hawk.client.header
+ id: 'dh37fgj492je', // Required by Hawk.client.header
key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
algorithm: 'sha256',
user: 'Steve'
diff --git a/deps/npm/node_modules/request/node_modules/hawk/index.js b/deps/npm/node_modules/request/node_modules/hawk/index.js
deleted file mode 100755
index 4cc88b358..000000000
--- a/deps/npm/node_modules/request/node_modules/hawk/index.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./lib'); \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/hawk/lib/browser.js b/deps/npm/node_modules/request/node_modules/hawk/lib/browser.js
index 3307fa243..7ccacf613 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/lib/browser.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/lib/browser.js
@@ -238,18 +238,18 @@ hawk.client = {
// Parse HTTP WWW-Authenticate header
- var attributes = hawk.utils.parseAuthorizationHeader(wwwAuthenticate, ['ts', 'tsm', 'error']);
- if (!attributes) {
+ var wwwAttributes = hawk.utils.parseAuthorizationHeader(wwwAuthenticate, ['ts', 'tsm', 'error']);
+ if (!wwwAttributes) {
return false;
}
- if (attributes.ts) {
- var tsm = hawk.crypto.calculateTsMac(attributes.ts, credentials);
- if (tsm !== attributes.tsm) {
+ if (wwwAttributes.ts) {
+ var tsm = hawk.crypto.calculateTsMac(wwwAttributes.ts, credentials);
+ if (tsm !== wwwAttributes.tsm) {
return false;
}
- hawk.utils.setNtpOffset(attributes.ts - Math.floor((new Date()).getTime() / 1000)); // Keep offset at 1 second precision
+ hawk.utils.setNtpOffset(wwwAttributes.ts - Math.floor((new Date()).getTime() / 1000)); // Keep offset at 1 second precision
}
}
@@ -617,6 +617,7 @@ hawk.utils = {
// $lab:coverage:off$
+/* eslint-disable */
// Based on: Crypto-JS v3.1.2
// Copyright (c) 2009-2013, Jeff Mott. All rights reserved.
@@ -638,4 +639,5 @@ if (typeof module !== 'undefined' && module.exports) {
module.exports = hawk;
}
+/* eslint-enable */
// $lab:coverage:on$
diff --git a/deps/npm/node_modules/request/node_modules/hawk/lib/client.js b/deps/npm/node_modules/request/node_modules/hawk/lib/client.js
index 7fdd48412..b3e8649e3 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/lib/client.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/lib/client.js
@@ -153,16 +153,16 @@ exports.authenticate = function (res, credentials, artifacts, options) {
// Parse HTTP WWW-Authenticate header
- var attributes = Utils.parseAuthorizationHeader(res.headers['www-authenticate'], ['ts', 'tsm', 'error']);
- if (attributes instanceof Error) {
+ var wwwAttributes = Utils.parseAuthorizationHeader(res.headers['www-authenticate'], ['ts', 'tsm', 'error']);
+ if (wwwAttributes instanceof Error) {
return false;
}
// Validate server timestamp (not used to update clock since it is done via the SNPT client)
- if (attributes.ts) {
- var tsm = Crypto.calculateTsMac(attributes.ts, credentials);
- if (tsm !== attributes.tsm) {
+ if (wwwAttributes.ts) {
+ var tsm = Crypto.calculateTsMac(wwwAttributes.ts, credentials);
+ if (tsm !== wwwAttributes.tsm) {
return false;
}
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/lib/server.js b/deps/npm/node_modules/request/node_modules/hawk/lib/server.js
index a80393031..a325d56a5 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/lib/server.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/lib/server.js
@@ -16,7 +16,7 @@ var internals = {};
/*
req: node's HTTP request object or an object as follows:
-
+
var request = {
method: 'GET',
url: '/resource/4?a=1&b=2',
@@ -24,21 +24,21 @@ var internals = {};
port: 8080,
authorization: 'Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", ext="some-app-ext-data", mac="6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE="'
};
-
+
credentialsFunc: required function to lookup the set of Hawk credentials based on the provided credentials id.
The credentials include the MAC key, MAC algorithm, and other attributes (such as username)
needed by the application. This function is the equivalent of verifying the username and
password in Basic authentication.
-
+
var credentialsFunc = function (id, callback) {
-
+
// Lookup credentials in database
db.lookup(id, function (err, item) {
-
+
if (err || !item) {
return callback(err);
}
-
+
var credentials = {
// Required
key: item.key,
@@ -46,27 +46,27 @@ var internals = {};
// Application specific
user: item.user
};
-
+
return callback(null, credentials);
});
};
-
+
options: {
hostHeaderName: optional header field name, used to override the default 'Host' header when used
behind a cache of a proxy. Apache2 changes the value of the 'Host' header while preserving
the original (which is what the module must verify) in the 'x-forwarded-host' header field.
Only used when passed a node Http.ServerRequest object.
-
- nonceFunc: optional nonce validation function. The function signature is function(nonce, ts, callback)
+
+ nonceFunc: optional nonce validation function. The function signature is function(key, nonce, ts, callback)
where 'callback' must be called using the signature function(err).
-
+
timestampSkewSec: optional number of seconds of permitted clock skew for incoming timestamps. Defaults to 60 seconds.
Provides a +/- skew which means actual allowed window is double the number of seconds.
-
+
localtimeOffsetMsec: optional local clock time offset express in a number of milliseconds (positive or negative).
Defaults to 0.
-
+
payload: optional payload for validation. The client calculates the hash value and includes it via the 'hash'
header attribute. The server always ensures the value provided has been included in the request
MAC. When this option is provided, it validates the hash value itself. Validation is done by calculating
@@ -85,10 +85,10 @@ var internals = {};
exports.authenticate = function (req, credentialsFunc, options, callback) {
callback = Hoek.nextTick(callback);
-
+
// Default options
- options.nonceFunc = options.nonceFunc || function (nonce, ts, nonceCallback) { return nonceCallback(); }; // No validation
+ options.nonceFunc = options.nonceFunc || internals.nonceFunc;
options.timestampSkewSec = options.timestampSkewSec || 60; // 60 seconds
// Application time
@@ -182,7 +182,7 @@ exports.authenticate = function (req, credentialsFunc, options, callback) {
// Check nonce
- options.nonceFunc(attributes.nonce, attributes.ts, function (err) {
+ options.nonceFunc(credentials.key, attributes.nonce, attributes.ts, function (err) {
if (err) {
return callback(Boom.unauthorized('Invalid nonce', 'Hawk'), credentials, artifacts);
@@ -325,7 +325,7 @@ exports.authenticateBewit = function (req, credentialsFunc, options, callback) {
// Extract bewit
- // 1 2 3 4
+ // 1 2 3 4
var resource = request.url.match(/^(\/.*)([\?&])bewit\=([^&$]*)(?:&(.+))?$/);
if (!resource) {
return callback(Boom.unauthorized(null, 'Hawk'));
@@ -445,10 +445,10 @@ exports.authenticateBewit = function (req, credentialsFunc, options, callback) {
exports.authenticateMessage = function (host, port, message, authorization, credentialsFunc, options, callback) {
callback = Hoek.nextTick(callback);
-
+
// Default options
- options.nonceFunc = options.nonceFunc || function (nonce, ts, nonceCallback) { return nonceCallback(); }; // No validation
+ options.nonceFunc = options.nonceFunc || internals.nonceFunc;
options.timestampSkewSec = options.timestampSkewSec || 60; // 60 seconds
// Application time
@@ -456,14 +456,14 @@ exports.authenticateMessage = function (host, port, message, authorization, cred
var now = Utils.now(options.localtimeOffsetMsec); // Measure now before any other processing
// Validate authorization
-
+
if (!authorization.id ||
!authorization.ts ||
!authorization.nonce ||
!authorization.hash ||
!authorization.mac) {
-
- return callback(Boom.badRequest('Invalid authorization'))
+
+ return callback(Boom.badRequest('Invalid authorization'));
}
// Fetch Hawk credentials
@@ -514,7 +514,7 @@ exports.authenticateMessage = function (host, port, message, authorization, cred
// Check nonce
- options.nonceFunc(authorization.nonce, authorization.ts, function (err) {
+ options.nonceFunc(credentials.key, authorization.nonce, authorization.ts, function (err) {
if (err) {
return callback(Boom.unauthorized('Invalid nonce', 'Hawk'), credentials);
@@ -532,3 +532,9 @@ exports.authenticateMessage = function (host, port, message, authorization, cred
});
});
};
+
+
+internals.nonceFunc = function (key, nonce, ts, nonceCallback) {
+
+ return nonceCallback(); // No validation
+};
diff --git a/deps/npm/node_modules/request/node_modules/hawk/lib/utils.js b/deps/npm/node_modules/request/node_modules/hawk/lib/utils.js
index de74606f3..8d2719abc 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/lib/utils.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/lib/utils.js
@@ -60,7 +60,7 @@ exports.parseRequest = function (req, options) {
if (!req.headers) {
return req;
}
-
+
// Obtain host and port information
if (!options.host || !options.port) {
@@ -157,8 +157,8 @@ exports.parseAuthorizationHeader = function (header, keys) {
};
-exports.unauthorized = function (message) {
+exports.unauthorized = function (message, attributes) {
- return Boom.unauthorized(message, 'Hawk');
+ return Boom.unauthorized(message, 'Hawk', attributes);
};
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/README.md b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/README.md
index 43f15626f..cb1e9089f 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/README.md
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/README.md
@@ -82,7 +82,12 @@ Returns a 401 Unauthorized error where:
- `scheme` can be one of the following:
- an authentication scheme name
- an array of string values. These values will be separated by ', ' and set to the 'WWW-Authenticate' header.
-- `attributes` - an object of values to use while setting the 'WWW-Authenticate' header. This value is only used when `schema` is a string, otherwise it is ignored. Every key/value pair will be included in the 'WWW-Authenticate' in the format of 'key="value"'. `null` and `undefined` will be replaced with an empty string. If `attributes` is set, `message` will be used as the 'error' segment of the 'WWW-Authenticate' header. If `message` is unset, the 'error' segment of the header will not be present and `isMissing` will be true on the error object.
+- `attributes` - an object of values to use while setting the 'WWW-Authenticate' header. This value is only used
+ when `schema` is a string, otherwise it is ignored. Every key/value pair will be included in the
+ 'WWW-Authenticate' in the format of 'key="value"' as well as in the response payload under the `attributes` key.
+ `null` and `undefined` will be replaced with an empty string. If `attributes` is set, `message` will be used as
+ the 'error' segment of the 'WWW-Authenticate' header. If `message` is unset, the 'error' segment of the header
+ will not be present and `isMissing` will be true on the error object.
If either `scheme` or `attributes` are set, the resultant `Boom` object will have the 'WWW-Authenticate' header set for the response.
@@ -111,7 +116,10 @@ Generates the following response:
"payload": {
"statusCode": 401,
"error": "Unauthorized",
- "message": "invalid password"
+ "message": "invalid password",
+ "attributes": {
+ "error": "invalid password"
+ }
},
"headers" {
"WWW-Authenticate": "sample error=\"invalid password\""
@@ -128,7 +136,13 @@ Generates the following response:
"payload": {
"statusCode": 401,
"error": "Unauthorized",
- "message": "invalid password"
+ "message": "invalid password",
+ "attributes": {
+ "error": "invalid password",
+ "ttl": 0,
+ "cache": "",
+ "foo": "bar"
+ }
},
"headers" {
"WWW-Authenticate": "sample ttl=\"0\", cache=\"\", foo=\"bar\", error=\"invalid password\""
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/index.js b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/index.js
deleted file mode 100755
index 4cc88b358..000000000
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/index.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./lib'); \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/lib/index.js b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/lib/index.js
index 256bf3222..fb6564a4b 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/lib/index.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/lib/index.js
@@ -99,20 +99,27 @@ exports.unauthorized = function (message, scheme, attributes) { // Or f
// function (message, scheme, attributes)
wwwAuthenticate = scheme;
+
+ if (attributes || message) {
+ err.output.payload.attributes = {};
+ }
+
if (attributes) {
var names = Object.keys(attributes);
for (i = 0, il = names.length; i < il; ++i) {
+ var name = names[i];
if (i) {
wwwAuthenticate += ',';
}
- var value = attributes[names[i]];
+ var value = attributes[name];
if (value === null ||
value === undefined) { // Value can be zero
value = '';
}
- wwwAuthenticate += ' ' + names[i] + '="' + Hoek.escapeHeaderAttribute(value.toString()) + '"';
+ wwwAuthenticate += ' ' + name + '="' + Hoek.escapeHeaderAttribute(value.toString()) + '"';
+ err.output.payload.attributes[name] = value;
}
}
@@ -121,6 +128,7 @@ exports.unauthorized = function (message, scheme, attributes) { // Or f
wwwAuthenticate += ',';
}
wwwAuthenticate += ' error="' + Hoek.escapeHeaderAttribute(message) + '"';
+ err.output.payload.attributes.error = message;
}
else {
err.isMissing = true;
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
index 08663ad9e..4e6eb3402 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
@@ -1,12 +1,12 @@
{
"name": "boom",
"description": "HTTP-friendly error objects",
- "version": "2.7.2",
+ "version": "2.8.0",
"repository": {
"type": "git",
"url": "git://github.com/hapijs/boom.git"
},
- "main": "index",
+ "main": "lib/index.js",
"keywords": [
"error",
"http"
@@ -26,15 +26,15 @@
"test-cov-html": "lab -a code -r html -o coverage.html"
},
"license": "BSD-3-Clause",
- "gitHead": "29be188e9ecfce3a2e090b8714dec2b6e0791724",
+ "gitHead": "43f13bf2d336b35ccbb062b32ba2a17259f6d24c",
"bugs": {
"url": "https://github.com/hapijs/boom/issues"
},
- "homepage": "https://github.com/hapijs/boom",
- "_id": "boom@2.7.2",
- "_shasum": "dad628d897f7fd2e32cc82197f13307971cf8354",
- "_from": "boom@>=2.0.0 <3.0.0",
- "_npmVersion": "2.7.0",
+ "homepage": "https://github.com/hapijs/boom#readme",
+ "_id": "boom@2.8.0",
+ "_shasum": "317bdfd47018fe7dd79b0e9da73efe244119fdf1",
+ "_from": "boom@>=2.8.0 <3.0.0",
+ "_npmVersion": "2.11.1",
"_nodeVersion": "0.10.38",
"_npmUser": {
"name": "arb",
@@ -55,10 +55,10 @@
}
],
"dist": {
- "shasum": "dad628d897f7fd2e32cc82197f13307971cf8354",
- "tarball": "http://registry.npmjs.org/boom/-/boom-2.7.2.tgz"
+ "shasum": "317bdfd47018fe7dd79b0e9da73efe244119fdf1",
+ "tarball": "http://registry.npmjs.org/boom/-/boom-2.8.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/boom/-/boom-2.7.2.tgz",
+ "_resolved": "https://registry.npmjs.org/boom/-/boom-2.8.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/test/index.js b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/test/index.js
index a0db4dff6..489f77c61 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/test/index.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/test/index.js
@@ -200,6 +200,7 @@ describe('unauthorized()', function () {
var err = Boom.unauthorized('boom', 'Test', { a: 1, b: 'something', c: null, d: 0 });
expect(err.output.statusCode).to.equal(401);
expect(err.output.headers['WWW-Authenticate']).to.equal('Test a="1", b="something", c="", d="0", error="boom"');
+ expect(err.output.payload.attributes).to.deep.equal({ a: 1, b: 'something', c: '', d: 0, error: 'boom' });
done();
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json b/deps/npm/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
index 3a2538621..31dbaba6b 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
@@ -36,14 +36,30 @@
"url": "http://github.com/hapijs/cryptiles/raw/master/LICENSE"
}
],
- "readme": "cryptiles\n=========\n\nGeneral purpose crypto utilities\n\n[![Build Status](https://secure.travis-ci.org/hapijs/cryptiles.png)](http://travis-ci.org/hapijs/cryptiles)\n\n## Methods\n\n### `randomString(<Number> size)`\nReturns a cryptographically strong pseudo-random data string. Takes a size argument for the length of the string.\n\n### `fixedTimeComparison(<String> a, <String> b)`\nCompare two strings using fixed time algorithm (to prevent time-based analysis of MAC digest match). Returns `true` if the strings match, `false` if they differ.\n",
- "readmeFilename": "README.md",
+ "gitHead": "944263dbb628b9b3da542d35600d587f861eeaf0",
"bugs": {
"url": "https://github.com/hapijs/cryptiles/issues"
},
- "homepage": "https://github.com/hapijs/cryptiles#readme",
+ "homepage": "https://github.com/hapijs/cryptiles",
"_id": "cryptiles@2.0.4",
"_shasum": "09ea1775b9e1c7de7e60a99d42ab6f08ce1a1285",
+ "_from": "cryptiles@>=2.0.0 <3.0.0",
+ "_npmVersion": "1.4.23",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "dist": {
+ "shasum": "09ea1775b9e1c7de7e60a99d42ab6f08ce1a1285",
+ "tarball": "http://registry.npmjs.org/cryptiles/-/cryptiles-2.0.4.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.4.tgz",
- "_from": "cryptiles@>=2.0.0 <3.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/sntp/package.json b/deps/npm/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
index 9ae1b926e..8b664b60f 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
@@ -36,14 +36,30 @@
"url": "http://github.com/hueniverse/sntp/raw/master/LICENSE"
}
],
- "readme": "# sntp\n\nAn SNTP v4 client (RFC4330) for node. Simpy connects to the NTP or SNTP server requested and returns the server time\nalong with the roundtrip duration and clock offset. To adjust the local time to the NTP time, add the returned `t` offset\nto the local time.\n\n[![Build Status](https://secure.travis-ci.org/hueniverse/sntp.png)](http://travis-ci.org/hueniverse/sntp)\n\n# Usage\n\n```javascript\nvar Sntp = require('sntp');\n\n// All options are optional\n\nvar options = {\n host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org\n port: 123, // Defaults to 123 (NTP)\n resolveReference: true, // Default to false (not resolving)\n timeout: 1000 // Defaults to zero (no timeout)\n};\n\n// Request server time\n\nSntp.time(options, function (err, time) {\n\n if (err) {\n console.log('Failed: ' + err.message);\n process.exit(1);\n }\n\n console.log('Local clock is off by: ' + time.t + ' milliseconds');\n process.exit(0);\n});\n```\n\nIf an application needs to maintain continuous time synchronization, the module provides a stateful method for\nquerying the current offset only when the last one is too old (defaults to daily).\n\n```javascript\n// Request offset once\n\nSntp.offset(function (err, offset) {\n\n console.log(offset); // New (served fresh)\n\n // Request offset again\n\n Sntp.offset(function (err, offset) {\n\n console.log(offset); // Identical (served from cache)\n });\n});\n```\n\nTo set a background offset refresh, start the interval and use the provided now() method. If for any reason the\nclient fails to obtain an up-to-date offset, the current system clock is used.\n\n```javascript\nvar before = Sntp.now(); // System time without offset\n\nSntp.start(function () {\n\n var now = Sntp.now(); // With offset\n Sntp.stop();\n});\n```\n\n",
- "readmeFilename": "README.md",
+ "gitHead": "ee2e35284f684609990681734d39010cd356d7da",
"bugs": {
"url": "https://github.com/hueniverse/sntp/issues"
},
- "homepage": "https://github.com/hueniverse/sntp#readme",
+ "homepage": "https://github.com/hueniverse/sntp",
"_id": "sntp@1.0.9",
"_shasum": "6541184cc90aeea6c6e7b35e2659082443c66198",
+ "_from": "sntp@>=1.0.0 <2.0.0",
+ "_npmVersion": "1.4.23",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "dist": {
+ "shasum": "6541184cc90aeea6c6e7b35e2659082443c66198",
+ "tarball": "http://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz",
- "_from": "sntp@>=1.0.0 <2.0.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/package.json b/deps/npm/node_modules/request/node_modules/hawk/package.json
index b7077b398..5a835b2e4 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/package.json
@@ -1,7 +1,7 @@
{
"name": "hawk",
"description": "HTTP Hawk Authentication Scheme",
- "version": "2.3.1",
+ "version": "3.1.0",
"author": {
"name": "Eran Hammer",
"email": "eran@hammer.io",
@@ -12,7 +12,7 @@
"type": "git",
"url": "git://github.com/hueniverse/hawk.git"
},
- "main": "index",
+ "main": "lib/index.js",
"keywords": [
"http",
"authentication",
@@ -20,12 +20,12 @@
"hawk"
],
"engines": {
- "node": ">=0.8.0"
+ "node": ">=0.10.32"
},
"browser": "./lib/browser.js",
"dependencies": {
"hoek": "2.x.x",
- "boom": "2.x.x",
+ "boom": "^2.8.x",
"cryptiles": "2.x.x",
"sntp": "1.x.x"
},
@@ -34,22 +34,35 @@
"lab": "5.x.x"
},
"scripts": {
- "test": "make test-cov"
+ "test": "lab -a code -t 100 -L",
+ "test-cov-html": "lab -a code -r html -o coverage.html"
},
- "licenses": [
- {
- "type": "BSD",
- "url": "http://github.com/hueniverse/hawk/raw/master/LICENSE"
- }
- ],
- "readme": "![hawk Logo](https://raw.github.com/hueniverse/hawk/master/images/hawk.png)\n\n<img align=\"right\" src=\"https://raw.github.com/hueniverse/hawk/master/images/logo.png\" /> **Hawk** is an HTTP authentication scheme using a message authentication code (MAC) algorithm to provide partial\nHTTP request cryptographic verification. For more complex use cases such as access delegation, see [Oz](https://github.com/hueniverse/oz).\n\nCurrent version: **2.3**\n\nNote: 2.x is the same exact protocol as 1.1. The version increment reflects a change in the internal error format\nused by the module and used by the node API.\n\n[![Build Status](https://secure.travis-ci.org/hueniverse/hawk.png)](http://travis-ci.org/hueniverse/hawk)\n\n# Table of Content\n\n- [**Introduction**](#introduction)\n - [Replay Protection](#replay-protection)\n - [Usage Example](#usage-example)\n - [Protocol Example](#protocol-example)\n - [Payload Validation](#payload-validation)\n - [Response Payload Validation](#response-payload-validation)\n - [Browser Support and Considerations](#browser-support-and-considerations)\n<p></p>\n- [**Single URI Authorization**](#single-uri-authorization)\n - [Usage Example](#bewit-usage-example)\n<p></p>\n- [**Security Considerations**](#security-considerations)\n - [MAC Keys Transmission](#mac-keys-transmission)\n - [Confidentiality of Requests](#confidentiality-of-requests)\n - [Spoofing by Counterfeit Servers](#spoofing-by-counterfeit-servers)\n - [Plaintext Storage of Credentials](#plaintext-storage-of-credentials)\n - [Entropy of Keys](#entropy-of-keys)\n - [Coverage Limitations](#coverage-limitations)\n - [Future Time Manipulation](#future-time-manipulation)\n - [Client Clock Poisoning](#client-clock-poisoning)\n - [Bewit Limitations](#bewit-limitations)\n - [Host Header Forgery](#host-header-forgery)\n<p></p>\n- [**Frequently Asked Questions**](#frequently-asked-questions)\n<p></p>\n- [**Implementations**](#implementations)\n- [**Acknowledgements**](#acknowledgements)\n\n# Introduction\n\n**Hawk** is an HTTP authentication scheme providing mechanisms for making authenticated HTTP requests with\npartial cryptographic verification of the request and response, covering the HTTP method, request URI, host,\nand optionally the request payload.\n\nSimilar to the HTTP [Digest access authentication schemes](http://www.ietf.org/rfc/rfc2617.txt), **Hawk** uses a set of\nclient credentials which include an identifier (e.g. username) and key (e.g. password). Likewise, just as with the Digest scheme,\nthe key is never included in authenticated requests. Instead, it is used to calculate a request MAC value which is\nincluded in its place.\n\nHowever, **Hawk** has several differences from Digest. In particular, while both use a nonce to limit the possibility of\nreplay attacks, in **Hawk** the client generates the nonce and uses it in combination with a timestamp, leading to less\n\"chattiness\" (interaction with the server).\n\nAlso unlike Digest, this scheme is not intended to protect the key itself (the password in Digest) because\nthe client and server must both have access to the key material in the clear.\n\nThe primary design goals of this scheme are to:\n* simplify and improve HTTP authentication for services that are unwilling or unable to deploy TLS for all resources,\n* secure credentials against leakage (e.g., when the client uses some form of dynamic configuration to determine where\n to send an authenticated request), and\n* avoid the exposure of credentials sent to a malicious server over an unauthenticated secure channel due to client\n failure to validate the server's identity as part of its TLS handshake.\n\nIn addition, **Hawk** supports a method for granting third-parties temporary access to individual resources using\na query parameter called _bewit_ (in falconry, a leather strap used to attach a tracking device to the leg of a hawk).\n\nThe **Hawk** scheme requires the establishment of a shared symmetric key between the client and the server,\nwhich is beyond the scope of this module. Typically, the shared credentials are established via an initial\nTLS-protected phase or derived from some other shared confidential information available to both the client\nand the server.\n\n\n## Replay Protection\n\nWithout replay protection, an attacker can use a compromised (but otherwise valid and authenticated) request more \nthan once, gaining access to a protected resource. To mitigate this, clients include both a nonce and a timestamp when \nmaking requests. This gives the server enough information to prevent replay attacks.\n\nThe nonce is generated by the client, and is a string unique across all requests with the same timestamp and\nkey identifier combination. \n\nThe timestamp enables the server to restrict the validity period of the credentials where requests occuring afterwards\nare rejected. It also removes the need for the server to retain an unbounded number of nonce values for future checks.\nBy default, **Hawk** uses a time window of 1 minute to allow for time skew between the client and server (which in\npractice translates to a maximum of 2 minutes as the skew can be positive or negative).\n\nUsing a timestamp requires the client's clock to be in sync with the server's clock. **Hawk** requires both the client\nclock and the server clock to use NTP to ensure synchronization. However, given the limitations of some client types\n(e.g. browsers) to deploy NTP, the server provides the client with its current time (in seconds precision) in response\nto a bad timestamp.\n\nThere is no expectation that the client will adjust its system clock to match the server (in fact, this would be a\npotential attack vector). Instead, the client only uses the server's time to calculate an offset used only\nfor communications with that particular server. The protocol rewards clients with synchronized clocks by reducing\nthe number of round trips required to authenticate the first request.\n\n\n## Usage Example\n\nServer code:\n\n```javascript\nvar Http = require('http');\nvar Hawk = require('hawk');\n\n\n// Credentials lookup function\n\nvar credentialsFunc = function (id, callback) {\n\n var credentials = {\n key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',\n algorithm: 'sha256',\n user: 'Steve'\n };\n\n return callback(null, credentials);\n};\n\n// Create HTTP server\n\nvar handler = function (req, res) {\n\n // Authenticate incoming request\n\n Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) {\n\n // Prepare response\n\n var payload = (!err ? 'Hello ' + credentials.user + ' ' + artifacts.ext : 'Shoosh!');\n var headers = { 'Content-Type': 'text/plain' };\n\n // Generate Server-Authorization response header\n\n var header = Hawk.server.header(credentials, artifacts, { payload: payload, contentType: headers['Content-Type'] });\n headers['Server-Authorization'] = header;\n\n // Send the response back\n\n res.writeHead(!err ? 200 : 401, headers);\n res.end(payload);\n });\n};\n\n// Start server\n\nHttp.createServer(handler).listen(8000, 'example.com');\n```\n\nClient code:\n\n```javascript\nvar Request = require('request');\nvar Hawk = require('hawk');\n\n\n// Client credentials\n\nvar credentials = {\n id: 'dh37fgj492je',\n key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',\n algorithm: 'sha256'\n}\n\n// Request options\n\nvar requestOptions = {\n uri: 'http://example.com:8000/resource/1?b=1&a=2',\n method: 'GET',\n headers: {}\n};\n\n// Generate Authorization request header\n\nvar header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'GET', { credentials: credentials, ext: 'some-app-data' });\nrequestOptions.headers.Authorization = header.field;\n\n// Send authenticated request\n\nRequest(requestOptions, function (error, response, body) {\n\n // Authenticate the server's response\n\n var isValid = Hawk.client.authenticate(response, credentials, header.artifacts, { payload: body });\n\n // Output results\n\n console.log(response.statusCode + ': ' + body + (isValid ? ' (valid)' : ' (invalid)'));\n});\n```\n\n**Hawk** utilized the [**SNTP**](https://github.com/hueniverse/sntp) module for time sync management. By default, the local\nmachine time is used. To automatically retrieve and synchronice the clock within the application, use the SNTP 'start()' method.\n\n```javascript\nHawk.sntp.start();\n```\n\n\n## Protocol Example\n\nThe client attempts to access a protected resource without authentication, sending the following HTTP request to\nthe resource server:\n\n```\nGET /resource/1?b=1&a=2 HTTP/1.1\nHost: example.com:8000\n```\n\nThe resource server returns an authentication challenge.\n\n```\nHTTP/1.1 401 Unauthorized\nWWW-Authenticate: Hawk\n```\n\nThe client has previously obtained a set of **Hawk** credentials for accessing resources on the \"http://example.com/\"\nserver. The **Hawk** credentials issued to the client include the following attributes:\n\n* Key identifier: dh37fgj492je\n* Key: werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn\n* Algorithm: sha256\n\nThe client generates the authentication header by calculating a timestamp (e.g. the number of seconds since January 1,\n1970 00:00:00 GMT), generating a nonce, and constructing the normalized request string (each value followed by a newline\ncharacter):\n\n```\nhawk.1.header\n1353832234\nj4h3g2\nGET\n/resource/1?b=1&a=2\nexample.com\n8000\n\nsome-app-ext-data\n\n```\n\nThe request MAC is calculated using HMAC with the specified hash algorithm \"sha256\" and the key over the normalized request string.\nThe result is base64-encoded to produce the request MAC:\n\n```\n6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE=\n```\n\nThe client includes the **Hawk** key identifier, timestamp, nonce, application specific data, and request MAC with the request using\nthe HTTP `Authorization` request header field:\n\n```\nGET /resource/1?b=1&a=2 HTTP/1.1\nHost: example.com:8000\nAuthorization: Hawk id=\"dh37fgj492je\", ts=\"1353832234\", nonce=\"j4h3g2\", ext=\"some-app-ext-data\", mac=\"6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE=\"\n```\n\nThe server validates the request by calculating the request MAC again based on the request received and verifies the validity\nand scope of the **Hawk** credentials. If valid, the server responds with the requested resource.\n\n\n### Payload Validation\n\n**Hawk** provides optional payload validation. When generating the authentication header, the client calculates a payload hash\nusing the specified hash algorithm. The hash is calculated over the concatenated value of (each followed by a newline character):\n* `hawk.1.payload`\n* the content-type in lowercase, without any parameters (e.g. `application/json`)\n* the request payload prior to any content encoding (the exact representation requirements should be specified by the server for payloads other than simple single-part ascii to ensure interoperability)\n\nFor example:\n\n* Payload: `Thank you for flying Hawk`\n* Content Type: `text/plain`\n* Hash (sha256): `Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=`\n\nResults in the following input to the payload hash function (newline terminated values):\n\n```\nhawk.1.payload\ntext/plain\nThank you for flying Hawk\n\n```\n\nWhich produces the following hash value:\n\n```\nYi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=\n```\n\nThe client constructs the normalized request string (newline terminated values):\n\n```\nhawk.1.header\n1353832234\nj4h3g2\nPOST\n/resource/1?a=1&b=2\nexample.com\n8000\nYi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=\nsome-app-ext-data\n\n```\n\nThen calculates the request MAC and includes the **Hawk** key identifier, timestamp, nonce, payload hash, application specific data,\nand request MAC, with the request using the HTTP `Authorization` request header field:\n\n```\nPOST /resource/1?a=1&b=2 HTTP/1.1\nHost: example.com:8000\nAuthorization: Hawk id=\"dh37fgj492je\", ts=\"1353832234\", nonce=\"j4h3g2\", hash=\"Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=\", ext=\"some-app-ext-data\", mac=\"aSe1DERmZuRl3pI36/9BdZmnErTw3sNzOOAUlfeKjVw=\"\n```\n\nIt is up to the server if and when it validates the payload for any given request, based solely on it's security policy\nand the nature of the data included.\n\nIf the payload is available at the time of authentication, the server uses the hash value provided by the client to construct\nthe normalized string and validates the MAC. If the MAC is valid, the server calculates the payload hash and compares the value\nwith the provided payload hash in the header. In many cases, checking the MAC first is faster than calculating the payload hash.\n\nHowever, if the payload is not available at authentication time (e.g. too large to fit in memory, streamed elsewhere, or processed\nat a different stage in the application), the server may choose to defer payload validation for later by retaining the hash value\nprovided by the client after validating the MAC.\n\nIt is important to note that MAC validation does not mean the hash value provided by the client is valid, only that the value\nincluded in the header was not modified. Without calculating the payload hash on the server and comparing it to the value provided\nby the client, the payload may be modified by an attacker.\n\n\n## Response Payload Validation\n\n**Hawk** provides partial response payload validation. The server includes the `Server-Authorization` response header which enables the\nclient to authenticate the response and ensure it is talking to the right server. **Hawk** defines the HTTP `Server-Authorization` header\nas a response header using the exact same syntax as the `Authorization` request header field.\n\nThe header is contructed using the same process as the client's request header. The server uses the same credentials and other\nartifacts provided by the client to constructs the normalized request string. The `ext` and `hash` values are replaced with\nnew values based on the server response. The rest as identical to those used by the client.\n\nThe result MAC digest is included with the optional `hash` and `ext` values:\n\n```\nServer-Authorization: Hawk mac=\"XIJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\", ext=\"response-specific\"\n```\n\n\n## Browser Support and Considerations\n\nA browser script is provided for including using a `<script>` tag in [lib/browser.js](/lib/browser.js). It's also a [component](http://component.io/hueniverse/hawk).\n\n**Hawk** relies on the _Server-Authorization_ and _WWW-Authenticate_ headers in its response to communicate with the client.\nTherefore, in case of CORS requests, it is important to consider sending _Access-Control-Expose-Headers_ with the value\n_\"WWW-Authenticate, Server-Authorization\"_ on each response from your server. As explained in the\n[specifications](http://www.w3.org/TR/cors/#access-control-expose-headers-response-header), it will indicate that these headers\ncan safely be accessed by the client (using getResponseHeader() on the XmlHttpRequest object). Otherwise you will be met with a\n[\"simple response header\"](http://www.w3.org/TR/cors/#simple-response-header) which excludes these fields and would prevent the\nHawk client from authenticating the requests.You can read more about the why and how in this\n[article](http://www.html5rocks.com/en/tutorials/cors/#toc-adding-cors-support-to-the-server)\n\n\n# Single URI Authorization\n\nThere are cases in which limited and short-term access to a protected resource is granted to a third party which does not\nhave access to the shared credentials. For example, displaying a protected image on a web page accessed by anyone. **Hawk**\nprovides limited support for such URIs in the form of a _bewit_ - a URI query parameter appended to the request URI which contains\nthe necessary credentials to authenticate the request.\n\nBecause of the significant security risks involved in issuing such access, bewit usage is purposely limited only to GET requests\nand for a finite period of time. Both the client and server can issue bewit credentials, however, the server should not use the same\ncredentials as the client to maintain clear traceability as to who issued which credentials.\n\nIn order to simplify implementation, bewit credentials do not support single-use policy and can be replayed multiple times within\nthe granted access timeframe. \n\n\n## Bewit Usage Example\n\nServer code:\n\n```javascript\nvar Http = require('http');\nvar Hawk = require('hawk');\n\n\n// Credentials lookup function\n\nvar credentialsFunc = function (id, callback) {\n\n var credentials = {\n key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',\n algorithm: 'sha256'\n };\n\n return callback(null, credentials);\n};\n\n// Create HTTP server\n\nvar handler = function (req, res) {\n\n Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {\n\n res.writeHead(!err ? 200 : 401, { 'Content-Type': 'text/plain' });\n res.end(!err ? 'Access granted' : 'Shoosh!');\n });\n};\n\nHttp.createServer(handler).listen(8000, 'example.com');\n```\n\nBewit code generation:\n\n```javascript\nvar Request = require('request');\nvar Hawk = require('hawk');\n\n\n// Client credentials\n\nvar credentials = {\n id: 'dh37fgj492je',\n key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',\n algorithm: 'sha256'\n}\n\n// Generate bewit\n\nvar duration = 60 * 5; // 5 Minutes\nvar bewit = Hawk.uri.getBewit('http://example.com:8080/resource/1?b=1&a=2', { credentials: credentials, ttlSec: duration, ext: 'some-app-data' });\nvar uri = 'http://example.com:8000/resource/1?b=1&a=2' + '&bewit=' + bewit;\n```\n\n\n# Security Considerations\n\nThe greatest sources of security risks are usually found not in **Hawk** but in the policies and procedures surrounding its use.\nImplementers are strongly encouraged to assess how this module addresses their security requirements. This section includes\nan incomplete list of security considerations that must be reviewed and understood before deploying **Hawk** on the server.\nMany of the protections provided in **Hawk** depends on whether and how they are used.\n\n### MAC Keys Transmission\n\n**Hawk** does not provide any mechanism for obtaining or transmitting the set of shared credentials required. Any mechanism used\nto obtain **Hawk** credentials must ensure that these transmissions are protected using transport-layer mechanisms such as TLS.\n\n### Confidentiality of Requests\n\nWhile **Hawk** provides a mechanism for verifying the integrity of HTTP requests, it provides no guarantee of request\nconfidentiality. Unless other precautions are taken, eavesdroppers will have full access to the request content. Servers should\ncarefully consider the types of data likely to be sent as part of such requests, and employ transport-layer security mechanisms\nto protect sensitive resources.\n\n### Spoofing by Counterfeit Servers\n\n**Hawk** provides limited verification of the server authenticity. When receiving a response back from the server, the server\nmay choose to include a response `Server-Authorization` header which the client can use to verify the response. However, it is up to\nthe server to determine when such measure is included, to up to the client to enforce that policy.\n\nA hostile party could take advantage of this by intercepting the client's requests and returning misleading or otherwise\nincorrect responses. Service providers should consider such attacks when developing services using this protocol, and should\nrequire transport-layer security for any requests where the authenticity of the resource server or of server responses is an issue.\n\n### Plaintext Storage of Credentials\n\nThe **Hawk** key functions the same way passwords do in traditional authentication systems. In order to compute the request MAC,\nthe server must have access to the key in plaintext form. This is in contrast, for example, to modern operating systems, which\nstore only a one-way hash of user credentials.\n\nIf an attacker were to gain access to these keys - or worse, to the server's database of all such keys - he or she would be able\nto perform any action on behalf of any resource owner. Accordingly, it is critical that servers protect these keys from unauthorized\naccess.\n\n### Entropy of Keys\n\nUnless a transport-layer security protocol is used, eavesdroppers will have full access to authenticated requests and request\nMAC values, and will thus be able to mount offline brute-force attacks to recover the key used. Servers should be careful to\nassign keys which are long enough, and random enough, to resist such attacks for at least the length of time that the **Hawk**\ncredentials are valid.\n\nFor example, if the credentials are valid for two weeks, servers should ensure that it is not possible to mount a brute force\nattack that recovers the key in less than two weeks. Of course, servers are urged to err on the side of caution, and use the\nlongest key reasonable.\n\nIt is equally important that the pseudo-random number generator (PRNG) used to generate these keys be of sufficiently high\nquality. Many PRNG implementations generate number sequences that may appear to be random, but which nevertheless exhibit\npatterns or other weaknesses which make cryptanalysis or brute force attacks easier. Implementers should be careful to use\ncryptographically secure PRNGs to avoid these problems.\n\n### Coverage Limitations\n\nThe request MAC only covers the HTTP `Host` header and optionally the `Content-Type` header. It does not cover any other headers\nwhich can often affect how the request body is interpreted by the server. If the server behavior is influenced by the presence\nor value of such headers, an attacker can manipulate the request headers without being detected. Implementers should use the\n`ext` feature to pass application-specific information via the `Authorization` header which is protected by the request MAC.\n\nThe response authentication, when performed, only covers the response payload, content-type, and the request information \nprovided by the client in it's request (method, resource, timestamp, nonce, etc.). It does not cover the HTTP status code or\nany other response header field (e.g. Location) which can affect the client's behaviour.\n\n### Future Time Manipulation\n\nThe protocol relies on a clock sync between the client and server. To accomplish this, the server informs the client of its\ncurrent time when an invalid timestamp is received.\n\nIf an attacker is able to manipulate this information and cause the client to use an incorrect time, it would be able to cause\nthe client to generate authenticated requests using time in the future. Such requests will fail when sent by the client, and will\nnot likely leave a trace on the server (given the common implementation of nonce, if at all enforced). The attacker will then\nbe able to replay the request at the correct time without detection.\n\nThe client must only use the time information provided by the server if:\n* it was delivered over a TLS connection and the server identity has been verified, or\n* the `tsm` MAC digest calculated using the same client credentials over the timestamp has been verified.\n\n### Client Clock Poisoning\n\nWhen receiving a request with a bad timestamp, the server provides the client with its current time. The client must never use\nthe time received from the server to adjust its own clock, and must only use it to calculate an offset for communicating with\nthat particular server.\n\n### Bewit Limitations\n\nSpecial care must be taken when issuing bewit credentials to third parties. Bewit credentials are valid until expiration and cannot\nbe revoked or limited without using other means. Whatever resource they grant access to will be completely exposed to anyone with\naccess to the bewit credentials which act as bearer credentials for that particular resource. While bewit usage is limited to GET\nrequests only and therefore cannot be used to perform transactions or change server state, it can still be used to expose private\nand sensitive information.\n\n### Host Header Forgery\n\nHawk validates the incoming request MAC against the incoming HTTP Host header. However, unless the optional `host` and `port`\noptions are used with `server.authenticate()`, a malicous client can mint new host names pointing to the server's IP address and\nuse that to craft an attack by sending a valid request that's meant for another hostname than the one used by the server. Server\nimplementors must manually verify that the host header received matches their expectation (or use the options mentioned above).\n\n# Frequently Asked Questions\n\n### Where is the protocol specification?\n\nIf you are looking for some prose explaining how all this works, **this is it**. **Hawk** is being developed as an open source\nproject instead of a standard. In other words, the [code](/hueniverse/hawk/tree/master/lib) is the specification. Not sure about\nsomething? Open an issue!\n\n### Is it done?\n\nAs of version 0.10.0, **Hawk** is feature-complete. However, until this module reaches version 1.0.0 it is considered experimental\nand is likely to change. This also means your feedback and contribution are very welcome. Feel free to open issues with questions\nand suggestions.\n\n### Where can I find **Hawk** implementations in other languages?\n\n**Hawk**'s only reference implementation is provided in JavaScript as a node.js module. However, it has been ported to other languages.\nThe full list is maintained [here](https://github.com/hueniverse/hawk/issues?labels=port&state=closed). Please add an issue if you are\nworking on another port. A cross-platform test-suite is in the works.\n\n### Why isn't the algorithm part of the challenge or dynamically negotiated?\n\nThe algorithm used is closely related to the key issued as different algorithms require different key sizes (and other\nrequirements). While some keys can be used for multiple algorithm, the protocol is designed to closely bind the key and algorithm\ntogether as part of the issued credentials.\n\n### Why is Host and Content-Type the only headers covered by the request MAC?\n\nIt is really hard to include other headers. Headers can be changed by proxies and other intermediaries and there is no\nwell-established way to normalize them. Many platforms change the case of header field names and values. The only\nstraight-forward solution is to include the headers in some blob (say, base64 encoded JSON) and include that with the request,\nan approach taken by JWT and other such formats. However, that design violates the HTTP header boundaries, repeats information,\nand introduces other security issues because firewalls will not be aware of these \"hidden\" headers. In addition, any information\nrepeated must be compared to the duplicated information in the header and therefore only moves the problem elsewhere.\n\n### Why not just use HTTP Digest?\n\nDigest requires pre-negotiation to establish a nonce. This means you can't just make a request - you must first send\na protocol handshake to the server. This pattern has become unacceptable for most web services, especially mobile\nwhere extra round-trip are costly.\n\n### Why bother with all this nonce and timestamp business?\n\n**Hawk** is an attempt to find a reasonable, practical compromise between security and usability. OAuth 1.0 got timestamp\nand nonces halfway right but failed when it came to scalability and consistent developer experience. **Hawk** addresses\nit by requiring the client to sync its clock, but provides it with tools to accomplish it.\n\nIn general, replay protection is a matter of application-specific threat model. It is less of an issue on a TLS-protected\nsystem where the clients are implemented using best practices and are under the control of the server. Instead of dropping\nreplay protection, **Hawk** offers a required time window and an optional nonce verification. Together, it provides developers\nwith the ability to decide how to enforce their security policy without impacting the client's implementation.\n\n### What are `app` and `dlg` in the authorization header and normalized mac string?\n\nThe original motivation for **Hawk** was to replace the OAuth 1.0 use cases. This included both a simple client-server mode which\nthis module is specifically designed for, and a delegated access mode which is being developed separately in\n[Oz](https://github.com/hueniverse/oz). In addition to the **Hawk** use cases, Oz requires another attribute: the application id `app`.\nThis provides binding between the credentials and the application in a way that prevents an attacker from tricking an application\nto use credentials issued to someone else. It also has an optional 'delegated-by' attribute `dlg` which is the application id of the\napplication the credentials were directly issued to. The goal of these two additions is to allow Oz to utilize **Hawk** directly,\nbut with the additional security of delegated credentials.\n\n### What is the purpose of the static strings used in each normalized MAC input?\n\nWhen calculating a hash or MAC, a static prefix (tag) is added. The prefix is used to prevent MAC values from being\nused or reused for a purpose other than what they were created for (i.e. prevents switching MAC values between a request,\nresponse, and a bewit use cases). It also protects against exploits created after a potential change in how the protocol\ncreates the normalized string. For example, if a future version would switch the order of nonce and timestamp, it\ncan create an exploit opportunity for cases where the nonce is similar in format to a timestamp.\n\n### Does **Hawk** have anything to do with OAuth?\n\nShort answer: no.\n\n**Hawk** was originally proposed as the OAuth MAC Token specification. However, the OAuth working group in its consistent\nincompetence failed to produce a final, usable solution to address one of the most popular use cases of OAuth 1.0 - using it\nto authenticate simple client-server transactions (i.e. two-legged). As you can guess, the OAuth working group is still hard\nat work to produce more garbage.\n\n**Hawk** provides a simple HTTP authentication scheme for making client-server requests. It does not address the OAuth use case\nof delegating access to a third party. If you are looking for an OAuth alternative, check out [Oz](https://github.com/hueniverse/oz).\n\n# Implementations\n\n- [Logibit Hawk in F#/.Net](https://github.com/logibit/logibit.hawk/)\n- [Tent Hawk in Ruby](https://github.com/tent/hawk-ruby)\n- [Wealdtech in Java](https://github.com/wealdtech/hawk)\n- [Kumar's Mohawk in Python](https://github.com/kumar303/mohawk/)\n\n# Acknowledgements\n\n**Hawk** is a derivative work of the [HTTP MAC Authentication Scheme](http://tools.ietf.org/html/draft-hammer-oauth-v2-mac-token-05) proposal\nco-authored by Ben Adida, Adam Barth, and Eran Hammer, which in turn was based on the OAuth 1.0 community specification.\n\nSpecial thanks to Ben Laurie for his always insightful feedback and advice.\n\nThe **Hawk** logo was created by [Chris Carrasco](http://chriscarrasco.com).\n",
- "readmeFilename": "README.md",
+ "license": "BSD-3-Clause",
+ "gitHead": "fdb9d05e383d5237631eaddc4f51422e54fa8b52",
"bugs": {
"url": "https://github.com/hueniverse/hawk/issues"
},
"homepage": "https://github.com/hueniverse/hawk#readme",
- "_id": "hawk@2.3.1",
- "_shasum": "1e731ce39447fa1d0f6d707f7bceebec0fd1ec1f",
- "_resolved": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz",
- "_from": "hawk@>=2.3.0 <2.4.0"
+ "_id": "hawk@3.1.0",
+ "_shasum": "8a13ae19977ec607602f3f0b9fd676f18c384e44",
+ "_from": "hawk@>=3.1.0 <3.2.0",
+ "_npmVersion": "2.10.0",
+ "_nodeVersion": "0.10.38",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hammer.io"
+ },
+ "dist": {
+ "shasum": "8a13ae19977ec607602f3f0b9fd676f18c384e44",
+ "tarball": "http://registry.npmjs.org/hawk/-/hawk-3.1.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/browser.js b/deps/npm/node_modules/request/node_modules/hawk/test/browser.js
index a45241389..49dc4cadd 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/browser.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/browser.js
@@ -82,7 +82,7 @@ describe('Browser', function () {
});
});
- describe('#bewit', function () {
+ describe('bewit()', function () {
it('returns a valid bewit value', function (done) {
@@ -516,7 +516,7 @@ describe('Browser', function () {
var localStorage = new Browser.internals.LocalStorage();
- Browser.utils.setStorage(localStorage)
+ Browser.utils.setStorage(localStorage);
Browser.utils.setNtpOffset(60 * 60 * 1000);
var header = Browser.client.header('http://example.com:8080/resource/4?filter=a', req.method, { credentials: credentials, ext: 'some-app-data' });
@@ -741,7 +741,7 @@ describe('Browser', function () {
describe('client', function () {
- describe('#header', function () {
+ describe('header()', function () {
it('returns a valid authorization header (sha1)', function (done) {
@@ -950,7 +950,7 @@ describe('Browser', function () {
});
});
- describe('#authenticate', function () {
+ describe('authenticate()', function () {
it('skips tsm validation when missing ts', function (done) {
@@ -1119,7 +1119,7 @@ describe('Browser', function () {
});
});
- describe('#message', function () {
+ describe('message()', function () {
it('generates an authorization then successfully parse it', function (done) {
@@ -1270,7 +1270,7 @@ describe('Browser', function () {
});
});
- describe('#authenticateTimestamp', function (done) {
+ describe('authenticateTimestamp()', function (done) {
it('validates a timestamp', function (done) {
@@ -1342,7 +1342,7 @@ describe('Browser', function () {
describe('utils', function () {
- describe('#setStorage', function () {
+ describe('setStorage()', function () {
it('sets storage for the first time', function (done) {
@@ -1358,21 +1358,29 @@ describe('Browser', function () {
});
});
- describe('#setNtpOffset', function (done) {
+ describe('setNtpOffset()', function (done) {
- it('catches localStorage errors', function (done) {
+ it('catches localStorage errors', { parallel: false }, function (done) {
var orig = Browser.utils.storage.setItem;
- var error = console.error;
+ var consoleOrig = console.error;
var count = 0;
- console.error = function () { if (count++ === 2) { console.error = error; } };
+ console.error = function () {
+
+ if (count++ === 2) {
+
+ console.error = consoleOrig;
+ }
+ };
+
Browser.utils.storage.setItem = function () {
Browser.utils.storage.setItem = orig;
- throw new Error()
+ throw new Error();
};
expect(function () {
+
Browser.utils.setNtpOffset(100);
}).not.to.throw();
@@ -1380,7 +1388,7 @@ describe('Browser', function () {
});
});
- describe('#parseAuthorizationHeader', function (done) {
+ describe('parseAuthorizationHeader()', function (done) {
it('returns null on missing header', function (done) {
@@ -1419,7 +1427,7 @@ describe('Browser', function () {
});
});
- describe('#parseUri', function () {
+ describe('parseUri()', function () {
it('returns empty port when unknown scheme', function (done) {
@@ -1436,18 +1444,16 @@ describe('Browser', function () {
});
});
- var str = "https://www.google.ca/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=url";
- var base64str = "aHR0cHM6Ly93d3cuZ29vZ2xlLmNhL3dlYmhwP3NvdXJjZWlkPWNocm9tZS1pbnN0YW50Jmlvbj0xJmVzcHY9MiZpZT1VVEYtOCNxPXVybA";
+ var str = 'https://www.google.ca/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=url';
+ var base64str = 'aHR0cHM6Ly93d3cuZ29vZ2xlLmNhL3dlYmhwP3NvdXJjZWlkPWNocm9tZS1pbnN0YW50Jmlvbj0xJmVzcHY9MiZpZT1VVEYtOCNxPXVybA';
- describe('#base64urlEncode', function () {
+ describe('base64urlEncode()', function () {
it('should base64 URL-safe decode a string', function (done) {
expect(Browser.utils.base64urlEncode(str)).to.equal(base64str);
done();
});
-
});
-
});
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/client.js b/deps/npm/node_modules/request/node_modules/hawk/test/client.js
index 5983897b2..d6be231ae 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/client.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/client.js
@@ -19,425 +19,422 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
+describe('Client', function () {
- describe('client', function () {
+ describe('header()', function () {
- describe('#header', function () {
+ it('returns a valid authorization header (sha1)', function (done) {
- it('returns a valid authorization header (sha1)', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
- var header = Hawk.client.header('http://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about' }).field;
- expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="bsvY3IfUllw6V5rvk4tStEvpBhE=", ext="Bazinga!", mac="qbf1ZPG/r/e06F4ht+T77LXi5vw="');
- done();
- });
+ var header = Hawk.client.header('http://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about' }).field;
+ expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="bsvY3IfUllw6V5rvk4tStEvpBhE=", ext="Bazinga!", mac="qbf1ZPG/r/e06F4ht+T77LXi5vw="');
+ done();
+ });
+
+ it('returns a valid authorization header (sha256)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' }).field;
+ expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", ext="Bazinga!", mac="q1CwFoSHzPZSkbIvl0oYlD+91rBUEvFk763nMjMndj8="');
+ done();
+ });
+
+ it('returns a valid authorization header (no ext)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' }).field;
+ expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
+ done();
+ });
+
+ it('returns a valid authorization header (null ext)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain', ext: null }).field;
+ expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
+ done();
+ });
+
+ it('returns a valid authorization header (empty payload)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: '', contentType: 'text/plain' }).field;
+ expect(header).to.equal('Hawk id=\"123456\", ts=\"1353809207\", nonce=\"Ygvqdz\", hash=\"q/t+NNAkQZNlq/aAD6PlexImwQTxwgT2MahfTa9XRLA=\", mac=\"U5k16YEzn3UnBHKeBzsDXn067Gu3R4YaY6xOt9PYRZM=\"');
+ done();
+ });
+
+ it('returns a valid authorization header (pre hashed payload)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var options = { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' };
+ options.hash = Hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, options.contentType);
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', options).field;
+ expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
+ done();
+ });
+
+ it('errors on missing uri', function (done) {
+
+ var header = Hawk.client.header('', 'POST');
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid argument type');
+ done();
+ });
+
+ it('errors on invalid uri', function (done) {
+
+ var header = Hawk.client.header(4, 'POST');
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid argument type');
+ done();
+ });
+
+ it('errors on missing method', function (done) {
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', '');
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid argument type');
+ done();
+ });
+
+ it('errors on invalid method', function (done) {
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 5);
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid argument type');
+ done();
+ });
+
+ it('errors on missing options', function (done) {
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST');
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid argument type');
+ done();
+ });
+
+ it('errors on invalid credentials (id)', function (done) {
+
+ var credentials = {
+ key: '2983d45yun89q',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207 });
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid credential object');
+ done();
+ });
+
+ it('errors on missing credentials', function (done) {
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { ext: 'Bazinga!', timestamp: 1353809207 });
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid credential object');
+ done();
+ });
+
+ it('errors on invalid credentials', function (done) {
+
+ var credentials = {
+ id: '123456',
+ algorithm: 'sha256'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207 });
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Invalid credential object');
+ done();
+ });
+
+ it('errors on invalid algorithm', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'hmac-sha-0'
+ };
+
+ var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, payload: 'something, anything!', ext: 'Bazinga!', timestamp: 1353809207 });
+ expect(header.field).to.equal('');
+ expect(header.err).to.equal('Unknown algorithm');
+ done();
+ });
+ });
+
+ describe('authenticate()', function () {
+
+ it('returns false on invalid header', function (done) {
+
+ var res = {
+ headers: {
+ 'server-authorization': 'Hawk mac="abc", bad="xyz"'
+ }
+ };
+
+ expect(Hawk.client.authenticate(res, {})).to.equal(false);
+ done();
+ });
+
+ it('returns false on invalid mac', function (done) {
+
+ var res = {
+ headers: {
+ 'content-type': 'text/plain',
+ 'server-authorization': 'Hawk mac="_IJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=", hash="f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=", ext="response-specific"'
+ }
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1362336900',
+ nonce: 'eb5S_L',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ app: undefined,
+ dlg: undefined,
+ mac: 'BlmSe8K+pbKIb6YsZCnt4E1GrYvY1AaYayNR82dGpIk=',
+ id: '123456'
+ };
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(false);
+ done();
+ });
+
+ it('returns true on ignoring hash', function (done) {
+
+ var res = {
+ headers: {
+ 'content-type': 'text/plain',
+ 'server-authorization': 'Hawk mac="XIJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=", hash="f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=", ext="response-specific"'
+ }
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1362336900',
+ nonce: 'eb5S_L',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ app: undefined,
+ dlg: undefined,
+ mac: 'BlmSe8K+pbKIb6YsZCnt4E1GrYvY1AaYayNR82dGpIk=',
+ id: '123456'
+ };
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(true);
+ done();
+ });
+
+ it('fails on invalid WWW-Authenticate header format', function (done) {
+
+ var header = 'Hawk ts="1362346425875", tsm="PhwayS28vtnn3qbv0mqRBYSXebN/zggEtucfeZ620Zo=", x="Stale timestamp"';
+ expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, {})).to.equal(false);
+ done();
+ });
- it('returns a valid authorization header (sha256)', function (done) {
+ it('fails on invalid WWW-Authenticate header format', function (done) {
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' }).field;
- expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", ext="Bazinga!", mac="q1CwFoSHzPZSkbIvl0oYlD+91rBUEvFk763nMjMndj8="');
- done();
- });
-
- it('returns a valid authorization header (no ext)', function (done) {
+ var header = 'Hawk ts="1362346425875", tsm="hwayS28vtnn3qbv0mqRBYSXebN/zggEtucfeZ620Zo=", error="Stale timestamp"';
+ expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, credentials)).to.equal(false);
+ done();
+ });
+
+ it('skips tsm validation when missing ts', function (done) {
+
+ var header = 'Hawk error="Stale timestamp"';
+ expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, {})).to.equal(true);
+ done();
+ });
+ });
+
+ describe('message()', function () {
+
+ it('generates authorization', function (done) {
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' }).field;
- expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
- done();
- });
-
- it('returns a valid authorization header (null ext)', function (done) {
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain', ext: null }).field;
- expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
- done();
- });
-
- it('returns a valid authorization header (empty payload)', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: '', contentType: 'text/plain' }).field;
- expect(header).to.equal('Hawk id=\"123456\", ts=\"1353809207\", nonce=\"Ygvqdz\", hash=\"q/t+NNAkQZNlq/aAD6PlexImwQTxwgT2MahfTa9XRLA=\", mac=\"U5k16YEzn3UnBHKeBzsDXn067Gu3R4YaY6xOt9PYRZM=\"');
- done();
- });
-
- it('returns a valid authorization header (pre hashed payload)', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
-
- var options = { credentials: credentials, timestamp: 1353809207, nonce: 'Ygvqdz', payload: 'something to write about', contentType: 'text/plain' };
- options.hash = Hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, options.contentType);
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', options).field;
- expect(header).to.equal('Hawk id="123456", ts="1353809207", nonce="Ygvqdz", hash="2QfCt3GuY9HQnHWyWD3wX68ZOKbynqlfYmuO2ZBRqtY=", mac="HTgtd0jPI6E4izx8e4OHdO36q00xFCU0FolNq3RiCYs="');
- done();
- });
-
- it('errors on missing uri', function (done) {
-
- var header = Hawk.client.header('', 'POST');
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid argument type');
- done();
- });
-
- it('errors on invalid uri', function (done) {
-
- var header = Hawk.client.header(4, 'POST');
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid argument type');
- done();
- });
-
- it('errors on missing method', function (done) {
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', '');
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid argument type');
- done();
- });
-
- it('errors on invalid method', function (done) {
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 5);
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid argument type');
- done();
- });
-
- it('errors on missing options', function (done) {
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST');
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid argument type');
- done();
- });
-
- it('errors on invalid credentials (id)', function (done) {
-
- var credentials = {
- key: '2983d45yun89q',
- algorithm: 'sha256'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207 });
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid credential object');
- done();
- });
-
- it('errors on missing credentials', function (done) {
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { ext: 'Bazinga!', timestamp: 1353809207 });
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid credential object');
- done();
- });
-
- it('errors on invalid credentials', function (done) {
-
- var credentials = {
- id: '123456',
- algorithm: 'sha256'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, ext: 'Bazinga!', timestamp: 1353809207 });
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Invalid credential object');
- done();
- });
-
- it('errors on invalid algorithm', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'hmac-sha-0'
- };
-
- var header = Hawk.client.header('https://example.net/somewhere/over/the/rainbow', 'POST', { credentials: credentials, payload: 'something, anything!', ext: 'Bazinga!', timestamp: 1353809207 });
- expect(header.field).to.equal('');
- expect(header.err).to.equal('Unknown algorithm');
- done();
- });
+ var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.exist();
+ expect(auth.ts).to.equal(1353809207);
+ expect(auth.nonce).to.equal('abc123');
+ done();
});
- describe('#authenticate', function () {
-
- it('returns false on invalid header', function (done) {
-
- var res = {
- headers: {
- 'server-authorization': 'Hawk mac="abc", bad="xyz"'
- }
- };
-
- expect(Hawk.client.authenticate(res, {})).to.equal(false);
- done();
- });
-
- it('returns false on invalid mac', function (done) {
-
- var res = {
- headers: {
- 'content-type': 'text/plain',
- 'server-authorization': 'Hawk mac="_IJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=", hash="f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=", ext="response-specific"'
- }
- };
-
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1362336900',
- nonce: 'eb5S_L',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- app: undefined,
- dlg: undefined,
- mac: 'BlmSe8K+pbKIb6YsZCnt4E1GrYvY1AaYayNR82dGpIk=',
- id: '123456'
- };
-
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
-
- expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(false);
- done();
- });
-
- it('returns true on ignoring hash', function (done) {
-
- var res = {
- headers: {
- 'content-type': 'text/plain',
- 'server-authorization': 'Hawk mac="XIJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=", hash="f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=", ext="response-specific"'
- }
- };
-
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1362336900',
- nonce: 'eb5S_L',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- app: undefined,
- dlg: undefined,
- mac: 'BlmSe8K+pbKIb6YsZCnt4E1GrYvY1AaYayNR82dGpIk=',
- id: '123456'
- };
-
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
-
- expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(true);
- done();
- });
-
- it('fails on invalid WWW-Authenticate header format', function (done) {
-
- var header = 'Hawk ts="1362346425875", tsm="PhwayS28vtnn3qbv0mqRBYSXebN/zggEtucfeZ620Zo=", x="Stale timestamp"';
- expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, {})).to.equal(false);
- done();
- });
-
- it('fails on invalid WWW-Authenticate header format', function (done) {
-
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
-
- var header = 'Hawk ts="1362346425875", tsm="hwayS28vtnn3qbv0mqRBYSXebN/zggEtucfeZ620Zo=", error="Stale timestamp"';
- expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, credentials)).to.equal(false);
- done();
- });
-
- it('skips tsm validation when missing ts', function (done) {
-
- var header = 'Hawk error="Stale timestamp"';
- expect(Hawk.client.authenticate({ headers: { 'www-authenticate': header } }, {})).to.equal(true);
- done();
- });
+ it('errors on invalid host', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message(5, 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on invalid port', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', '80', 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
});
- describe('#message', function () {
-
- it('generates authorization', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
+ it('errors on missing host', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 0, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on null message', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 80, null, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on missing message', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 80, undefined, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on invalid message', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 80, 5, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on missing options', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 80, 'I am the boodyman');
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on invalid credentials (id)', function (done) {
+
+ var credentials = {
+ key: '2983d45yun89q',
+ algorithm: 'sha1'
+ };
+
+ var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('errors on invalid credentials (key)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ algorithm: 'sha1'
+ };
- var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.exist();
- expect(auth.ts).to.equal(1353809207);
- expect(auth.nonce).to.equal('abc123');
- done();
- });
-
- it('errors on invalid host', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message(5, 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on invalid port', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', '80', 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on missing host', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 0, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on null message', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, null, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on missing message', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, undefined, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on invalid message', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, 5, { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on missing options', function (done) {
-
- var credentials = {
- id: '123456',
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, 'I am the boodyman');
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on invalid credentials (id)', function (done) {
-
- var credentials = {
- key: '2983d45yun89q',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
-
- it('errors on invalid credentials (key)', function (done) {
-
- var credentials = {
- id: '123456',
- algorithm: 'sha1'
- };
-
- var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
- expect(auth).to.not.exist();
- done();
- });
+ var auth = Hawk.client.message('example.com', 80, 'I am the boodyman', { credentials: credentials, timestamp: 1353809207, nonce: 'abc123' });
+ expect(auth).to.not.exist();
+ done();
});
});
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/crypto.js b/deps/npm/node_modules/request/node_modules/hawk/test/crypto.js
index f2d3d4580..1131628bf 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/crypto.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/crypto.js
@@ -18,69 +18,53 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
-
- describe('Crypto', function () {
-
- describe('#generateNormalizedString', function () {
-
- it('should return a valid normalized string', function (done) {
-
- expect(Hawk.crypto.generateNormalizedString('header', {
- credentials: {
- key: 'dasdfasdf',
- algorithm: 'sha256'
- },
- ts: 1357747017,
- nonce: 'k3k4j5',
- method: 'GET',
- resource: '/resource/something',
- host: 'example.com',
- port: 8080
- })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\n\n\n');
-
- done();
- });
-
- it('should return a valid normalized string (ext)', function (done) {
-
- expect(Hawk.crypto.generateNormalizedString('header', {
- credentials: {
- key: 'dasdfasdf',
- algorithm: 'sha256'
- },
- ts: 1357747017,
- nonce: 'k3k4j5',
- method: 'GET',
- resource: '/resource/something',
- host: 'example.com',
- port: 8080,
- ext: 'this is some app data'
- })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\n\nthis is some app data\n');
-
- done();
- });
-
- it('should return a valid normalized string (payload + ext)', function (done) {
-
- expect(Hawk.crypto.generateNormalizedString('header', {
- credentials: {
- key: 'dasdfasdf',
- algorithm: 'sha256'
- },
- ts: 1357747017,
- nonce: 'k3k4j5',
- method: 'GET',
- resource: '/resource/something',
- host: 'example.com',
- port: 8080,
- hash: 'U4MKKSmiVxk37JCCrAVIjV/OhB3y+NdwoCr6RShbVkE=',
- ext: 'this is some app data'
- })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\nU4MKKSmiVxk37JCCrAVIjV/OhB3y+NdwoCr6RShbVkE=\nthis is some app data\n');
-
- done();
- });
+describe('Crypto', function () {
+
+ describe('generateNormalizedString()', function () {
+
+ it('should return a valid normalized string', function (done) {
+
+ expect(Hawk.crypto.generateNormalizedString('header', {
+ ts: 1357747017,
+ nonce: 'k3k4j5',
+ method: 'GET',
+ resource: '/resource/something',
+ host: 'example.com',
+ port: 8080
+ })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\n\n\n');
+
+ done();
+ });
+
+ it('should return a valid normalized string (ext)', function (done) {
+
+ expect(Hawk.crypto.generateNormalizedString('header', {
+ ts: 1357747017,
+ nonce: 'k3k4j5',
+ method: 'GET',
+ resource: '/resource/something',
+ host: 'example.com',
+ port: 8080,
+ ext: 'this is some app data'
+ })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\n\nthis is some app data\n');
+
+ done();
+ });
+
+ it('should return a valid normalized string (payload + ext)', function (done) {
+
+ expect(Hawk.crypto.generateNormalizedString('header', {
+ ts: 1357747017,
+ nonce: 'k3k4j5',
+ method: 'GET',
+ resource: '/resource/something',
+ host: 'example.com',
+ port: 8080,
+ hash: 'U4MKKSmiVxk37JCCrAVIjV/OhB3y+NdwoCr6RShbVkE=',
+ ext: 'this is some app data'
+ })).to.equal('hawk.1.header\n1357747017\nk3k4j5\nGET\n/resource/something\nexample.com\n8080\nU4MKKSmiVxk37JCCrAVIjV/OhB3y+NdwoCr6RShbVkE=\nthis is some app data\n');
+
+ done();
});
});
});
-
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/message.js b/deps/npm/node_modules/request/node_modules/hawk/test/message.js
deleted file mode 100755
index b0494cc45..000000000
--- a/deps/npm/node_modules/request/node_modules/hawk/test/message.js
+++ /dev/null
@@ -1,261 +0,0 @@
-// Load modules
-
-var Url = require('url');
-var Code = require('code');
-var Hawk = require('../lib');
-var Hoek = require('hoek');
-var Lab = require('lab');
-
-
-// Declare internals
-
-var internals = {};
-
-
-// Test shortcuts
-
-var lab = exports.lab = Lab.script();
-var describe = lab.experiment;
-var it = lab.test;
-var expect = Code.expect;
-
-
-describe('Hawk', function () {
-
- var credentialsFunc = function (id, callback) {
-
- var credentials = {
- id: id,
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: (id === '1' ? 'sha1' : 'sha256'),
- user: 'steve'
- };
-
- return callback(null, credentials);
- };
-
- it('should generate an authorization then successfully parse it', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
-
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- done();
- });
- });
- });
-
- it('should fail authorization on mismatching host', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example1.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Bad mac');
- done();
- });
- });
- });
-
- it('should fail authorization on stale timestamp', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { localtimeOffsetMsec: 100000 }, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Stale timestamp');
- done();
- });
- });
- });
-
- it('overrides timestampSkewSec', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials, localtimeOffsetMsec: 100000 });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { timestampSkewSec: 500 }, function (err, credentials) {
-
- expect(err).to.not.exist();
- done();
- });
- });
- });
-
- it('should fail authorization on invalid authorization', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
- delete auth.id;
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid authorization');
- done();
- });
- });
- });
-
- it('should fail authorization on bad hash', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message1', auth, credentialsFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Bad message hash');
- done();
- });
- });
- });
-
- it('should fail authorization on nonce error', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { nonceFunc: function (nonce, ts, callback) { callback (new Error('kaboom')); } }, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid nonce');
- done();
- });
- });
- });
-
- it('should fail authorization on credentials error', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- var errFunc = function (id, callback) {
-
- callback(new Error('kablooey'));
- };
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('kablooey');
- done();
- });
- });
- });
-
- it('should fail authorization on missing credentials', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- var errFunc = function (id, callback) {
-
- callback();
- };
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Unknown credentials');
- done();
- });
- });
- });
-
- it('should fail authorization on invalid credentials', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- var errFunc = function (id, callback) {
-
- callback(null, {});
- };
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid credentials');
- done();
- });
- });
- });
-
- it('should fail authorization on invalid credentials algorithm', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- expect(auth).to.exist();
-
- var errFunc = function (id, callback) {
-
- callback(null, { key: '123', algorithm: '456' });
- };
-
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
-
- expect(err).to.exist();
- expect(err.message).to.equal('Unknown algorithm');
- done();
- });
- });
- });
-
- it('should fail on missing host', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var auth = Hawk.client.message(null, 8080, 'some message', { credentials: credentials });
- expect(auth).to.not.exist();
- done();
- });
- });
-
- it('should fail on missing credentials', function (done) {
-
- var auth = Hawk.client.message('example.com', 8080, 'some message', {});
- expect(auth).to.not.exist();
- done();
- });
-
- it('should fail on invalid algorithm', function (done) {
-
- credentialsFunc('123456', function (err, credentials) {
-
- var creds = Hoek.clone(credentials);
- creds.algorithm = 'blah';
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: creds });
- expect(auth).to.not.exist();
- done();
- });
- });
-});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/readme.js b/deps/npm/node_modules/request/node_modules/hawk/test/readme.js
index 60af19983..a46626466 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/readme.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/readme.js
@@ -19,79 +19,76 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
+describe('README', function () {
- describe('README', function () {
+ describe('core', function () {
- describe('core', function () {
+ var credentials = {
+ id: 'dh37fgj492je',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256'
+ };
- var credentials = {
- id: 'dh37fgj492je',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256'
- };
+ var options = {
+ credentials: credentials,
+ timestamp: 1353832234,
+ nonce: 'j4h3g2',
+ ext: 'some-app-ext-data'
+ };
- var options = {
- credentials: credentials,
- timestamp: 1353832234,
- nonce: 'j4h3g2',
- ext: 'some-app-ext-data'
- };
+ it('should generate a header protocol example', function (done) {
- it('should generate a header protocol example', function (done) {
+ var header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'GET', options).field;
- var header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'GET', options).field;
+ expect(header).to.equal('Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", ext="some-app-ext-data", mac="6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE="');
+ done();
+ });
- expect(header).to.equal('Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", ext="some-app-ext-data", mac="6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE="');
- done();
- });
+ it('should generate a normalized string protocol example', function (done) {
- it('should generate a normalized string protocol example', function (done) {
-
- var normalized = Hawk.crypto.generateNormalizedString('header', {
- credentials: credentials,
- ts: options.timestamp,
- nonce: options.nonce,
- method: 'GET',
- resource: '/resource?a=1&b=2',
- host: 'example.com',
- port: 8000,
- ext: options.ext
- });
-
- expect(normalized).to.equal('hawk.1.header\n1353832234\nj4h3g2\nGET\n/resource?a=1&b=2\nexample.com\n8000\n\nsome-app-ext-data\n');
- done();
+ var normalized = Hawk.crypto.generateNormalizedString('header', {
+ credentials: credentials,
+ ts: options.timestamp,
+ nonce: options.nonce,
+ method: 'GET',
+ resource: '/resource?a=1&b=2',
+ host: 'example.com',
+ port: 8000,
+ ext: options.ext
});
- var payloadOptions = Hoek.clone(options);
- payloadOptions.payload = 'Thank you for flying Hawk';
- payloadOptions.contentType = 'text/plain';
+ expect(normalized).to.equal('hawk.1.header\n1353832234\nj4h3g2\nGET\n/resource?a=1&b=2\nexample.com\n8000\n\nsome-app-ext-data\n');
+ done();
+ });
- it('should generate a header protocol example (with payload)', function (done) {
+ var payloadOptions = Hoek.clone(options);
+ payloadOptions.payload = 'Thank you for flying Hawk';
+ payloadOptions.contentType = 'text/plain';
- var header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'POST', payloadOptions).field;
+ it('should generate a header protocol example (with payload)', function (done) {
- expect(header).to.equal('Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", hash="Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=", ext="some-app-ext-data", mac="aSe1DERmZuRl3pI36/9BdZmnErTw3sNzOOAUlfeKjVw="');
- done();
- });
+ var header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'POST', payloadOptions).field;
- it('should generate a normalized string protocol example (with payload)', function (done) {
-
- var normalized = Hawk.crypto.generateNormalizedString('header', {
- credentials: credentials,
- ts: options.timestamp,
- nonce: options.nonce,
- method: 'POST',
- resource: '/resource?a=1&b=2',
- host: 'example.com',
- port: 8000,
- hash: Hawk.crypto.calculatePayloadHash(payloadOptions.payload, credentials.algorithm, payloadOptions.contentType),
- ext: options.ext
- });
-
- expect(normalized).to.equal('hawk.1.header\n1353832234\nj4h3g2\nPOST\n/resource?a=1&b=2\nexample.com\n8000\nYi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=\nsome-app-ext-data\n');
- done();
+ expect(header).to.equal('Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", hash="Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=", ext="some-app-ext-data", mac="aSe1DERmZuRl3pI36/9BdZmnErTw3sNzOOAUlfeKjVw="');
+ done();
+ });
+
+ it('should generate a normalized string protocol example (with payload)', function (done) {
+
+ var normalized = Hawk.crypto.generateNormalizedString('header', {
+ credentials: credentials,
+ ts: options.timestamp,
+ nonce: options.nonce,
+ method: 'POST',
+ resource: '/resource?a=1&b=2',
+ host: 'example.com',
+ port: 8000,
+ hash: Hawk.crypto.calculatePayloadHash(payloadOptions.payload, credentials.algorithm, payloadOptions.contentType),
+ ext: options.ext
});
+
+ expect(normalized).to.equal('hawk.1.header\n1353832234\nj4h3g2\nPOST\n/resource?a=1&b=2\nexample.com\n8000\nYi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=\nsome-app-ext-data\n');
+ done();
});
});
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/server.js b/deps/npm/node_modules/request/node_modules/hawk/test/server.js
index 00d7d085f..c2899e4c7 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/server.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/server.js
@@ -3,6 +3,7 @@
var Url = require('url');
var Code = require('code');
var Hawk = require('../lib');
+var Hoek = require('hoek');
var Lab = require('lab');
@@ -19,967 +20,1294 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
+describe('Server', function () {
- describe('server', function () {
+ var credentialsFunc = function (id, callback) {
- var credentialsFunc = function (id, callback) {
+ var credentials = {
+ id: id,
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: (id === '1' ? 'sha1' : 'sha256'),
+ user: 'steve'
+ };
- var credentials = {
- id: id,
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: (id === '1' ? 'sha1' : 'sha256'),
- user: 'steve'
+ return callback(null, credentials);
+ };
+
+ describe('authenticate()', function () {
+
+ it('parses a valid authentication header (sha1)', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="1", ts="1353788437", nonce="k3j4h2", mac="zy79QQ5/EYFmQqutVnYb73gAc/U=", ext="hello"'
};
- return callback(null, credentials);
- };
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- describe('#authenticate', function () {
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
+ });
+ });
+
+ it('parses a valid authentication header (sha256)', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/1?b=1&a=2',
+ host: 'example.com',
+ port: 8000,
+ authorization: 'Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", mac="m8r1rHbXN6NgO+KIIhjO7sFRyd78RNGVUwehe8Cp2dU=", ext="some-app-data"'
+ };
- it('parses a valid authentication header (sha1)', function (done) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353832234000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
+ });
+ });
+
+ it('parses a valid authentication header (host override)', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: 'example1.com:8080',
authorization: 'Hawk id="1", ts="1353788437", nonce="k3j4h2", mac="zy79QQ5/EYFmQqutVnYb73gAc/U=", ext="hello"'
- };
+ }
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { host: 'example.com', localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- done();
- });
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
});
+ });
- it('parses a valid authentication header (sha256)', function (done) {
+ it('parses a valid authentication header (host port override)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/1?b=1&a=2',
- host: 'example.com',
- port: 8000,
- authorization: 'Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", mac="m8r1rHbXN6NgO+KIIhjO7sFRyd78RNGVUwehe8Cp2dU=", ext="some-app-data"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: 'example1.com:80',
+ authorization: 'Hawk id="1", ts="1353788437", nonce="k3j4h2", mac="zy79QQ5/EYFmQqutVnYb73gAc/U=", ext="hello"'
+ }
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353832234000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { host: 'example.com', port: 8080, localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- done();
- });
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
+ });
+ });
+
+ it('parses a valid authentication header (POST with payload)', function (done) {
+
+ var req = {
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123456", ts="1357926341", nonce="1AwuJD", hash="qAiXIVv+yjDATneWxZP2YCTa9aHRgQdnH9b3Wc+o3dg=", ext="some-app-data", mac="UeYcj5UoTVaAWXNvJfLVia7kU3VabxCqrccXP8sUGC4="'
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1357926341000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
});
+ });
- it('parses a valid authentication header (host override)', function (done) {
+ it('errors on missing hash', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
+ var req = {
+ method: 'GET',
+ url: '/resource/1?b=1&a=2',
+ host: 'example.com',
+ port: 8000,
+ authorization: 'Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", mac="m8r1rHbXN6NgO+KIIhjO7sFRyd78RNGVUwehe8Cp2dU=", ext="some-app-data"'
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { payload: 'body', localtimeOffsetMsec: 1353832234000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing required payload hash');
+ done();
+ });
+ });
+
+ it('errors on a stale timestamp', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123456", ts="1362337299", nonce="UzmxSs", ext="some-app-data", mac="wnNUxchvvryMH2RxckTdZ/gY3ijzvccx4keVvELC61w="'
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Stale timestamp');
+ var header = err.output.headers['WWW-Authenticate'];
+ var ts = header.match(/^Hawk ts\=\"(\d+)\"\, tsm\=\"([^\"]+)\"\, error=\"Stale timestamp\"$/);
+ var now = Hawk.utils.now();
+ expect(parseInt(ts[1], 10) * 1000).to.be.within(now - 1000, now + 1000);
+
+ var res = {
headers: {
- host: 'example1.com:8080',
- authorization: 'Hawk id="1", ts="1353788437", nonce="k3j4h2", mac="zy79QQ5/EYFmQqutVnYb73gAc/U=", ext="hello"'
+ 'www-authenticate': header
}
};
- Hawk.server.authenticate(req, credentialsFunc, { host: 'example.com', localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(true);
+ done();
+ });
+ });
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
+ it('errors on a replay', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="bXx7a7p1h9QYQNZ8x7QhvDQym8ACgab4m3lVSFn4DBw=", ext="hello"'
+ };
+
+ var memoryCache = {};
+ var options = {
+ localtimeOffsetMsec: 1353788437000 - Hawk.utils.now(),
+ nonceFunc: function (key, nonce, ts, callback) {
+
+ if (memoryCache[key + nonce]) {
+ return callback(new Error());
+ }
+
+ memoryCache[key + nonce] = true;
+ return callback();
+ }
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, options, function (err, credentials, artifacts) {
+
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+
+ Hawk.server.authenticate(req, credentialsFunc, options, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid nonce');
done();
});
});
+ });
- it('parses a valid authentication header (host port override)', function (done) {
+ it('does not error on nonce collision if keys differ', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- headers: {
- host: 'example1.com:80',
- authorization: 'Hawk id="1", ts="1353788437", nonce="k3j4h2", mac="zy79QQ5/EYFmQqutVnYb73gAc/U=", ext="hello"'
+ var reqSteve = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="bXx7a7p1h9QYQNZ8x7QhvDQym8ACgab4m3lVSFn4DBw=", ext="hello"'
+ };
+
+ var reqBob = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="456", ts="1353788437", nonce="k3j4h2", mac="LXfmTnRzrLd9TD7yfH+4se46Bx6AHyhpM94hLCiNia4=", ext="hello"'
+ };
+
+ var credentialsFunc = function (id, callback) {
+
+ var credentials = {
+ '123': {
+ id: id,
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: (id === '1' ? 'sha1' : 'sha256'),
+ user: 'steve'
+ },
+ '456': {
+ id: id,
+ key: 'xrunpaw3489ruxnpa98w4rxnwerxhqb98rpaxn39848',
+ algorithm: (id === '1' ? 'sha1' : 'sha256'),
+ user: 'bob'
}
};
- Hawk.server.authenticate(req, credentialsFunc, { host: 'example.com', port: 8080, localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ return callback(null, credentials[id]);
+ };
+
+ var memoryCache = {};
+ var options = {
+ localtimeOffsetMsec: 1353788437000 - Hawk.utils.now(),
+ nonceFunc: function (key, nonce, ts, callback) {
+
+ if (memoryCache[key + nonce]) {
+ return callback(new Error());
+ }
+
+ memoryCache[key + nonce] = true;
+ return callback();
+ }
+ };
+
+ Hawk.server.authenticate(reqSteve, credentialsFunc, options, function (err, credentials, artifacts) {
+
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+
+ Hawk.server.authenticate(reqBob, credentialsFunc, options, function (err, credentials, artifacts) {
expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
+ expect(credentials.user).to.equal('bob');
done();
});
});
+ });
- it('parses a valid authentication header (POST with payload)', function (done) {
+ it('errors on an invalid authentication header: wrong scheme', function (done) {
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123456", ts="1357926341", nonce="1AwuJD", hash="qAiXIVv+yjDATneWxZP2YCTa9aHRgQdnH9b3Wc+o3dg=", ext="some-app-data", mac="UeYcj5UoTVaAWXNvJfLVia7kU3VabxCqrccXP8sUGC4="'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Basic asdasdasdasd'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1357926341000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.not.exist();
+ done();
});
+ });
- it('errors on missing hash', function (done) {
+ it('errors on an invalid authentication header: no scheme', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/1?b=1&a=2',
- host: 'example.com',
- port: 8000,
- authorization: 'Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", mac="m8r1rHbXN6NgO+KIIhjO7sFRyd78RNGVUwehe8Cp2dU=", ext="some-app-data"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: '!@#'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { payload: 'body', localtimeOffsetMsec: 1353832234000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing required payload hash');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid header syntax');
+ done();
});
+ });
- it('errors on a stale timestamp', function (done) {
+ it('errors on an missing authorization header', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123456", ts="1362337299", nonce="UzmxSs", ext="some-app-data", mac="wnNUxchvvryMH2RxckTdZ/gY3ijzvccx4keVvELC61w="'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Stale timestamp');
- var header = err.output.headers['WWW-Authenticate'];
- var ts = header.match(/^Hawk ts\=\"(\d+)\"\, tsm\=\"([^\"]+)\"\, error=\"Stale timestamp\"$/);
- var now = Hawk.utils.now();
- expect(parseInt(ts[1], 10) * 1000).to.be.within(now - 1000, now + 1000);
-
- var res = {
- headers: {
- 'www-authenticate': header
- }
- };
-
- expect(Hawk.client.authenticate(res, credentials, artifacts)).to.equal(true);
- done();
- });
+ expect(err).to.exist();
+ expect(err.isMissing).to.equal(true);
+ done();
});
+ });
- it('errors on a replay', function (done) {
+ it('errors on an missing host header', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="bXx7a7p1h9QYQNZ8x7QhvDQym8ACgab4m3lVSFn4DBw=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ headers: {
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ }
+ };
- var memoryCache = {};
- var options = {
- localtimeOffsetMsec: 1353788437000 - Hawk.utils.now(),
- nonceFunc: function (nonce, ts, callback) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- if (memoryCache[nonce]) {
- return callback(new Error());
- }
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid Host header');
+ done();
+ });
+ });
- memoryCache[nonce] = true;
- return callback();
- }
- };
+ it('errors on an missing authorization attribute (id)', function (done) {
- Hawk.server.authenticate(req, credentialsFunc, options, function (err, credentials, artifacts) {
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing attributes');
+ done();
+ });
+ });
- Hawk.server.authenticate(req, credentialsFunc, options, function (err, credentials, artifacts) {
+ it('errors on an missing authorization attribute (ts)', function (done) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid nonce');
- done();
- });
- });
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing attributes');
+ done();
});
+ });
- it('errors on an invalid authentication header: wrong scheme', function (done) {
+ it('errors on an missing authorization attribute (nonce)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Basic asdasdasdasd'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.not.exist();
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing attributes');
+ done();
});
+ });
- it('errors on an invalid authentication header: no scheme', function (done) {
+ it('errors on an missing authorization attribute (mac)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: '!@#'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid header syntax');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing attributes');
+ done();
});
+ });
- it('errors on an missing authorization header', function (done) {
+ it('errors on an unknown authorization attribute', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", x="3", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.isMissing).to.equal(true);
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Unknown attribute: x');
+ done();
});
+ });
- it('errors on an missing host header', function (done) {
+ it('errors on an bad authorization header format', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- headers: {
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- }
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123\\", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid Host header');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Bad header format');
+ done();
});
+ });
- it('errors on an missing authorization attribute (id)', function (done) {
+ it('errors on an bad authorization attribute value', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="\t", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Bad attribute value: id');
+ done();
});
+ });
- it('errors on an missing authorization attribute (ts)', function (done) {
+ it('errors on an empty authorization attribute value', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Bad attribute value: id');
+ done();
});
+ });
- it('errors on an missing authorization attribute (nonce)', function (done) {
+ it('errors on duplicated authorization attribute key', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", id="456", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Duplicate attribute: id');
+ done();
});
+ });
- it('errors on an missing authorization attribute (mac)', function (done) {
+ it('errors on an invalid authorization header format', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid header syntax');
+ done();
});
+ });
- it('errors on an unknown authorization attribute', function (done) {
+ it('errors on an bad host header (missing host)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", x="3", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: ':8080',
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ }
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Unknown attribute: x');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid Host header');
+ done();
});
+ });
- it('errors on an bad authorization header format', function (done) {
+ it('errors on an bad host header (pad port)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123\\", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: 'example.com:something',
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ }
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Bad header format');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid Host header');
+ done();
});
+ });
- it('errors on an bad authorization attribute value', function (done) {
+ it('errors on credentialsFunc error', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="\t", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ var credentialsFunc = function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Bad attribute value: id');
- done();
- });
+ return callback(new Error('Unknown user'));
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown user');
+ done();
});
+ });
- it('errors on an empty authorization attribute value', function (done) {
+ it('errors on credentialsFunc error (with credentials)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ var credentialsFunc = function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Bad attribute value: id');
- done();
- });
+ return callback(new Error('Unknown user'), { some: 'value' });
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown user');
+ expect(credentials.some).to.equal('value');
+ done();
});
+ });
- it('errors on duplicated authorization attribute key', function (done) {
+ it('errors on missing credentials', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", id="456", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ var credentialsFunc = function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Duplicate attribute: id');
- done();
- });
+ return callback(null, null);
+ };
+
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Unknown credentials');
+ done();
});
+ });
+
+ it('errors on invalid credentials (id)', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- it('errors on an invalid authorization header format', function (done) {
+ var credentialsFunc = function (id, callback) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk'
+ var credentials = {
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ user: 'steve'
};
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ return callback(null, credentials);
+ };
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid header syntax');
- done();
- });
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid credentials');
+ expect(err.output.payload.message).to.equal('An internal server error occurred');
+ done();
});
+ });
- it('errors on an bad host header (missing host)', function (done) {
+ it('errors on invalid credentials (key)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- headers: {
- host: ':8080',
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- }
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
+
+ var credentialsFunc = function (id, callback) {
+
+ var credentials = {
+ id: '23434d3q4d5345d',
+ user: 'steve'
};
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ return callback(null, credentials);
+ };
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid Host header');
- done();
- });
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid credentials');
+ expect(err.output.payload.message).to.equal('An internal server error occurred');
+ done();
});
+ });
- it('errors on an bad host header (pad port)', function (done) {
+ it('errors on unknown credentials algorithm', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- headers: {
- host: 'example.com:something',
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- }
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
+
+ var credentialsFunc = function (id, callback) {
+
+ var credentials = {
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'hmac-sha-0',
+ user: 'steve'
};
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ return callback(null, credentials);
+ };
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid Host header');
- done();
- });
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown algorithm');
+ expect(err.output.payload.message).to.equal('An internal server error occurred');
+ done();
});
+ });
- it('errors on credentialsFunc error', function (done) {
+ it('errors on unknown bad mac', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcU4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
+ };
- var credentialsFunc = function (id, callback) {
+ var credentialsFunc = function (id, callback) {
- return callback(new Error('Unknown user'));
+ var credentials = {
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
};
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ return callback(null, credentials);
+ };
- expect(err).to.exist();
- expect(err.message).to.equal('Unknown user');
- done();
- });
+ Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Bad mac');
+ done();
});
+ });
+ });
- it('errors on credentialsFunc error (with credentials)', function (done) {
+ describe('header()', function () {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ it('generates header', function (done) {
- var credentialsFunc = function (id, callback) {
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
- return callback(new Error('Unknown user'), { some: 'value' });
- };
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('Hawk mac=\"n14wVJK4cOxAytPUMc5bPezQzuJGl5n7MYXhFQgEKsE=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\", ext=\"response-specific\"');
+ done();
+ });
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ it('generates header (empty payload)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(credentials, artifacts, { payload: '', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('Hawk mac=\"i8/kUBDx0QF+PpCtW860kkV/fa9dbwEoe/FpGUXowf0=\", hash=\"q/t+NNAkQZNlq/aAD6PlexImwQTxwgT2MahfTa9XRLA=\", ext=\"response-specific\"');
+ done();
+ });
+
+ it('generates header (pre calculated hash)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var options = { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' };
+ options.hash = Hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, options.contentType);
+ var header = Hawk.server.header(credentials, artifacts, options);
+ expect(header).to.equal('Hawk mac=\"n14wVJK4cOxAytPUMc5bPezQzuJGl5n7MYXhFQgEKsE=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\", ext=\"response-specific\"');
+ done();
+ });
+
+ it('generates header (null ext)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: null });
+ expect(header).to.equal('Hawk mac=\"6PrybJTJs20jsgBw5eilXpcytD8kUbaIKNYXL+6g0ns=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\"');
+ done();
+ });
+
+ it('errors on missing artifacts', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var header = Hawk.server.header(credentials, null, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('');
+ done();
+ });
+
+ it('errors on invalid artifacts', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var header = Hawk.server.header(credentials, 5, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('');
+ done();
+ });
+
+ it('errors on missing credentials', function (done) {
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(null, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('');
+ done();
+ });
+
+ it('errors on invalid credentials (key)', function (done) {
+
+ var credentials = {
+ id: '123456',
+ algorithm: 'sha256',
+ user: 'steve'
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('');
+ done();
+ });
+
+ it('errors on invalid algorithm', function (done) {
+
+ var credentials = {
+ id: '123456',
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'x',
+ user: 'steve'
+ };
+
+ var artifacts = {
+ method: 'POST',
+ host: 'example.com',
+ port: '8080',
+ resource: '/resource/4?filter=a',
+ ts: '1398546787',
+ nonce: 'xUwusx',
+ hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
+ ext: 'some-app-data',
+ mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
+ id: '123456'
+ };
+
+ var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
+ expect(header).to.equal('');
+ done();
+ });
+ });
+
+ describe('authenticateMessage()', function () {
+
+ it('errors on invalid authorization (ts)', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ delete auth.ts;
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
expect(err).to.exist();
- expect(err.message).to.equal('Unknown user');
- expect(credentials.some).to.equal('value');
+ expect(err.message).to.equal('Invalid authorization');
done();
});
});
+ });
- it('errors on missing credentials', function (done) {
+ it('errors on invalid authorization (nonce)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var credentialsFunc = function (id, callback) {
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ delete auth.nonce;
- return callback(null, null);
- };
-
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Unknown credentials');
+ expect(err.message).to.equal('Invalid authorization');
done();
});
});
+ });
- it('errors on invalid credentials (id)', function (done) {
-
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
-
- var credentialsFunc = function (id, callback) {
+ it('errors on invalid authorization (hash)', function (done) {
- var credentials = {
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- return callback(null, credentials);
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ delete auth.hash;
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
expect(err).to.exist();
- expect(err.message).to.equal('Invalid credentials');
- expect(err.output.payload.message).to.equal('An internal server error occurred');
+ expect(err.message).to.equal('Invalid authorization');
done();
});
});
+ });
- it('errors on invalid credentials (key)', function (done) {
+ it('errors with credentials', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var credentialsFunc = function (id, callback) {
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- var credentials = {
- id: '23434d3q4d5345d',
- user: 'steve'
- };
-
- return callback(null, credentials);
- };
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, function (id, callback) {
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ callback(new Error('something'), { some: 'value' });
+ }, {}, function (err, credentials) {
expect(err).to.exist();
- expect(err.message).to.equal('Invalid credentials');
- expect(err.output.payload.message).to.equal('An internal server error occurred');
+ expect(err.message).to.equal('something');
+ expect(credentials.some).to.equal('value');
done();
});
});
+ });
- it('errors on unknown credentials algorithm', function (done) {
-
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcUyW6EEgUH4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
-
- var credentialsFunc = function (id, callback) {
+ it('errors on nonce collision', function (done) {
- var credentials = {
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'hmac-sha-0',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- return callback(null, credentials);
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {
+ nonceFunc: function (key, nonce, ts, nonceCallback) {
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ nonceCallback(true);
+ }
+ }, function (err, credentials) {
expect(err).to.exist();
- expect(err.message).to.equal('Unknown algorithm');
- expect(err.output.payload.message).to.equal('An internal server error occurred');
+ expect(err.message).to.equal('Invalid nonce');
done();
});
});
+ });
- it('errors on unknown bad mac', function (done) {
+ var credentialsFunc = function (id, callback) {
- var req = {
- method: 'GET',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080,
- authorization: 'Hawk id="123", ts="1353788437", nonce="k3j4h2", mac="/qwS4UjfVWMcU4jlr7T/wuKe3dKijvTvSos=", ext="hello"'
- };
+ var credentials = {
+ id: id,
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: (id === '1' ? 'sha1' : 'sha256'),
+ user: 'steve'
+ };
- var credentialsFunc = function (id, callback) {
+ return callback(null, credentials);
+ };
- var credentials = {
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ it('should generate an authorization then successfully parse it', function (done) {
- return callback(null, credentials);
- };
+ credentialsFunc('123456', function (err, credentials) {
- Hawk.server.authenticate(req, credentialsFunc, { localtimeOffsetMsec: 1353788437000 - Hawk.utils.now() }, function (err, credentials, artifacts) {
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Bad mac');
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
done();
});
});
});
- describe('#header', function () {
+ it('should fail authorization on mismatching host', function (done) {
- it('generates header', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ Hawk.server.authenticateMessage('example1.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
- var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('Hawk mac=\"n14wVJK4cOxAytPUMc5bPezQzuJGl5n7MYXhFQgEKsE=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\", ext=\"response-specific\"');
- done();
+ expect(err).to.exist();
+ expect(err.message).to.equal('Bad mac');
+ done();
+ });
});
+ });
- it('generates header (empty payload)', function (done) {
+ it('should fail authorization on stale timestamp', function (done) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- var header = Hawk.server.header(credentials, artifacts, { payload: '', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('Hawk mac=\"i8/kUBDx0QF+PpCtW860kkV/fa9dbwEoe/FpGUXowf0=\", hash=\"q/t+NNAkQZNlq/aAD6PlexImwQTxwgT2MahfTa9XRLA=\", ext=\"response-specific\"');
- done();
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { localtimeOffsetMsec: 100000 }, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Stale timestamp');
+ done();
+ });
});
+ });
- it('generates header (pre calculated hash)', function (done) {
+ it('overrides timestampSkewSec', function (done) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials, localtimeOffsetMsec: 100000 });
+ expect(auth).to.exist();
- var options = { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' };
- options.hash = Hawk.crypto.calculatePayloadHash(options.payload, credentials.algorithm, options.contentType);
- var header = Hawk.server.header(credentials, artifacts, options);
- expect(header).to.equal('Hawk mac=\"n14wVJK4cOxAytPUMc5bPezQzuJGl5n7MYXhFQgEKsE=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\", ext=\"response-specific\"');
- done();
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { timestampSkewSec: 500 }, function (err, credentials) {
+
+ expect(err).to.not.exist();
+ done();
+ });
});
+ });
- it('generates header (null ext)', function (done) {
+ it('should fail authorization on invalid authorization', function (done) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+ delete auth.id;
- var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: null });
- expect(header).to.equal('Hawk mac=\"6PrybJTJs20jsgBw5eilXpcytD8kUbaIKNYXL+6g0ns=\", hash=\"f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=\"');
- done();
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid authorization');
+ done();
+ });
});
+ });
- it('errors on missing artifacts', function (done) {
+ it('should fail authorization on bad hash', function (done) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var header = Hawk.server.header(credentials, null, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('');
- done();
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message1', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Bad message hash');
+ done();
+ });
});
+ });
- it('errors on invalid artifacts', function (done) {
+ it('should fail authorization on nonce error', function (done) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var header = Hawk.server.header(credentials, 5, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('');
- done();
- });
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- it('errors on missing credentials', function (done) {
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {
+ nonceFunc: function (key, nonce, ts, callback) {
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ callback(new Error('kaboom'));
+ }
+ }, function (err, credentials) {
- var header = Hawk.server.header(null, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('');
- done();
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid nonce');
+ done();
+ });
});
+ });
- it('errors on invalid credentials (key)', function (done) {
+ it('should fail authorization on credentials error', function (done) {
- var credentials = {
- id: '123456',
- algorithm: 'sha256',
- user: 'steve'
- };
-
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ credentialsFunc('123456', function (err, credentials) {
- var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('');
- done();
- });
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- it('errors on invalid algorithm', function (done) {
+ var errFunc = function (id, callback) {
- var credentials = {
- id: '123456',
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'x',
- user: 'steve'
+ callback(new Error('kablooey'));
};
- var artifacts = {
- method: 'POST',
- host: 'example.com',
- port: '8080',
- resource: '/resource/4?filter=a',
- ts: '1398546787',
- nonce: 'xUwusx',
- hash: 'nJjkVtBE5Y/Bk38Aiokwn0jiJxt/0S2WRSUwWLCf5xk=',
- ext: 'some-app-data',
- mac: 'dvIvMThwi28J61Jc3P0ryAhuKpanU63GXdx6hkmQkJA=',
- id: '123456'
- };
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
- var header = Hawk.server.header(credentials, artifacts, { payload: 'some reply', contentType: 'text/plain', ext: 'response-specific' });
- expect(header).to.equal('');
- done();
+ expect(err).to.exist();
+ expect(err.message).to.equal('kablooey');
+ done();
+ });
});
});
- describe('#authenticateMessage', function () {
+ it('should fail authorization on missing credentials', function (done) {
- it('errors on invalid authorization (ts)', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
- credentialsFunc('123456', function (err, credentials) {
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- delete auth.ts;
+ var errFunc = function (id, callback) {
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+ callback();
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid authorization');
- done();
- });
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown credentials');
+ done();
});
});
+ });
+
+ it('should fail authorization on invalid credentials', function (done) {
- it('errors on invalid authorization (nonce)', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
- credentialsFunc('123456', function (err, credentials) {
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- delete auth.nonce;
+ var errFunc = function (id, callback) {
+
+ callback(null, {});
+ };
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid authorization');
- done();
- });
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid credentials');
+ done();
});
});
+ });
- it('errors on invalid authorization (hash)', function (done) {
+ it('should fail authorization on invalid credentials algorithm', function (done) {
- credentialsFunc('123456', function (err, credentials) {
+ credentialsFunc('123456', function (err, credentials) {
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
- delete auth.hash;
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+ var errFunc = function (id, callback) {
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid authorization');
- done();
- });
+ callback(null, { key: '123', algorithm: '456' });
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown algorithm');
+ done();
});
});
+ });
- it('errors with credentials', function (done) {
+ it('should fail on missing host', function (done) {
- credentialsFunc('123456', function (err, credentials) {
+ credentialsFunc('123456', function (err, credentials) {
- var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ var auth = Hawk.client.message(null, 8080, 'some message', { credentials: credentials });
+ expect(auth).to.not.exist();
+ done();
+ });
+ });
- Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, function (id, callback) { callback(new Error('something'), { some: 'value' }); }, {}, function (err, credentials) {
+ it('should fail on missing credentials', function (done) {
- expect(err).to.exist();
- expect(err.message).to.equal('something');
- expect(credentials.some).to.equal('value');
- done();
- });
- });
- });
+ var auth = Hawk.client.message('example.com', 8080, 'some message', {});
+ expect(auth).to.not.exist();
+ done();
});
- describe('#authenticatePayloadHash', function () {
+ it('should fail on invalid algorithm', function (done) {
- it('checks payload hash', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
- expect(Hawk.server.authenticatePayloadHash('abcdefg', { hash: 'abcdefg' })).to.equal(true);
- expect(Hawk.server.authenticatePayloadHash('1234567', { hash: 'abcdefg' })).to.equal(false);
+ var creds = Hoek.clone(credentials);
+ creds.algorithm = 'blah';
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: creds });
+ expect(auth).to.not.exist();
done();
});
});
});
+
+ describe('authenticatePayloadHash()', function () {
+
+ it('checks payload hash', function (done) {
+
+ expect(Hawk.server.authenticatePayloadHash('abcdefg', { hash: 'abcdefg' })).to.equal(true);
+ expect(Hawk.server.authenticatePayloadHash('1234567', { hash: 'abcdefg' })).to.equal(false);
+ done();
+ });
+ });
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/uri.js b/deps/npm/node_modules/request/node_modules/hawk/test/uri.js
index e64b0095b..1b623c091 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/uri.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/uri.js
@@ -21,77 +21,33 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
+describe('Uri', function () {
- describe('Uri', function () {
+ var credentialsFunc = function (id, callback) {
- var credentialsFunc = function (id, callback) {
-
- var credentials = {
- id: id,
- key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
- algorithm: 'sha256',
- user: 'steve'
- };
-
- return callback(null, credentials);
+ var credentials = {
+ id: id,
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: 'sha256',
+ user: 'steve'
};
- it('should generate a bewit then successfully authenticate it', function (done) {
-
- var req = {
- method: 'GET',
- url: '/resource/4?a=1&b=2',
- host: 'example.com',
- port: 80
- };
-
- credentialsFunc('123456', function (err, credentials) {
-
- var bewit = Hawk.uri.getBewit('http://example.com/resource/4?a=1&b=2', { credentials: credentials, ttlSec: 60 * 60 * 24 * 365 * 100, ext: 'some-app-data' });
- req.url += '&bewit=' + bewit;
-
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
-
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- expect(attributes.ext).to.equal('some-app-data');
- done();
- });
- });
- });
-
- it('should generate a bewit then successfully authenticate it (no ext)', function (done) {
-
- var req = {
- method: 'GET',
- url: '/resource/4?a=1&b=2',
- host: 'example.com',
- port: 80
- };
+ return callback(null, credentials);
+ };
- credentialsFunc('123456', function (err, credentials) {
+ it('should generate a bewit then successfully authenticate it', function (done) {
- var bewit = Hawk.uri.getBewit('http://example.com/resource/4?a=1&b=2', { credentials: credentials, ttlSec: 60 * 60 * 24 * 365 * 100 });
- req.url += '&bewit=' + bewit;
-
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
-
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- done();
- });
- });
- });
+ var req = {
+ method: 'GET',
+ url: '/resource/4?a=1&b=2',
+ host: 'example.com',
+ port: 80
+ };
- it('should successfully authenticate a request (last param)', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
- var req = {
- method: 'GET',
- url: '/resource/4?a=1&b=2&bewit=MTIzNDU2XDQ1MTE0ODQ2MjFcMzFjMmNkbUJFd1NJRVZDOVkva1NFb2c3d3YrdEVNWjZ3RXNmOGNHU2FXQT1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var bewit = Hawk.uri.getBewit('http://example.com/resource/4?a=1&b=2', { credentials: credentials, ttlSec: 60 * 60 * 24 * 365 * 100, ext: 'some-app-data' });
+ req.url += '&bewit=' + bewit;
Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
@@ -101,342 +57,401 @@ describe('Hawk', function () {
done();
});
});
+ });
- it('should successfully authenticate a request (first param)', function (done) {
+ it('should generate a bewit then successfully authenticate it (no ext)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2MjFcMzFjMmNkbUJFd1NJRVZDOVkva1NFb2c3d3YrdEVNWjZ3RXNmOGNHU2FXQT1cc29tZS1hcHAtZGF0YQ&a=1&b=2',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?a=1&b=2',
+ host: 'example.com',
+ port: 80
+ };
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var bewit = Hawk.uri.getBewit('http://example.com/resource/4?a=1&b=2', { credentials: credentials, ttlSec: 60 * 60 * 24 * 365 * 100 });
+ req.url += '&bewit=' + bewit;
Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
expect(err).to.not.exist();
expect(credentials.user).to.equal('steve');
- expect(attributes.ext).to.equal('some-app-data');
done();
});
});
+ });
- it('should successfully authenticate a request (only param)', function (done) {
+ it('should successfully authenticate a request (last param)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2NDFcZm1CdkNWT3MvcElOTUUxSTIwbWhrejQ3UnBwTmo4Y1VrSHpQd3Q5OXJ1cz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?a=1&b=2&bewit=MTIzNDU2XDQ1MTE0ODQ2MjFcMzFjMmNkbUJFd1NJRVZDOVkva1NFb2c3d3YrdEVNWjZ3RXNmOGNHU2FXQT1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.not.exist();
- expect(credentials.user).to.equal('steve');
- expect(attributes.ext).to.equal('some-app-data');
- done();
- });
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ expect(attributes.ext).to.equal('some-app-data');
+ done();
});
+ });
- it('should fail on multiple authentication', function (done) {
+ it('should successfully authenticate a request (first param)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2NDFcZm1CdkNWT3MvcElOTUUxSTIwbWhrejQ3UnBwTmo4Y1VrSHpQd3Q5OXJ1cz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080,
- authorization: 'Basic asdasdasdasd'
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2MjFcMzFjMmNkbUJFd1NJRVZDOVkva1NFb2c3d3YrdEVNWjZ3RXNmOGNHU2FXQT1cc29tZS1hcHAtZGF0YQ&a=1&b=2',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Multiple authentications');
- done();
- });
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ expect(attributes.ext).to.equal('some-app-data');
+ done();
});
+ });
- it('should fail on method other than GET', function (done) {
+ it('should successfully authenticate a request (only param)', function (done) {
- credentialsFunc('123456', function (err, credentials) {
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2NDFcZm1CdkNWT3MvcElOTUUxSTIwbWhrejQ3UnBwTmo4Y1VrSHpQd3Q5OXJ1cz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- host: 'example.com',
- port: 8080
- };
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- var exp = Math.floor(Hawk.utils.now() / 1000) + 60;
- var ext = 'some-app-data';
- var mac = Hawk.crypto.calculateMac('bewit', credentials, {
- timestamp: exp,
- nonce: '',
- method: req.method,
- resource: req.url,
- host: req.host,
- port: req.port,
- ext: ext
- });
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ expect(attributes.ext).to.equal('some-app-data');
+ done();
+ });
+ });
- var bewit = credentials.id + '\\' + exp + '\\' + mac + '\\' + ext;
+ it('should fail on multiple authentication', function (done) {
- req.url += '&bewit=' + Hoek.base64urlEncode(bewit);
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MTE0ODQ2NDFcZm1CdkNWT3MvcElOTUUxSTIwbWhrejQ3UnBwTmo4Y1VrSHpQd3Q5OXJ1cz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080,
+ authorization: 'Basic asdasdasdasd'
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid method');
- done();
- });
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Multiple authentications');
+ done();
});
+ });
+
+ it('should fail on method other than GET', function (done) {
- it('should fail on invalid host header', function (done) {
+ credentialsFunc('123456', function (err, credentials) {
var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- headers: {
- host: 'example.com:something'
- }
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ host: 'example.com',
+ port: 8080
};
+ var exp = Math.floor(Hawk.utils.now() / 1000) + 60;
+ var ext = 'some-app-data';
+ var mac = Hawk.crypto.calculateMac('bewit', credentials, {
+ timestamp: exp,
+ nonce: '',
+ method: req.method,
+ resource: req.url,
+ host: req.host,
+ port: req.port,
+ ext: ext
+ });
+
+ var bewit = credentials.id + '\\' + exp + '\\' + mac + '\\' + ext;
+
+ req.url += '&bewit=' + Hoek.base64urlEncode(bewit);
+
Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid Host header');
+ expect(err.output.payload.message).to.equal('Invalid method');
done();
});
});
+ });
- it('should fail on empty bewit', function (done) {
+ it('should fail on invalid host header', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ headers: {
+ host: 'example.com:something'
+ }
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Empty bewit');
- expect(err.isMissing).to.not.exist();
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid Host header');
+ done();
});
+ });
- it('should fail on invalid bewit', function (done) {
+ it('should fail on empty bewit', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=*',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid bewit encoding');
- expect(err.isMissing).to.not.exist();
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Empty bewit');
+ expect(err.isMissing).to.not.exist();
+ done();
});
+ });
- it('should fail on missing bewit', function (done) {
+ it('should fail on invalid bewit', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=*',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.not.exist();
- expect(err.isMissing).to.equal(true);
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid bewit encoding');
+ expect(err.isMissing).to.not.exist();
+ done();
});
+ });
- it('should fail on invalid bewit structure', function (done) {
+ it('should fail on missing bewit', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=abc',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Invalid bewit structure');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.not.exist();
+ expect(err.isMissing).to.equal(true);
+ done();
});
+ });
- it('should fail on empty bewit attribute', function (done) {
+ it('should fail on invalid bewit structure', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=YVxcY1xk',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=abc',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing bewit attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Invalid bewit structure');
+ done();
});
+ });
- it('should fail on missing bewit id attribute', function (done) {
+ it('should fail on empty bewit attribute', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=XDQ1NTIxNDc2MjJcK0JFbFhQMXhuWjcvd1Nrbm1ldGhlZm5vUTNHVjZNSlFVRHk4NWpTZVJ4VT1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=YVxcY1xk',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Missing bewit attributes');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing bewit attributes');
+ done();
});
-
- it('should fail on expired access', function (done) {
+ });
- var req = {
- method: 'GET',
- url: '/resource/4?a=1&b=2&bewit=MTIzNDU2XDEzNTY0MTg1ODNcWk1wZlMwWU5KNHV0WHpOMmRucTRydEk3NXNXTjFjeWVITTcrL0tNZFdVQT1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ it('should fail on missing bewit id attribute', function (done) {
- Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=XDQ1NTIxNDc2MjJcK0JFbFhQMXhuWjcvd1Nrbm1ldGhlZm5vUTNHVjZNSlFVRHk4NWpTZVJ4VT1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Access expired');
- done();
- });
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Missing bewit attributes');
+ done();
});
+ });
- it('should fail on credentials function error', function (done) {
+ it('should fail on expired access', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?a=1&b=2&bewit=MTIzNDU2XDEzNTY0MTg1ODNcWk1wZlMwWU5KNHV0WHpOMmRucTRydEk3NXNXTjFjeWVITTcrL0tNZFdVQT1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(Hawk.error.badRequest('Boom')); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, credentialsFunc, {}, function (err, credentials, attributes) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Boom');
- done();
- });
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Access expired');
+ done();
});
+ });
- it('should fail on credentials function error with credentials', function (done) {
+ it('should fail on credentials function error', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(Hawk.error.badRequest('Boom'), { some: 'value' }); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Boom');
- expect(credentials.some).to.equal('value');
- done();
- });
+ callback(Hawk.error.badRequest('Boom'));
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Boom');
+ done();
});
+ });
- it('should fail on null credentials function response', function (done) {
+ it('should fail on credentials function error with credentials', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(null, null); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Unknown credentials');
- done();
- });
+ callback(Hawk.error.badRequest('Boom'), { some: 'value' });
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Boom');
+ expect(credentials.some).to.equal('value');
+ done();
});
+ });
- it('should fail on invalid credentials function response', function (done) {
+ it('should fail on null credentials function response', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(null, {}); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, function (id, callback) {
- expect(err).to.exist();
- expect(err.message).to.equal('Invalid credentials');
- done();
- });
+ callback(null, null);
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Unknown credentials');
+ done();
});
+ });
- it('should fail on invalid credentials function response (unknown algorithm)', function (done) {
+ it('should fail on invalid credentials function response', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(null, { key: 'xxx', algorithm: 'xxx' }); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, function (id, callback) {
- expect(err).to.exist();
- expect(err.message).to.equal('Unknown algorithm');
- done();
- });
+ callback(null, {});
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid credentials');
+ done();
});
+ });
- it('should fail on expired access', function (done) {
+ it('should fail on invalid credentials function response (unknown algorithm)', function (done) {
- var req = {
- method: 'GET',
- url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
- host: 'example.com',
- port: 8080
- };
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
- Hawk.uri.authenticate(req, function (id, callback) { callback(null, { key: 'xxx', algorithm: 'sha256' }); }, {}, function (err, credentials, attributes) {
+ Hawk.uri.authenticate(req, function (id, callback) {
- expect(err).to.exist();
- expect(err.output.payload.message).to.equal('Bad mac');
- done();
- });
+ callback(null, { key: 'xxx', algorithm: 'xxx' });
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown algorithm');
+ done();
+ });
+ });
+
+ it('should fail on expired access', function (done) {
+
+ var req = {
+ method: 'GET',
+ url: '/resource/4?bewit=MTIzNDU2XDQ1MDk5OTE3MTlcTUE2eWkwRWRwR0pEcWRwb0JkYVdvVDJrL0hDSzA1T0Y3MkhuZlVmVy96Zz1cc29tZS1hcHAtZGF0YQ',
+ host: 'example.com',
+ port: 8080
+ };
+
+ Hawk.uri.authenticate(req, function (id, callback) {
+
+ callback(null, { key: 'xxx', algorithm: 'sha256' });
+ }, {}, function (err, credentials, attributes) {
+
+ expect(err).to.exist();
+ expect(err.output.payload.message).to.equal('Bad mac');
+ done();
});
});
- describe('#getBewit', function () {
+ describe('getBewit()', function () {
it('returns a valid bewit value', function (done) {
@@ -586,5 +601,249 @@ describe('Hawk', function () {
done();
});
});
+ describe('authenticateMessage()', function () {
+
+ var credentialsFunc = function (id, callback) {
+
+ var credentials = {
+ id: id,
+ key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn',
+ algorithm: (id === '1' ? 'sha1' : 'sha256'),
+ user: 'steve'
+ };
+
+ return callback(null, credentials);
+ };
+
+ it('should generate an authorization then successfully parse it', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.not.exist();
+ expect(credentials.user).to.equal('steve');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on mismatching host', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example1.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Bad mac');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on stale timestamp', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { localtimeOffsetMsec: 100000 }, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Stale timestamp');
+ done();
+ });
+ });
+ });
+
+ it('overrides timestampSkewSec', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials, localtimeOffsetMsec: 100000 });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, { timestampSkewSec: 500 }, function (err, credentials) {
+
+ expect(err).to.not.exist();
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on invalid authorization', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+ delete auth.id;
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid authorization');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on bad hash', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message1', auth, credentialsFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Bad message hash');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on nonce error', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, credentialsFunc, {
+ nonceFunc: function (key, nonce, ts, callback) {
+
+ callback(new Error('kaboom'));
+ }
+ }, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid nonce');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on credentials error', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ var errFunc = function (id, callback) {
+
+ callback(new Error('kablooey'));
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('kablooey');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on missing credentials', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ var errFunc = function (id, callback) {
+
+ callback();
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown credentials');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on invalid credentials', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ var errFunc = function (id, callback) {
+
+ callback(null, {});
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Invalid credentials');
+ done();
+ });
+ });
+ });
+
+ it('should fail authorization on invalid credentials algorithm', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: credentials });
+ expect(auth).to.exist();
+
+ var errFunc = function (id, callback) {
+
+ callback(null, { key: '123', algorithm: '456' });
+ };
+
+ Hawk.server.authenticateMessage('example.com', 8080, 'some message', auth, errFunc, {}, function (err, credentials) {
+
+ expect(err).to.exist();
+ expect(err.message).to.equal('Unknown algorithm');
+ done();
+ });
+ });
+ });
+
+ it('should fail on missing host', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var auth = Hawk.client.message(null, 8080, 'some message', { credentials: credentials });
+ expect(auth).to.not.exist();
+ done();
+ });
+ });
+
+ it('should fail on missing credentials', function (done) {
+
+ var auth = Hawk.client.message('example.com', 8080, 'some message', {});
+ expect(auth).to.not.exist();
+ done();
+ });
+
+ it('should fail on invalid algorithm', function (done) {
+
+ credentialsFunc('123456', function (err, credentials) {
+
+ var creds = Hoek.clone(credentials);
+ creds.algorithm = 'blah';
+ var auth = Hawk.client.message('example.com', 8080, 'some message', { credentials: creds });
+ expect(auth).to.not.exist();
+ done();
+ });
+ });
+ });
});
diff --git a/deps/npm/node_modules/request/node_modules/hawk/test/utils.js b/deps/npm/node_modules/request/node_modules/hawk/test/utils.js
index 9ccda8be3..1bfef65f8 100755
--- a/deps/npm/node_modules/request/node_modules/hawk/test/utils.js
+++ b/deps/npm/node_modules/request/node_modules/hawk/test/utils.js
@@ -19,102 +19,103 @@ var it = lab.test;
var expect = Code.expect;
-describe('Hawk', function () {
-
- describe('Utils', function () {
-
- describe('#parseHost', function () {
-
- it('returns port 80 for non tls node request', function (done) {
-
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- headers: {
- host: 'example.com',
- 'content-type': 'text/plain;x=y'
- }
- };
-
- expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(80);
- done();
- });
-
- it('returns port 443 for non tls node request', function (done) {
-
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- headers: {
- host: 'example.com',
- 'content-type': 'text/plain;x=y'
- },
- connection: {
- encrypted: true
- }
- };
-
- expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(443);
- done();
- });
-
- it('returns port 443 for non tls node request (IPv6)', function (done) {
-
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- headers: {
- host: '[123:123:123]',
- 'content-type': 'text/plain;x=y'
- },
- connection: {
- encrypted: true
- }
- };
-
- expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(443);
- done();
- });
-
- it('parses IPv6 headers', function (done) {
-
- var req = {
- method: 'POST',
- url: '/resource/4?filter=a',
- headers: {
- host: '[123:123:123]:8000',
- 'content-type': 'text/plain;x=y'
- },
- connection: {
- encrypted: true
- }
- };
-
- var host = Hawk.utils.parseHost(req, 'Host');
- expect(host.port).to.equal('8000');
- expect(host.name).to.equal('[123:123:123]');
- done();
- });
+describe('Utils', function () {
+
+ describe('parseHost()', function () {
+
+ it('returns port 80 for non tls node request', function (done) {
+
+ var req = {
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: 'example.com',
+ 'content-type': 'text/plain;x=y'
+ }
+ };
+
+ expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(80);
+ done();
});
- describe('#version', function () {
+ it('returns port 443 for non tls node request', function (done) {
+
+ var req = {
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: 'example.com',
+ 'content-type': 'text/plain;x=y'
+ },
+ connection: {
+ encrypted: true
+ }
+ };
+
+ expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(443);
+ done();
+ });
- it('returns the correct package version number', function (done) {
+ it('returns port 443 for non tls node request (IPv6)', function (done) {
+
+ var req = {
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: '[123:123:123]',
+ 'content-type': 'text/plain;x=y'
+ },
+ connection: {
+ encrypted: true
+ }
+ };
+
+ expect(Hawk.utils.parseHost(req, 'Host').port).to.equal(443);
+ done();
+ });
- expect(Hawk.utils.version()).to.equal(Package.version);
- done();
- });
+ it('parses IPv6 headers', function (done) {
+
+ var req = {
+ method: 'POST',
+ url: '/resource/4?filter=a',
+ headers: {
+ host: '[123:123:123]:8000',
+ 'content-type': 'text/plain;x=y'
+ },
+ connection: {
+ encrypted: true
+ }
+ };
+
+ var host = Hawk.utils.parseHost(req, 'Host');
+ expect(host.port).to.equal('8000');
+ expect(host.name).to.equal('[123:123:123]');
+ done();
});
+ });
- describe('#unauthorized', function () {
+ describe('version()', function () {
- it('returns a hawk 401', function (done) {
+ it('returns the correct package version number', function (done) {
- expect(Hawk.utils.unauthorized('kaboom').output.headers['WWW-Authenticate']).to.equal('Hawk error="kaboom"');
- done();
- });
+ expect(Hawk.utils.version()).to.equal(Package.version);
+ done();
});
});
-});
+ describe('unauthorized()', function () {
+ it('returns a hawk 401', function (done) {
+
+ expect(Hawk.utils.unauthorized('kaboom').output.headers['WWW-Authenticate']).to.equal('Hawk error="kaboom"');
+ done();
+ });
+
+ it('supports attributes', function (done) {
+
+ expect(Hawk.utils.unauthorized('kaboom', { a: 'b' }).output.headers['WWW-Authenticate']).to.equal('Hawk a="b", error="kaboom"');
+ done();
+ });
+ });
+});
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
index 034c2b48f..be2bc636a 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
@@ -32,14 +32,32 @@
"pretest": "which gjslint; if [[ \"$?\" = 0 ]] ; then gjslint --nojsdoc -r lib -r tst; else echo \"Missing gjslint. Skipping lint\"; fi",
"test": "tap ./tst"
},
- "readme": "node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS.\nCurrently BER encoding is supported; at some point I'll likely have to do DER.\n\n## Usage\n\nMostly, if you're *actually* needing to read and write ASN.1, you probably don't\nneed this readme to explain what and why. If you have no idea what ASN.1 is,\nsee this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc\n\nThe source is pretty much self-explanatory, and has read/write methods for the\ncommon types out there.\n\n### Decoding\n\nThe following reads an ASN.1 sequence with a boolean.\n\n var Ber = require('asn1').Ber;\n\n var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));\n\n reader.readSequence();\n console.log('Sequence len: ' + reader.length);\n if (reader.peek() === Ber.Boolean)\n console.log(reader.readBoolean());\n\n### Encoding\n\nThe following generates the same payload as above.\n\n var Ber = require('asn1').Ber;\n\n var writer = new Ber.Writer();\n\n writer.startSequence();\n writer.writeBoolean(true);\n writer.endSequence();\n\n console.log(writer.buffer);\n\n## Installation\n\n npm install asn1\n\n## License\n\nMIT.\n\n## Bugs\n\nSee <https://github.com/mcavage/node-asn1/issues>.\n",
- "readmeFilename": "README.md",
- "bugs": {
- "url": "https://github.com/mcavage/node-asn1/issues"
+ "_npmUser": {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
},
- "homepage": "https://github.com/mcavage/node-asn1#readme",
"_id": "asn1@0.1.11",
+ "_engineSupported": true,
+ "_npmVersion": "1.1.0-beta-4",
+ "_nodeVersion": "v0.6.6",
+ "_defaultsLoaded": true,
+ "dist": {
+ "shasum": "559be18376d08a4ec4dbe80877d27818639b2df7",
+ "tarball": "http://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "559be18376d08a4ec4dbe80877d27818639b2df7",
"_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz",
- "_from": "asn1@0.1.11"
+ "_from": "asn1@0.1.11",
+ "bugs": {
+ "url": "https://github.com/mcavage/node-asn1/issues"
+ },
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/mcavage/node-asn1#readme"
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
index b3317675d..6fcca673f 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
@@ -16,15 +16,30 @@
"engines": {
"node": ">=0.8"
},
- "readme": "# node-assert-plus\n\nThis library is a super small wrapper over node's assert module that has two\nthings: (1) the ability to disable assertions with the environment variable\nNODE_NDEBUG, and (2) some API wrappers for argument testing. Like\n`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks\nlike this:\n\n var assert = require('assert-plus');\n\n function fooAccount(options, callback) {\n\t assert.object(options, 'options');\n\t\tassert.number(options.id, 'options.id);\n\t\tassert.bool(options.isManager, 'options.isManager');\n\t\tassert.string(options.name, 'options.name');\n\t\tassert.arrayOfString(options.email, 'options.email');\n\t\tassert.func(callback, 'callback');\n\n // Do stuff\n\t\tcallback(null, {});\n }\n\n# API\n\nAll methods that *aren't* part of node's core assert API are simply assumed to\ntake an argument, and then a string 'name' that's not a message; `AssertionError`\nwill be thrown if the assertion fails with a message like:\n\n AssertionError: foo (string) is required\n\tat test (/home/mark/work/foo/foo.js:3:9)\n\tat Object.<anonymous> (/home/mark/work/foo/foo.js:15:1)\n\tat Module._compile (module.js:446:26)\n\tat Object..js (module.js:464:10)\n\tat Module.load (module.js:353:31)\n\tat Function._load (module.js:311:12)\n\tat Array.0 (module.js:484:10)\n\tat EventEmitter._tickCallback (node.js:190:38)\n\nfrom:\n\n function test(foo) {\n\t assert.string(foo, 'foo');\n }\n\nThere you go. You can check that arrays are of a homogenous type with `Arrayof$Type`:\n\n function test(foo) {\n\t assert.arrayOfString(foo, 'foo');\n }\n\nYou can assert IFF an argument is not `undefined` (i.e., an optional arg):\n\n assert.optionalString(foo, 'foo');\n\nLastly, you can opt-out of assertion checking altogether by setting the\nenvironment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have\nlots of assertions, and don't want to pay `typeof ()` taxes to v8 in\nproduction.\n\nThe complete list of APIs is:\n\n* assert.bool\n* assert.buffer\n* assert.func\n* assert.number\n* assert.object\n* assert.string\n* assert.arrayOfBool\n* assert.arrayOfFunc\n* assert.arrayOfNumber\n* assert.arrayOfObject\n* assert.arrayOfString\n* assert.optionalBool\n* assert.optionalBuffer\n* assert.optionalFunc\n* assert.optionalNumber\n* assert.optionalObject\n* assert.optionalString\n* assert.optionalArrayOfBool\n* assert.optionalArrayOfFunc\n* assert.optionalArrayOfNumber\n* assert.optionalArrayOfObject\n* assert.optionalArrayOfString\n* assert.AssertionError\n* assert.fail\n* assert.ok\n* assert.equal\n* assert.notEqual\n* assert.deepEqual\n* assert.notDeepEqual\n* assert.strictEqual\n* assert.notStrictEqual\n* assert.throws\n* assert.doesNotThrow\n* assert.ifError\n\n# Installation\n\n npm install assert-plus\n\n## License\n\nThe MIT License (MIT)\nCopyright (c) 2012 Mark Cavage\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n## Bugs\n\nSee <https://github.com/mcavage/node-assert-plus/issues>.\n",
- "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/mcavage/node-assert-plus/issues"
},
- "homepage": "https://github.com/mcavage/node-assert-plus#readme",
"dependencies": {},
"_id": "assert-plus@0.1.5",
+ "dist": {
+ "shasum": "ee74009413002d84cec7219c6ac811812e723160",
+ "tarball": "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz"
+ },
+ "_from": "assert-plus@>=0.1.5 <0.2.0",
+ "_npmVersion": "1.3.11",
+ "_npmUser": {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "ee74009413002d84cec7219c6ac811812e723160",
"_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz",
- "_from": "assert-plus@>=0.1.5 <0.2.0"
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/mcavage/node-assert-plus#readme"
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
index 2090a5ea3..c33f8a574 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
@@ -15,13 +15,28 @@
"type": "git",
"url": "git+https://github.com/rmustacc/node-ctype.git"
},
- "readme": "Node-CType is a way to read and write binary data in structured and easy to use\nformat. Its name comes from the C header file.\n\nTo get started, simply clone the repository or use npm to install it. Once it is\nthere, simply require it.\n\ngit clone git://github.com/rmustacc/node-ctype\nnpm install ctype\nvar mod_ctype = require('ctype')\n\n\nThere are two APIs that you can use, depending on what abstraction you'd like.\nThe low level API let's you read and write individual integers and floats from\nbuffers. The higher level API let's you read and write structures of these. To\nillustrate this, let's looks look at how we would read and write a binary\nencoded x,y point.\n\nIn C we would define this structure as follows:\n\ntypedef struct point {\n\tuint16_t\tp_x;\n\tuint16_t\tp_y;\n} point_t;\n\nTo read a binary encoded point from a Buffer, we first need to create a CType\nparser (where we specify the endian and other options) and add the typedef.\n\nvar parser = new mod_ctype.Parser({ endian: 'big' });\nparser.typedef('point_t', [\n\t{ x: { type: 'uint16_t' } },\n\t{ y: { type: 'uint16_t' } }\n]);\n\nFrom here, given a buffer buf and an offset into it, we can read a point.\n\nvar out = parser.readData([ { point: { type: 'point_t' } } ], buffer, 0);\nconsole.log(out);\n{ point: { x: 23, y: 42 } }\n\nAnother way to get the same information would be to use the low level methods.\nNote that these require you to manually deal with the offset. Here's how we'd\nget the same values of x and y from the buffer.\n\nvar x = mod_ctype.ruint16(buf, 'big', 0);\nvar y = mod_ctype.ruint16(buf, 'big', 2);\nconsole.log(x + ', ' + y);\n23, 42\n\nThe true power of this API comes from the ability to define and nest typedefs,\njust as you would in C. By default, the following types are defined by default.\nNote that they return a Number, unless indicated otherwise.\n\n * int8_t\n * int16_t\n * int32_t\n * int64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * uint8_t\n * uint16_t\n * uint32_t\n * uint64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * float\n * double\n * char (either returns a buffer with that character or a uint8_t)\n * char[] (returns an object with the buffer and the number of characters read which is either the total amount requested or until the first 0)\n\n\nctf2json integration:\n\nNode-CType supports consuming the output of ctf2json. Once you read in a JSON file,\nall you have to do to add all the definitions it contains is:\n\nvar data, parser;\ndata = JSON.parse(parsedJSONData);\nparser = mod_ctype.parseCTF(data, { endian: 'big' });\n\nFor more documentation, see the file README.old. Full documentation is in the\nprocess of being rewritten as a series of manual pages which will be available\nin the repository and online for viewing.\n\nTo read the ctio manual page simple run, from the root of the workspace:\n\nman -Mman -s 3ctype ctio\n",
- "readmeFilename": "README",
- "bugs": {
- "url": "https://github.com/rmustacc/node-ctype/issues"
- },
"_id": "ctype@0.5.3",
+ "dist": {
+ "shasum": "82c18c2461f74114ef16c135224ad0b9144ca12f",
+ "tarball": "http://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz"
+ },
+ "_npmVersion": "1.1.59",
+ "_npmUser": {
+ "name": "rm",
+ "email": "rm@fingolfin.org"
+ },
+ "maintainers": [
+ {
+ "name": "rm",
+ "email": "rm@fingolfin.org"
+ }
+ ],
+ "directories": {},
"_shasum": "82c18c2461f74114ef16c135224ad0b9144ca12f",
"_resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.3.tgz",
- "_from": "ctype@0.5.3"
+ "_from": "ctype@0.5.3",
+ "bugs": {
+ "url": "https://github.com/rmustacc/node-ctype/issues"
+ },
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/isstream/.jshintrc b/deps/npm/node_modules/request/node_modules/isstream/.jshintrc
deleted file mode 100644
index c8ef3ca40..000000000
--- a/deps/npm/node_modules/request/node_modules/isstream/.jshintrc
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "predef": [ ]
- , "bitwise": false
- , "camelcase": false
- , "curly": false
- , "eqeqeq": false
- , "forin": false
- , "immed": false
- , "latedef": false
- , "noarg": true
- , "noempty": true
- , "nonew": true
- , "plusplus": false
- , "quotmark": true
- , "regexp": false
- , "undef": true
- , "unused": true
- , "strict": false
- , "trailing": true
- , "maxlen": 120
- , "asi": true
- , "boss": true
- , "debug": true
- , "eqnull": true
- , "esnext": true
- , "evil": true
- , "expr": true
- , "funcscope": false
- , "globalstrict": false
- , "iterator": false
- , "lastsemic": true
- , "laxbreak": true
- , "laxcomma": true
- , "loopfunc": true
- , "multistr": false
- , "onecase": false
- , "proto": false
- , "regexdash": false
- , "scripturl": true
- , "smarttabs": false
- , "shadow": false
- , "sub": true
- , "supernew": false
- , "validthis": true
- , "browser": true
- , "couch": false
- , "devel": false
- , "dojo": false
- , "mootools": false
- , "node": true
- , "nonstandard": true
- , "prototypejs": false
- , "rhino": false
- , "worker": true
- , "wsh": false
- , "nomen": false
- , "onevar": false
- , "passfail": false
-} \ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/isstream/package.json b/deps/npm/node_modules/request/node_modules/isstream/package.json
index d0338b882..74e97e608 100644
--- a/deps/npm/node_modules/request/node_modules/isstream/package.json
+++ b/deps/npm/node_modules/request/node_modules/isstream/package.json
@@ -33,10 +33,27 @@
"url": "https://github.com/rvagg/isstream/issues"
},
"homepage": "https://github.com/rvagg/isstream",
- "readme": "# isStream\n\n[![Build Status](https://secure.travis-ci.org/rvagg/isstream.png)](http://travis-ci.org/rvagg/isstream)\n\n**Test if an object is a `Stream`**\n\n[![NPM](https://nodei.co/npm/isstream.svg)](https://nodei.co/npm/isstream/)\n\nThe missing `Stream.isStream(obj)`: determine if an object is standard Node.js `Stream`. Works for Node-core `Stream` objects (for 0.8, 0.10, 0.11, and in theory, older and newer versions) and all versions of **[readable-stream](https://github.com/isaacs/readable-stream)**.\n\n## Usage:\n\n```js\nvar isStream = require('isstream')\nvar Stream = require('stream')\n\nisStream(new Stream()) // true\n\nisStream({}) // false\n\nisStream(new Stream.Readable()) // true\nisStream(new Stream.Writable()) // true\nisStream(new Stream.Duplex()) // true\nisStream(new Stream.Transform()) // true\nisStream(new Stream.PassThrough()) // true\n```\n\n## But wait! There's more!\n\nYou can also test for `isReadable(obj)`, `isWritable(obj)` and `isDuplex(obj)` to test for implementations of Streams2 (and Streams3) base classes.\n\n```js\nvar isReadable = require('isstream').isReadable\nvar isWritable = require('isstream').isWritable\nvar isDuplex = require('isstream').isDuplex\nvar Stream = require('stream')\n\nisReadable(new Stream()) // false\nisWritable(new Stream()) // false\nisDuplex(new Stream()) // false\n\nisReadable(new Stream.Readable()) // true\nisReadable(new Stream.Writable()) // false\nisReadable(new Stream.Duplex()) // true\nisReadable(new Stream.Transform()) // true\nisReadable(new Stream.PassThrough()) // true\n\nisWritable(new Stream.Readable()) // false\nisWritable(new Stream.Writable()) // true\nisWritable(new Stream.Duplex()) // true\nisWritable(new Stream.Transform()) // true\nisWritable(new Stream.PassThrough()) // true\n\nisDuplex(new Stream.Readable()) // false\nisDuplex(new Stream.Writable()) // false\nisDuplex(new Stream.Duplex()) // true\nisDuplex(new Stream.Transform()) // true\nisDuplex(new Stream.PassThrough()) // true\n```\n\n*Reminder: when implementing your own streams, please [use **readable-stream** rather than core streams](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).*\n\n\n## License\n\n**isStream** is Copyright (c) 2015 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.\n",
- "readmeFilename": "README.md",
+ "gitHead": "cd39cba6da939b4fc9110825203adc506422c3dc",
"_id": "isstream@0.1.2",
"_shasum": "47e63f7af55afa6f92e1500e690eb8b8529c099a",
+ "_from": "isstream@>=0.1.1 <0.2.0",
+ "_npmVersion": "2.6.1",
+ "_nodeVersion": "1.4.3",
+ "_npmUser": {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ "maintainers": [
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ }
+ ],
+ "dist": {
+ "shasum": "47e63f7af55afa6f92e1500e690eb8b8529c099a",
+ "tarball": "http://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
- "_from": "isstream@>=0.1.1 <0.2.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md b/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
index 3f4ebe9a4..c5b8f5f13 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
@@ -1,3 +1,47 @@
+2.1.3 / 2015-07-13
+==================
+
+ * deps: mime-db@~1.15.0
+ - Add new mime types
+
+2.1.2 / 2015-06-25
+==================
+
+ * deps: mime-db@~1.14.0
+ - Add new mime types
+
+2.1.1 / 2015-06-08
+==================
+
+ * perf: fix deopt during mapping
+
+2.1.0 / 2015-06-07
+==================
+
+ * Fix incorrectly treating extension-less file name as extension
+ - i.e. `'path/to/json'` will no longer return `application/json`
+ * Fix `.charset(type)` to accept parameters
+ * Fix `.charset(type)` to match case-insensitive
+ * Improve generation of extension to MIME mapping
+ * Refactor internals for readability and no argument reassignment
+ * Prefer `application/*` MIME types from the same source
+ * Prefer any type over `application/octet-stream`
+ * deps: mime-db@~1.13.0
+ - Add nginx as a source
+ - Add new mime types
+
+2.0.14 / 2015-06-06
+===================
+
+ * deps: mime-db@~1.12.0
+ - Add new mime types
+
+2.0.13 / 2015-05-31
+===================
+
+ * deps: mime-db@~1.11.0
+ - Add new mime types
+
2.0.12 / 2015-05-19
===================
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/LICENSE b/deps/npm/node_modules/request/node_modules/mime-types/LICENSE
index a7ae8ee9b..06166077b 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/LICENSE
+++ b/deps/npm/node_modules/request/node_modules/mime-types/LICENSE
@@ -1,22 +1,23 @@
+(The MIT License)
-The MIT License (MIT)
+Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
+Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
-Copyright (c) 2014 Jonathan Ong me@jongleberry.com
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/README.md b/deps/npm/node_modules/request/node_modules/mime-types/README.md
index 372749336..e26295d04 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/README.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/README.md
@@ -42,10 +42,11 @@ All functions return `false` if input is invalid or not found.
Lookup the content-type associated with a file.
```js
-mime.lookup('json') // 'application/json'
-mime.lookup('.md') // 'text/x-markdown'
-mime.lookup('file.html') // 'text/html'
-mime.lookup('folder/file.js') // 'application/javascript'
+mime.lookup('json') // 'application/json'
+mime.lookup('.md') // 'text/x-markdown'
+mime.lookup('file.html') // 'text/html'
+mime.lookup('folder/file.js') // 'application/javascript'
+mime.lookup('folder/.htaccess') // false
mime.lookup('cats') // false
```
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/index.js b/deps/npm/node_modules/request/node_modules/mime-types/index.js
index b46a202f5..9edf72b75 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/index.js
+++ b/deps/npm/node_modules/request/node_modules/mime-types/index.js
@@ -1,63 +1,188 @@
+/*!
+ * mime-types
+ * Copyright(c) 2014 Jonathan Ong
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ * @private
+ */
var db = require('mime-db')
+var extname = require('path').extname
-// types[extension] = type
-exports.types = Object.create(null)
-// extensions[type] = [extensions]
+/**
+ * Module variables.
+ * @private
+ */
+
+var extractTypeRegExp = /^\s*([^;\s]*)(?:;|\s|$)/
+var textTypeRegExp = /^text\//i
+
+/**
+ * Module exports.
+ * @public
+ */
+
+exports.charset = charset
+exports.charsets = { lookup: charset }
+exports.contentType = contentType
+exports.extension = extension
exports.extensions = Object.create(null)
+exports.lookup = lookup
+exports.types = Object.create(null)
-Object.keys(db).forEach(function (name) {
- var mime = db[name]
- var exts = mime.extensions
- if (!exts || !exts.length) return
- exports.extensions[name] = exts
- exts.forEach(function (ext) {
- exports.types[ext] = name
- })
-})
-
-exports.lookup = function (string) {
- if (!string || typeof string !== "string") return false
- // remove any leading paths, though we should just use path.basename
- string = string.replace(/.*[\.\/\\]/, '').toLowerCase()
- if (!string) return false
- return exports.types[string] || false
-}
+// Populate the extensions/types maps
+populateMaps(exports.extensions, exports.types)
-exports.extension = function (type) {
- if (!type || typeof type !== "string") return false
- // to do: use media-typer
- type = type.match(/^\s*([^;\s]*)(?:;|\s|$)/)
- if (!type) return false
- var exts = exports.extensions[type[1].toLowerCase()]
- if (!exts || !exts.length) return false
- return exts[0]
-}
+/**
+ * Get the default charset for a MIME type.
+ *
+ * @param {string} type
+ * @return {boolean|string}
+ */
+
+function charset(type) {
+ if (!type || typeof type !== 'string') {
+ return false
+ }
-// type has to be an exact mime type
-exports.charset = function (type) {
- var mime = db[type]
- if (mime && mime.charset) return mime.charset
+ // TODO: use media-typer
+ var match = extractTypeRegExp.exec(type)
+ var mime = match && db[match[1].toLowerCase()]
+
+ if (mime && mime.charset) {
+ return mime.charset
+ }
// default text/* to utf-8
- if (/^text\//.test(type)) return 'UTF-8'
+ if (match && textTypeRegExp.test(match[1])) {
+ return 'UTF-8'
+ }
return false
}
-// backwards compatibility
-exports.charsets = {
- lookup: exports.charset
+/**
+ * Create a full Content-Type header given a MIME type or extension.
+ *
+ * @param {string} str
+ * @return {boolean|string}
+ */
+
+function contentType(str) {
+ // TODO: should this even be in this module?
+ if (!str || typeof str !== 'string') {
+ return false
+ }
+
+ var mime = str.indexOf('/') === -1
+ ? exports.lookup(str)
+ : str
+
+ if (!mime) {
+ return false
+ }
+
+ // TODO: use content-type or other module
+ if (mime.indexOf('charset') === -1) {
+ var charset = exports.charset(mime)
+ if (charset) mime += '; charset=' + charset.toLowerCase()
+ }
+
+ return mime
+}
+
+/**
+ * Get the default extension for a MIME type.
+ *
+ * @param {string} type
+ * @return {boolean|string}
+ */
+
+function extension(type) {
+ if (!type || typeof type !== 'string') {
+ return false
+ }
+
+ // TODO: use media-typer
+ var match = extractTypeRegExp.exec(type)
+
+ // get extensions
+ var exts = match && exports.extensions[match[1].toLowerCase()]
+
+ if (!exts || !exts.length) {
+ return false
+ }
+
+ return exts[0]
}
-// to do: maybe use set-type module or something
-exports.contentType = function (type) {
- if (!type || typeof type !== "string") return false
- if (!~type.indexOf('/')) type = exports.lookup(type)
- if (!type) return false
- if (!~type.indexOf('charset')) {
- var charset = exports.charset(type)
- if (charset) type += '; charset=' + charset.toLowerCase()
+/**
+ * Lookup the MIME type for a file path/extension.
+ *
+ * @param {string} path
+ * @return {boolean|string}
+ */
+
+function lookup(path) {
+ if (!path || typeof path !== 'string') {
+ return false
+ }
+
+ // get the extension ("ext" or ".ext" or full path)
+ var extension = extname('x.' + path)
+ .toLowerCase()
+ .substr(1)
+
+ if (!extension) {
+ return false
}
- return type
+
+ return exports.types[extension] || false
+}
+
+/**
+ * Populate the extensions and types maps.
+ * @private
+ */
+
+function populateMaps(extensions, types) {
+ // source preference (least -> most)
+ var preference = ['nginx', 'apache', undefined, 'iana']
+
+ Object.keys(db).forEach(function forEachMimeType(type) {
+ var mime = db[type]
+ var exts = mime.extensions
+
+ if (!exts || !exts.length) {
+ return
+ }
+
+ // mime -> extensions
+ extensions[type] = exts
+
+ // extension -> mime
+ for (var i = 0; i < exts.length; i++) {
+ var extension = exts[i]
+
+ if (types[extension]) {
+ var from = preference.indexOf(db[types[extension]].source)
+ var to = preference.indexOf(mime.source)
+
+ if (types[extension] !== 'application/octet-stream'
+ && from > to || (from === to && types[extension].substr(0, 12) === 'application/')) {
+ // skip the remapping
+ return
+ }
+ }
+
+ // set the extension -> mime
+ types[extension] = type
+ }
+ })
}
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
index f24590ebd..fa40614d9 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
@@ -1,3 +1,49 @@
+1.15.0 / 2015-07-13
+===================
+
+ * Add `application/x-httpd-php`
+
+1.14.0 / 2015-06-25
+===================
+
+ * Add `application/scim+json`
+ * Add `application/vnd.3gpp.ussd+xml`
+ * Add `application/vnd.biopax.rdf+xml`
+ * Add `text/x-processing`
+
+1.13.0 / 2015-06-07
+===================
+
+ * Add nginx as a source
+ * Add `application/x-cocoa`
+ * Add `application/x-java-archive-diff`
+ * Add `application/x-makeself`
+ * Add `application/x-perl`
+ * Add `application/x-pilot`
+ * Add `application/x-redhat-package-manager`
+ * Add `application/x-sea`
+ * Add `audio/x-m4a`
+ * Add `audio/x-realaudio`
+ * Add `image/x-jng`
+ * Add `text/mathml`
+
+1.12.0 / 2015-06-05
+===================
+
+ * Add `application/bdoc`
+ * Add `application/vnd.hyperdrive+json`
+ * Add `application/x-bdoc`
+ * Add extension `.rtf` to `text/rtf`
+
+1.11.0 / 2015-05-31
+===================
+
+ * Add `audio/wav`
+ * Add `audio/wave`
+ * Add extension `.litcoffee` to `text/coffeescript`
+ * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data`
+ * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install`
+
1.10.0 / 2015-05-19
===================
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/README.md b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/README.md
index 2c54bb40c..164cca030 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/README.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/README.md
@@ -13,6 +13,7 @@ It aggregates data from the following sources:
- http://www.iana.org/assignments/media-types/media-types.xhtml
- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types
+- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types
## Installation
@@ -20,8 +21,12 @@ It aggregates data from the following sources:
npm install mime-db
```
-If you're crazy enough to use this in the browser,
-you can just grab the JSON file:
+### Database Download
+
+If you're crazy enough to use this in the browser, you can just grab the
+JSON file using [RawGit](https://rawgit.com/). It is recommended to replace
+`master` with [a release tag](https://github.com/jshttp/mime-db/tags) as the
+JSON format may change in the future.
```
https://cdn.rawgit.com/jshttp/mime-db/master/db.json
@@ -45,6 +50,7 @@ Each mime type has the following properties:
If not set, it's probably a custom media type.
- `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types)
- `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml)
+ - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types)
- `.extensions[]` - known extensions associated with this mime type.
- `.compressible` - whether a file of this type is can be gzipped.
- `.charset` - the default charset associated with this type, if any.
@@ -56,7 +62,7 @@ If unknown, every property could be `undefined`.
To edit the database, only make PRs against `src/custom.json` or
`src/custom-suffix.json`.
-To update the build, run `npm run update`.
+To update the build, run `npm run build`.
## Adding Custom Media Types
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
index d5aa78782..2f2dc448e 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
@@ -105,6 +105,10 @@
"application/batch-smtp": {
"source": "iana"
},
+ "application/bdoc": {
+ "compressible": false,
+ "extensions": ["bdoc"]
+ },
"application/beep+xml": {
"source": "iana"
},
@@ -410,7 +414,7 @@
"application/java-archive": {
"source": "apache",
"compressible": false,
- "extensions": ["jar"]
+ "extensions": ["jar","war","ear"]
},
"application/java-serialized-object": {
"source": "apache",
@@ -690,7 +694,7 @@
"application/octet-stream": {
"source": "iana",
"compressible": false,
- "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","buffer"]
+ "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"]
},
"application/oda": {
"source": "iana",
@@ -933,6 +937,10 @@
"application/scaip+xml": {
"source": "iana"
},
+ "application/scim+json": {
+ "source": "iana",
+ "compressible": true
+ },
"application/scvp-cv-request": {
"source": "iana",
"extensions": ["scq"]
@@ -1158,6 +1166,9 @@
"application/vnd.3gpp.sms": {
"source": "iana"
},
+ "application/vnd.3gpp.ussd+xml": {
+ "source": "iana"
+ },
"application/vnd.3gpp2.bcmcsinfo+xml": {
"source": "iana"
},
@@ -1319,6 +1330,9 @@
"source": "iana",
"compressible": true
},
+ "application/vnd.biopax.rdf+xml": {
+ "source": "iana"
+ },
"application/vnd.blueice.multipass": {
"source": "iana",
"extensions": ["mpm"]
@@ -2037,7 +2051,12 @@
"source": "iana"
},
"application/vnd.hydrostatix.sof-data": {
- "source": "iana"
+ "source": "iana",
+ "extensions": ["sfd-hdstx"]
+ },
+ "application/vnd.hyperdrive+json": {
+ "source": "iana",
+ "compressible": true
},
"application/vnd.hzn-3d-crossword": {
"source": "iana"
@@ -2728,7 +2747,8 @@
"extensions": ["ngdat"]
},
"application/vnd.nokia.n-gage.symbian.install": {
- "source": "iana"
+ "source": "iana",
+ "extensions": ["n-gage"]
},
"application/vnd.nokia.ncd": {
"source": "iana"
@@ -4083,6 +4103,10 @@
"source": "apache",
"extensions": ["bcpio"]
},
+ "application/x-bdoc": {
+ "compressible": false,
+ "extensions": ["bdoc"]
+ },
"application/x-bittorrent": {
"source": "apache",
"extensions": ["torrent"]
@@ -4124,6 +4148,10 @@
"application/x-chrome-extension": {
"extensions": ["crx"]
},
+ "application/x-cocoa": {
+ "source": "nginx",
+ "extensions": ["cco"]
+ },
"application/x-compress": {
"source": "apache"
},
@@ -4270,6 +4298,10 @@
"source": "apache",
"extensions": ["hdf"]
},
+ "application/x-httpd-php": {
+ "compressible": true,
+ "extensions": ["php"]
+ },
"application/x-install-instructions": {
"source": "apache",
"extensions": ["install"]
@@ -4278,6 +4310,10 @@
"source": "apache",
"extensions": ["iso"]
},
+ "application/x-java-archive-diff": {
+ "source": "nginx",
+ "extensions": ["jardiff"]
+ },
"application/x-java-jnlp-file": {
"source": "apache",
"compressible": false,
@@ -4298,6 +4334,10 @@
"source": "apache",
"extensions": ["lzh","lha"]
},
+ "application/x-makeself": {
+ "source": "nginx",
+ "extensions": ["run"]
+ },
"application/x-mie": {
"source": "apache",
"extensions": ["mie"]
@@ -4389,6 +4429,14 @@
"source": "apache",
"extensions": ["nzb"]
},
+ "application/x-perl": {
+ "source": "nginx",
+ "extensions": ["pl","pm"]
+ },
+ "application/x-pilot": {
+ "source": "nginx",
+ "extensions": ["prc","pdb"]
+ },
"application/x-pkcs12": {
"source": "apache",
"compressible": false,
@@ -4407,10 +4455,18 @@
"compressible": false,
"extensions": ["rar"]
},
+ "application/x-redhat-package-manager": {
+ "source": "nginx",
+ "extensions": ["rpm"]
+ },
"application/x-research-info-systems": {
"source": "apache",
"extensions": ["ris"]
},
+ "application/x-sea": {
+ "source": "nginx",
+ "extensions": ["sea"]
+ },
"application/x-sh": {
"source": "apache",
"compressible": true,
@@ -4469,7 +4525,7 @@
},
"application/x-tcl": {
"source": "apache",
- "extensions": ["tcl"]
+ "extensions": ["tcl","tk"]
},
"application/x-tex": {
"source": "apache",
@@ -4505,7 +4561,7 @@
},
"application/x-x509-ca-cert": {
"source": "apache",
- "extensions": ["der","crt"]
+ "extensions": ["der","crt","pem"]
},
"application/x-xfig": {
"source": "apache",
@@ -5100,6 +5156,14 @@
"audio/vorbis-config": {
"source": "iana"
},
+ "audio/wav": {
+ "compressible": false,
+ "extensions": ["wav"]
+ },
+ "audio/wave": {
+ "compressible": false,
+ "extensions": ["wav"]
+ },
"audio/webm": {
"source": "apache",
"compressible": false,
@@ -5123,6 +5187,10 @@
"source": "apache",
"extensions": ["flac"]
},
+ "audio/x-m4a": {
+ "source": "nginx",
+ "extensions": ["m4a"]
+ },
"audio/x-matroska": {
"source": "apache",
"extensions": ["mka"]
@@ -5147,6 +5215,10 @@
"source": "apache",
"extensions": ["rmp"]
},
+ "audio/x-realaudio": {
+ "source": "nginx",
+ "extensions": ["ra"]
+ },
"audio/x-tta": {
"source": "apache"
},
@@ -5402,11 +5474,16 @@
"compressible": true,
"extensions": ["ico"]
},
+ "image/x-jng": {
+ "source": "nginx",
+ "extensions": ["jng"]
+ },
"image/x-mrsid-image": {
"source": "apache",
"extensions": ["sid"]
},
"image/x-ms-bmp": {
+ "source": "nginx",
"compressible": true,
"extensions": ["bmp"]
},
@@ -5673,7 +5750,7 @@
"compressible": true
},
"text/coffeescript": {
- "extensions": ["coffee"]
+ "extensions": ["coffee","litcoffee"]
},
"text/css": {
"source": "iana",
@@ -5715,7 +5792,7 @@
"text/html": {
"source": "iana",
"compressible": true,
- "extensions": ["html","htm"]
+ "extensions": ["html","htm","shtml"]
},
"text/jade": {
"extensions": ["jade"]
@@ -5737,6 +5814,10 @@
"text/markdown": {
"source": "iana"
},
+ "text/mathml": {
+ "source": "nginx",
+ "extensions": ["mml"]
+ },
"text/mizar": {
"source": "iana"
},
@@ -5781,7 +5862,9 @@
"extensions": ["rtx"]
},
"text/rtf": {
- "source": "iana"
+ "source": "iana",
+ "compressible": true,
+ "extensions": ["rtf"]
},
"text/rtp-enc-aescm128": {
"source": "iana"
@@ -5942,6 +6025,7 @@
"extensions": ["c","cc","cxx","cpp","h","hh","dic"]
},
"text/x-component": {
+ "source": "nginx",
"extensions": ["htc"]
},
"text/x-fortran": {
@@ -5980,6 +6064,10 @@
"source": "apache",
"extensions": ["p","pas"]
},
+ "text/x-processing": {
+ "compressible": true,
+ "extensions": ["pde"]
+ },
"text/x-sass": {
"extensions": ["sass"]
},
@@ -6008,7 +6096,8 @@
},
"text/xml": {
"source": "iana",
- "compressible": true
+ "compressible": true,
+ "extensions": ["xml"]
},
"text/xml-external-parsed-entity": {
"source": "iana"
@@ -6021,7 +6110,7 @@
},
"video/3gpp": {
"source": "apache",
- "extensions": ["3gp"]
+ "extensions": ["3gp","3gpp"]
},
"video/3gpp-tt": {
"source": "apache"
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
index 6ac498a51..a07ee866a 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
@@ -1,7 +1,7 @@
{
"name": "mime-db",
"description": "Media Type Database",
- "version": "1.10.0",
+ "version": "1.15.0",
"contributors": [
{
"name": "Douglas Christopher Wilson",
@@ -33,14 +33,14 @@
"url": "git+https://github.com/jshttp/mime-db.git"
},
"devDependencies": {
- "bluebird": "~2.9.20",
- "co": "4.5.4",
+ "bluebird": "2.9.33",
+ "co": "4.6.0",
"cogent": "1.0.1",
- "csv-parse": "0.1.1",
+ "csv-parse": "0.1.3",
"gnode": "0.1.1",
- "istanbul": "0.3.9",
+ "istanbul": "0.3.17",
"mocha": "1.21.5",
- "raw-body": "2.0.1",
+ "raw-body": "2.1.2",
"stream-to-array": "2"
},
"files": [
@@ -55,20 +55,20 @@
},
"scripts": {
"build": "node scripts/build",
- "fetch": "gnode scripts/extensions && gnode scripts/types",
+ "fetch": "gnode scripts/fetch-apache && gnode scripts/fetch-iana && gnode scripts/fetch-nginx",
"test": "mocha --reporter spec --bail --check-leaks test/",
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/",
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/",
"update": "npm run fetch && npm run build"
},
- "gitHead": "260552f9177fe78986b92699999f81999c7fe43c",
+ "gitHead": "96922b79fcaacf8c2a95ce3368739ec71c9471a2",
"bugs": {
"url": "https://github.com/jshttp/mime-db/issues"
},
"homepage": "https://github.com/jshttp/mime-db",
- "_id": "mime-db@1.10.0",
- "_shasum": "e6308063c758ebd12837874c3d1ea9170766b03b",
- "_from": "mime-db@>=1.10.0 <1.11.0",
+ "_id": "mime-db@1.15.0",
+ "_shasum": "d219e6214bbcae23a6fa69c0868c4fadc1405e8a",
+ "_from": "mime-db@>=1.15.0 <1.16.0",
"_npmVersion": "1.4.28",
"_npmUser": {
"name": "dougwilson",
@@ -85,10 +85,10 @@
}
],
"dist": {
- "shasum": "e6308063c758ebd12837874c3d1ea9170766b03b",
- "tarball": "http://registry.npmjs.org/mime-db/-/mime-db-1.10.0.tgz"
+ "shasum": "d219e6214bbcae23a6fa69c0868c4fadc1405e8a",
+ "tarball": "http://registry.npmjs.org/mime-db/-/mime-db-1.15.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.10.0.tgz",
+ "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.15.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/package.json b/deps/npm/node_modules/request/node_modules/mime-types/package.json
index 1db9255bd..4fbb21420 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/package.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/package.json
@@ -1,7 +1,7 @@
{
"name": "mime-types",
"description": "The ultimate javascript content-type utility.",
- "version": "2.0.12",
+ "version": "2.1.3",
"contributors": [
{
"name": "Douglas Christopher Wilson",
@@ -28,10 +28,10 @@
"url": "git+https://github.com/jshttp/mime-types.git"
},
"dependencies": {
- "mime-db": "~1.10.0"
+ "mime-db": "~1.15.0"
},
"devDependencies": {
- "istanbul": "0.3.9",
+ "istanbul": "0.3.17",
"mocha": "~1.21.5"
},
"files": [
@@ -47,14 +47,14 @@
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/test.js",
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot test/test.js"
},
- "gitHead": "a1e3c4b6bbb6afb615c8a058481d58cb57a4cb95",
+ "gitHead": "565c49ad5683d4a123a170da3444ed32ce426c3a",
"bugs": {
"url": "https://github.com/jshttp/mime-types/issues"
},
"homepage": "https://github.com/jshttp/mime-types",
- "_id": "mime-types@2.0.12",
- "_shasum": "87ae9f124e94f8e440c93d1a72d0dccecdb71135",
- "_from": "mime-types@>=2.0.1 <2.1.0",
+ "_id": "mime-types@2.1.3",
+ "_shasum": "f259849c7eb1f85b8f5f826187278a7f74f0c966",
+ "_from": "mime-types@>=2.1.2 <2.2.0",
"_npmVersion": "1.4.28",
"_npmUser": {
"name": "dougwilson",
@@ -75,10 +75,10 @@
}
],
"dist": {
- "shasum": "87ae9f124e94f8e440c93d1a72d0dccecdb71135",
- "tarball": "http://registry.npmjs.org/mime-types/-/mime-types-2.0.12.tgz"
+ "shasum": "f259849c7eb1f85b8f5f826187278a7f74f0c966",
+ "tarball": "http://registry.npmjs.org/mime-types/-/mime-types-2.1.3.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.12.tgz",
+ "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.3.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/qs/.jshintignore b/deps/npm/node_modules/request/node_modules/qs/.jshintignore
deleted file mode 100644
index 3c3629e64..000000000
--- a/deps/npm/node_modules/request/node_modules/qs/.jshintignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/deps/npm/node_modules/request/node_modules/qs/.jshintrc b/deps/npm/node_modules/request/node_modules/qs/.jshintrc
deleted file mode 100644
index 997b3f7d4..000000000
--- a/deps/npm/node_modules/request/node_modules/qs/.jshintrc
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "node": true,
-
- "curly": true,
- "latedef": true,
- "quotmark": true,
- "undef": true,
- "unused": true,
- "trailing": true
-}
diff --git a/deps/npm/node_modules/request/node_modules/qs/Makefile b/deps/npm/node_modules/request/node_modules/qs/Makefile
deleted file mode 100644
index 31cc899d4..000000000
--- a/deps/npm/node_modules/request/node_modules/qs/Makefile
+++ /dev/null
@@ -1,8 +0,0 @@
-test:
- @node node_modules/lab/bin/lab -a code -L
-test-cov:
- @node node_modules/lab/bin/lab -a code -t 100 -L
-test-cov-html:
- @node node_modules/lab/bin/lab -a code -L -r html -o coverage.html
-
-.PHONY: test test-cov test-cov-html
diff --git a/deps/npm/node_modules/request/node_modules/qs/Readme.md b/deps/npm/node_modules/request/node_modules/qs/Readme.md
index 0c72aba5d..48a0de97f 100644
--- a/deps/npm/node_modules/request/node_modules/qs/Readme.md
+++ b/deps/npm/node_modules/request/node_modules/qs/Readme.md
@@ -34,10 +34,17 @@ For example, the string `'foo[bar]=baz'` converts to:
}
```
-The parsed value is returned as a plain object, created via `Object.create(null)` and as such you should be aware that prototype methods do not exist on it and a user may set those names to whatever value they like:
+When using the `plainObjects` option the parsed value is returned as a plain object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like:
```javascript
-Qs.parse('a.hasOwnProperty=b');
+Qs.parse('a.hasOwnProperty=b', { plainObjects: true });
+// { a: { hasOwnProperty: 'b' } }
+```
+
+By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option.
+
+```javascript
+Qs.parse('a.hasOwnProperty=b', { allowPrototypes: true });
// { a: { hasOwnProperty: 'b' } }
```
@@ -111,6 +118,13 @@ Qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ });
// { a: 'b', c: 'd', e: 'f' }
```
+Option `allowDots` can be used to disable dot notation:
+
+```javascript
+Qs.parse('a.b=c', { allowDots: false });
+// { 'a.b': 'c' } }
+```
+
### Parsing Arrays
**qs** can also parse arrays using a similar `[]` notation:
diff --git a/deps/npm/node_modules/request/node_modules/qs/index.js b/deps/npm/node_modules/request/node_modules/qs/index.js
deleted file mode 100644
index 2291cd858..000000000
--- a/deps/npm/node_modules/request/node_modules/qs/index.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./lib/');
diff --git a/deps/npm/node_modules/request/node_modules/qs/lib/parse.js b/deps/npm/node_modules/request/node_modules/qs/lib/parse.js
index 1a1e205e3..e7c56c5ce 100644
--- a/deps/npm/node_modules/request/node_modules/qs/lib/parse.js
+++ b/deps/npm/node_modules/request/node_modules/qs/lib/parse.js
@@ -10,7 +10,9 @@ var internals = {
depth: 5,
arrayLimit: 20,
parameterLimit: 1000,
- strictNullHandling: false
+ strictNullHandling: false,
+ plainObjects: false,
+ allowPrototypes: false
};
@@ -61,7 +63,7 @@ internals.parseObject = function (chain, val, options) {
obj = obj.concat(internals.parseObject(chain, val, options));
}
else {
- obj = Object.create(null);
+ obj = options.plainObjects ? Object.create(null) : {};
var cleanRoot = root[0] === '[' && root[root.length - 1] === ']' ? root.slice(1, root.length - 1) : root;
var index = parseInt(cleanRoot, 10);
var indexString = '' + index;
@@ -109,6 +111,16 @@ internals.parseKeys = function (key, val, options) {
var keys = [];
if (segment[1]) {
+ // If we aren't using plain objects, optionally prefix keys
+ // that would overwrite object prototype properties
+ if (!options.plainObjects &&
+ Object.prototype.hasOwnProperty(segment[1])) {
+
+ if (!options.allowPrototypes) {
+ return;
+ }
+ }
+
keys.push(segment[1]);
}
@@ -118,6 +130,13 @@ internals.parseKeys = function (key, val, options) {
while ((segment = child.exec(key)) !== null && i < options.depth) {
++i;
+ if (!options.plainObjects &&
+ Object.prototype.hasOwnProperty(segment[1].replace(/\[|\]/g, ''))) {
+
+ if (!options.allowPrototypes) {
+ continue;
+ }
+ }
keys.push(segment[1]);
}
@@ -133,25 +152,26 @@ internals.parseKeys = function (key, val, options) {
module.exports = function (str, options) {
- if (str === '' ||
- str === null ||
- typeof str === 'undefined') {
-
- return Object.create(null);
- }
-
options = options || {};
options.delimiter = typeof options.delimiter === 'string' || Utils.isRegExp(options.delimiter) ? options.delimiter : internals.delimiter;
options.depth = typeof options.depth === 'number' ? options.depth : internals.depth;
options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : internals.arrayLimit;
options.parseArrays = options.parseArrays !== false;
options.allowDots = options.allowDots !== false;
+ options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : internals.plainObjects;
+ options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : internals.allowPrototypes;
options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : internals.parameterLimit;
options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : internals.strictNullHandling;
+ if (str === '' ||
+ str === null ||
+ typeof str === 'undefined') {
+
+ return options.plainObjects ? Object.create(null) : {};
+ }
var tempObj = typeof str === 'string' ? internals.parseValues(str, options) : str;
- var obj = Object.create(null);
+ var obj = options.plainObjects ? Object.create(null) : {};
// Iterate over the keys and setup the new object
@@ -159,7 +179,7 @@ module.exports = function (str, options) {
for (var i = 0, il = keys.length; i < il; ++i) {
var key = keys[i];
var newObj = internals.parseKeys(key, tempObj[key], options);
- obj = Utils.merge(obj, newObj);
+ obj = Utils.merge(obj, newObj, options);
}
return Utils.compact(obj);
diff --git a/deps/npm/node_modules/request/node_modules/qs/lib/utils.js b/deps/npm/node_modules/request/node_modules/qs/lib/utils.js
index ec93afcd2..88f314732 100644
--- a/deps/npm/node_modules/request/node_modules/qs/lib/utils.js
+++ b/deps/npm/node_modules/request/node_modules/qs/lib/utils.js
@@ -5,14 +5,14 @@
var internals = {};
internals.hexTable = new Array(256);
-for (var i = 0; i < 256; ++i) {
- internals.hexTable[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase();
+for (var h = 0; h < 256; ++h) {
+ internals.hexTable[h] = '%' + ((h < 16 ? '0' : '') + h.toString(16)).toUpperCase();
}
-exports.arrayToObject = function (source) {
+exports.arrayToObject = function (source, options) {
- var obj = Object.create(null);
+ var obj = options.plainObjects ? Object.create(null) : {};
for (var i = 0, il = source.length; i < il; ++i) {
if (typeof source[i] !== 'undefined') {
@@ -24,7 +24,7 @@ exports.arrayToObject = function (source) {
};
-exports.merge = function (target, source) {
+exports.merge = function (target, source, options) {
if (!source) {
return target;
@@ -52,7 +52,7 @@ exports.merge = function (target, source) {
if (Array.isArray(target) &&
!Array.isArray(source)) {
- target = exports.arrayToObject(target);
+ target = exports.arrayToObject(target, options);
}
var keys = Object.keys(source);
@@ -60,11 +60,11 @@ exports.merge = function (target, source) {
var key = keys[k];
var value = source[key];
- if (!target[key]) {
+ if (!Object.prototype.hasOwnProperty.call(target, key)) {
target[key] = value;
}
else {
- target[key] = exports.merge(target[key], value);
+ target[key] = exports.merge(target[key], value, options);
}
}
diff --git a/deps/npm/node_modules/request/node_modules/qs/package.json b/deps/npm/node_modules/request/node_modules/qs/package.json
index bd7c53cea..4c45772df 100644
--- a/deps/npm/node_modules/request/node_modules/qs/package.json
+++ b/deps/npm/node_modules/request/node_modules/qs/package.json
@@ -1,9 +1,9 @@
{
"name": "qs",
- "version": "3.1.0",
+ "version": "4.0.0",
"description": "A querystring parser that supports nesting and arrays, with a depth limit",
"homepage": "https://github.com/hapijs/qs",
- "main": "index.js",
+ "main": "lib/index.js",
"dependencies": {},
"devDependencies": {
"browserify": "^10.2.1",
@@ -11,8 +11,9 @@
"lab": "5.x.x"
},
"scripts": {
- "test": "make test-cov",
- "dist": "browserify --standalone Qs index.js > dist/qs.js"
+ "test": "lab -a code -t 100 -L",
+ "test-cov-html": "lab -a code -r html -o coverage.html",
+ "dist": "browserify --standalone Qs lib/index.js > dist/qs.js"
},
"repository": {
"type": "git",
@@ -23,22 +24,22 @@
"qs"
],
"license": "BSD-3-Clause",
- "gitHead": "e53b1b242a55f886531954ebdd78b3b20efadaf0",
+ "gitHead": "e573dd08eae6cce30d2202704691a102dfa3782a",
"bugs": {
"url": "https://github.com/hapijs/qs/issues"
},
- "_id": "qs@3.1.0",
- "_shasum": "d0e9ae745233a12dc43fb4f3055bba446261153c",
- "_from": "qs@>=3.1.0 <3.2.0",
- "_npmVersion": "2.9.0",
- "_nodeVersion": "0.12.2",
+ "_id": "qs@4.0.0",
+ "_shasum": "c31d9b74ec27df75e543a86c78728ed8d4623607",
+ "_from": "qs@>=4.0.0 <4.1.0",
+ "_npmVersion": "2.12.0",
+ "_nodeVersion": "0.12.4",
"_npmUser": {
"name": "nlf",
"email": "quitlahok@gmail.com"
},
"dist": {
- "shasum": "d0e9ae745233a12dc43fb4f3055bba446261153c",
- "tarball": "http://registry.npmjs.org/qs/-/qs-3.1.0.tgz"
+ "shasum": "c31d9b74ec27df75e543a86c78728ed8d4623607",
+ "tarball": "http://registry.npmjs.org/qs/-/qs-4.0.0.tgz"
},
"maintainers": [
{
@@ -51,6 +52,6 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/qs/-/qs-3.1.0.tgz",
+ "_resolved": "https://registry.npmjs.org/qs/-/qs-4.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/qs/test/parse.js b/deps/npm/node_modules/request/node_modules/qs/test/parse.js
index 9afbf35f0..a19d76457 100644
--- a/deps/npm/node_modules/request/node_modules/qs/test/parse.js
+++ b/deps/npm/node_modules/request/node_modules/qs/test/parse.js
@@ -23,194 +23,194 @@ describe('parse()', function () {
it('parses a simple string', function (done) {
- expect(Qs.parse('0=foo')).to.deep.equal({ '0': 'foo' }, { prototype: false });
- expect(Qs.parse('foo=c++')).to.deep.equal({ foo: 'c ' }, { prototype: false });
- expect(Qs.parse('a[>=]=23')).to.deep.equal({ a: { '>=': '23' } }, { prototype: false });
- expect(Qs.parse('a[<=>]==23')).to.deep.equal({ a: { '<=>': '=23' } }, { prototype: false });
- expect(Qs.parse('a[==]=23')).to.deep.equal({ a: { '==': '23' } }, { prototype: false });
- expect(Qs.parse('foo', {strictNullHandling: true})).to.deep.equal({ foo: null }, { prototype: false });
- expect(Qs.parse('foo' )).to.deep.equal({ foo: '' }, { prototype: false });
- expect(Qs.parse('foo=')).to.deep.equal({ foo: '' }, { prototype: false });
- expect(Qs.parse('foo=bar')).to.deep.equal({ foo: 'bar' }, { prototype: false });
- expect(Qs.parse(' foo = bar = baz ')).to.deep.equal({ ' foo ': ' bar = baz ' }, { prototype: false });
- expect(Qs.parse('foo=bar=baz')).to.deep.equal({ foo: 'bar=baz' }, { prototype: false });
- expect(Qs.parse('foo=bar&bar=baz')).to.deep.equal({ foo: 'bar', bar: 'baz' }, { prototype: false });
- expect(Qs.parse('foo2=bar2&baz2=')).to.deep.equal({ foo2: 'bar2', baz2: '' }, { prototype: false });
- expect(Qs.parse('foo=bar&baz', {strictNullHandling: true})).to.deep.equal({ foo: 'bar', baz: null }, { prototype: false });
- expect(Qs.parse('foo=bar&baz')).to.deep.equal({ foo: 'bar', baz: '' }, { prototype: false });
+ expect(Qs.parse('0=foo')).to.deep.equal({ '0': 'foo' });
+ expect(Qs.parse('foo=c++')).to.deep.equal({ foo: 'c ' });
+ expect(Qs.parse('a[>=]=23')).to.deep.equal({ a: { '>=': '23' } });
+ expect(Qs.parse('a[<=>]==23')).to.deep.equal({ a: { '<=>': '=23' } });
+ expect(Qs.parse('a[==]=23')).to.deep.equal({ a: { '==': '23' } });
+ expect(Qs.parse('foo', { strictNullHandling: true })).to.deep.equal({ foo: null });
+ expect(Qs.parse('foo' )).to.deep.equal({ foo: '' });
+ expect(Qs.parse('foo=')).to.deep.equal({ foo: '' });
+ expect(Qs.parse('foo=bar')).to.deep.equal({ foo: 'bar' });
+ expect(Qs.parse(' foo = bar = baz ')).to.deep.equal({ ' foo ': ' bar = baz ' });
+ expect(Qs.parse('foo=bar=baz')).to.deep.equal({ foo: 'bar=baz' });
+ expect(Qs.parse('foo=bar&bar=baz')).to.deep.equal({ foo: 'bar', bar: 'baz' });
+ expect(Qs.parse('foo2=bar2&baz2=')).to.deep.equal({ foo2: 'bar2', baz2: '' });
+ expect(Qs.parse('foo=bar&baz', { strictNullHandling: true })).to.deep.equal({ foo: 'bar', baz: null });
+ expect(Qs.parse('foo=bar&baz')).to.deep.equal({ foo: 'bar', baz: '' });
expect(Qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World')).to.deep.equal({
cht: 'p3',
chd: 't:60,40',
chs: '250x100',
chl: 'Hello|World'
- }, { prototype: false });
+ });
done();
});
it('allows disabling dot notation', function (done) {
- expect(Qs.parse('a.b=c')).to.deep.equal({ a: { b: 'c' } }, { prototype: false });
- expect(Qs.parse('a.b=c', { allowDots: false })).to.deep.equal({ 'a.b': 'c' }, { prototype: false });
+ expect(Qs.parse('a.b=c')).to.deep.equal({ a: { b: 'c' } });
+ expect(Qs.parse('a.b=c', { allowDots: false })).to.deep.equal({ 'a.b': 'c' });
done();
});
it('parses a single nested string', function (done) {
- expect(Qs.parse('a[b]=c')).to.deep.equal({ a: { b: 'c' } }, { prototype: false });
+ expect(Qs.parse('a[b]=c')).to.deep.equal({ a: { b: 'c' } });
done();
});
it('parses a double nested string', function (done) {
- expect(Qs.parse('a[b][c]=d')).to.deep.equal({ a: { b: { c: 'd' } } }, { prototype: false });
+ expect(Qs.parse('a[b][c]=d')).to.deep.equal({ a: { b: { c: 'd' } } });
done();
});
it('defaults to a depth of 5', function (done) {
- expect(Qs.parse('a[b][c][d][e][f][g][h]=i')).to.deep.equal({ a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, { prototype: false });
+ expect(Qs.parse('a[b][c][d][e][f][g][h]=i')).to.deep.equal({ a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } });
done();
});
it('only parses one level when depth = 1', function (done) {
- expect(Qs.parse('a[b][c]=d', { depth: 1 })).to.deep.equal({ a: { b: { '[c]': 'd' } } }, { prototype: false });
- expect(Qs.parse('a[b][c][d]=e', { depth: 1 })).to.deep.equal({ a: { b: { '[c][d]': 'e' } } }, { prototype: false });
+ expect(Qs.parse('a[b][c]=d', { depth: 1 })).to.deep.equal({ a: { b: { '[c]': 'd' } } });
+ expect(Qs.parse('a[b][c][d]=e', { depth: 1 })).to.deep.equal({ a: { b: { '[c][d]': 'e' } } });
done();
});
it('parses a simple array', function (done) {
- expect(Qs.parse('a=b&a=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
+ expect(Qs.parse('a=b&a=c')).to.deep.equal({ a: ['b', 'c'] });
done();
});
it('parses an explicit array', function (done) {
- expect(Qs.parse('a[]=b')).to.deep.equal({ a: ['b'] }, { prototype: false });
- expect(Qs.parse('a[]=b&a[]=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a[]=b&a[]=c&a[]=d')).to.deep.equal({ a: ['b', 'c', 'd'] }, { prototype: false });
+ expect(Qs.parse('a[]=b')).to.deep.equal({ a: ['b'] });
+ expect(Qs.parse('a[]=b&a[]=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a[]=b&a[]=c&a[]=d')).to.deep.equal({ a: ['b', 'c', 'd'] });
done();
});
it('parses a mix of simple and explicit arrays', function (done) {
- expect(Qs.parse('a=b&a[]=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a[]=b&a=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a[0]=b&a=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a=b&a[0]=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a[1]=b&a=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a=b&a[1]=c')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
+ expect(Qs.parse('a=b&a[]=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a[]=b&a=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a[0]=b&a=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a=b&a[0]=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a[1]=b&a=c')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a=b&a[1]=c')).to.deep.equal({ a: ['b', 'c'] });
done();
});
it('parses a nested array', function (done) {
- expect(Qs.parse('a[b][]=c&a[b][]=d')).to.deep.equal({ a: { b: ['c', 'd'] } }, { prototype: false });
- expect(Qs.parse('a[>=]=25')).to.deep.equal({ a: { '>=': '25' } }, { prototype: false });
+ expect(Qs.parse('a[b][]=c&a[b][]=d')).to.deep.equal({ a: { b: ['c', 'd'] } });
+ expect(Qs.parse('a[>=]=25')).to.deep.equal({ a: { '>=': '25' } });
done();
});
it('allows to specify array indices', function (done) {
- expect(Qs.parse('a[1]=c&a[0]=b&a[2]=d')).to.deep.equal({ a: ['b', 'c', 'd'] }, { prototype: false });
- expect(Qs.parse('a[1]=c&a[0]=b')).to.deep.equal({ a: ['b', 'c'] }, { prototype: false });
- expect(Qs.parse('a[1]=c')).to.deep.equal({ a: ['c'] }, { prototype: false });
+ expect(Qs.parse('a[1]=c&a[0]=b&a[2]=d')).to.deep.equal({ a: ['b', 'c', 'd'] });
+ expect(Qs.parse('a[1]=c&a[0]=b')).to.deep.equal({ a: ['b', 'c'] });
+ expect(Qs.parse('a[1]=c')).to.deep.equal({ a: ['c'] });
done();
});
it('limits specific array indices to 20', function (done) {
- expect(Qs.parse('a[20]=a')).to.deep.equal({ a: ['a'] }, { prototype: false });
- expect(Qs.parse('a[21]=a')).to.deep.equal({ a: { '21': 'a' } }, { prototype: false });
+ expect(Qs.parse('a[20]=a')).to.deep.equal({ a: ['a'] });
+ expect(Qs.parse('a[21]=a')).to.deep.equal({ a: { '21': 'a' } });
done();
});
it('supports keys that begin with a number', function (done) {
- expect(Qs.parse('a[12b]=c')).to.deep.equal({ a: { '12b': 'c' } }, { prototype: false });
+ expect(Qs.parse('a[12b]=c')).to.deep.equal({ a: { '12b': 'c' } });
done();
});
it('supports encoded = signs', function (done) {
- expect(Qs.parse('he%3Dllo=th%3Dere')).to.deep.equal({ 'he=llo': 'th=ere' }, { prototype: false });
+ expect(Qs.parse('he%3Dllo=th%3Dere')).to.deep.equal({ 'he=llo': 'th=ere' });
done();
});
it('is ok with url encoded strings', function (done) {
- expect(Qs.parse('a[b%20c]=d')).to.deep.equal({ a: { 'b c': 'd' } }, { prototype: false });
- expect(Qs.parse('a[b]=c%20d')).to.deep.equal({ a: { b: 'c d' } }, { prototype: false });
+ expect(Qs.parse('a[b%20c]=d')).to.deep.equal({ a: { 'b c': 'd' } });
+ expect(Qs.parse('a[b]=c%20d')).to.deep.equal({ a: { b: 'c d' } });
done();
});
it('allows brackets in the value', function (done) {
- expect(Qs.parse('pets=["tobi"]')).to.deep.equal({ pets: '["tobi"]' }, { prototype: false });
- expect(Qs.parse('operators=[">=", "<="]')).to.deep.equal({ operators: '[">=", "<="]' }, { prototype: false });
+ expect(Qs.parse('pets=["tobi"]')).to.deep.equal({ pets: '["tobi"]' });
+ expect(Qs.parse('operators=[">=", "<="]')).to.deep.equal({ operators: '[">=", "<="]' });
done();
});
it('allows empty values', function (done) {
- expect(Qs.parse('')).to.deep.equal({}, { prototype: false });
- expect(Qs.parse(null)).to.deep.equal({}, { prototype: false });
- expect(Qs.parse(undefined)).to.deep.equal({}, { prototype: false });
+ expect(Qs.parse('')).to.deep.equal({});
+ expect(Qs.parse(null)).to.deep.equal({});
+ expect(Qs.parse(undefined)).to.deep.equal({});
done();
});
it('transforms arrays to objects', function (done) {
- expect(Qs.parse('foo[0]=bar&foo[bad]=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } }, { prototype: false });
- expect(Qs.parse('foo[bad]=baz&foo[0]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } }, { prototype: false });
- expect(Qs.parse('foo[bad]=baz&foo[]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } }, { prototype: false });
- expect(Qs.parse('foo[]=bar&foo[bad]=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } }, { prototype: false });
- expect(Qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }, { prototype: false });
- expect(Qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb')).to.deep.equal({foo: [ {a: 'a', b: 'b'}, {a: 'aa', b: 'bb'} ]}, { prototype: false });
- expect(Qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c')).to.deep.equal({ a: { '0': 'b', t: 'u', hasOwnProperty: 'c' } }, { prototype: false });
- expect(Qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y')).to.deep.equal({ a: { '0': 'b', hasOwnProperty: 'c', x: 'y' } }, { prototype: false });
+ expect(Qs.parse('foo[0]=bar&foo[bad]=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } });
+ expect(Qs.parse('foo[bad]=baz&foo[0]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } });
+ expect(Qs.parse('foo[bad]=baz&foo[]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } });
+ expect(Qs.parse('foo[]=bar&foo[bad]=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } });
+ expect(Qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar', '1': 'foo' } });
+ expect(Qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb')).to.deep.equal({ foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] });
+ expect(Qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c')).to.deep.equal({ a: { '0': 'b', t: 'u', c: true } });
+ expect(Qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y')).to.deep.equal({ a: { '0': 'b', '1': 'c', x: 'y' } });
done();
});
it('transforms arrays to objects (dot notation)', function (done) {
- expect(Qs.parse('foo[0].baz=bar&fool.bad=baz')).to.deep.equal({ foo: [ { baz: 'bar'} ], fool: { bad: 'baz' } }, { prototype: false });
- expect(Qs.parse('foo[0].baz=bar&fool.bad.boo=baz')).to.deep.equal({ foo: [ { baz: 'bar'} ], fool: { bad: { boo: 'baz' } } }, { prototype: false });
- expect(Qs.parse('foo[0][0].baz=bar&fool.bad=baz')).to.deep.equal({ foo: [[ { baz: 'bar'} ]], fool: { bad: 'baz' } }, { prototype: false });
- expect(Qs.parse('foo[0].baz[0]=15&foo[0].bar=2')).to.deep.equal({ foo: [{ baz: ['15'], bar: '2' }] }, { prototype: false });
- expect(Qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2')).to.deep.equal({ foo: [{ baz: ['15', '16'], bar: '2' }] }, { prototype: false });
- expect(Qs.parse('foo.bad=baz&foo[0]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } }, { prototype: false });
- expect(Qs.parse('foo.bad=baz&foo[]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } }, { prototype: false });
- expect(Qs.parse('foo[]=bar&foo.bad=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } }, { prototype: false });
- expect(Qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }, { prototype: false });
- expect(Qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb')).to.deep.equal({foo: [ {a: 'a', b: 'b'}, {a: 'aa', b: 'bb'} ]}, { prototype: false });
+ expect(Qs.parse('foo[0].baz=bar&fool.bad=baz')).to.deep.equal({ foo: [{ baz: 'bar' }], fool: { bad: 'baz' } });
+ expect(Qs.parse('foo[0].baz=bar&fool.bad.boo=baz')).to.deep.equal({ foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } });
+ expect(Qs.parse('foo[0][0].baz=bar&fool.bad=baz')).to.deep.equal({ foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } });
+ expect(Qs.parse('foo[0].baz[0]=15&foo[0].bar=2')).to.deep.equal({ foo: [{ baz: ['15'], bar: '2' }] });
+ expect(Qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2')).to.deep.equal({ foo: [{ baz: ['15', '16'], bar: '2' }] });
+ expect(Qs.parse('foo.bad=baz&foo[0]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } });
+ expect(Qs.parse('foo.bad=baz&foo[]=bar')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar' } });
+ expect(Qs.parse('foo[]=bar&foo.bad=baz')).to.deep.equal({ foo: { '0': 'bar', bad: 'baz' } });
+ expect(Qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo')).to.deep.equal({ foo: { bad: 'baz', '0': 'bar', '1': 'foo' } });
+ expect(Qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb')).to.deep.equal({ foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] });
done();
});
it('can add keys to objects', function (done) {
- expect(Qs.parse('a[b]=c&a=d')).to.deep.equal({ a: { b: 'c', d: true } }, { prototype: false });
+ expect(Qs.parse('a[b]=c&a=d')).to.deep.equal({ a: { b: 'c', d: true } });
done();
});
it('correctly prunes undefined values when converting an array to an object', function (done) {
- expect(Qs.parse('a[2]=b&a[99999999]=c')).to.deep.equal({ a: { '2': 'b', '99999999': 'c' } }, { prototype: false });
+ expect(Qs.parse('a[2]=b&a[99999999]=c')).to.deep.equal({ a: { '2': 'b', '99999999': 'c' } });
done();
});
it('supports malformed uri characters', function (done) {
- expect(Qs.parse('{%:%}', {strictNullHandling: true})).to.deep.equal({ '{%:%}': null }, { prototype: false });
- expect(Qs.parse('{%:%}=')).to.deep.equal({ '{%:%}': '' }, { prototype: false });
- expect(Qs.parse('foo=%:%}')).to.deep.equal({ foo: '%:%}' }, { prototype: false });
+ expect(Qs.parse('{%:%}', { strictNullHandling: true })).to.deep.equal({ '{%:%}': null });
+ expect(Qs.parse('{%:%}=')).to.deep.equal({ '{%:%}': '' });
+ expect(Qs.parse('foo=%:%}')).to.deep.equal({ foo: '%:%}' });
done();
});
it('doesn\'t produce empty keys', function (done) {
- expect(Qs.parse('_r=1&')).to.deep.equal({ '_r': '1' }, { prototype: false });
+ expect(Qs.parse('_r=1&')).to.deep.equal({ '_r': '1' });
done();
});
@@ -224,45 +224,45 @@ describe('parse()', function () {
it('parses arrays of objects', function (done) {
- expect(Qs.parse('a[][b]=c')).to.deep.equal({ a: [{ b: 'c' }] }, { prototype: false });
- expect(Qs.parse('a[0][b]=c')).to.deep.equal({ a: [{ b: 'c' }] }, { prototype: false });
+ expect(Qs.parse('a[][b]=c')).to.deep.equal({ a: [{ b: 'c' }] });
+ expect(Qs.parse('a[0][b]=c')).to.deep.equal({ a: [{ b: 'c' }] });
done();
});
it('allows for empty strings in arrays', function (done) {
- expect(Qs.parse('a[]=b&a[]=&a[]=c')).to.deep.equal({ a: ['b', '', 'c'] }, { prototype: false });
- expect(Qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', {strictNullHandling: true})).to.deep.equal({ a: ['b', null, 'c', ''] }, { prototype: false });
- expect(Qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', {strictNullHandling: true})).to.deep.equal({ a: ['b', '', 'c', null] }, { prototype: false });
- expect(Qs.parse('a[]=&a[]=b&a[]=c')).to.deep.equal({ a: ['', 'b', 'c'] }, { prototype: false });
+ expect(Qs.parse('a[]=b&a[]=&a[]=c')).to.deep.equal({ a: ['b', '', 'c'] });
+ expect(Qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true })).to.deep.equal({ a: ['b', null, 'c', ''] });
+ expect(Qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true })).to.deep.equal({ a: ['b', '', 'c', null] });
+ expect(Qs.parse('a[]=&a[]=b&a[]=c')).to.deep.equal({ a: ['', 'b', 'c'] });
done();
});
it('compacts sparse arrays', function (done) {
- expect(Qs.parse('a[10]=1&a[2]=2')).to.deep.equal({ a: ['2', '1'] }, { prototype: false });
+ expect(Qs.parse('a[10]=1&a[2]=2')).to.deep.equal({ a: ['2', '1'] });
done();
});
it('parses semi-parsed strings', function (done) {
- expect(Qs.parse({ 'a[b]': 'c' })).to.deep.equal({ a: { b: 'c' } }, { prototype: false });
- expect(Qs.parse({ 'a[b]': 'c', 'a[d]': 'e' })).to.deep.equal({ a: { b: 'c', d: 'e' } }, { prototype: false });
+ expect(Qs.parse({ 'a[b]': 'c' })).to.deep.equal({ a: { b: 'c' } });
+ expect(Qs.parse({ 'a[b]': 'c', 'a[d]': 'e' })).to.deep.equal({ a: { b: 'c', d: 'e' } });
done();
});
it('parses buffers correctly', function (done) {
var b = new Buffer('test');
- expect(Qs.parse({ a: b })).to.deep.equal({ a: b }, { prototype: false });
+ expect(Qs.parse({ a: b })).to.deep.equal({ a: b });
done();
});
it('continues parsing when no parent is found', function (done) {
- expect(Qs.parse('[]=&a=b')).to.deep.equal({ '0': '', a: 'b' }, { prototype: false });
- expect(Qs.parse('[]&a=b', {strictNullHandling: true})).to.deep.equal({ '0': null, a: 'b' }, { prototype: false });
- expect(Qs.parse('[foo]=bar')).to.deep.equal({ foo: 'bar' }, { prototype: false });
+ expect(Qs.parse('[]=&a=b')).to.deep.equal({ '0': '', a: 'b' });
+ expect(Qs.parse('[]&a=b', { strictNullHandling: true })).to.deep.equal({ '0': null, a: 'b' });
+ expect(Qs.parse('[foo]=bar')).to.deep.equal({ foo: 'bar' });
done();
});
@@ -286,9 +286,9 @@ describe('parse()', function () {
Object.prototype.crash = '';
Array.prototype.crash = '';
expect(Qs.parse.bind(null, 'a=b')).to.not.throw();
- expect(Qs.parse('a=b')).to.deep.equal({ a: 'b' }, { prototype: false });
+ expect(Qs.parse('a=b')).to.deep.equal({ a: 'b' });
expect(Qs.parse.bind(null, 'a[][b]=c')).to.not.throw();
- expect(Qs.parse('a[][b]=c')).to.deep.equal({ a: [{ b: 'c' }] }, { prototype: false });
+ expect(Qs.parse('a[][b]=c')).to.deep.equal({ a: [{ b: 'c' }] });
delete Object.prototype.crash;
delete Array.prototype.crash;
done();
@@ -296,105 +296,105 @@ describe('parse()', function () {
it('parses a string with an alternative string delimiter', function (done) {
- expect(Qs.parse('a=b;c=d', { delimiter: ';' })).to.deep.equal({ a: 'b', c: 'd' }, { prototype: false });
+ expect(Qs.parse('a=b;c=d', { delimiter: ';' })).to.deep.equal({ a: 'b', c: 'd' });
done();
});
it('parses a string with an alternative RegExp delimiter', function (done) {
- expect(Qs.parse('a=b; c=d', { delimiter: /[;,] */ })).to.deep.equal({ a: 'b', c: 'd' }, { prototype: false });
+ expect(Qs.parse('a=b; c=d', { delimiter: /[;,] */ })).to.deep.equal({ a: 'b', c: 'd' });
done();
});
it('does not use non-splittable objects as delimiters', function (done) {
- expect(Qs.parse('a=b&c=d', { delimiter: true })).to.deep.equal({ a: 'b', c: 'd' }, { prototype: false });
+ expect(Qs.parse('a=b&c=d', { delimiter: true })).to.deep.equal({ a: 'b', c: 'd' });
done();
});
it('allows overriding parameter limit', function (done) {
- expect(Qs.parse('a=b&c=d', { parameterLimit: 1 })).to.deep.equal({ a: 'b' }, { prototype: false });
+ expect(Qs.parse('a=b&c=d', { parameterLimit: 1 })).to.deep.equal({ a: 'b' });
done();
});
it('allows setting the parameter limit to Infinity', function (done) {
- expect(Qs.parse('a=b&c=d', { parameterLimit: Infinity })).to.deep.equal({ a: 'b', c: 'd' }, { prototype: false });
+ expect(Qs.parse('a=b&c=d', { parameterLimit: Infinity })).to.deep.equal({ a: 'b', c: 'd' });
done();
});
it('allows overriding array limit', function (done) {
- expect(Qs.parse('a[0]=b', { arrayLimit: -1 })).to.deep.equal({ a: { '0': 'b' } }, { prototype: false });
- expect(Qs.parse('a[-1]=b', { arrayLimit: -1 })).to.deep.equal({ a: { '-1': 'b' } }, { prototype: false });
- expect(Qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 })).to.deep.equal({ a: { '0': 'b', '1': 'c' } }, { prototype: false });
+ expect(Qs.parse('a[0]=b', { arrayLimit: -1 })).to.deep.equal({ a: { '0': 'b' } });
+ expect(Qs.parse('a[-1]=b', { arrayLimit: -1 })).to.deep.equal({ a: { '-1': 'b' } });
+ expect(Qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 })).to.deep.equal({ a: { '0': 'b', '1': 'c' } });
done();
});
it('allows disabling array parsing', function (done) {
- expect(Qs.parse('a[0]=b&a[1]=c', { parseArrays: false })).to.deep.equal({ a: { '0': 'b', '1': 'c' } }, { prototype: false });
+ expect(Qs.parse('a[0]=b&a[1]=c', { parseArrays: false })).to.deep.equal({ a: { '0': 'b', '1': 'c' } });
done();
});
it('parses an object', function (done) {
var input = {
- 'user[name]': {'pop[bob]': 3},
+ 'user[name]': { 'pop[bob]': 3 },
'user[email]': null
};
var expected = {
'user': {
- 'name': {'pop[bob]': 3},
+ 'name': { 'pop[bob]': 3 },
'email': null
}
};
var result = Qs.parse(input);
- expect(result).to.deep.equal(expected, { prototype: false });
+ expect(result).to.deep.equal(expected);
done();
});
it('parses an object in dot notation', function (done) {
var input = {
- 'user.name': {'pop[bob]': 3},
+ 'user.name': { 'pop[bob]': 3 },
'user.email.': null
};
var expected = {
'user': {
- 'name': {'pop[bob]': 3},
+ 'name': { 'pop[bob]': 3 },
'email': null
}
};
var result = Qs.parse(input);
- expect(result).to.deep.equal(expected, { prototype: false });
+ expect(result).to.deep.equal(expected);
done();
});
it('parses an object and not child values', function (done) {
var input = {
- 'user[name]': {'pop[bob]': { 'test': 3 }},
+ 'user[name]': { 'pop[bob]': { 'test': 3 } },
'user[email]': null
};
var expected = {
'user': {
- 'name': {'pop[bob]': { 'test': 3 }},
+ 'name': { 'pop[bob]': { 'test': 3 } },
'email': null
}
};
var result = Qs.parse(input);
- expect(result).to.deep.equal(expected, { prototype: false });
+ expect(result).to.deep.equal(expected);
done();
});
@@ -404,7 +404,7 @@ describe('parse()', function () {
delete global.Buffer;
var result = Qs.parse('a=b&c=d');
global.Buffer = tempBuffer;
- expect(result).to.deep.equal({ a: 'b', c: 'd' }, { prototype: false });
+ expect(result).to.deep.equal({ a: 'b', c: 'd' });
done();
});
@@ -423,7 +423,7 @@ describe('parse()', function () {
expect(parsed).to.contain('foo');
expect(parsed.foo).to.contain('bar', 'baz');
expect(parsed.foo.bar).to.equal('baz');
- expect(parsed.foo.baz).to.deep.equal(a, { prototype: false });
+ expect(parsed.foo.baz).to.deep.equal(a);
done();
});
@@ -432,24 +432,47 @@ describe('parse()', function () {
var a = Object.create(null);
a.b = 'c';
- expect(Qs.parse(a)).to.deep.equal({ b: 'c' }, { prototype: false });
+ expect(Qs.parse(a)).to.deep.equal({ b: 'c' });
var result = Qs.parse({ a: a });
expect(result).to.contain('a');
- expect(result.a).to.deep.equal(a, { prototype: false });
+ expect(result.a).to.deep.equal(a);
done();
});
it('parses dates correctly', function (done) {
var now = new Date();
- expect(Qs.parse({ a: now })).to.deep.equal({ a: now }, { prototype: false });
+ expect(Qs.parse({ a: now })).to.deep.equal({ a: now });
done();
});
it('parses regular expressions correctly', function (done) {
var re = /^test$/;
- expect(Qs.parse({ a: re })).to.deep.equal({ a: re }, { prototype: false });
+ expect(Qs.parse({ a: re })).to.deep.equal({ a: re });
+ done();
+ });
+
+ it('can allow overwriting prototype properties', function (done) {
+
+ expect(Qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true })).to.deep.equal({ a: { hasOwnProperty: 'b' } }, { prototype: false });
+ expect(Qs.parse('hasOwnProperty=b', { allowPrototypes: true })).to.deep.equal({ hasOwnProperty: 'b' }, { prototype: false });
+ done();
+ });
+
+ it('can return plain objects', function (done) {
+
+ var expected = Object.create(null);
+ expected.a = Object.create(null);
+ expected.a.b = 'c';
+ expected.a.hasOwnProperty = 'd';
+ expect(Qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true })).to.deep.equal(expected);
+ expect(Qs.parse(null, { plainObjects: true })).to.deep.equal(Object.create(null));
+ var expectedArray = Object.create(null);
+ expectedArray.a = Object.create(null);
+ expectedArray.a['0'] = 'b';
+ expectedArray.a.c = 'd';
+ expect(Qs.parse('a[]=b&a[c]=d', { plainObjects: true })).to.deep.equal(expectedArray);
done();
});
});
diff --git a/deps/npm/node_modules/request/node_modules/qs/test/stringify.js b/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
index 7a80c6c35..48b7803f7 100644
--- a/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
+++ b/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
@@ -111,14 +111,14 @@ describe('stringify()', function () {
it('stringifies an empty value', function (done) {
expect(Qs.stringify({ a: '' })).to.equal('a=');
- expect(Qs.stringify({ a: null }, {strictNullHandling: true})).to.equal('a');
+ expect(Qs.stringify({ a: null }, { strictNullHandling: true })).to.equal('a');
expect(Qs.stringify({ a: '', b: '' })).to.equal('a=&b=');
- expect(Qs.stringify({ a: null, b: '' }, {strictNullHandling: true})).to.equal('a&b=');
+ expect(Qs.stringify({ a: null, b: '' }, { strictNullHandling: true })).to.equal('a&b=');
expect(Qs.stringify({ a: { b: '' } })).to.equal('a%5Bb%5D=');
- expect(Qs.stringify({ a: { b: null } }, {strictNullHandling: true})).to.equal('a%5Bb%5D');
- expect(Qs.stringify({ a: { b: null } }, {strictNullHandling: false})).to.equal('a%5Bb%5D=');
+ expect(Qs.stringify({ a: { b: null } }, { strictNullHandling: true })).to.equal('a%5Bb%5D');
+ expect(Qs.stringify({ a: { b: null } }, { strictNullHandling: false })).to.equal('a%5Bb%5D=');
done();
});
@@ -155,8 +155,8 @@ describe('stringify()', function () {
expect(Qs.stringify({ a: undefined })).to.equal('');
- expect(Qs.stringify({ a: { b: undefined, c: null } }, {strictNullHandling: true})).to.equal('a%5Bc%5D');
- expect(Qs.stringify({ a: { b: undefined, c: null } }, {strictNullHandling: false})).to.equal('a%5Bc%5D=');
+ expect(Qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true })).to.equal('a%5Bc%5D');
+ expect(Qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false })).to.equal('a%5Bc%5D=');
expect(Qs.stringify({ a: { b: undefined, c: '' } })).to.equal('a%5Bc%5D=');
done();
});
@@ -184,7 +184,7 @@ describe('stringify()', function () {
it('skips properties that are part of the object prototype', function (done) {
Object.prototype.crash = 'test';
- expect(Qs.stringify({ a: 'b'})).to.equal('a=b');
+ expect(Qs.stringify({ a: 'b' })).to.equal('a=b');
expect(Qs.stringify({ a: { b: 'c' } })).to.equal('a%5Bb%5D=c');
delete Object.prototype.crash;
done();
@@ -224,8 +224,8 @@ describe('stringify()', function () {
it('selects properties when filter=array', function (done) {
expect(Qs.stringify({ a: 'b' }, { filter: ['a'] })).to.equal('a=b');
- expect(Qs.stringify({ a: 1}, { filter: [] })).to.equal('');
- expect(Qs.stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2]})).to.equal('a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3');
+ expect(Qs.stringify({ a: 1 }, { filter: [] })).to.equal('');
+ expect(Qs.stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2] })).to.equal('a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3');
done();
});
diff --git a/deps/npm/node_modules/request/node_modules/stringstream/package.json b/deps/npm/node_modules/request/node_modules/stringstream/package.json
index 37b7bec83..3b1373bb5 100644
--- a/deps/npm/node_modules/request/node_modules/stringstream/package.json
+++ b/deps/npm/node_modules/request/node_modules/stringstream/package.json
@@ -21,12 +21,28 @@
"license": "MIT",
"readme": "# Decode streams into strings The Right Way(tm)\n\n```javascript\nvar fs = require('fs')\nvar zlib = require('zlib')\nvar strs = require('stringstream')\n\nvar utf8Stream = fs.createReadStream('massiveLogFile.gz')\n .pipe(zlib.createGunzip())\n .pipe(strs('utf8'))\n```\n\nNo need to deal with `setEncoding()` weirdness, just compose streams\nlike they were supposed to be!\n\nHandles input and output encoding:\n\n```javascript\n// Stream from utf8 to hex to base64... Why not, ay.\nvar hex64Stream = fs.createReadStream('myFile')\n .pipe(strs('utf8', 'hex'))\n .pipe(strs('hex', 'base64'))\n```\n\nAlso deals with `base64` output correctly by aligning each emitted data\nchunk so that there are no dangling `=` characters:\n\n```javascript\nvar stream = fs.createReadStream('myFile').pipe(strs('base64'))\n\nvar base64Str = ''\n\nstream.on('data', function(data) { base64Str += data })\nstream.on('end', function() {\n console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding()\n console.log('Original file is: ' + new Buffer(base64Str, 'base64'))\n})\n```\n",
"readmeFilename": "README.md",
- "bugs": {
- "url": "https://github.com/mhart/StringStream/issues"
- },
- "homepage": "https://github.com/mhart/StringStream#readme",
"_id": "stringstream@0.0.4",
+ "dist": {
+ "shasum": "0f0e3423f942960b5692ac324a57dd093bc41a92",
+ "tarball": "http://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz"
+ },
+ "_npmVersion": "1.2.0",
+ "_npmUser": {
+ "name": "hichaelmart",
+ "email": "michael.hart.au@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "hichaelmart",
+ "email": "michael.hart.au@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "0f0e3423f942960b5692ac324a57dd093bc41a92",
"_resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz",
- "_from": "stringstream@>=0.0.4 <0.1.0"
+ "_from": "stringstream@>=0.0.4 <0.1.0",
+ "bugs": {
+ "url": "https://github.com/mhart/StringStream/issues"
+ },
+ "homepage": "https://github.com/mhart/StringStream#readme"
}
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/README.md b/deps/npm/node_modules/request/node_modules/tough-cookie/README.md
index f56d153f1..419dd48d5 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/README.md
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/README.md
@@ -8,7 +8,7 @@
# Synopsis
``` javascript
-var tough = require('tough-cookie'); // note: not 'cookie', 'cookies' or 'node-cookie'
+var tough = require('tough-cookie');
var Cookie = tough.Cookie;
var cookie = Cookie.parse(header);
cookie.value = 'somethingdifferent';
@@ -32,61 +32,51 @@ Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already take
# API
-tough
-=====
+## tough
Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound".
**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary.
-parseDate(string)
------------------
+### `parseDate(string)`
Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`.
-formatDate(date)
-----------------
+### `formatDate(date)`
Format a Date into a RFC1123 string (the RFC6265-recommended format).
-canonicalDomain(str)
---------------------
+### `canonicalDomain(str)`
Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects).
-domainMatch(str,domStr[,canonicalize=true])
--------------------------------------------
+### `domainMatch(str,domStr[,canonicalize=true])`
Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match".
The `canonicalize` parameter will run the other two paramters through `canonicalDomain` or not.
-defaultPath(path)
------------------
+### `defaultPath(path)`
Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC.
The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output.
-pathMatch(reqPath,cookiePath)
------------------------------
+### `pathMatch(reqPath,cookiePath)`
Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean.
This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`.
-parse(header)
-----------------------------
+### `parse(header)`
alias for `Cookie.parse(header)`
-fromJSON(string)
-----------------
+### `fromJSON(string)`
alias for `Cookie.fromJSON(string)`
-getPublicSuffix(hostname)
--------------------------
+### `getPublicSuffix(hostname)`
Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.
@@ -94,32 +84,35 @@ For example: `www.example.com` and `www.subdomain.example.com` both have public
For further information, see http://publicsuffix.org/. This module derives its list from that site.
-cookieCompare(a,b)
-------------------
+### `cookieCompare(a,b)`
-For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). Longest `.path`s go first, then sorted oldest to youngest.
+For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence:
+
+* Longest `.path`
+* oldest `.creation` (which has a 1ms precision, same as `Date`)
+* lowest `.creationIndex` (to get beyond the 1ms precision)
``` javascript
var cookies = [ /* unsorted array of Cookie objects */ ];
cookies = cookies.sort(cookieCompare);
```
-permuteDomain(domain)
----------------------
+**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`.
-Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.
+### `permuteDomain(domain)`
+Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.
-permutePath(path)
------------------
+### `permutePath(path)`
Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores.
-Cookie
-======
-Cookie.parse(header)
------------------------------------
+## Cookie
+
+Exported via `tough.Cookie`.
+
+### `Cookie.parse(header)`
Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed.
@@ -132,13 +125,9 @@ else
cookies = [Cookie.parse(res.headers['set-cookie'])];
```
-Cookie.fromJSON(string)
------------------------
-
-Convert a JSON string to a `Cookie` object. Does a `JSON.parse()` and converts the `.created`, `.lastAccessed` and `.expires` properties into `Date` objects.
+### Properties
-Properties
-==========
+Cookie object properties:
* _key_ - string - the name or key of the cookie (default "")
* _value_ - string - the value of the cookie (default "")
@@ -149,70 +138,81 @@ Properties
* _secure_ - boolean - the `Secure` cookie flag
* _httpOnly_ - boolean - the `HttpOnly` cookie flag
* _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)
+ * _creation_ - `Date` - when this cookie was constructed
+ * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation)
After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes:
* _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied)
* _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.
- * _created_ - `Date` - when this cookie was added to the jar
+ * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar
* _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute.
-Construction([{options}])
-------------
+### `Cookie([{properties}])`
-Receives an options object that can contain any Cookie properties, uses the default for unspecified properties.
+Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties.
-.toString()
------------
+### `.toString()`
encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.
-.cookieString()
----------------
+### `.cookieString()`
encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '=').
-.setExpires(String)
--------------------
+### `.setExpires(String)`
sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set.
-.setMaxAge(number)
--------------------
+### `.setMaxAge(number)`
sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly.
-.expiryTime([now=Date.now()])
------------------------------
+### `.expiryTime([now=Date.now()])`
-.expiryDate([now=Date.now()])
------------------------------
+### `.expiryDate([now=Date.now()])`
expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.
-Max-Age takes precedence over Expires (as per the RFC). The `.created` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute.
+Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute.
If Expires (`.expires`) is set, that's returned.
Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents).
-.TTL([now=Date.now()])
----------
+### `.TTL([now=Date.now()])`
compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.
The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.
-.canonicalizedDoman()
----------------------
+### `.canonicalizedDoman()`
-.cdomain()
-----------
+### `.cdomain()`
return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters.
-.validate()
------------
+### `.toJSON()`
+
+For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized.
+
+Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`).
+
+**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
+
+### `Cookie.fromJSON(strOrObj)`
+
+Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first.
+
+Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer.
+
+Returns `null` upon JSON parsing error.
+
+### `.clone()`
+
+Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`.
+
+### `.validate()`
Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive.
@@ -226,48 +226,41 @@ if (cookie.validate() === true) {
}
```
-CookieJar
-=========
-Construction([store = new MemoryCookieStore()][, rejectPublicSuffixes])
-------------
+## CookieJar
+
+Exported via `tough.CookieJar`.
+
+### `CookieJar([store],[rejectPublicSuffixes])`
Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used.
+### Properties
-Attributes
-----------
+CookieJar object properties:
* _rejectPublicSuffixes_ - boolean - reject cookies with domains like "com" and "co.uk" (default: `true`)
Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods.
-.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))
--------------------------------------------------------------------
+### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))`
-Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.created`, `.lastAccessed` and `.hostOnly` properties.
+Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties.
The `options` object can be omitted and can have the following properties:
* _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
- * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. CookieStore errors aren't ignored by this option.
+ * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option.
As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).
-.setCookieSync(cookieOrString, currentUrl, [{options}])
--------------------------------------------------------
+### `.setCookieSync(cookieOrString, currentUrl, [{options}])`
Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
-.storeCookie(cookie, [{options},] cb(err,cookie))
--------------------------------------------------
-
-__REMOVED__ removed in lieu of the CookieStore API below
-
-.getCookies(currentUrl, [{options},] cb(err,cookies))
------------------------------------------------------
+### `.getCookies(currentUrl, [{options},] cb(err,cookies))`
Retrieve the list of cookies that can be sent in a Cookie header for the current url.
@@ -279,59 +272,99 @@ The `options` object can be omitted and can have the following properties:
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
* _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially).
- * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the CookieStore `fetchCookies` function (the default MemoryCookieStore supports it).
+ * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it).
The `.lastAccessed` property of the returned cookies will have been updated.
-.getCookiesSync(currentUrl, [{options}])
-----------------------------------------
+### `.getCookiesSync(currentUrl, [{options}])`
Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
-.getCookieString(...)
----------------------
+### `.getCookieString(...)`
Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`.
-.getCookieStringSync(...)
--------------------------
+### `.getCookieStringSync(...)`
Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
-.getSetCookieStrings(...)
--------------------------
+### `.getSetCookieStrings(...)`
Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`.
-.getSetCookieStringsSync(...)
------------------------------
+### `.getSetCookieStringsSync(...)`
Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
-Store
-=====
+### `.serialize(cb(err,serializedObject))`
+
+Serialize the Jar if the underlying store supports `.getAllCookies`.
+
+**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
+
+See [Serialization Format].
+
+### `.serializeSync()`
+
+Sync version of .serialize
+
+### `.toJSON()`
+
+Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`.
+
+### `CookieJar.deserialize(serialized, [store], cb(err,object))`
-Base class for CookieJar stores.
+A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization.
-# CookieStore API
+The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created.
+
+As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback.
+
+### `CookieJar.deserializeSync(serialized, [store])`
+
+Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work.
+
+### `CookieJar.fromJSON(string)`
+
+Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`.
+
+### `.clone([store,]cb(err,newJar))`
+
+Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa.
+
+The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`.
+
+### `.cloneSync([store])`
+
+Synchronous version of `.clone`, returning a new `CookieJar` instance.
+
+The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used.
+
+The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls.
+
+## Store
+
+Base class for CookieJar stores. Available as `tough.Store`.
+
+## Store API
The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores.
-Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. Stores are asynchronous by default, but if `store.synchronous` is set, then the `*Sync` methods on the CookieJar can be used.
+Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`.
+
+Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style
All `domain` parameters will have been normalized before calling.
The Cookie store must have all of the following methods.
-store.findCookie(domain, path, key, cb(err,cookie))
----------------------------------------------------
+### `store.findCookie(domain, path, key, cb(err,cookie))`
Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned.
Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error).
-store.findCookies(domain, path, cb(err,cookies))
-------------------------------------------------
+### `store.findCookies(domain, path, cb(err,cookies))`
Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above.
@@ -341,8 +374,7 @@ The resulting list will be checked for applicability to the current request acco
As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only).
-store.putCookie(cookie, cb(err))
---------------------------------
+### `store.putCookie(cookie, cb(err))`
Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.
@@ -350,12 +382,11 @@ The `cookie` object MUST NOT be modified; the caller will have already updated t
Pass an error if the cookie cannot be stored.
-store.updateCookie(oldCookie, newCookie, cb(err))
--------------------------------------------------
+### `store.updateCookie(oldCookie, newCookie, cb(err))`
Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.
-The `.lastAccessed` property will always be different between the two objects and `.created` will always be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are sorted (or selected for deletion).
+The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement).
Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object.
@@ -363,26 +394,59 @@ The `newCookie` and `oldCookie` objects MUST NOT be modified.
Pass an error if the newCookie cannot be stored.
-store.removeCookie(domain, path, key, cb(err))
-----------------------------------------------
+### `store.removeCookie(domain, path, key, cb(err))`
Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint).
The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie.
-store.removeCookies(domain, path, cb(err))
-------------------------------------------
+### `store.removeCookies(domain, path, cb(err))`
Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed.
Pass an error ONLY if removing any existing cookies failed.
-MemoryCookieStore
-=================
+### `store.getAllCookies(cb(err, cookies))`
+
+Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure.
+
+Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail.
+
+Pass an error if retrieval fails.
+
+## MemoryCookieStore
+
+Inherits from `Store`.
-Inherits from Store.
+A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API.
-Just-in-memory CookieJar synchronous store implementation, used by default.
+# Serialization Format
+
+**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`.
+
+```js
+ {
+ // The version of tough-cookie that serialized this jar.
+ version: 'tough-cookie@1.x.y',
+
+ // add the store type, to make humans happy:
+ storeType: 'MemoryCookieStore',
+
+ // CookieJar configuration:
+ rejectPublicSuffixes: true,
+ // ... future items go here
+
+ // Gets filled from jar.store.getAllCookies():
+ cookies: [
+ {
+ key: 'string',
+ value: 'string',
+ // ...
+ /* other Cookie.serializableProperties go here */
+ }
+ ]
+ }
+```
# Copyright and License
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
index bec7f90b1..4f677c387 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
@@ -35,6 +35,7 @@ var pubsuffix = require('./pubsuffix');
var Store = require('./store').Store;
var MemoryCookieStore = require('./memstore').MemoryCookieStore;
var pathMatch = require('./pathMatch').pathMatch;
+var VERSION = require('../package.json').version;
var punycode;
try {
@@ -84,8 +85,6 @@ var YEAR = /^(\d{2}|\d{4})$/; // 2 to 4 digits
var MAX_TIME = 2147483647000; // 31-bit max
var MIN_TIME = 0; // 31-bit min
-var cookiesCreated = 0; // Number of cookies created in runtime
-
// RFC6265 S5.1.1 date parser:
function parseDate(str) {
@@ -458,47 +457,60 @@ function parse(str) {
}
}
- // ensure a default date for sorting:
- c.creation = new Date();
- //NOTE: add runtime index for the cookieCompare() to resolve the situation when Date's precision is not enough .
- //Store initial UTC time as well, so we will be able to determine if we need to fallback to the Date object.
- c._creationRuntimeIdx = ++cookiesCreated;
- c._initialCreationTime = c.creation.getTime();
return c;
}
+// avoid the V8 deoptimization monster!
+function jsonParse(str) {
+ var obj;
+ try {
+ obj = JSON.parse(str);
+ } catch (e) {
+ return e;
+ }
+ return obj;
+}
+
function fromJSON(str) {
if (!str) {
return null;
}
var obj;
- try {
- obj = JSON.parse(str);
- } catch (e) {
- return null;
+ if (typeof str === 'string') {
+ obj = jsonParse(str);
+ if (obj instanceof Error) {
+ return null;
+ }
+ } else {
+ // assume it's an Object
+ obj = str;
}
var c = new Cookie();
- for (var i=0; i<numCookieProperties; i++) {
- var prop = cookieProperties[i];
- if (obj[prop] == null) {
- continue;
+ for (var i=0; i<Cookie.serializableProperties.length; i++) {
+ var prop = Cookie.serializableProperties[i];
+ if (obj[prop] === undefined ||
+ obj[prop] === Cookie.prototype[prop])
+ {
+ continue; // leave as prototype default
}
+
if (prop === 'expires' ||
prop === 'creation' ||
prop === 'lastAccessed')
{
- c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]);
+ if (obj[prop] === null) {
+ c[prop] = null;
+ } else {
+ c[prop] = obj[prop] == "Infinity" ?
+ "Infinity" : new Date(obj[prop]);
+ }
} else {
c[prop] = obj[prop];
}
}
-
- // ensure a default date for sorting:
- c.creation = c.creation || new Date();
-
return c;
}
@@ -512,23 +524,28 @@ function fromJSON(str) {
*/
function cookieCompare(a,b) {
+ var cmp = 0;
+
// descending for length: b CMP a
- var deltaLen = (b.path ? b.path.length : 0) - (a.path ? a.path.length : 0);
- if (deltaLen !== 0) {
- return deltaLen;
+ var aPathLen = a.path ? a.path.length : 0;
+ var bPathLen = b.path ? b.path.length : 0;
+ cmp = bPathLen - aPathLen;
+ if (cmp !== 0) {
+ return cmp;
}
+ // ascending for time: a CMP b
var aTime = a.creation ? a.creation.getTime() : MAX_TIME;
var bTime = b.creation ? b.creation.getTime() : MAX_TIME;
-
- // NOTE: if creation dates are equal and they were not modified from the outside,
- // then use _creationRuntimeIdx for the comparison.
- if(aTime === bTime && aTime === a._initialCreationTime && bTime === b._initialCreationTime) {
- return a._creationRuntimeIdx - b._creationRuntimeIdx;
+ cmp = aTime - bTime;
+ if (cmp !== 0) {
+ return cmp;
}
- // ascending for time: a CMP b
- return aTime - bTime;
+ // break ties for the same millisecond (precision of JavaScript's clock)
+ cmp = a.creationIndex - b.creationIndex;
+
+ return cmp;
}
// Gives the permutation of all possible pathMatch()es of a given path. The
@@ -569,17 +586,31 @@ function getCookieContext(url) {
return urlParse(url);
}
-function Cookie (opts) {
- if (typeof opts !== "object") {
- return;
- }
- Object.keys(opts).forEach(function (key) {
- if (Cookie.prototype.hasOwnProperty(key)) {
- this[key] = opts[key] || Cookie.prototype[key];
+function Cookie(opts) {
+ opts = opts || {};
+
+ Object.keys(opts).forEach(function(prop) {
+ if (Cookie.prototype.hasOwnProperty(prop) &&
+ Cookie.prototype[prop] !== opts[prop] &&
+ prop.substr(0,1) !== '_')
+ {
+ this[prop] = opts[prop];
}
- }.bind(this));
+ }, this);
+
+ this.creation = this.creation || new Date();
+
+ // used to break creation ties in cookieCompare():
+ Object.defineProperty(this, 'creationIndex', {
+ configurable: false,
+ enumerable: false, // important for assert.deepEqual checks
+ writable: true,
+ value: ++Cookie.cookiesCreated
+ });
}
+Cookie.cookiesCreated = 0; // incremented each time a cookie is created
+
Cookie.parse = parse;
Cookie.fromJSON = fromJSON;
@@ -599,17 +630,22 @@ Cookie.prototype.extensions = null;
Cookie.prototype.hostOnly = null; // boolean when set
Cookie.prototype.pathIsDefault = null; // boolean when set
Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse
-Cookie.prototype._initialCreationTime = null; // Used to determine if cookie.creation was modified
-Cookie.prototype._creationRuntimeIdx = null; // Runtime index of the created cookie, used in cookieCompare()
Cookie.prototype.lastAccessed = null; // Date when set
+Object.defineProperty(Cookie.prototype, 'creationIndex', {
+ configurable: true,
+ enumerable: false,
+ writable: true,
+ value: 0
+});
-var cookieProperties = Object.freeze(Object.keys(Cookie.prototype).map(function(p) {
- if (p instanceof Function) {
- return;
- }
- return p;
-}));
-var numCookieProperties = cookieProperties.length;
+Cookie.serializableProperties = Object.keys(Cookie.prototype)
+ .filter(function(prop) {
+ return !(
+ Cookie.prototype[prop] instanceof Function ||
+ prop === 'creationIndex' ||
+ prop.substr(0,1) === '_'
+ );
+ });
Cookie.prototype.inspect = function inspect() {
var now = Date.now();
@@ -620,6 +656,46 @@ Cookie.prototype.inspect = function inspect() {
'"';
};
+Cookie.prototype.toJSON = function() {
+ var obj = {};
+
+ var props = Cookie.serializableProperties;
+ for (var i=0; i<props.length; i++) {
+ var prop = props[i];
+ if (this[prop] === Cookie.prototype[prop]) {
+ continue; // leave as prototype default
+ }
+
+ if (prop === 'expires' ||
+ prop === 'creation' ||
+ prop === 'lastAccessed')
+ {
+ if (this[prop] === null) {
+ obj[prop] = null;
+ } else {
+ obj[prop] = this[prop] == "Infinity" ? // intentionally not ===
+ "Infinity" : this[prop].toISOString();
+ }
+ } else if (prop === 'maxAge') {
+ if (this[prop] !== null) {
+ // again, intentionally not ===
+ obj[prop] = (this[prop] == Infinity || this[prop] == -Infinity) ?
+ this[prop].toString() : this[prop];
+ }
+ } else {
+ if (this[prop] !== Cookie.prototype[prop]) {
+ obj[prop] = this[prop];
+ }
+ }
+ }
+
+ return obj;
+};
+
+Cookie.prototype.clone = function() {
+ return fromJSON(this.toJSON());
+};
+
Cookie.prototype.validate = function validate() {
if (!COOKIE_OCTETS.test(this.value)) {
return false;
@@ -744,7 +820,7 @@ Cookie.prototype.TTL = function TTL(now) {
// elsewhere)
Cookie.prototype.expiryTime = function expiryTime(now) {
if (this.maxAge != null) {
- var relativeTo = this.creation || now || new Date();
+ var relativeTo = now || this.creation || new Date();
var age = (this.maxAge <= 0) ? -Infinity : this.maxAge*1000;
return relativeTo.getTime() + age;
}
@@ -782,7 +858,6 @@ Cookie.prototype.canonicalizedDomain = function canonicalizedDomain() {
return canonicalDomain(this.domain);
};
-
function CookieJar(store, rejectPublicSuffixes) {
if (rejectPublicSuffixes != null) {
this.rejectPublicSuffixes = rejectPublicSuffixes;
@@ -895,6 +970,7 @@ CookieJar.prototype.setCookie = function(cookie, url, options, cb) {
return cb(options.ignoreError ? null : err);
}
cookie.creation = oldCookie.creation; // step 11.3
+ cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker
cookie.lastAccessed = now;
// Step 11.4 (delete cookie) is implied by just setting the new one:
store.updateCookie(oldCookie, cookie, next); // step 12
@@ -1041,6 +1117,151 @@ CookieJar.prototype.getSetCookieStrings = function(/*..., cb*/) {
this.getCookies.apply(this,args);
};
+CAN_BE_SYNC.push('serialize');
+CookieJar.prototype.serialize = function(cb) {
+ var type = this.store.constructor.name;
+ if (type === 'Object') {
+ type = null;
+ }
+
+ // update README.md "Serialization Format" if you change this, please!
+ var serialized = {
+ // The version of tough-cookie that serialized this jar. Generally a good
+ // practice since future versions can make data import decisions based on
+ // known past behavior. When/if this matters, use `semver`.
+ version: 'tough-cookie@'+VERSION,
+
+ // add the store type, to make humans happy:
+ storeType: type,
+
+ // CookieJar configuration:
+ rejectPublicSuffixes: !!this.rejectPublicSuffixes,
+
+ // this gets filled from getAllCookies:
+ cookies: []
+ };
+
+ if (!(this.store.getAllCookies &&
+ typeof this.store.getAllCookies === 'function'))
+ {
+ return cb(new Error('store does not support getAllCookies and cannot be serialized'));
+ }
+
+ this.store.getAllCookies(function(err,cookies) {
+ if (err) {
+ return cb(err);
+ }
+
+ serialized.cookies = cookies.map(function(cookie) {
+ // convert to serialized 'raw' cookies
+ cookie = (cookie instanceof Cookie) ? cookie.toJSON() : cookie;
+
+ // Remove the index so new ones get assigned during deserialization
+ delete cookie.creationIndex;
+
+ return cookie;
+ });
+
+ return cb(null, serialized);
+ });
+};
+
+// well-known name that JSON.stringify calls
+CookieJar.prototype.toJSON = function() {
+ return this.serializeSync();
+};
+
+// use the class method CookieJar.deserialize instead of calling this directly
+CAN_BE_SYNC.push('_importCookies');
+CookieJar.prototype._importCookies = function(serialized, cb) {
+ var jar = this;
+ var cookies = serialized.cookies;
+ if (!cookies || !Array.isArray(cookies)) {
+ return cb(new Error('serialized jar has no cookies array'));
+ }
+
+ function putNext(err) {
+ if (err) {
+ return cb(err);
+ }
+
+ if (!cookies.length) {
+ return cb(err, jar);
+ }
+
+ var cookie;
+ try {
+ cookie = fromJSON(cookies.shift());
+ } catch (e) {
+ return cb(e);
+ }
+
+ if (cookie === null) {
+ return putNext(null); // skip this cookie
+ }
+
+ jar.store.putCookie(cookie, putNext);
+ }
+
+ putNext();
+};
+
+CookieJar.deserialize = function(strOrObj, store, cb) {
+ if (arguments.length !== 3) {
+ // store is optional
+ cb = store;
+ store = null;
+ }
+
+ var serialized;
+ if (typeof strOrObj === 'string') {
+ serialized = jsonParse(strOrObj);
+ if (serialized instanceof Error) {
+ return cb(serialized);
+ }
+ } else {
+ serialized = strOrObj;
+ }
+
+ var jar = new CookieJar(store, serialized.rejectPublicSuffixes);
+ jar._importCookies(serialized, function(err) {
+ if (err) {
+ return cb(err);
+ }
+ cb(null, jar);
+ });
+};
+
+CookieJar.fromJSON = CookieJar.deserializeSync;
+CookieJar.deserializeSync = function(strOrObj, store) {
+ var serialized = typeof strOrObj === 'string' ?
+ JSON.parse(strOrObj) : strOrObj;
+ var jar = new CookieJar(store, serialized.rejectPublicSuffixes);
+
+ // catch this mistake early:
+ if (!jar.store.synchronous) {
+ throw new Error('CookieJar store is not synchronous; use async API instead.');
+ }
+
+ jar._importCookiesSync(serialized);
+ return jar;
+};
+
+CAN_BE_SYNC.push('clone');
+CookieJar.prototype.clone = function(newStore, cb) {
+ if (arguments.length === 1) {
+ cb = newStore;
+ newStore = null;
+ }
+
+ this.serialize(function(err,serialized) {
+ if (err) {
+ return cb(err);
+ }
+ CookieJar.deserialize(newStore, serialized, cb);
+ });
+};
+
// Use a closure to provide a true imperative API for synchronous stores.
function syncWrap(method) {
return function() {
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/memstore.js b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/memstore.js
index a9ddc6f4c..89ceb6900 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/memstore.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/memstore.js
@@ -41,6 +41,8 @@ function MemoryCookieStore() {
util.inherits(MemoryCookieStore, Store);
exports.MemoryCookieStore = MemoryCookieStore;
MemoryCookieStore.prototype.idx = null;
+
+// Since it's just a struct in RAM, this Store is synchronous
MemoryCookieStore.prototype.synchronous = true;
// force a default depth:
@@ -116,21 +118,21 @@ MemoryCookieStore.prototype.putCookie = function(cookie, cb) {
cb(null);
};
-MemoryCookieStore.prototype.updateCookie = function updateCookie(oldCookie, newCookie, cb) {
+MemoryCookieStore.prototype.updateCookie = function(oldCookie, newCookie, cb) {
// updateCookie() may avoid updating cookies that are identical. For example,
// lastAccessed may not be important to some stores and an equality
// comparison could exclude that field.
this.putCookie(newCookie,cb);
};
-MemoryCookieStore.prototype.removeCookie = function removeCookie(domain, path, key, cb) {
+MemoryCookieStore.prototype.removeCookie = function(domain, path, key, cb) {
if (this.idx[domain] && this.idx[domain][path] && this.idx[domain][path][key]) {
delete this.idx[domain][path][key];
}
cb(null);
};
-MemoryCookieStore.prototype.removeCookies = function removeCookies(domain, path, cb) {
+MemoryCookieStore.prototype.removeCookies = function(domain, path, cb) {
if (this.idx[domain]) {
if (path) {
delete this.idx[domain][path];
@@ -140,3 +142,29 @@ MemoryCookieStore.prototype.removeCookies = function removeCookies(domain, path,
}
return cb(null);
};
+
+MemoryCookieStore.prototype.getAllCookies = function(cb) {
+ var cookies = [];
+ var idx = this.idx;
+
+ var domains = Object.keys(idx);
+ domains.forEach(function(domain) {
+ var paths = Object.keys(idx[domain]);
+ paths.forEach(function(path) {
+ var keys = Object.keys(idx[domain][path]);
+ keys.forEach(function(key) {
+ if (key !== null) {
+ cookies.push(idx[domain][path][key]);
+ }
+ });
+ });
+ });
+
+ // Sort by creationIndex so deserializing retains the creation order.
+ // When implementing your own store, this SHOULD retain the order too
+ cookies.sort(function(a,b) {
+ return (a.creationIndex||0) - (b.creationIndex||0);
+ });
+
+ cb(null, cookies);
+};
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/store.js b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/store.js
index ad69c14d3..bce52925d 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/store.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/store.js
@@ -62,6 +62,10 @@ Store.prototype.removeCookie = function(domain, path, key, cb) {
throw new Error('removeCookie is not implemented');
};
-Store.prototype.removeCookies = function removeCookies(domain, path, cb) {
+Store.prototype.removeCookies = function(domain, path, cb) {
throw new Error('removeCookies is not implemented');
};
+
+Store.prototype.getAllCookies = function(cb) {
+ throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)');
+};
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/package.json b/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
index 96d8d1d16..55605b3ee 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
@@ -16,7 +16,7 @@
"RFC6265",
"RFC2965"
],
- "version": "1.2.0",
+ "version": "2.0.0",
"homepage": "https://github.com/SalesforceEng/tough-cookie",
"repository": {
"type": "git",
@@ -36,9 +36,9 @@
"vows": "0.7.0",
"async": ">=0.1.12"
},
- "gitHead": "74e59de50b719bb9a1b01c8c9db57fa31401ed1a",
- "_id": "tough-cookie@1.2.0",
- "_shasum": "9b7e9d98e769e80b5aa899d944fe44e02ebf82ad",
+ "gitHead": "a3af6104da7787c23bb98910109b0e0e8a10153c",
+ "_id": "tough-cookie@2.0.0",
+ "_shasum": "41ce08720b35cf90beb044dd2609fb19e928718f",
"_from": "tough-cookie@>=0.12.0",
"_npmVersion": "2.7.4",
"_nodeVersion": "0.12.2",
@@ -47,8 +47,8 @@
"email": "jstash@gmail.com"
},
"dist": {
- "shasum": "9b7e9d98e769e80b5aa899d944fe44e02ebf82ad",
- "tarball": "http://registry.npmjs.org/tough-cookie/-/tough-cookie-1.2.0.tgz"
+ "shasum": "41ce08720b35cf90beb044dd2609fb19e928718f",
+ "tarball": "http://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz"
},
"maintainers": [
{
@@ -61,6 +61,6 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-1.2.0.tgz",
+ "_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/public-suffix.txt b/deps/npm/node_modules/request/node_modules/tough-cookie/public-suffix.txt
index f8941f7ac..d5c9924ed 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/public-suffix.txt
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/public-suffix.txt
@@ -826,6 +826,7 @@ edu.et
biz.et
name.et
info.et
+net.et
// eu : http://en.wikipedia.org/wiki/.eu
eu
@@ -932,6 +933,11 @@ org.gi
// gl : http://en.wikipedia.org/wiki/.gl
// http://nic.gl
gl
+co.gl
+com.gl
+edu.gl
+net.gl
+org.gl
// gm : http://www.nic.gm/htmlpages%5Cgm-policy.htm
gm
@@ -3571,6 +3577,7 @@ ltd.lk
assn.lk
grp.lk
hotel.lk
+ac.lk
// lr : http://psg.com/dns/lr/lr.txt
// Submitted by registry <randy@psg.com> 2008-06-17
@@ -4412,9 +4419,9 @@ mobi.ng
// ni : http://www.nic.ni/dominios.htm
*.ni
-// nl : http://www.domain-registry.nl/ace.php/c,728,122,,,,Home.html
-// Confirmed by registry <Antoin.Verschuren@sidn.nl> (with technical
-// reservations) 2008-06-08
+// nl : http://en.wikipedia.org/wiki/.nl
+// https://www.sidn.nl/
+// ccTLD for the Netherlands
nl
// BV.nl will be a registry for dutch BV's (besloten vennootschap)
@@ -5304,14 +5311,11 @@ gos.pk
info.pk
// pl http://www.dns.pl/english/index.html
-// confirmed on 26.09.2014 from Bogna Tchórzewska <partner@dns.pl>
+// updated by .PL registry on 2015-04-28
pl
com.pl
net.pl
org.pl
-info.pl
-waw.pl
-gov.pl
// pl functional domains (http://www.dns.pl/english/index.html)
aid.pl
agro.pl
@@ -5321,6 +5325,7 @@ biz.pl
edu.pl
gmina.pl
gsm.pl
+info.pl
mail.pl
miasta.pl
media.pl
@@ -5342,16 +5347,55 @@ tm.pl
tourism.pl
travel.pl
turystyka.pl
-// Government domains (administred by ippt.gov.pl)
-uw.gov.pl
-um.gov.pl
+// Government domains
+gov.pl
+ap.gov.pl
+ic.gov.pl
+is.gov.pl
+us.gov.pl
+kmpsp.gov.pl
+kppsp.gov.pl
+kwpsp.gov.pl
+psp.gov.pl
+wskr.gov.pl
+kwp.gov.pl
+mw.gov.pl
ug.gov.pl
+um.gov.pl
+umig.gov.pl
+ugim.gov.pl
upow.gov.pl
+uw.gov.pl
starostwo.gov.pl
+pa.gov.pl
+po.gov.pl
+psse.gov.pl
+pup.gov.pl
+rzgw.gov.pl
+sa.gov.pl
so.gov.pl
sr.gov.pl
-po.gov.pl
-pa.gov.pl
+wsa.gov.pl
+sko.gov.pl
+uzs.gov.pl
+wiih.gov.pl
+winb.gov.pl
+pinb.gov.pl
+wios.gov.pl
+witd.gov.pl
+wzmiuw.gov.pl
+piw.gov.pl
+wiw.gov.pl
+griw.gov.pl
+wif.gov.pl
+oum.gov.pl
+sdn.gov.pl
+zp.gov.pl
+uppo.gov.pl
+mup.gov.pl
+wuoz.gov.pl
+konsulat.gov.pl
+oirm.gov.pl
// pl regional domains (http://www.dns.pl/english/index.html)
augustow.pl
babia-gora.pl
@@ -5459,6 +5503,7 @@ ustka.pl
walbrzych.pl
warmia.pl
warszawa.pl
+waw.pl
wegrow.pl
wielun.pl
wlocl.pl
@@ -6621,114 +6666,147 @@ edu.ws
yt
// IDN ccTLDs
-// Please sort by ISO 3166 ccTLD, then punicode string
-// when submitting patches and follow this format:
-// <Punicode> ("<english word>" <language>) : <ISO 3166 ccTLD>
-// [optional sponsoring org]
-// <URL>
+// When submitting patches, please maintain a sort by ISO 3166 ccTLD, then
+// U-label, and follow this format:
+// // A-Label ("<Latin renderings>", <language name>[, variant info]) : <ISO 3166 ccTLD>
+// // [sponsoring org]
+// U-Label
-// xn--mgbaam7a8h ("Emerat" Arabic) : AE
+// xn--mgbaam7a8h ("Emerat", Arabic) : AE
// http://nic.ae/english/arabicdomain/rules.jsp
امارات
-// xn--54b7fta0cc ("Bangla" Bangla) : BD
+// xn--y9a3aq ("hye", Armenian) : AM
+// ISOC AM (operated by .am Registry)
+հայ
+
+// xn--54b7fta0cc ("Bangla", Bangla) : BD
বাংলা
-// xn--fiqs8s ("China" Chinese-Han-Simplified <.Zhongguo>) : CN
+// xn--90ais ("bel", Belarusian/Russian Cyrillic) : BY
+// Operated by .by registry
+бел
+
+// xn--fiqs8s ("Zhongguo/China", Chinese, Simplified) : CN
// CNNIC
// http://cnnic.cn/html/Dir/2005/10/11/3218.htm
中国
-// xn--fiqz9s ("China" Chinese-Han-Traditional <.Zhongguo>) : CN
+// xn--fiqz9s ("Zhongguo/China", Chinese, Traditional) : CN
// CNNIC
// http://cnnic.cn/html/Dir/2005/10/11/3218.htm
中國
-// xn--lgbbat1ad8j ("Algeria / Al Jazair" Arabic) : DZ
+// xn--lgbbat1ad8j ("Algeria/Al Jazair", Arabic) : DZ
الجزائر
-// xn--wgbh1c ("Egypt" Arabic .masr) : EG
+// xn--wgbh1c ("Egypt/Masr", Arabic) : EG
// http://www.dotmasr.eg/
مصر
-// xn--node ("ge" Georgian (Mkhedruli)) : GE
+// xn--node ("ge", Georgian Mkhedruli) : GE
გე
-// xn--j6w193g ("Hong Kong" Chinese-Han) : HK
+// xn--qxam ("el", Greek) : GR
+// Hellenic Ministry of Infrastructure, Transport, and Networks
+ελ
+
+// xn--j6w193g ("Hong Kong", Chinese) : HK
// https://www2.hkirc.hk/register/rules.jsp
香港
-// xn--h2brj9c ("Bharat" Devanagari) : IN
+// xn--h2brj9c ("Bharat", Devanagari) : IN
// India
भारत
-// xn--mgbbh1a71e ("Bharat" Arabic) : IN
+// xn--mgbbh1a71e ("Bharat", Arabic) : IN
// India
بھارت
-// xn--fpcrj9c3d ("Bharat" Telugu) : IN
+// xn--fpcrj9c3d ("Bharat", Telugu) : IN
// India
భారత్
-// xn--gecrj9c ("Bharat" Gujarati) : IN
+// xn--gecrj9c ("Bharat", Gujarati) : IN
// India
ભારત
-// xn--s9brj9c ("Bharat" Gurmukhi) : IN
+// xn--s9brj9c ("Bharat", Gurmukhi) : IN
// India
ਭਾਰਤ
-// xn--45brj9c ("Bharat" Bengali) : IN
+// xn--45brj9c ("Bharat", Bengali) : IN
// India
ভারত
-// xn--xkc2dl3a5ee0h ("India" Tamil) : IN
+// xn--xkc2dl3a5ee0h ("India", Tamil) : IN
// India
இந்தியா
-// xn--mgba3a4f16a ("Iran" Persian) : IR
+// xn--mgba3a4f16a ("Iran", Persian) : IR
ایران
-// xn--mgba3a4fra ("Iran" Arabic) : IR
+// xn--mgba3a4fra ("Iran", Arabic) : IR
ايران
-// xn--mgbayh7gpa ("al-Ordon" Arabic) : JO
+// xn--mgbtx2b ("Iraq", Arabic) : IQ
+// Communications and Media Commission
+عراق
+
+// xn--mgbayh7gpa ("al-Ordon", Arabic) : JO
// National Information Technology Center (NITC)
// Royal Scientific Society, Al-Jubeiha
الاردن
-// xn--3e0b707e ("Republic of Korea" Hangul) : KR
+// xn--3e0b707e ("Republic of Korea", Hangul) : KR
한국
-// xn--80ao21a ("Kaz" Kazakh) : KZ
+// xn--80ao21a ("Kaz", Kazakh) : KZ
қаз
-// xn--fzc2c9e2c ("Lanka" Sinhalese-Sinhala) : LK
+// xn--fzc2c9e2c ("Lanka", Sinhalese-Sinhala) : LK
// http://nic.lk
ලංකා
-// xn--xkc2al3hye2a ("Ilangai" Tamil) : LK
+// xn--xkc2al3hye2a ("Ilangai", Tamil) : LK
// http://nic.lk
இலங்கை
-// xn--mgbc0a9azcg ("Morocco / al-Maghrib" Arabic) : MA
+// xn--mgbc0a9azcg ("Morocco/al-Maghrib", Arabic) : MA
المغرب
-// xn--l1acc ("mon" Mongolian) : MN
+// xn--d1alf ("mkd", Macedonian) : MK
+// MARnet
+мкд
+
+// xn--l1acc ("mon", Mongolian) : MN
мон
-// xn--mgbx4cd0ab ("Malaysia" Malay) : MY
+// xn--mix891f ("Macao", Chinese, Traditional) : MO
+// MONIC / HNET Asia (Registry Operator for .mo)
+澳門
+
+// xn--mix082f ("Macao", Chinese, Simplified) : MO
+澳门
+
+// xn--mgbx4cd0ab ("Malaysia", Malay) : MY
مليسيا
-// xn--mgb9awbf ("Oman" Arabic) : OM
+// xn--mgb9awbf ("Oman", Arabic) : OM
عمان
-// xn--ygbi2ammx ("Falasteen" Arabic) : PS
+// xn--mgbai9azgqp6j ("Pakistan", Urdu/Arabic) : PK
+پاکستان
+
+// xn--mgbai9a5eva00b ("Pakistan", Urdu/Arabic, variant) : PK
+پاكستان
+
+// xn--ygbi2ammx ("Falasteen", Arabic) : PS
// The Palestinian National Internet Naming Authority (PNINA)
// http://www.pnina.ps
فلسطين
-// xn--90a3ac ("srb" Cyrillic) : RS
+// xn--90a3ac ("srb", Cyrillic) : RS
// http://www.rnids.rs/en/the-.срб-domain
срб
пр.срб
@@ -6738,62 +6816,66 @@ yt
упр.срб
ак.срб
-// xn--p1ai ("rf" Russian-Cyrillic) : RU
+// xn--p1ai ("rf", Russian-Cyrillic) : RU
// http://www.cctld.ru/en/docs/rulesrf.php
рф
-// xn--wgbl6a ("Qatar" Arabic) : QA
+// xn--wgbl6a ("Qatar", Arabic) : QA
// http://www.ict.gov.qa/
قطر
-// xn--mgberp4a5d4ar ("AlSaudiah" Arabic) : SA
+// xn--mgberp4a5d4ar ("AlSaudiah", Arabic) : SA
// http://www.nic.net.sa/
السعودية
-// xn--mgberp4a5d4a87g ("AlSaudiah" Arabic) variant : SA
+// xn--mgberp4a5d4a87g ("AlSaudiah", Arabic, variant) : SA
السعودیة
-// xn--mgbqly7c0a67fbc ("AlSaudiah" Arabic) variant : SA
+// xn--mgbqly7c0a67fbc ("AlSaudiah", Arabic, variant) : SA
السعودیۃ
-// xn--mgbqly7cvafr ("AlSaudiah" Arabic) variant : SA
+// xn--mgbqly7cvafr ("AlSaudiah", Arabic, variant) : SA
السعوديه
-// xn--ogbpf8fl ("Syria" Arabic) : SY
-سورية
-
-// xn--mgbtf8fl ("Syria" Arabic) variant : SY
-سوريا
+// xn--mgbpl2fh ("sudan", Arabic) : SD
+// Operated by .sd registry
+سودان
-// xn--yfro4i67o Singapore ("Singapore" Chinese-Han) : SG
+// xn--yfro4i67o Singapore ("Singapore", Chinese) : SG
新加坡
-// xn--clchc0ea0b2g2a9gcd ("Singapore" Tamil) : SG
+// xn--clchc0ea0b2g2a9gcd ("Singapore", Tamil) : SG
சிங்கப்பூர்
-// xn--o3cw4h ("Thai" Thai) : TH
+// xn--ogbpf8fl ("Syria", Arabic) : SY
+سورية
+
+// xn--mgbtf8fl ("Syria", Arabic, variant) : SY
+سوريا
+
+// xn--o3cw4h ("Thai", Thai) : TH
// http://www.thnic.co.th
ไทย
-// xn--pgbs0dh ("Tunis") : TN
+// xn--pgbs0dh ("Tunisia", Arabic) : TN
// http://nic.tn
تونس
-// xn--kpry57d ("Taiwan" Chinese-Han-Traditional) : TW
+// xn--kpry57d ("Taiwan", Chinese, Traditional) : TW
// http://www.twnic.net/english/dn/dn_07a.htm
台灣
-// xn--kprw13d ("Taiwan" Chinese-Han-Simplified) : TW
+// xn--kprw13d ("Taiwan", Chinese, Simplified) : TW
// http://www.twnic.net/english/dn/dn_07a.htm
台湾
-// xn--nnx388a ("Taiwan") variant : TW
+// xn--nnx388a ("Taiwan", Chinese, variant) : TW
臺灣
-// xn--j1amh ("ukr" Cyrillic) : UA
+// xn--j1amh ("ukr", Cyrillic) : UA
укр
-// xn--mgb2ddes ("AlYemen" Arabic) : YE
+// xn--mgb2ddes ("AlYemen", Arabic) : YE
اليمن
// xxx : http://icmregistry.com
@@ -6812,7 +6894,7 @@ xxx
*.zw
-// List of new gTLDs imported from https://newgtlds.icann.org/newgtlds.csv on 2015-04-07T06:02:08Z
+// List of new gTLDs imported from https://newgtlds.icann.org/newgtlds.csv on 2015-05-06T09:31:08Z
// aaa : 2015-02-26 American Automobile Association, Inc.
aaa
@@ -6865,6 +6947,9 @@ africa
// africamagic : 2015-03-05 Electronic Media Network (Pty) Ltd
africamagic
+// agakhan : 2015-04-23 Fondation Aga Khan (Aga Khan Foundation)
+agakhan
+
// agency : 2013-11-14 Steel Falls, LLC
agency
@@ -6877,6 +6962,9 @@ airforce
// airtel : 2014-10-24 Bharti Airtel Limited
airtel
+// akdn : 2015-04-23 Fondation Aga Khan (Aga Khan Foundation)
+akdn
+
// alibaba : 2015-01-15 Alibaba Group Holding Limited
alibaba
@@ -6949,6 +7037,9 @@ axa
// azure : 2014-12-18 Microsoft Corporation
azure
+// baby : 2015-04-09 Johnson & Johnson Services, Inc.
+baby
+
// baidu : 2015-01-08 Baidu, Inc.
baidu
@@ -7180,13 +7271,16 @@ cba
// cbn : 2014-08-22 The Christian Broadcasting Network, Inc.
cbn
+// ceb : 2015-04-09 The Corporate Executive Board Company
+ceb
+
// center : 2013-11-07 Tin Mill, LLC
center
// ceo : 2013-11-07 CEOTLD Pty Ltd
ceo
-// cern : 2014-06-05 European Organization for Nuclear Research (\
+// cern : 2014-06-05 European Organization for Nuclear Research ("CERN")
cern
// cfa : 2014-08-28 CFA Institute
@@ -7195,9 +7289,15 @@ cfa
// cfd : 2014-12-11 IG Group Holdings PLC
cfd
+// chanel : 2015-04-09 Chanel International B.V.
+chanel
+
// channel : 2014-05-08 Charleston Road Registry Inc.
channel
+// chase : 2015-04-30 JPMorgan Chase & Co.
+chase
+
// chat : 2014-12-04 Sand Fields, LLC
chat
@@ -7249,6 +7349,9 @@ clinic
// clothing : 2013-08-27 Steel Lake, LLC
clothing
+// cloud : 2015-04-16 ARUBA S.p.A.
+cloud
+
// club : 2013-11-08 .CLUB DOMAINS, LLC
club
@@ -7675,7 +7778,7 @@ garden
// gbiz : 2014-07-17 Charleston Road Registry Inc.
gbiz
-// gdn : 2014-07-31 Joint Stock Company \
+// gdn : 2014-07-31 Joint Stock Company "Navigation-information systems"
gdn
// gea : 2014-12-04 GEA Group Aktiengesellschaft
@@ -7945,6 +8048,9 @@ java
// jcb : 2014-11-20 JCB Co., Ltd.
jcb
+// jcp : 2015-04-23 JCP Media, Inc.
+jcp
+
// jetzt : 2014-01-09 New TLD Company AB
jetzt
@@ -7972,6 +8078,9 @@ jot
// joy : 2014-12-18 Amazon EU S.à r.l.
joy
+// jpmorgan : 2015-04-30 JPMorgan Chase & Co.
+jpmorgan
+
// jprs : 2014-09-18 Japan Registry Services Co., Ltd.
jprs
@@ -7984,6 +8093,15 @@ kaufen
// kddi : 2014-09-12 KDDI CORPORATION
kddi
+// kerryhotels : 2015-04-30 Kerry Trading Co. Limited
+kerryhotels
+
+// kerrylogistics : 2015-04-09 Kerry Trading Co. Limited
+kerrylogistics
+
+// kerryproperties : 2015-04-09 Kerry Trading Co. Limited
+kerryproperties
+
// kfh : 2014-12-04 Kuwait Finance House
kfh
@@ -8005,6 +8123,9 @@ koeln
// komatsu : 2015-01-08 Komatsu Ltd.
komatsu
+// kpmg : 2015-04-23 KPMG International Cooperative (KPMG International Genossenschaft)
+kpmg
+
// kpn : 2015-01-08 Koninklijke KPN N.V.
kpn
@@ -8014,6 +8135,9 @@ krd
// kred : 2013-12-19 KredTLD Pty Ltd
kred
+// kuokgroup : 2015-04-09 Kerry Trading Co. Limited
+kuokgroup
+
// kyknet : 2015-03-05 Electronic Media Network (Pty) Ltd
kyknet
@@ -8047,7 +8171,7 @@ law
// lawyer : 2014-03-20
lawyer
-// lds : 2014-03-20 IRI Domain Management, LLC (\
+// lds : 2014-03-20 IRI Domain Management, LLC ("Applicant")
lds
// lease : 2014-03-06 Victor Trail, LLC
@@ -8059,6 +8183,9 @@ leclerc
// legal : 2014-10-16 Blue Falls, LLC
legal
+// lexus : 2015-04-23 TOYOTA MOTOR CORPORATION
+lexus
+
// lgbt : 2014-05-08 Afilias Limited
lgbt
@@ -8209,6 +8336,9 @@ microsoft
// mini : 2014-01-09 Bayerische Motoren Werke Aktiengesellschaft
mini
+// mls : 2015-04-23 The Canadian Real Estate Association
+mls
+
// mma : 2014-11-07 MMA IARD
mma
@@ -8227,6 +8357,9 @@ moe
// moi : 2014-12-18 Amazon EU S.à r.l.
moi
+// mom : 2015-04-16 Uniregistry, Corp.
+mom
+
// monash : 2013-09-30 Monash University
monash
@@ -8236,7 +8369,7 @@ money
// montblanc : 2014-06-23 Richemont DNS Inc.
montblanc
-// mormon : 2013-12-05 IRI Domain Management, LLC (\
+// mormon : 2013-12-05 IRI Domain Management, LLC ("Applicant")
mormon
// mortgage : 2014-03-20
@@ -8350,6 +8483,9 @@ nyc
// obi : 2014-09-25 OBI Group Holding SE & Co. KGaA
obi
+// observer : 2015-04-30 Guardian News and Media Limited
+observer
+
// office : 2015-03-12 Microsoft Corporation
office
@@ -8512,6 +8648,9 @@ properties
// property : 2014-05-22 Uniregistry, Corp.
property
+// protection : 2015-04-23 Symantec Corporation
+protection
+
// pub : 2013-12-12 United TLD Holdco Ltd.
pub
@@ -8689,7 +8828,7 @@ sbs
// sca : 2014-03-13 SVENSKA CELLULOSA AKTIEBOLAGET SCA (publ)
sca
-// scb : 2014-02-20 The Siam Commercial Bank Public Company Limited (\
+// scb : 2014-02-20 The Siam Commercial Bank Public Company Limited ("SCB")
scb
// schmidt : 2014-04-03 SALM S.A.S.
@@ -8740,6 +8879,9 @@ sexy
// sharp : 2014-05-01 Sharp Corporation
sharp
+// shaw : 2015-04-23 Shaw Cablesystems G.P.
+shaw
+
// shia : 2014-09-04 Asia Green IT System Bilgisayar San. ve Tic. Ltd. Sti.
shia
@@ -8767,6 +8909,9 @@ singles
// site : 2015-01-15 DotSite Inc.
site
+// ski : 2015-04-09 STARTING DOT LIMITED
+ski
+
// skin : 2015-01-15 L'Oréal
skin
@@ -8848,6 +8993,9 @@ stockholm
// storage : 2014-12-22 Self Storage Company LLC
storage
+// store : 2015-04-09 DotStore Inc.
+store
+
// studio : 2015-02-11 Spring Goodbye, LLC
studio
@@ -8902,13 +9050,16 @@ tab
// taipei : 2014-07-10 Taipei City Government
taipei
+// talk : 2015-04-09 Amazon EU S.à r.l.
+talk
+
// taobao : 2015-01-15 Alibaba Group Holding Limited
taobao
// tatamotors : 2015-03-12 Tata Motors Ltd
tatamotors
-// tatar : 2014-04-24 Limited Liability Company \
+// tatar : 2014-04-24 Limited Liability Company "Coordination Center of Regional Domain of Tatarstan Republic"
tatar
// tattoo : 2013-08-30 Uniregistry, Corp.
@@ -8950,6 +9101,9 @@ thd
// theater : 2015-03-19 Blue Tigers, LLC
theater
+// theguardian : 2015-04-30 Guardian News and Media Limited
+theguardian
+
// tickets : 2015-02-05 Accent Media Limited
tickets
@@ -8995,6 +9149,9 @@ tours
// town : 2014-03-06 Koko Moon, LLC
town
+// toyota : 2015-04-23 TOYOTA MOTOR CORPORATION
+toyota
+
// toys : 2014-03-06 Pioneer Orchard, LLC
toys
@@ -9307,6 +9464,9 @@ xin
// xn--estv75g : 2015-02-19 Industrial and Commercial Bank of China Limited
工行
+// xn--fct429k : 2015-04-09 Amazon EU S.à r.l.
+家電
+
// xn--fhbei : 2015-01-15 VeriSign Sarl
كوم
@@ -9436,6 +9596,9 @@ vermögensberatung
// xn--vuq861b : 2014-10-16 Beijing Tele-info Network Technology Co., Ltd.
信息
+// xn--w4r85el8fhu5dnra : 2015-04-30 Kerry Trading Co. Limited
+嘉里大酒店
+
// xn--xhq521b : 2013-11-14 Guangzhou YU Wei Information Technology Co., Ltd.
广东
@@ -9466,6 +9629,9 @@ yoga
// yokohama : 2013-12-12 GMO Registry, Inc.
yokohama
+// you : 2015-04-09 Amazon EU S.à r.l.
+you
+
// youtube : 2014-05-01 Charleston Road Registry Inc.
youtube
@@ -9908,6 +10074,66 @@ webhop.org
worse-than.tv
writesthisblog.com
+// EU.org https://eu.org/
+// Submitted by Pierre Beyssac <hostmaster@eu.org> 2015-04-17
+
+eu.org
+al.eu.org
+asso.eu.org
+at.eu.org
+au.eu.org
+be.eu.org
+bg.eu.org
+ca.eu.org
+cd.eu.org
+ch.eu.org
+cn.eu.org
+cy.eu.org
+cz.eu.org
+de.eu.org
+dk.eu.org
+edu.eu.org
+ee.eu.org
+es.eu.org
+fi.eu.org
+fr.eu.org
+gr.eu.org
+hr.eu.org
+hu.eu.org
+ie.eu.org
+il.eu.org
+in.eu.org
+int.eu.org
+is.eu.org
+it.eu.org
+jp.eu.org
+kr.eu.org
+lt.eu.org
+lu.eu.org
+lv.eu.org
+mc.eu.org
+me.eu.org
+mk.eu.org
+mt.eu.org
+my.eu.org
+net.eu.org
+ng.eu.org
+nl.eu.org
+no.eu.org
+nz.eu.org
+paris.eu.org
+pl.eu.org
+pt.eu.org
+q-a.eu.org
+ro.eu.org
+ru.eu.org
+se.eu.org
+si.eu.org
+sk.eu.org
+tr.eu.org
+uk.eu.org
+us.eu.org
+
// Fastly Inc. http://www.fastly.com/
// Submitted by Vladimir Vuksan <vladimir@fastly.com> 2013-05-31
a.ssl.fastly.net
@@ -10011,6 +10237,10 @@ azurewebsites.net
azure-mobile.net
cloudapp.net
+// Neustar Inc.
+// Submitted by Trung Tran <Trung.Tran@neustar.biz> 2015-04-23
+4u.com
+
// NFSN, Inc. : https://www.NearlyFreeSpeech.NET/
// Submitted by Jeff Wheelhouse <support@nearlyfreespeech.net> 2014-02-02
nfshost.com
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_sorting_test.js b/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_sorting_test.js
index 8cc984272..826562a2b 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_sorting_test.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_sorting_test.js
@@ -33,6 +33,7 @@ var vows = require('vows');
var assert = require('assert');
var tough = require('../lib/cookie');
var Cookie = tough.Cookie;
+var CookieJar = tough.CookieJar;
function toKeyArray(cookies) {
return cookies.map(function (c) {
@@ -43,6 +44,71 @@ function toKeyArray(cookies) {
vows
.describe('Cookie sorting')
.addBatch({
+ "Assumptions:": {
+ ".creationIndex is set during construction": function() {
+ var now = new Date();
+ var c1 = new Cookie();
+ var c2 = new Cookie();
+ assert.isNumber(c1.creationIndex);
+ assert.isNumber(c2.creationIndex);
+ assert(c1.creationIndex < c2.creationIndex,
+ 'creationIndex should increase with each construction');
+ },
+
+ ".creationIndex is set during construction (forced ctime)": function() {
+ var now = new Date();
+ var c1 = new Cookie({creation: now});
+ var c2 = new Cookie({creation: now});
+ assert.strictEqual(c1.creation, c2.creation);
+ assert.isNumber(c1.creationIndex);
+ assert.isNumber(c2.creationIndex);
+ assert(c1.creationIndex < c2.creationIndex,
+ 'creationIndex should increase with each construction');
+ },
+
+ ".creationIndex is left alone during new setCookie": function() {
+ var jar = new CookieJar();
+ var c = new Cookie({key:'k', value:'v', domain:'example.com'});
+ var now = new Date();
+ var beforeDate = c.creation;
+ assert.instanceOf(beforeDate, Date);
+ assert.notStrictEqual(now, beforeDate);
+ var beforeIndex = c.creationIndex;
+ assert.isNumber(c.creationIndex);
+
+ jar.setCookieSync(c, 'http://example.com/', {now: now});
+
+ assert.strictEqual(c.creation, now);
+ assert.strictEqual(c.creationIndex, beforeIndex);
+ },
+
+ ".creationIndex is preserved during update setCookie": function() {
+ var jar = new CookieJar();
+
+ var thisMs = Date.now();
+ var t1 = new Date(thisMs);
+ var t2 = new Date(thisMs);
+ assert.notStrictEqual(t1, t2); // Date objects are distinct
+
+ var c = new Cookie({key:'k', value:'v1', domain:'example.com'});
+ jar.setCookieSync(c, 'http://example.com/', {now: t1});
+ var originalIndex = c.creationIndex;
+
+ assert.strictEqual(c.creation, t1);
+ assert.strictEqual(c.lastAccessed, t1);
+
+ c = new Cookie({key:'k', value:'v2', domain:'example.com'});
+ assert.notStrictEqual(c.creation, t1); // new timestamp assigned
+
+ jar.setCookieSync(c, 'http://example.com/', {now: t2});
+
+ assert.strictEqual(c.creation, t1); // retained
+ assert.strictEqual(c.lastAccessed, t2); // updated
+ assert.strictEqual(c.creationIndex, originalIndex); // retained
+ },
+ }
+ })
+ .addBatch({
"Cookie Sorting": {
topic: function () {
var cookies = [];
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_to_json_test.js b/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_to_json_test.js
index cc3f1fc85..94a23d46e 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_to_json_test.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/test/cookie_to_json_test.js
@@ -123,8 +123,8 @@ vows
},
"null": {
topic: function(f) { return f(null) },
- "looks good": function(str) {
- assert.match(str, /"maxAge":null/);
+ "absent": function(str) {
+ assert.match(str, /(?!"maxAge":null)/); // NB: negative RegExp
}
}
},
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/test/ietf_test.js b/deps/npm/node_modules/request/node_modules/tough-cookie/test/ietf_test.js
index fac2e3eff..8bd41dd65 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/test/ietf_test.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/test/ietf_test.js
@@ -61,8 +61,7 @@ function setGetCookieVows() {
jar.setCookieSync(cookieStr, sentFrom, {ignoreError: true});
});
- var actual = jar.getCookiesSync(sentTo);
- actual = actual.sort(tough.cookieCompare);
+ var actual = jar.getCookiesSync(sentTo,{sort:true});
assert.strictEqual(actual.length, expected.length);
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/test/jar_serialization_test.js b/deps/npm/node_modules/request/node_modules/tough-cookie/test/jar_serialization_test.js
new file mode 100644
index 000000000..277c90c8f
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/test/jar_serialization_test.js
@@ -0,0 +1,348 @@
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+'use strict';
+var vows = require('vows');
+var assert = require('assert');
+var tough = require('../lib/cookie');
+var Cookie = tough.Cookie;
+var CookieJar = tough.CookieJar;
+var Store = tough.Store;
+var MemoryCookieStore = tough.MemoryCookieStore;
+var VERSION = require('../package.json').version;
+
+var domains = ['example.com','www.example.com','example.net'];
+var paths = ['/','/foo','/foo/bar'];
+
+var isInteger = Number.isInteger || function(value) {
+ // Node 0.10 (still supported) doesn't have Number.isInteger
+ // from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isInteger
+ return typeof value === "number" &&
+ isFinite(value) &&
+ Math.floor(value) === value;
+};
+
+function setUp(context) {
+ context.now = new Date();
+ context.nowISO = context.now.toISOString();
+ context.expires = new Date(context.now.getTime() + 86400000);
+
+ var c, domain;
+ context.jar = new CookieJar();
+
+ context.totalCookies = 0;
+
+ // Do paths first since the MemoryCookieStore index is domain at the top
+ // level. This should cause the preservation of creation order in
+ // getAllCookies to be exercised.
+ for (var i = 0; i<paths.length; i++) {
+ var path = paths[i];
+ for (var j = 0; j<domains.length; j++) {
+ domain = domains[j];
+ c = new Cookie({
+ expires: context.expires,
+ domain: domain,
+ path: path,
+ key: 'key',
+ value: 'value'+j+i
+ });
+ context.jar.setCookieSync(c, 'http://'+domain+'/', {now: context.now});
+ context.totalCookies++;
+ }
+ }
+
+ // corner cases
+ domain = 'example.com';
+ var cornerCases = [
+ { expires: 'Infinity', key: 'infExp', value: 'infExp' },
+ { maxAge: 3600, key: 'max', value: 'max' },
+ { expires: context.expires, key: 'flags', value: 'flags',
+ secure: true, httpOnly: true },
+ { expires: context.expires, key: 'honly', value: 'honly',
+ hostOnly: true, domain: 'www.example.org' },
+ ];
+
+ for (var i = 0; i<cornerCases.length; i++) {
+ cornerCases[i].domain = cornerCases[i].domain || 'example.org';
+ cornerCases[i].path = '/';
+ c = new Cookie(cornerCases[i]);
+ context.jar.setCookieSync(c, 'https://www.example.org/', {now: context.now});
+ context.totalCookies++;
+ }
+}
+
+function checkMetadata(serialized) {
+ assert.notEqual(serialized, null);
+ assert.isObject(serialized);
+ assert.equal(serialized.version, 'tough-cookie@'+VERSION);
+ assert.equal(serialized.storeType, 'MemoryCookieStore');
+ assert.typeOf(serialized.rejectPublicSuffixes, 'boolean');
+ assert.isArray(serialized.cookies);
+}
+
+var serializedCookiePropTypes = {
+ 'key': 'string',
+ 'value': 'string',
+ 'expires': 'isoDate', // if "Infinity" it's supposed to be missing
+ 'maxAge': 'intOrInf',
+ 'domain': 'string',
+ 'path': 'string',
+ 'secure': 'boolean',
+ 'httpOnly': 'boolean',
+ 'extensions': 'array', // of strings, technically
+ 'hostOnly': 'boolean',
+ 'pathIsDefault': 'boolean',
+ 'creation': 'isoDate',
+ 'lastAccessed': 'isoDate'
+};
+
+function validateSerializedCookie(cookie) {
+ assert.isObject(cookie);
+ assert.isFalse(cookie instanceof Cookie);
+
+ Object.keys(cookie).forEach(function(prop) {
+ var type = serializedCookiePropTypes[prop];
+ switch(type) {
+ case 'string':
+ case 'boolean':
+ case 'array':
+ case 'number':
+ assert.typeOf(cookie[prop], type);
+ break;
+
+ case 'intOrInf':
+ if (cookie[prop] === 'Infinity' || cookie[prop] === '-Infinity') {
+ assert(true);
+ } else {
+ assert(isInteger(cookie[prop]),
+ "serialized property isn't integer: "+prop);
+ }
+ break;
+
+ case 'isoDate':
+ // rather than a regexp, assert it's parsable and equal
+ var parsed = Date.parse(cookie[prop]);
+ assert(parsed, 'could not parse serialized date property');
+ // assert.equals(cookie[prop], parsed.toISOString());
+ break;
+
+ default:
+ assert.fail("unexpected serialized property: "+prop);
+ }
+ });
+
+}
+
+vows
+ .describe('CookieJar serialization')
+ .addBatch({
+ "Assumptions:": {
+ "serializableProperties all accounted for": function() {
+ var actualKeys = Cookie.serializableProperties.concat([]); // copy
+ actualKeys.sort();
+ var expectedKeys = Object.keys(serializedCookiePropTypes);
+ expectedKeys.sort();
+ assert.deepEqual(actualKeys, expectedKeys);
+ }
+ }
+ })
+ .addBatch({
+ "For Stores without getAllCookies": {
+ topic: function() {
+ var store = new Store();
+ store.synchronous = true;
+ var jar = new CookieJar(store);
+ return jar;
+ },
+ "Cannot call toJSON": function(jar) {
+ assert.throws(function() {
+ jar.toJSON();
+ }, 'getAllCookies is not implemented (therefore jar cannot be serialized)');
+ }
+ }
+ })
+ .addBatch({
+ "For async stores": {
+ topic: function() {
+ var store = new MemoryCookieStore();
+ store.synchronous = false; // pretend it's async
+ var jar = new CookieJar(store);
+ return jar;
+ },
+ "Cannot call toJSON": function(jar) {
+ assert.throws(function() {
+ jar.toJSON();
+ }, 'CookieJar store is not synchronous; use async API instead.');
+ }
+ }
+ })
+ .addBatch({
+ "With a small store": {
+ topic: function() {
+ var now = this.now = new Date();
+ this.jar = new CookieJar();
+ // domain cookie with custom extension
+ var cookie = Cookie.parse('sid=one; domain=example.com; path=/; fubar');
+ this.jar.setCookieSync(cookie, 'http://example.com/', {now: this.now});
+
+ cookie = Cookie.parse('sid=two; domain=example.net; path=/; fubar');
+ this.jar.setCookieSync(cookie, 'http://example.net/', {now: this.now});
+
+ return this.jar;
+ },
+
+ "serialize synchronously": {
+ topic: function(jar) {
+ return jar.serializeSync();
+ },
+ "it gives a serialization with the two cookies": function(data) {
+ checkMetadata(data);
+ assert.equal(data.cookies.length, 2);
+ data.cookies.forEach(function(cookie) {
+ validateSerializedCookie(cookie);
+ });
+ },
+ "then deserialize": {
+ topic: function(data) {
+ return CookieJar.deserializeSync(data);
+ },
+ "memstores are identical": function(newJar) {
+ assert.deepEqual(this.jar.store, newJar.store);
+ }
+ }
+ },
+
+ "serialize asynchronously": {
+ topic: function(jar) {
+ jar.serialize(this.callback);
+ },
+ "it gives a serialization with the two cookies": function(data) {
+ checkMetadata(data);
+ assert.equal(data.cookies.length, 2);
+ data.cookies.forEach(function(cookie) {
+ validateSerializedCookie(cookie);
+ });
+ },
+ "then deserialize": {
+ topic: function(data) {
+ CookieJar.deserialize(data, this.callback);
+ },
+ "memstores are identical": function(newJar) {
+ assert.deepEqual(this.jar.store, newJar.store);
+ }
+ }
+ }
+ }
+ })
+ .addBatch({
+ "With a moderately-sized store": {
+ topic: function() {
+ setUp(this);
+ this.jar.serialize(this.callback);
+ },
+ "has expected metadata": function(err,jsonObj) {
+ assert.isNull(err);
+ assert.equal(jsonObj.version, 'tough-cookie@'+VERSION);
+ assert.isTrue(jsonObj.rejectPublicSuffixes);
+ assert.equal(jsonObj.storeType, 'MemoryCookieStore');
+ },
+ "has a bunch of objects as 'raw' cookies": function(jsonObj) {
+ assert.isArray(jsonObj.cookies);
+ assert.equal(jsonObj.cookies.length, this.totalCookies);
+
+ jsonObj.cookies.forEach(function(cookie) {
+ validateSerializedCookie(cookie);
+
+ if (cookie.key === 'key') {
+ assert.match(cookie.value, /^value\d\d/);
+ }
+
+ if (cookie.key === 'infExp' || cookie.key === 'max') {
+ assert.isUndefined(cookie.expires);
+ } else {
+ assert.strictEqual(cookie.expires, this.expires.toISOString())
+ }
+
+ if (cookie.key === 'max') {
+ assert.strictEqual(cookie.maxAge, 3600);
+ } else {
+ assert.isUndefined(cookie.maxAge);
+ }
+
+ assert.equal(cookie.hostOnly, cookie.key === 'honly');
+
+ if (cookie.key === 'flags') {
+ assert.isTrue(cookie.secure);
+ assert.isTrue(cookie.httpOnly);
+ } else {
+ assert.isUndefined(cookie.secure);
+ assert.isUndefined(cookie.httpOnly);
+ }
+
+ assert.strictEqual(cookie.creation, this.nowISO);
+ assert.strictEqual(cookie.lastAccessed, this.nowISO);
+
+ }, this);
+ },
+
+ "then taking it for a round-trip": {
+ topic: function(jsonObj) {
+ CookieJar.deserialize(jsonObj, this.callback);
+ },
+ "memstore index is identical": function(err,newJar) {
+ assert.deepEqual(newJar.store.idx, this.jar.store.idx);
+ },
+ "then spot-check retrieval": {
+ topic: function(newJar) {
+ newJar.getCookies('http://example.org/', this.callback);
+ },
+ "gets expected cookies": function(results) {
+ assert.isArray(results);
+ assert.equal(results.length, 2);
+
+ results.forEach(function(cookie) {
+ assert.instanceOf(cookie, Cookie);
+
+ if (cookie.key === 'infExp') {
+ assert.strictEqual(cookie.expires, "Infinity");
+ assert.strictEqual(cookie.TTL(this.now), Infinity);
+ } else if (cookie.key === 'max') {
+ assert.strictEqual(cookie.TTL(this.now), 3600*1000);
+ } else {
+ assert.fail('Unexpected cookie key: '+cookie.key);
+ }
+ }.bind(this));
+ }
+ }
+ }
+ }
+ })
+ .export(module);
diff --git a/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc b/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc
deleted file mode 100644
index 4c1c8d497..000000000
--- a/deps/npm/node_modules/request/node_modules/tunnel-agent/.jshintrc
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "node": true,
- "asi": true,
- "laxcomma": true
-}
diff --git a/deps/npm/node_modules/request/node_modules/tunnel-agent/index.js b/deps/npm/node_modules/request/node_modules/tunnel-agent/index.js
index 13c04272d..da516ec43 100644
--- a/deps/npm/node_modules/request/node_modules/tunnel-agent/index.js
+++ b/deps/npm/node_modules/request/node_modules/tunnel-agent/index.js
@@ -81,23 +81,29 @@ TunnelingAgent.prototype.addRequest = function addRequest(req, options) {
if (self.sockets.length >= this.maxSockets) {
// We are over limit so we'll add it to the queue.
- self.requests.push({host: host, port: port, request: req})
+ self.requests.push({host: options.host, port: options.port, request: req})
return
}
// If we are under maxSockets create a new one.
- self.createSocket({host: options.host, port: options.port, request: req}, function(socket) {
+ self.createConnection({host: options.host, port: options.port, request: req})
+}
+
+TunnelingAgent.prototype.createConnection = function createConnection(pending) {
+ var self = this
+
+ self.createSocket(pending, function(socket) {
socket.on('free', onFree)
socket.on('close', onCloseOrRemove)
socket.on('agentRemove', onCloseOrRemove)
- req.onSocket(socket)
+ pending.request.onSocket(socket)
function onFree() {
- self.emit('free', socket, options.host, options.port)
+ self.emit('free', socket, pending.host, pending.port)
}
function onCloseOrRemove(err) {
- self.removeSocket()
+ self.removeSocket(socket)
socket.removeListener('free', onFree)
socket.removeListener('close', onCloseOrRemove)
socket.removeListener('agentRemove', onCloseOrRemove)
@@ -182,9 +188,7 @@ TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
if (pending) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
- this.createSocket(pending, function(socket) {
- pending.request.onSocket(socket)
- })
+ this.createConnection(pending)
}
}
@@ -197,6 +201,7 @@ function createSecureSocket(options, cb) {
, socket: socket
}
))
+ self.sockets[self.sockets.indexOf(socket)] = secureSocket
cb(secureSocket)
})
}
diff --git a/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json b/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
index e2bc2f012..77c19da8d 100644
--- a/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
+++ b/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
@@ -6,7 +6,7 @@
},
"name": "tunnel-agent",
"description": "HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.",
- "version": "0.4.0",
+ "version": "0.4.1",
"repository": {
"url": "git+https://github.com/mikeal/tunnel-agent.git"
},
@@ -17,14 +17,44 @@
"engines": {
"node": "*"
},
- "readme": "tunnel-agent\n============\n\nHTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.\n",
- "readmeFilename": "README.md",
+ "gitHead": "912a7a6d00e10ec76baf9c9369de280fa5badef3",
"bugs": {
"url": "https://github.com/mikeal/tunnel-agent/issues"
},
"homepage": "https://github.com/mikeal/tunnel-agent#readme",
- "_id": "tunnel-agent@0.4.0",
- "_shasum": "b1184e312ffbcf70b3b4c78e8c219de7ebb1c550",
- "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.0.tgz",
- "_from": "tunnel-agent@>=0.4.0 <0.5.0"
+ "_id": "tunnel-agent@0.4.1",
+ "scripts": {},
+ "_shasum": "bbeecff4d679ce753db9462761a88dfcec3c5ab3",
+ "_from": "tunnel-agent@>=0.4.0 <0.5.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.5",
+ "_npmUser": {
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
+ },
+ "dist": {
+ "shasum": "bbeecff4d679ce753db9462761a88dfcec3c5ab3",
+ "tarball": "http://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ {
+ "name": "nylen",
+ "email": "jnylen@gmail.com"
+ },
+ {
+ "name": "fredkschott",
+ "email": "fkschott@gmail.com"
+ },
+ {
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.1.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/package.json b/deps/npm/node_modules/request/package.json
index a2f119a5c..0f3275a9b 100644
--- a/deps/npm/node_modules/request/package.json
+++ b/deps/npm/node_modules/request/package.json
@@ -7,7 +7,7 @@
"util",
"utility"
],
- "version": "2.57.0",
+ "version": "2.60.0",
"author": {
"name": "Mikeal Rogers",
"email": "mikeal.rogers@gmail.com"
@@ -25,19 +25,20 @@
},
"main": "index.js",
"dependencies": {
- "bl": "~0.9.0",
- "caseless": "~0.10.0",
+ "bl": "~1.0.0",
+ "caseless": "~0.11.0",
+ "extend": "~3.0.0",
"forever-agent": "~0.6.0",
- "form-data": "~0.2.0",
+ "form-data": "~1.0.0-rc1",
"json-stringify-safe": "~5.0.0",
- "mime-types": "~2.0.1",
+ "mime-types": "~2.1.2",
"node-uuid": "~1.4.0",
- "qs": "~3.1.0",
+ "qs": "~4.0.0",
"tunnel-agent": "~0.4.0",
"tough-cookie": ">=0.12.0",
"http-signature": "~0.11.0",
"oauth-sign": "~0.8.0",
- "hawk": "~2.3.0",
+ "hawk": "~3.1.0",
"aws-sign2": "~0.5.0",
"stringstream": "~0.0.4",
"combined-stream": "~1.0.1",
@@ -45,14 +46,17 @@
"har-validator": "^1.6.1"
},
"scripts": {
- "test": "npm run lint && node node_modules/.bin/taper tests/test-*.js && npm run test-browser",
+ "test": "npm run lint && npm run test-ci && npm run test-browser",
+ "test-ci": "taper tests/test-*.js",
+ "test-cov": "istanbul cover tape tests/test-*.js",
"test-browser": "node tests/browser/start.js",
- "lint": "node node_modules/.bin/eslint lib/ *.js tests/ && echo Lint passed."
+ "lint": "eslint lib/ *.js tests/ && echo Lint passed."
},
"devDependencies": {
"browserify": "~5.9.1",
"browserify-istanbul": "~0.1.3",
"buffer-equal": "0.0.1",
+ "codecov.io": "~0.1.2",
"coveralls": "~2.11.2",
"eslint": "0.18.0",
"function-bind": "~1.0.0",
@@ -69,17 +73,21 @@
"taper": "~0.4.0",
"bluebird": "~2.9.21"
},
- "gitHead": "1fafe0dc387e5efa9ae95b40aa80c43e83e1b98f",
+ "gitHead": "af19cef3bc60e9151ffce5015d8ce3c0728d3aca",
"homepage": "https://github.com/request/request#readme",
- "_id": "request@2.57.0",
- "_shasum": "d445105a42d009b9d724289633b449a6d723d989",
- "_from": "request@2.57.0",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "0.12.4",
+ "_id": "request@2.60.0",
+ "_shasum": "498820957fcdded1d37749069610c85f61a29f2d",
+ "_from": "request@2.60.0",
+ "_npmVersion": "2.11.2",
+ "_nodeVersion": "0.12.6",
"_npmUser": {
"name": "simov",
"email": "simeonvelichkov@gmail.com"
},
+ "dist": {
+ "shasum": "498820957fcdded1d37749069610c85f61a29f2d",
+ "tarball": "http://registry.npmjs.org/request/-/request-2.60.0.tgz"
+ },
"maintainers": [
{
"name": "mikeal",
@@ -98,10 +106,6 @@
"email": "simeonvelichkov@gmail.com"
}
],
- "dist": {
- "shasum": "d445105a42d009b9d724289633b449a6d723d989",
- "tarball": "http://registry.npmjs.org/request/-/request-2.57.0.tgz"
- },
"directories": {},
- "_resolved": "https://registry.npmjs.org/request/-/request-2.57.0.tgz"
+ "_resolved": "https://registry.npmjs.org/request/-/request-2.60.0.tgz"
}
diff --git a/deps/npm/node_modules/request/request.js b/deps/npm/node_modules/request/request.js
index 21b4f5294..f3f5dd915 100644
--- a/deps/npm/node_modules/request/request.js
+++ b/deps/npm/node_modules/request/request.js
@@ -6,19 +6,17 @@ var http = require('http')
, util = require('util')
, stream = require('stream')
, zlib = require('zlib')
- , helpers = require('./lib/helpers')
, bl = require('bl')
, hawk = require('hawk')
, aws = require('aws-sign2')
, httpSignature = require('http-signature')
, mime = require('mime-types')
- , tunnel = require('tunnel-agent')
, stringstream = require('stringstream')
, caseless = require('caseless')
, ForeverAgent = require('forever-agent')
, FormData = require('form-data')
+ , helpers = require('./lib/helpers')
, cookies = require('./lib/cookies')
- , copy = require('./lib/copy')
, getProxyFromURI = require('./lib/getProxyFromURI')
, Querystring = require('./lib/querystring').Querystring
, Har = require('./lib/har').Har
@@ -26,46 +24,19 @@ var http = require('http')
, OAuth = require('./lib/oauth').OAuth
, Multipart = require('./lib/multipart').Multipart
, Redirect = require('./lib/redirect').Redirect
+ , Tunnel = require('./lib/tunnel').Tunnel
var safeStringify = helpers.safeStringify
, isReadStream = helpers.isReadStream
, toBase64 = helpers.toBase64
, defer = helpers.defer
+ , copy = helpers.copy
+ , version = helpers.version
, globalCookieJar = cookies.jar()
var globalPool = {}
-var defaultProxyHeaderWhiteList = [
- 'accept',
- 'accept-charset',
- 'accept-encoding',
- 'accept-language',
- 'accept-ranges',
- 'cache-control',
- 'content-encoding',
- 'content-language',
- 'content-length',
- 'content-location',
- 'content-md5',
- 'content-range',
- 'content-type',
- 'connection',
- 'date',
- 'expect',
- 'max-forwards',
- 'pragma',
- 'referer',
- 'te',
- 'transfer-encoding',
- 'user-agent',
- 'via'
-]
-
-var defaultProxyHeaderExclusiveList = [
- 'proxy-authorization'
-]
-
function filterForNonReserved(reserved, options) {
// Filter out properties that are not reserved.
// Reserved values are passed in at call site.
@@ -96,103 +67,6 @@ function filterOutReservedFunctions(reserved, options) {
}
-function constructProxyHost(uriObject) {
- var port = uriObject.portA
- , protocol = uriObject.protocol
- , proxyHost = uriObject.hostname + ':'
-
- if (port) {
- proxyHost += port
- } else if (protocol === 'https:') {
- proxyHost += '443'
- } else {
- proxyHost += '80'
- }
-
- return proxyHost
-}
-
-function constructProxyHeaderWhiteList(headers, proxyHeaderWhiteList) {
- var whiteList = proxyHeaderWhiteList
- .reduce(function (set, header) {
- set[header.toLowerCase()] = true
- return set
- }, {})
-
- return Object.keys(headers)
- .filter(function (header) {
- return whiteList[header.toLowerCase()]
- })
- .reduce(function (set, header) {
- set[header] = headers[header]
- return set
- }, {})
-}
-
-function getTunnelOption(self, options) {
- // Tunnel HTTPS by default, or if a previous request in the redirect chain
- // was tunneled. Allow the user to override this setting.
-
- // If self.tunnel is already set (because this is a redirect), use the
- // existing value.
- if (typeof self.tunnel !== 'undefined') {
- return self.tunnel
- }
-
- // If options.tunnel is set (the user specified a value), use it.
- if (typeof options.tunnel !== 'undefined') {
- return options.tunnel
- }
-
- // If the destination is HTTPS, tunnel.
- if (self.uri.protocol === 'https:') {
- return true
- }
-
- // Otherwise, leave tunnel unset, because if a later request in the redirect
- // chain is HTTPS then that request (and any subsequent ones) should be
- // tunneled.
- return undefined
-}
-
-function constructTunnelOptions(request) {
- var proxy = request.proxy
-
- var tunnelOptions = {
- proxy : {
- host : proxy.hostname,
- port : +proxy.port,
- proxyAuth : proxy.auth,
- headers : request.proxyHeaders
- },
- headers : request.headers,
- ca : request.ca,
- cert : request.cert,
- key : request.key,
- passphrase : request.passphrase,
- pfx : request.pfx,
- ciphers : request.ciphers,
- rejectUnauthorized : request.rejectUnauthorized,
- secureOptions : request.secureOptions,
- secureProtocol : request.secureProtocol
- }
-
- return tunnelOptions
-}
-
-function constructTunnelFnName(uri, proxy) {
- var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http')
- var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http')
- return [uriProtocol, proxyProtocol].join('Over')
-}
-
-function getTunnelFn(request) {
- var uri = request.uri
- var proxy = request.proxy
- var tunnelFnName = constructTunnelFnName(uri, proxy)
- return tunnel[tunnelFnName]
-}
-
// Function for properly handling a connection error
function connectionErrorHandler(error) {
var socket = this
@@ -262,6 +136,7 @@ function Request (options) {
self._oauth = new OAuth(self)
self._multipart = new Multipart(self)
self._redirect = new Redirect(self)
+ self._tunnel = new Tunnel(self)
self.init(options)
}
@@ -276,37 +151,6 @@ function debug() {
}
Request.prototype.debug = debug
-Request.prototype.setupTunnel = function () {
- var self = this
-
- if (typeof self.proxy === 'string') {
- self.proxy = url.parse(self.proxy)
- }
-
- if (!self.proxy || !self.tunnel) {
- return false
- }
-
- // Setup Proxy Header Exclusive List and White List
- self.proxyHeaderExclusiveList = self.proxyHeaderExclusiveList || []
- self.proxyHeaderWhiteList = self.proxyHeaderWhiteList || defaultProxyHeaderWhiteList
- var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList)
- var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList)
-
- // Setup Proxy Headers and Proxy Headers Host
- // Only send the Proxy White Listed Header names
- self.proxyHeaders = constructProxyHeaderWhiteList(self.headers, proxyHeaderWhiteList)
- self.proxyHeaders.host = constructProxyHost(self.uri)
- proxyHeaderExclusiveList.forEach(self.removeHeader, self)
-
- // Set Agent from Tunnel Data
- var tunnelFn = getTunnelFn(self)
- var tunnelOptions = constructTunnelOptions(self)
- self.agent = tunnelFn(tunnelOptions)
-
- return true
-}
-
Request.prototype.init = function (options) {
// init() contains all the code to setup the request object.
// the actual outgoing request is not started until start() is called
@@ -450,9 +294,9 @@ Request.prototype.init = function (options) {
self.proxy = getProxyFromURI(self.uri)
}
- self.tunnel = getTunnelOption(self, options)
+ self.tunnel = self._tunnel.isEnabled(options)
if (self.proxy) {
- self.setupTunnel()
+ self._tunnel.setup(options)
}
self._redirect.onRequest(options)
@@ -587,28 +431,24 @@ Request.prototype.init = function (options) {
self.elapsedTime = self.elapsedTime || 0
}
- if (self.body) {
- var length = 0
- if (!Buffer.isBuffer(self.body)) {
- if (Array.isArray(self.body)) {
- for (var i = 0; i < self.body.length; i++) {
- length += self.body[i].length
- }
- } else {
- self.body = new Buffer(self.body)
- length = self.body.length
- }
- } else {
- length = self.body.length
+ function setContentLength () {
+ if (!Buffer.isBuffer(self.body) && !Array.isArray(self.body)) {
+ self.body = new Buffer(self.body)
}
- if (length) {
- if (!self.hasHeader('content-length')) {
+ if (!self.hasHeader('content-length')) {
+ var length = (Array.isArray(self.body))
+ ? self.body.reduce(function (a, b) {return a + b.length}, 0)
+ : self.body.length
+ if (length) {
self.setHeader('content-length', length)
+ } else {
+ self.emit('error', new Error('Argument error, options.body.'))
}
- } else {
- self.emit('error', new Error('Argument error, options.body.'))
}
}
+ if (self.body) {
+ setContentLength()
+ }
if (options.oauth) {
self.oauth(options.oauth)
@@ -638,7 +478,16 @@ Request.prototype.init = function (options) {
if (options.agentClass) {
self.agentClass = options.agentClass
} else if (options.forever) {
- self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL
+ var v = version()
+ // use ForeverAgent in node 0.10- only
+ if (v.major === 0 && v.minor <= 10) {
+ self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL
+ } else {
+ self.agent = new self.httpModule.Agent({
+ keepAlive: true,
+ maxSockets: (options.pool && options.pool.maxSockets) || Infinity
+ })
+ }
} else {
self.agentClass = self.httpModule.Agent
}
@@ -698,6 +547,7 @@ Request.prototype.init = function (options) {
self._multipart.body.pipe(self)
}
if (self.body) {
+ setContentLength()
if (Array.isArray(self.body)) {
self.body.forEach(function (part) {
self.write(part)
@@ -723,7 +573,7 @@ Request.prototype.init = function (options) {
if (self._form && !self.hasHeader('content-length')) {
// Before ending the request, we had to compute the length of the whole form, asyncly
- self.setHeader(self._form.getHeaders())
+ self.setHeader(self._form.getHeaders(), true)
self._form.getLength(function (err, length) {
if (!err) {
self.setHeader('content-length', length)
@@ -750,7 +600,7 @@ Request.prototype._updateProtocol = function () {
// previously was doing http, now doing https
// if it's https, then we might need to tunnel now.
if (self.proxy) {
- if (self.setupTunnel()) {
+ if (self._tunnel.setup()) {
return
}
}
@@ -1302,7 +1152,9 @@ Request.prototype.qs = function (q, clobber) {
Request.prototype.form = function (form) {
var self = this
if (form) {
- self.setHeader('content-type', 'application/x-www-form-urlencoded')
+ if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) {
+ self.setHeader('content-type', 'application/x-www-form-urlencoded')
+ }
self.body = (typeof form === 'string')
? self._qs.rfc3986(form.toString('utf8'))
: self._qs.stringify(form).toString('utf8')
@@ -1545,10 +1397,10 @@ Request.prototype.destroy = function () {
}
Request.defaultProxyHeaderWhiteList =
- defaultProxyHeaderWhiteList.slice()
+ Tunnel.defaultProxyHeaderWhiteList.slice()
Request.defaultProxyHeaderExclusiveList =
- defaultProxyHeaderExclusiveList.slice()
+ Tunnel.defaultProxyHeaderExclusiveList.slice()
// Exports
diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md
index 58e7ac303..18659f67f 100644
--- a/deps/npm/node_modules/rimraf/README.md
+++ b/deps/npm/node_modules/rimraf/README.md
@@ -1,3 +1,5 @@
+[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies)
+
The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node.
Install with `npm install rimraf`, or just drop rimraf.js somewhere.
@@ -28,7 +30,7 @@ the async API. It's better.
## CLI
If installed with `npm install rimraf -g` it can be used as a global
-command `rimraf <path>` which is useful for cross platform support.
+command `rimraf <path> [<path> ...]` which is useful for cross platform support.
## mkdirp
diff --git a/deps/npm/node_modules/rimraf/bin.js b/deps/npm/node_modules/rimraf/bin.js
index 29bfa8a63..1bd5a0d16 100755
--- a/deps/npm/node_modules/rimraf/bin.js
+++ b/deps/npm/node_modules/rimraf/bin.js
@@ -18,7 +18,7 @@ var args = process.argv.slice(2).filter(function(arg) {
if (help || args.length === 0) {
// If they didn't ask for help, then this is not a "success"
var log = help ? console.log : console.error
- log('Usage: rimraf <path>')
+ log('Usage: rimraf <path> [<path> ...]')
log('')
log(' Deletes all files and folders at "path" recursively.')
log('')
@@ -26,8 +26,15 @@ if (help || args.length === 0) {
log('')
log(' -h, --help Display this usage info')
process.exit(help ? 0 : 1)
-} else {
- args.forEach(function(arg) {
- rimraf.sync(arg)
+} else
+ go(0)
+
+function go (n) {
+ if (n >= args.length)
+ return
+ rimraf(args[n], function (er) {
+ if (er)
+ throw er
+ go(n+1)
})
}
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/README.md b/deps/npm/node_modules/rimraf/node_modules/glob/README.md
deleted file mode 100644
index 258257ecb..000000000
--- a/deps/npm/node_modules/rimraf/node_modules/glob/README.md
+++ /dev/null
@@ -1,369 +0,0 @@
-[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Dependency Status](https://david-dm.org/isaacs/node-glob.svg)](https://david-dm.org/isaacs/node-glob) [![devDependency Status](https://david-dm.org/isaacs/node-glob/dev-status.svg)](https://david-dm.org/isaacs/node-glob#info=devDependencies) [![optionalDependency Status](https://david-dm.org/isaacs/node-glob/optional-status.svg)](https://david-dm.org/isaacs/node-glob#info=optionalDependencies)
-
-# Glob
-
-Match files using the patterns the shell uses, like stars and stuff.
-
-This is a glob implementation in JavaScript. It uses the `minimatch`
-library to do its matching.
-
-![](oh-my-glob.gif)
-
-## Usage
-
-```javascript
-var glob = require("glob")
-
-// options is optional
-glob("**/*.js", options, function (er, files) {
- // files is an array of filenames.
- // If the `nonull` option is set, and nothing
- // was found, then files is ["**/*.js"]
- // er is an error object or null.
-})
-```
-
-## Glob Primer
-
-"Globs" are the patterns you type when you do stuff like `ls *.js` on
-the command line, or put `build/*` in a `.gitignore` file.
-
-Before parsing the path part patterns, braced sections are expanded
-into a set. Braced sections start with `{` and end with `}`, with any
-number of comma-delimited sections within. Braced sections may contain
-slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
-
-The following characters have special magic meaning when used in a
-path portion:
-
-* `*` Matches 0 or more characters in a single path portion
-* `?` Matches 1 character
-* `[...]` Matches a range of characters, similar to a RegExp range.
- If the first character of the range is `!` or `^` then it matches
- any character not in the range.
-* `!(pattern|pattern|pattern)` Matches anything that does not match
- any of the patterns provided.
-* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
- patterns provided.
-* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
- patterns provided.
-* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
-* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
- provided
-* `**` If a "globstar" is alone in a path portion, then it matches
- zero or more directories and subdirectories searching for matches.
- It does not crawl symlinked directories.
-
-### Dots
-
-If a file or directory path portion has a `.` as the first character,
-then it will not match any glob pattern unless that pattern's
-corresponding path part also has a `.` as its first character.
-
-For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
-However the pattern `a/*/c` would not, because `*` does not start with
-a dot character.
-
-You can make glob treat dots as normal characters by setting
-`dot:true` in the options.
-
-### Basename Matching
-
-If you set `matchBase:true` in the options, and the pattern has no
-slashes in it, then it will seek for any file anywhere in the tree
-with a matching basename. For example, `*.js` would match
-`test/simple/basic.js`.
-
-### Negation
-
-The intent for negation would be for a pattern starting with `!` to
-match everything that *doesn't* match the supplied pattern. However,
-the implementation is weird, and for the time being, this should be
-avoided. The behavior will change or be deprecated in version 5.
-
-### Empty Sets
-
-If no matching files are found, then an empty array is returned. This
-differs from the shell, where the pattern itself is returned. For
-example:
-
- $ echo a*s*d*f
- a*s*d*f
-
-To get the bash-style behavior, set the `nonull:true` in the options.
-
-### See Also:
-
-* `man sh`
-* `man bash` (Search for "Pattern Matching")
-* `man 3 fnmatch`
-* `man 5 gitignore`
-* [minimatch documentation](https://github.com/isaacs/minimatch)
-
-## glob.hasMagic(pattern, [options])
-
-Returns `true` if there are any special characters in the pattern, and
-`false` otherwise.
-
-Note that the options affect the results. If `noext:true` is set in
-the options object, then `+(a|b)` will not be considered a magic
-pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
-then that is considered magical, unless `nobrace:true` is set in the
-options.
-
-## glob(pattern, [options], cb)
-
-* `pattern` {String} Pattern to be matched
-* `options` {Object}
-* `cb` {Function}
- * `err` {Error | null}
- * `matches` {Array<String>} filenames found matching the pattern
-
-Perform an asynchronous glob search.
-
-## glob.sync(pattern, [options])
-
-* `pattern` {String} Pattern to be matched
-* `options` {Object}
-* return: {Array<String>} filenames found matching the pattern
-
-Perform a synchronous glob search.
-
-## Class: glob.Glob
-
-Create a Glob object by instantiating the `glob.Glob` class.
-
-```javascript
-var Glob = require("glob").Glob
-var mg = new Glob(pattern, options, cb)
-```
-
-It's an EventEmitter, and starts walking the filesystem to find matches
-immediately.
-
-### new glob.Glob(pattern, [options], [cb])
-
-* `pattern` {String} pattern to search for
-* `options` {Object}
-* `cb` {Function} Called when an error occurs, or matches are found
- * `err` {Error | null}
- * `matches` {Array<String>} filenames found matching the pattern
-
-Note that if the `sync` flag is set in the options, then matches will
-be immediately available on the `g.found` member.
-
-### Properties
-
-* `minimatch` The minimatch object that the glob uses.
-* `options` The options object passed in.
-* `aborted` Boolean which is set to true when calling `abort()`. There
- is no way at this time to continue a glob search after aborting, but
- you can re-use the statCache to avoid having to duplicate syscalls.
-* `statCache` Collection of all the stat results the glob search
- performed.
-* `cache` Convenience object. Each field has the following possible
- values:
- * `false` - Path does not exist
- * `true` - Path exists
- * `'DIR'` - Path exists, and is not a directory
- * `'FILE'` - Path exists, and is a directory
- * `[file, entries, ...]` - Path exists, is a directory, and the
- array value is the results of `fs.readdir`
-* `statCache` Cache of `fs.stat` results, to prevent statting the same
- path multiple times.
-* `symlinks` A record of which paths are symbolic links, which is
- relevant in resolving `**` patterns.
-* `realpathCache` An optional object which is passed to `fs.realpath`
- to minimize unnecessary syscalls. It is stored on the instantiated
- Glob object, and may be re-used.
-
-### Events
-
-* `end` When the matching is finished, this is emitted with all the
- matches found. If the `nonull` option is set, and no match was found,
- then the `matches` list contains the original pattern. The matches
- are sorted, unless the `nosort` flag is set.
-* `match` Every time a match is found, this is emitted with the matched.
-* `error` Emitted when an unexpected error is encountered, or whenever
- any fs error occurs if `options.strict` is set.
-* `abort` When `abort()` is called, this event is raised.
-
-### Methods
-
-* `pause` Temporarily stop the search
-* `resume` Resume the search
-* `abort` Stop the search forever
-
-### Options
-
-All the options that can be passed to Minimatch can also be passed to
-Glob to change pattern matching behavior. Also, some have been added,
-or have glob-specific ramifications.
-
-All options are false by default, unless otherwise noted.
-
-All options are added to the Glob object, as well.
-
-If you are running many `glob` operations, you can pass a Glob object
-as the `options` argument to a subsequent operation to shortcut some
-`stat` and `readdir` calls. At the very least, you may pass in shared
-`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
-parallel glob operations will be sped up by sharing information about
-the filesystem.
-
-* `cwd` The current working directory in which to search. Defaults
- to `process.cwd()`.
-* `root` The place where patterns starting with `/` will be mounted
- onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
- systems, and `C:\` or some such on Windows.)
-* `dot` Include `.dot` files in normal matches and `globstar` matches.
- Note that an explicit dot in a portion of the pattern will always
- match dot files.
-* `nomount` By default, a pattern starting with a forward-slash will be
- "mounted" onto the root setting, so that a valid filesystem path is
- returned. Set this flag to disable that behavior.
-* `mark` Add a `/` character to directory matches. Note that this
- requires additional stat calls.
-* `nosort` Don't sort the results.
-* `stat` Set to true to stat *all* results. This reduces performance
- somewhat, and is completely unnecessary, unless `readdir` is presumed
- to be an untrustworthy indicator of file existence.
-* `silent` When an unusual error is encountered when attempting to
- read a directory, a warning will be printed to stderr. Set the
- `silent` option to true to suppress these warnings.
-* `strict` When an unusual error is encountered when attempting to
- read a directory, the process will just continue on in search of
- other matches. Set the `strict` option to raise an error in these
- cases.
-* `cache` See `cache` property above. Pass in a previously generated
- cache object to save some fs calls.
-* `statCache` A cache of results of filesystem information, to prevent
- unnecessary stat calls. While it should not normally be necessary
- to set this, you may pass the statCache from one glob() call to the
- options object of another, if you know that the filesystem will not
- change between calls. (See "Race Conditions" below.)
-* `symlinks` A cache of known symbolic links. You may pass in a
- previously generated `symlinks` object to save `lstat` calls when
- resolving `**` matches.
-* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
-* `nounique` In some cases, brace-expanded patterns can result in the
- same file showing up multiple times in the result set. By default,
- this implementation prevents duplicates in the result set. Set this
- flag to disable that behavior.
-* `nonull` Set to never return an empty set, instead returning a set
- containing the pattern itself. This is the default in glob(3).
-* `debug` Set to enable debug logging in minimatch and glob.
-* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
-* `noglobstar` Do not match `**` against multiple filenames. (Ie,
- treat it as a normal `*` instead.)
-* `noext` Do not match `+(a|b)` "extglob" patterns.
-* `nocase` Perform a case-insensitive match. Note: on
- case-insensitive filesystems, non-magic patterns will match by
- default, since `stat` and `readdir` will not raise errors.
-* `matchBase` Perform a basename-only match if the pattern does not
- contain any slash characters. That is, `*.js` would be treated as
- equivalent to `**/*.js`, matching all js files in all directories.
-* `nonegate` Suppress `negate` behavior. (See below.)
-* `nocomment` Suppress `comment` behavior. (See below.)
-* `nonull` Return the pattern when no matches are found.
-* `nodir` Do not match directories, only files. (Note: to match
- *only* directories, simply put a `/` at the end of the pattern.)
-* `ignore` Add a pattern or an array of patterns to exclude matches.
-* `follow` Follow symlinked directories when expanding `**` patterns.
- Note that this can result in a lot of duplicate references in the
- presence of cyclic links.
-* `realpath` Set to true to call `fs.realpath` on all of the results.
- In the case of a symlink that cannot be resolved, the full absolute
- path to the matched entry is returned (though it will usually be a
- broken symlink)
-
-## Comparisons to other fnmatch/glob implementations
-
-While strict compliance with the existing standards is a worthwhile
-goal, some discrepancies exist between node-glob and other
-implementations, and are intentional.
-
-If the pattern starts with a `!` character, then it is negated. Set the
-`nonegate` flag to suppress this behavior, and treat leading `!`
-characters normally. This is perhaps relevant if you wish to start the
-pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
-characters at the start of a pattern will negate the pattern multiple
-times.
-
-If a pattern starts with `#`, then it is treated as a comment, and
-will not match anything. Use `\#` to match a literal `#` at the
-start of a line, or set the `nocomment` flag to suppress this behavior.
-
-The double-star character `**` is supported by default, unless the
-`noglobstar` flag is set. This is supported in the manner of bsdglob
-and bash 4.3, where `**` only has special significance if it is the only
-thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
-`a/**b` will not.
-
-Note that symlinked directories are not crawled as part of a `**`,
-though their contents may match against subsequent portions of the
-pattern. This prevents infinite loops and duplicates and the like.
-
-If an escaped pattern has no matches, and the `nonull` flag is set,
-then glob returns the pattern as-provided, rather than
-interpreting the character escapes. For example,
-`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
-`"*a?"`. This is akin to setting the `nullglob` option in bash, except
-that it does not resolve escaped pattern characters.
-
-If brace expansion is not disabled, then it is performed before any
-other interpretation of the glob pattern. Thus, a pattern like
-`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
-**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
-checked for validity. Since those two are valid, matching proceeds.
-
-## Windows
-
-**Please only use forward-slashes in glob expressions.**
-
-Though windows uses either `/` or `\` as its path separator, only `/`
-characters are used by this glob implementation. You must use
-forward-slashes **only** in glob expressions. Back-slashes will always
-be interpreted as escape characters, not path separators.
-
-Results from absolute patterns such as `/foo/*` are mounted onto the
-root setting using `path.join`. On windows, this will by default result
-in `/foo/*` matching `C:\foo\bar.txt`.
-
-## Race Conditions
-
-Glob searching, by its very nature, is susceptible to race conditions,
-since it relies on directory walking and such.
-
-As a result, it is possible that a file that exists when glob looks for
-it may have been deleted or modified by the time it returns the result.
-
-As part of its internal implementation, this program caches all stat
-and readdir calls that it makes, in order to cut down on system
-overhead. However, this also makes it even more susceptible to races,
-especially if the cache or statCache objects are reused between glob
-calls.
-
-Users are thus advised not to use a glob result as a guarantee of
-filesystem state in the face of rapid changes. For the vast majority
-of operations, this is never a problem.
-
-## Contributing
-
-Any change to behavior (including bugfixes) must come with a test.
-
-Patches that fail tests or reduce performance will be rejected.
-
-```
-# to run tests
-npm test
-
-# to re-generate test fixtures
-npm run test-regen
-
-# to benchmark against bash/zsh
-npm run bench
-
-# to profile javascript
-npm run prof
-```
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/common.js b/deps/npm/node_modules/rimraf/node_modules/glob/common.js
deleted file mode 100644
index cd7c82448..000000000
--- a/deps/npm/node_modules/rimraf/node_modules/glob/common.js
+++ /dev/null
@@ -1,237 +0,0 @@
-exports.alphasort = alphasort
-exports.alphasorti = alphasorti
-exports.isAbsolute = process.platform === "win32" ? absWin : absUnix
-exports.setopts = setopts
-exports.ownProp = ownProp
-exports.makeAbs = makeAbs
-exports.finish = finish
-exports.mark = mark
-exports.isIgnored = isIgnored
-exports.childrenIgnored = childrenIgnored
-
-function ownProp (obj, field) {
- return Object.prototype.hasOwnProperty.call(obj, field)
-}
-
-var path = require("path")
-var minimatch = require("minimatch")
-var Minimatch = minimatch.Minimatch
-
-function absWin (p) {
- if (absUnix(p)) return true
- // pull off the device/UNC bit from a windows path.
- // from node's lib/path.js
- var splitDeviceRe =
- /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/
- var result = splitDeviceRe.exec(p)
- var device = result[1] || ''
- var isUnc = device && device.charAt(1) !== ':'
- var isAbsolute = !!result[2] || isUnc // UNC paths are always absolute
-
- return isAbsolute
-}
-
-function absUnix (p) {
- return p.charAt(0) === "/" || p === ""
-}
-
-function alphasorti (a, b) {
- return a.toLowerCase().localeCompare(b.toLowerCase())
-}
-
-function alphasort (a, b) {
- return a.localeCompare(b)
-}
-
-function setupIgnores (self, options) {
- self.ignore = options.ignore || []
-
- if (!Array.isArray(self.ignore))
- self.ignore = [self.ignore]
-
- if (self.ignore.length) {
- self.ignore = self.ignore.map(ignoreMap)
- }
-}
-
-function ignoreMap (pattern) {
- var gmatcher = null
- if (pattern.slice(-3) === '/**') {
- var gpattern = pattern.replace(/(\/\*\*)+$/, '')
- gmatcher = new Minimatch(gpattern, { nonegate: true })
- }
-
- return {
- matcher: new Minimatch(pattern, { nonegate: true }),
- gmatcher: gmatcher
- }
-}
-
-function setopts (self, pattern, options) {
- if (!options)
- options = {}
-
- // base-matching: just use globstar for that.
- if (options.matchBase && -1 === pattern.indexOf("/")) {
- if (options.noglobstar) {
- throw new Error("base matching requires globstar")
- }
- pattern = "**/" + pattern
- }
-
- self.pattern = pattern
- self.strict = options.strict !== false
- self.realpath = !!options.realpath
- self.realpathCache = options.realpathCache || Object.create(null)
- self.follow = !!options.follow
- self.dot = !!options.dot
- self.mark = !!options.mark
- self.nodir = !!options.nodir
- if (self.nodir)
- self.mark = true
- self.sync = !!options.sync
- self.nounique = !!options.nounique
- self.nonull = !!options.nonull
- self.nosort = !!options.nosort
- self.nocase = !!options.nocase
- self.stat = !!options.stat
- self.noprocess = !!options.noprocess
-
- self.maxLength = options.maxLength || Infinity
- self.cache = options.cache || Object.create(null)
- self.statCache = options.statCache || Object.create(null)
- self.symlinks = options.symlinks || Object.create(null)
-
- setupIgnores(self, options)
-
- self.changedCwd = false
- var cwd = process.cwd()
- if (!ownProp(options, "cwd"))
- self.cwd = cwd
- else {
- self.cwd = options.cwd
- self.changedCwd = path.resolve(options.cwd) !== cwd
- }
-
- self.root = options.root || path.resolve(self.cwd, "/")
- self.root = path.resolve(self.root)
- if (process.platform === "win32")
- self.root = self.root.replace(/\\/g, "/")
-
- self.nomount = !!options.nomount
-
- self.minimatch = new Minimatch(pattern, options)
- self.options = self.minimatch.options
-}
-
-function finish (self) {
- var nou = self.nounique
- var all = nou ? [] : Object.create(null)
-
- for (var i = 0, l = self.matches.length; i < l; i ++) {
- var matches = self.matches[i]
- if (!matches || Object.keys(matches).length === 0) {
- if (self.nonull) {
- // do like the shell, and spit out the literal glob
- var literal = self.minimatch.globSet[i]
- if (nou)
- all.push(literal)
- else
- all[literal] = true
- }
- } else {
- // had matches
- var m = Object.keys(matches)
- if (nou)
- all.push.apply(all, m)
- else
- m.forEach(function (m) {
- all[m] = true
- })
- }
- }
-
- if (!nou)
- all = Object.keys(all)
-
- if (!self.nosort)
- all = all.sort(self.nocase ? alphasorti : alphasort)
-
- // at *some* point we statted all of these
- if (self.mark) {
- for (var i = 0; i < all.length; i++) {
- all[i] = self._mark(all[i])
- }
- if (self.nodir) {
- all = all.filter(function (e) {
- return !(/\/$/.test(e))
- })
- }
- }
-
- if (self.ignore.length)
- all = all.filter(function(m) {
- return !isIgnored(self, m)
- })
-
- self.found = all
-}
-
-function mark (self, p) {
- var abs = makeAbs(self, p)
- var c = self.cache[abs]
- var m = p
- if (c) {
- var isDir = c === 'DIR' || Array.isArray(c)
- var slash = p.slice(-1) === '/'
-
- if (isDir && !slash)
- m += '/'
- else if (!isDir && slash)
- m = m.slice(0, -1)
-
- if (m !== p) {
- var mabs = makeAbs(self, m)
- self.statCache[mabs] = self.statCache[abs]
- self.cache[mabs] = self.cache[abs]
- }
- }
-
- return m
-}
-
-// lotta situps...
-function makeAbs (self, f) {
- var abs = f
- if (f.charAt(0) === '/') {
- abs = path.join(self.root, f)
- } else if (exports.isAbsolute(f)) {
- abs = f
- } else if (self.changedCwd) {
- abs = path.resolve(self.cwd, f)
- } else if (self.realpath) {
- abs = path.resolve(f)
- }
- return abs
-}
-
-
-// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
-// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
-function isIgnored (self, path) {
- if (!self.ignore.length)
- return false
-
- return self.ignore.some(function(item) {
- return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
- })
-}
-
-function childrenIgnored (self, path) {
- if (!self.ignore.length)
- return false
-
- return self.ignore.some(function(item) {
- return !!(item.gmatcher && item.gmatcher.match(path))
- })
-}
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/glob.js b/deps/npm/node_modules/rimraf/node_modules/glob/glob.js
deleted file mode 100644
index eac0693cc..000000000
--- a/deps/npm/node_modules/rimraf/node_modules/glob/glob.js
+++ /dev/null
@@ -1,740 +0,0 @@
-// Approach:
-//
-// 1. Get the minimatch set
-// 2. For each pattern in the set, PROCESS(pattern, false)
-// 3. Store matches per-set, then uniq them
-//
-// PROCESS(pattern, inGlobStar)
-// Get the first [n] items from pattern that are all strings
-// Join these together. This is PREFIX.
-// If there is no more remaining, then stat(PREFIX) and
-// add to matches if it succeeds. END.
-//
-// If inGlobStar and PREFIX is symlink and points to dir
-// set ENTRIES = []
-// else readdir(PREFIX) as ENTRIES
-// If fail, END
-//
-// with ENTRIES
-// If pattern[n] is GLOBSTAR
-// // handle the case where the globstar match is empty
-// // by pruning it out, and testing the resulting pattern
-// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
-// // handle other cases.
-// for ENTRY in ENTRIES (not dotfiles)
-// // attach globstar + tail onto the entry
-// // Mark that this entry is a globstar match
-// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
-//
-// else // not globstar
-// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
-// Test ENTRY against pattern[n]
-// If fails, continue
-// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
-//
-// Caveat:
-// Cache all stats and readdirs results to minimize syscall. Since all
-// we ever care about is existence and directory-ness, we can just keep
-// `true` for files, and [children,...] for directories, or `false` for
-// things that don't exist.
-
-module.exports = glob
-
-var fs = require('fs')
-var minimatch = require('minimatch')
-var Minimatch = minimatch.Minimatch
-var inherits = require('inherits')
-var EE = require('events').EventEmitter
-var path = require('path')
-var assert = require('assert')
-var globSync = require('./sync.js')
-var common = require('./common.js')
-var alphasort = common.alphasort
-var alphasorti = common.alphasorti
-var isAbsolute = common.isAbsolute
-var setopts = common.setopts
-var ownProp = common.ownProp
-var inflight = require('inflight')
-var util = require('util')
-var childrenIgnored = common.childrenIgnored
-
-var once = require('once')
-
-function glob (pattern, options, cb) {
- if (typeof options === 'function') cb = options, options = {}
- if (!options) options = {}
-
- if (options.sync) {
- if (cb)
- throw new TypeError('callback provided to sync glob')
- return globSync(pattern, options)
- }
-
- return new Glob(pattern, options, cb)
-}
-
-glob.sync = globSync
-var GlobSync = glob.GlobSync = globSync.GlobSync
-
-// old api surface
-glob.glob = glob
-
-glob.hasMagic = function (pattern, options_) {
- var options = util._extend({}, options_)
- options.noprocess = true
-
- var g = new Glob(pattern, options)
- var set = g.minimatch.set
- if (set.length > 1)
- return true
-
- for (var j = 0; j < set[0].length; j++) {
- if (typeof set[0][j] !== 'string')
- return true
- }
-
- return false
-}
-
-glob.Glob = Glob
-inherits(Glob, EE)
-function Glob (pattern, options, cb) {
- if (typeof options === 'function') {
- cb = options
- options = null
- }
-
- if (options && options.sync) {
- if (cb)
- throw new TypeError('callback provided to sync glob')
- return new GlobSync(pattern, options)
- }
-
- if (!(this instanceof Glob))
- return new Glob(pattern, options, cb)
-
- setopts(this, pattern, options)
- this._didRealPath = false
-
- // process each pattern in the minimatch set
- var n = this.minimatch.set.length
-
- // The matches are stored as {<filename>: true,...} so that
- // duplicates are automagically pruned.
- // Later, we do an Object.keys() on these.
- // Keep them as a list so we can fill in when nonull is set.
- this.matches = new Array(n)
-
- if (typeof cb === 'function') {
- cb = once(cb)
- this.on('error', cb)
- this.on('end', function (matches) {
- cb(null, matches)
- })
- }
-
- var self = this
- var n = this.minimatch.set.length
- this._processing = 0
- this.matches = new Array(n)
-
- this._emitQueue = []
- this._processQueue = []
- this.paused = false
-
- if (this.noprocess)
- return this
-
- if (n === 0)
- return done()
-
- for (var i = 0; i < n; i ++) {
- this._process(this.minimatch.set[i], i, false, done)
- }
-
- function done () {
- --self._processing
- if (self._processing <= 0)
- self._finish()
- }
-}
-
-Glob.prototype._finish = function () {
- assert(this instanceof Glob)
- if (this.aborted)
- return
-
- if (this.realpath && !this._didRealpath)
- return this._realpath()
-
- common.finish(this)
- this.emit('end', this.found)
-}
-
-Glob.prototype._realpath = function () {
- if (this._didRealpath)
- return
-
- this._didRealpath = true
-
- var n = this.matches.length
- if (n === 0)
- return this._finish()
-
- var self = this
- for (var i = 0; i < this.matches.length; i++)
- this._realpathSet(i, next)
-
- function next () {
- if (--n === 0)
- self._finish()
- }
-}
-
-Glob.prototype._realpathSet = function (index, cb) {
- var matchset = this.matches[index]
- if (!matchset)
- return cb()
-
- var found = Object.keys(matchset)
- var self = this
- var n = found.length
-
- if (n === 0)
- return cb()
-
- var set = this.matches[index] = Object.create(null)
- found.forEach(function (p, i) {
- // If there's a problem with the stat, then it means that
- // one or more of the links in the realpath couldn't be
- // resolved. just return the abs value in that case.
- p = self._makeAbs(p)
- fs.realpath(p, self.realpathCache, function (er, real) {
- if (!er)
- set[real] = true
- else if (er.syscall === 'stat')
- set[p] = true
- else
- self.emit('error', er) // srsly wtf right here
-
- if (--n === 0) {
- self.matches[index] = set
- cb()
- }
- })
- })
-}
-
-Glob.prototype._mark = function (p) {
- return common.mark(this, p)
-}
-
-Glob.prototype._makeAbs = function (f) {
- return common.makeAbs(this, f)
-}
-
-Glob.prototype.abort = function () {
- this.aborted = true
- this.emit('abort')
-}
-
-Glob.prototype.pause = function () {
- if (!this.paused) {
- this.paused = true
- this.emit('pause')
- }
-}
-
-Glob.prototype.resume = function () {
- if (this.paused) {
- this.emit('resume')
- this.paused = false
- if (this._emitQueue.length) {
- var eq = this._emitQueue.slice(0)
- this._emitQueue.length = 0
- for (var i = 0; i < eq.length; i ++) {
- var e = eq[i]
- this._emitMatch(e[0], e[1])
- }
- }
- if (this._processQueue.length) {
- var pq = this._processQueue.slice(0)
- this._processQueue.length = 0
- for (var i = 0; i < pq.length; i ++) {
- var p = pq[i]
- this._processing--
- this._process(p[0], p[1], p[2], p[3])
- }
- }
- }
-}
-
-Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
- assert(this instanceof Glob)
- assert(typeof cb === 'function')
-
- if (this.aborted)
- return
-
- this._processing++
- if (this.paused) {
- this._processQueue.push([pattern, index, inGlobStar, cb])
- return
- }
-
- //console.error('PROCESS %d', this._processing, pattern)
-
- // Get the first [n] parts of pattern that are all strings.
- var n = 0
- while (typeof pattern[n] === 'string') {
- n ++
- }
- // now n is the index of the first one that is *not* a string.
-
- // see if there's anything else
- var prefix
- switch (n) {
- // if not, then this is rather simple
- case pattern.length:
- this._processSimple(pattern.join('/'), index, cb)
- return
-
- case 0:
- // pattern *starts* with some non-trivial item.
- // going to readdir(cwd), but not include the prefix in matches.
- prefix = null
- break
-
- default:
- // pattern has some string bits in the front.
- // whatever it starts with, whether that's 'absolute' like /foo/bar,
- // or 'relative' like '../baz'
- prefix = pattern.slice(0, n).join('/')
- break
- }
-
- var remain = pattern.slice(n)
-
- // get the list of entries.
- var read
- if (prefix === null)
- read = '.'
- else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
- if (!prefix || !isAbsolute(prefix))
- prefix = '/' + prefix
- read = prefix
- } else
- read = prefix
-
- var abs = this._makeAbs(read)
-
- //if ignored, skip _processing
- if (childrenIgnored(this, read))
- return cb()
-
- var isGlobStar = remain[0] === minimatch.GLOBSTAR
- if (isGlobStar)
- this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
- else
- this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
-}
-
-Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
- var self = this
- this._readdir(abs, inGlobStar, function (er, entries) {
- return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
- })
-}
-
-Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
-
- // if the abs isn't a dir, then nothing can match!
- if (!entries)
- return cb()
-
- // It will only match dot entries if it starts with a dot, or if
- // dot is set. Stuff like @(.foo|.bar) isn't allowed.
- var pn = remain[0]
- var negate = !!this.minimatch.negate
- var rawGlob = pn._glob
- var dotOk = this.dot || rawGlob.charAt(0) === '.'
-
- var matchedEntries = []
- for (var i = 0; i < entries.length; i++) {
- var e = entries[i]
- if (e.charAt(0) !== '.' || dotOk) {
- var m
- if (negate && !prefix) {
- m = !e.match(pn)
- } else {
- m = e.match(pn)
- }
- if (m)
- matchedEntries.push(e)
- }
- }
-
- //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
-
- var len = matchedEntries.length
- // If there are no matched entries, then nothing matches.
- if (len === 0)
- return cb()
-
- // if this is the last remaining pattern bit, then no need for
- // an additional stat *unless* the user has specified mark or
- // stat explicitly. We know they exist, since readdir returned
- // them.
-
- if (remain.length === 1 && !this.mark && !this.stat) {
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- if (prefix) {
- if (prefix !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
-
- if (e.charAt(0) === '/' && !this.nomount) {
- e = path.join(this.root, e)
- }
- this._emitMatch(index, e)
- }
- // This was the last one, and no stats were needed
- return cb()
- }
-
- // now test all matched entries as stand-ins for that part
- // of the pattern.
- remain.shift()
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- var newPattern
- if (prefix) {
- if (prefix !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
- this._process([e].concat(remain), index, inGlobStar, cb)
- }
- cb()
-}
-
-Glob.prototype._emitMatch = function (index, e) {
- if (this.aborted)
- return
-
- if (this.matches[index][e])
- return
-
- if (this.paused) {
- this._emitQueue.push([index, e])
- return
- }
-
- var abs = this._makeAbs(e)
-
- if (this.nodir) {
- var c = this.cache[abs]
- if (c === 'DIR' || Array.isArray(c))
- return
- }
-
- if (this.mark)
- e = this._mark(e)
-
- this.matches[index][e] = true
-
- var st = this.statCache[abs]
- if (st)
- this.emit('stat', e, st)
-
- this.emit('match', e)
-}
-
-Glob.prototype._readdirInGlobStar = function (abs, cb) {
- if (this.aborted)
- return
-
- // follow all symlinked directories forever
- // just proceed as if this is a non-globstar situation
- if (this.follow)
- return this._readdir(abs, false, cb)
-
- var lstatkey = 'lstat\0' + abs
- var self = this
- var lstatcb = inflight(lstatkey, lstatcb_)
-
- if (lstatcb)
- fs.lstat(abs, lstatcb)
-
- function lstatcb_ (er, lstat) {
- if (er)
- return cb()
-
- var isSym = lstat.isSymbolicLink()
- self.symlinks[abs] = isSym
-
- // If it's not a symlink or a dir, then it's definitely a regular file.
- // don't bother doing a readdir in that case.
- if (!isSym && !lstat.isDirectory()) {
- self.cache[abs] = 'FILE'
- cb()
- } else
- self._readdir(abs, false, cb)
- }
-}
-
-Glob.prototype._readdir = function (abs, inGlobStar, cb) {
- if (this.aborted)
- return
-
- cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
- if (!cb)
- return
-
- //console.error('RD %j %j', +inGlobStar, abs)
- if (inGlobStar && !ownProp(this.symlinks, abs))
- return this._readdirInGlobStar(abs, cb)
-
- if (ownProp(this.cache, abs)) {
- var c = this.cache[abs]
- if (!c || c === 'FILE')
- return cb()
-
- if (Array.isArray(c))
- return cb(null, c)
- }
-
- var self = this
- fs.readdir(abs, readdirCb(this, abs, cb))
-}
-
-function readdirCb (self, abs, cb) {
- return function (er, entries) {
- if (er)
- self._readdirError(abs, er, cb)
- else
- self._readdirEntries(abs, entries, cb)
- }
-}
-
-Glob.prototype._readdirEntries = function (abs, entries, cb) {
- if (this.aborted)
- return
-
- // if we haven't asked to stat everything, then just
- // assume that everything in there exists, so we can avoid
- // having to stat it a second time.
- if (!this.mark && !this.stat) {
- for (var i = 0; i < entries.length; i ++) {
- var e = entries[i]
- if (abs === '/')
- e = abs + e
- else
- e = abs + '/' + e
- this.cache[e] = true
- }
- }
-
- this.cache[abs] = entries
- return cb(null, entries)
-}
-
-Glob.prototype._readdirError = function (f, er, cb) {
- if (this.aborted)
- return
-
- // handle errors, and cache the information
- switch (er.code) {
- case 'ENOTDIR': // totally normal. means it *does* exist.
- this.cache[this._makeAbs(f)] = 'FILE'
- break
-
- case 'ENOENT': // not terribly unusual
- case 'ELOOP':
- case 'ENAMETOOLONG':
- case 'UNKNOWN':
- this.cache[this._makeAbs(f)] = false
- break
-
- default: // some unusual error. Treat as failure.
- this.cache[this._makeAbs(f)] = false
- if (this.strict) return this.emit('error', er)
- if (!this.silent) console.error('glob error', er)
- break
- }
- return cb()
-}
-
-Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
- var self = this
- this._readdir(abs, inGlobStar, function (er, entries) {
- self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
- })
-}
-
-
-Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
- //console.error('pgs2', prefix, remain[0], entries)
-
- // no entries means not a dir, so it can never have matches
- // foo.txt/** doesn't match foo.txt
- if (!entries)
- return cb()
-
- // test without the globstar, and with every child both below
- // and replacing the globstar.
- var remainWithoutGlobStar = remain.slice(1)
- var gspref = prefix ? [ prefix ] : []
- var noGlobStar = gspref.concat(remainWithoutGlobStar)
-
- // the noGlobStar pattern exits the inGlobStar state
- this._process(noGlobStar, index, false, cb)
-
- var isSym = this.symlinks[abs]
- var len = entries.length
-
- // If it's a symlink, and we're in a globstar, then stop
- if (isSym && inGlobStar)
- return cb()
-
- for (var i = 0; i < len; i++) {
- var e = entries[i]
- if (e.charAt(0) === '.' && !this.dot)
- continue
-
- // these two cases enter the inGlobStar state
- var instead = gspref.concat(entries[i], remainWithoutGlobStar)
- this._process(instead, index, true, cb)
-
- var below = gspref.concat(entries[i], remain)
- this._process(below, index, true, cb)
- }
-
- cb()
-}
-
-Glob.prototype._processSimple = function (prefix, index, cb) {
- // XXX review this. Shouldn't it be doing the mounting etc
- // before doing stat? kinda weird?
- var self = this
- this._stat(prefix, function (er, exists) {
- self._processSimple2(prefix, index, er, exists, cb)
- })
-}
-Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
-
- //console.error('ps2', prefix, exists)
-
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- // If it doesn't exist, then just mark the lack of results
- if (!exists)
- return cb()
-
- if (prefix && isAbsolute(prefix) && !this.nomount) {
- var trail = /[\/\\]$/.test(prefix)
- if (prefix.charAt(0) === '/') {
- prefix = path.join(this.root, prefix)
- } else {
- prefix = path.resolve(this.root, prefix)
- if (trail)
- prefix += '/'
- }
- }
-
- if (process.platform === 'win32')
- prefix = prefix.replace(/\\/g, '/')
-
- // Mark this as a match
- this._emitMatch(index, prefix)
- cb()
-}
-
-// Returns either 'DIR', 'FILE', or false
-Glob.prototype._stat = function (f, cb) {
- var abs = this._makeAbs(f)
- var needDir = f.slice(-1) === '/'
-
- if (f.length > this.maxLength)
- return cb()
-
- if (!this.stat && ownProp(this.cache, abs)) {
- var c = this.cache[abs]
-
- if (Array.isArray(c))
- c = 'DIR'
-
- // It exists, but maybe not how we need it
- if (!needDir || c === 'DIR')
- return cb(null, c)
-
- if (needDir && c === 'FILE')
- return cb()
-
- // otherwise we have to stat, because maybe c=true
- // if we know it exists, but not what it is.
- }
-
- var exists
- var stat = this.statCache[abs]
- if (stat !== undefined) {
- if (stat === false)
- return cb(null, stat)
- else {
- var type = stat.isDirectory() ? 'DIR' : 'FILE'
- if (needDir && type === 'FILE')
- return cb()
- else
- return cb(null, type, stat)
- }
- }
-
- var self = this
- var statcb = inflight('stat\0' + abs, lstatcb_)
- if (statcb)
- fs.lstat(abs, statcb)
-
- function lstatcb_ (er, lstat) {
- if (lstat && lstat.isSymbolicLink()) {
- // If it's a symlink, then treat it as the target, unless
- // the target does not exist, then treat it as a file.
- return fs.stat(abs, function (er, stat) {
- if (er)
- self._stat2(f, abs, null, lstat, cb)
- else
- self._stat2(f, abs, er, stat, cb)
- })
- } else {
- self._stat2(f, abs, er, lstat, cb)
- }
- }
-}
-
-Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
- if (er) {
- this.statCache[abs] = false
- return cb()
- }
-
- var needDir = f.slice(-1) === '/'
- this.statCache[abs] = stat
-
- if (abs.slice(-1) === '/' && !stat.isDirectory())
- return cb(null, false, stat)
-
- var c = stat.isDirectory() ? 'DIR' : 'FILE'
- this.cache[abs] = this.cache[abs] || c
-
- if (needDir && c !== 'DIR')
- return cb()
-
- return cb(null, c, stat)
-}
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/package.json b/deps/npm/node_modules/rimraf/node_modules/glob/package.json
deleted file mode 100644
index 7a2cb4c63..000000000
--- a/deps/npm/node_modules/rimraf/node_modules/glob/package.json
+++ /dev/null
@@ -1,72 +0,0 @@
-{
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "name": "glob",
- "description": "a little globber",
- "version": "4.5.3",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-glob.git"
- },
- "main": "glob.js",
- "files": [
- "glob.js",
- "sync.js",
- "common.js"
- ],
- "engines": {
- "node": "*"
- },
- "dependencies": {
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^2.0.1",
- "once": "^1.3.0"
- },
- "devDependencies": {
- "mkdirp": "0",
- "rimraf": "^2.2.8",
- "tap": "^0.5.0",
- "tick": "0.0.6"
- },
- "scripts": {
- "prepublish": "npm run benchclean",
- "profclean": "rm -f v8.log profile.txt",
- "test": "npm run profclean && tap test/*.js",
- "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js",
- "bench": "bash benchmark.sh",
- "prof": "bash prof.sh && cat profile.txt",
- "benchclean": "bash benchclean.sh"
- },
- "license": "ISC",
- "gitHead": "a4e461ab59a837eee80a4d8dbdbf5ae1054a646f",
- "bugs": {
- "url": "https://github.com/isaacs/node-glob/issues"
- },
- "homepage": "https://github.com/isaacs/node-glob",
- "_id": "glob@4.5.3",
- "_shasum": "c6cb73d3226c1efef04de3c56d012f03377ee15f",
- "_from": "glob@>=4.4.2 <5.0.0",
- "_npmVersion": "2.7.1",
- "_nodeVersion": "1.4.2",
- "_npmUser": {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- }
- ],
- "dist": {
- "shasum": "c6cb73d3226c1efef04de3c56d012f03377ee15f",
- "tarball": "http://registry.npmjs.org/glob/-/glob-4.5.3.tgz"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/glob/-/glob-4.5.3.tgz",
- "readme": "ERROR: No README data found!"
-}
diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/sync.js b/deps/npm/node_modules/rimraf/node_modules/glob/sync.js
deleted file mode 100644
index f4f5e36d4..000000000
--- a/deps/npm/node_modules/rimraf/node_modules/glob/sync.js
+++ /dev/null
@@ -1,457 +0,0 @@
-module.exports = globSync
-globSync.GlobSync = GlobSync
-
-var fs = require('fs')
-var minimatch = require('minimatch')
-var Minimatch = minimatch.Minimatch
-var Glob = require('./glob.js').Glob
-var util = require('util')
-var path = require('path')
-var assert = require('assert')
-var common = require('./common.js')
-var alphasort = common.alphasort
-var alphasorti = common.alphasorti
-var isAbsolute = common.isAbsolute
-var setopts = common.setopts
-var ownProp = common.ownProp
-var childrenIgnored = common.childrenIgnored
-
-function globSync (pattern, options) {
- if (typeof options === 'function' || arguments.length === 3)
- throw new TypeError('callback provided to sync glob\n'+
- 'See: https://github.com/isaacs/node-glob/issues/167')
-
- return new GlobSync(pattern, options).found
-}
-
-function GlobSync (pattern, options) {
- if (!pattern)
- throw new Error('must provide pattern')
-
- if (typeof options === 'function' || arguments.length === 3)
- throw new TypeError('callback provided to sync glob\n'+
- 'See: https://github.com/isaacs/node-glob/issues/167')
-
- if (!(this instanceof GlobSync))
- return new GlobSync(pattern, options)
-
- setopts(this, pattern, options)
-
- if (this.noprocess)
- return this
-
- var n = this.minimatch.set.length
- this.matches = new Array(n)
- for (var i = 0; i < n; i ++) {
- this._process(this.minimatch.set[i], i, false)
- }
- this._finish()
-}
-
-GlobSync.prototype._finish = function () {
- assert(this instanceof GlobSync)
- if (this.realpath) {
- var self = this
- this.matches.forEach(function (matchset, index) {
- var set = self.matches[index] = Object.create(null)
- for (var p in matchset) {
- try {
- p = self._makeAbs(p)
- var real = fs.realpathSync(p, this.realpathCache)
- set[real] = true
- } catch (er) {
- if (er.syscall === 'stat')
- set[self._makeAbs(p)] = true
- else
- throw er
- }
- }
- })
- }
- common.finish(this)
-}
-
-
-GlobSync.prototype._process = function (pattern, index, inGlobStar) {
- assert(this instanceof GlobSync)
-
- // Get the first [n] parts of pattern that are all strings.
- var n = 0
- while (typeof pattern[n] === 'string') {
- n ++
- }
- // now n is the index of the first one that is *not* a string.
-
- // See if there's anything else
- var prefix
- switch (n) {
- // if not, then this is rather simple
- case pattern.length:
- this._processSimple(pattern.join('/'), index)
- return
-
- case 0:
- // pattern *starts* with some non-trivial item.
- // going to readdir(cwd), but not include the prefix in matches.
- prefix = null
- break
-
- default:
- // pattern has some string bits in the front.
- // whatever it starts with, whether that's 'absolute' like /foo/bar,
- // or 'relative' like '../baz'
- prefix = pattern.slice(0, n).join('/')
- break
- }
-
- var remain = pattern.slice(n)
-
- // get the list of entries.
- var read
- if (prefix === null)
- read = '.'
- else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
- if (!prefix || !isAbsolute(prefix))
- prefix = '/' + prefix
- read = prefix
- } else
- read = prefix
-
- var abs = this._makeAbs(read)
-
- //if ignored, skip processing
- if (childrenIgnored(this, read))
- return
-
- var isGlobStar = remain[0] === minimatch.GLOBSTAR
- if (isGlobStar)
- this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
- else
- this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
-}
-
-
-GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
- var entries = this._readdir(abs, inGlobStar)
-
- // if the abs isn't a dir, then nothing can match!
- if (!entries)
- return
-
- // It will only match dot entries if it starts with a dot, or if
- // dot is set. Stuff like @(.foo|.bar) isn't allowed.
- var pn = remain[0]
- var negate = !!this.minimatch.negate
- var rawGlob = pn._glob
- var dotOk = this.dot || rawGlob.charAt(0) === '.'
-
- var matchedEntries = []
- for (var i = 0; i < entries.length; i++) {
- var e = entries[i]
- if (e.charAt(0) !== '.' || dotOk) {
- var m
- if (negate && !prefix) {
- m = !e.match(pn)
- } else {
- m = e.match(pn)
- }
- if (m)
- matchedEntries.push(e)
- }
- }
-
- var len = matchedEntries.length
- // If there are no matched entries, then nothing matches.
- if (len === 0)
- return
-
- // if this is the last remaining pattern bit, then no need for
- // an additional stat *unless* the user has specified mark or
- // stat explicitly. We know they exist, since readdir returned
- // them.
-
- if (remain.length === 1 && !this.mark && !this.stat) {
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- if (prefix) {
- if (prefix.slice(-1) !== '/')
- e = prefix + '/' + e
- else
- e = prefix + e
- }
-
- if (e.charAt(0) === '/' && !this.nomount) {
- e = path.join(this.root, e)
- }
- this.matches[index][e] = true
- }
- // This was the last one, and no stats were needed
- return
- }
-
- // now test all matched entries as stand-ins for that part
- // of the pattern.
- remain.shift()
- for (var i = 0; i < len; i ++) {
- var e = matchedEntries[i]
- var newPattern
- if (prefix)
- newPattern = [prefix, e]
- else
- newPattern = [e]
- this._process(newPattern.concat(remain), index, inGlobStar)
- }
-}
-
-
-GlobSync.prototype._emitMatch = function (index, e) {
- var abs = this._makeAbs(e)
- if (this.mark)
- e = this._mark(e)
-
- if (this.matches[index][e])
- return
-
- if (this.nodir) {
- var c = this.cache[this._makeAbs(e)]
- if (c === 'DIR' || Array.isArray(c))
- return
- }
-
- this.matches[index][e] = true
- if (this.stat)
- this._stat(e)
-}
-
-
-GlobSync.prototype._readdirInGlobStar = function (abs) {
- // follow all symlinked directories forever
- // just proceed as if this is a non-globstar situation
- if (this.follow)
- return this._readdir(abs, false)
-
- var entries
- var lstat
- var stat
- try {
- lstat = fs.lstatSync(abs)
- } catch (er) {
- // lstat failed, doesn't exist
- return null
- }
-
- var isSym = lstat.isSymbolicLink()
- this.symlinks[abs] = isSym
-
- // If it's not a symlink or a dir, then it's definitely a regular file.
- // don't bother doing a readdir in that case.
- if (!isSym && !lstat.isDirectory())
- this.cache[abs] = 'FILE'
- else
- entries = this._readdir(abs, false)
-
- return entries
-}
-
-GlobSync.prototype._readdir = function (abs, inGlobStar) {
- var entries
-
- if (inGlobStar && !ownProp(this.symlinks, abs))
- return this._readdirInGlobStar(abs)
-
- if (ownProp(this.cache, abs)) {
- var c = this.cache[abs]
- if (!c || c === 'FILE')
- return null
-
- if (Array.isArray(c))
- return c
- }
-
- try {
- return this._readdirEntries(abs, fs.readdirSync(abs))
- } catch (er) {
- this._readdirError(abs, er)
- return null
- }
-}
-
-GlobSync.prototype._readdirEntries = function (abs, entries) {
- // if we haven't asked to stat everything, then just
- // assume that everything in there exists, so we can avoid
- // having to stat it a second time.
- if (!this.mark && !this.stat) {
- for (var i = 0; i < entries.length; i ++) {
- var e = entries[i]
- if (abs === '/')
- e = abs + e
- else
- e = abs + '/' + e
- this.cache[e] = true
- }
- }
-
- this.cache[abs] = entries
-
- // mark and cache dir-ness
- return entries
-}
-
-GlobSync.prototype._readdirError = function (f, er) {
- // handle errors, and cache the information
- switch (er.code) {
- case 'ENOTDIR': // totally normal. means it *does* exist.
- this.cache[this._makeAbs(f)] = 'FILE'
- break
-
- case 'ENOENT': // not terribly unusual
- case 'ELOOP':
- case 'ENAMETOOLONG':
- case 'UNKNOWN':
- this.cache[this._makeAbs(f)] = false
- break
-
- default: // some unusual error. Treat as failure.
- this.cache[this._makeAbs(f)] = false
- if (this.strict) throw er
- if (!this.silent) console.error('glob error', er)
- break
- }
-}
-
-GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
-
- var entries = this._readdir(abs, inGlobStar)
-
- // no entries means not a dir, so it can never have matches
- // foo.txt/** doesn't match foo.txt
- if (!entries)
- return
-
- // test without the globstar, and with every child both below
- // and replacing the globstar.
- var remainWithoutGlobStar = remain.slice(1)
- var gspref = prefix ? [ prefix ] : []
- var noGlobStar = gspref.concat(remainWithoutGlobStar)
-
- // the noGlobStar pattern exits the inGlobStar state
- this._process(noGlobStar, index, false)
-
- var len = entries.length
- var isSym = this.symlinks[abs]
-
- // If it's a symlink, and we're in a globstar, then stop
- if (isSym && inGlobStar)
- return
-
- for (var i = 0; i < len; i++) {
- var e = entries[i]
- if (e.charAt(0) === '.' && !this.dot)
- continue
-
- // these two cases enter the inGlobStar state
- var instead = gspref.concat(entries[i], remainWithoutGlobStar)
- this._process(instead, index, true)
-
- var below = gspref.concat(entries[i], remain)
- this._process(below, index, true)
- }
-}
-
-GlobSync.prototype._processSimple = function (prefix, index) {
- // XXX review this. Shouldn't it be doing the mounting etc
- // before doing stat? kinda weird?
- var exists = this._stat(prefix)
-
- if (!this.matches[index])
- this.matches[index] = Object.create(null)
-
- // If it doesn't exist, then just mark the lack of results
- if (!exists)
- return
-
- if (prefix && isAbsolute(prefix) && !this.nomount) {
- var trail = /[\/\\]$/.test(prefix)
- if (prefix.charAt(0) === '/') {
- prefix = path.join(this.root, prefix)
- } else {
- prefix = path.resolve(this.root, prefix)
- if (trail)
- prefix += '/'
- }
- }
-
- if (process.platform === 'win32')
- prefix = prefix.replace(/\\/g, '/')
-
- // Mark this as a match
- this.matches[index][prefix] = true
-}
-
-// Returns either 'DIR', 'FILE', or false
-GlobSync.prototype._stat = function (f) {
- var abs = this._makeAbs(f)
- var needDir = f.slice(-1) === '/'
-
- if (f.length > this.maxLength)
- return false
-
- if (!this.stat && ownProp(this.cache, abs)) {
- var c = this.cache[abs]
-
- if (Array.isArray(c))
- c = 'DIR'
-
- // It exists, but maybe not how we need it
- if (!needDir || c === 'DIR')
- return c
-
- if (needDir && c === 'FILE')
- return false
-
- // otherwise we have to stat, because maybe c=true
- // if we know it exists, but not what it is.
- }
-
- var exists
- var stat = this.statCache[abs]
- if (!stat) {
- var lstat
- try {
- lstat = fs.lstatSync(abs)
- } catch (er) {
- return false
- }
-
- if (lstat.isSymbolicLink()) {
- try {
- stat = fs.statSync(abs)
- } catch (er) {
- stat = lstat
- }
- } else {
- stat = lstat
- }
- }
-
- this.statCache[abs] = stat
-
- var c = stat.isDirectory() ? 'DIR' : 'FILE'
- this.cache[abs] = this.cache[abs] || c
-
- if (needDir && c !== 'DIR')
- return false
-
- return c
-}
-
-GlobSync.prototype._mark = function (p) {
- return common.mark(this, p)
-}
-
-GlobSync.prototype._makeAbs = function (f) {
- return common.makeAbs(this, f)
-}
diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json
index b0ec171e0..449d380d9 100644
--- a/deps/npm/node_modules/rimraf/package.json
+++ b/deps/npm/node_modules/rimraf/package.json
@@ -1,6 +1,6 @@
{
"name": "rimraf",
- "version": "2.4.0",
+ "version": "2.4.2",
"main": "rimraf.js",
"description": "A deep deletion module for node (like `rm -rf`)",
"author": {
@@ -20,35 +20,35 @@
"rimraf": "./bin.js"
},
"dependencies": {
- "glob": "^4.4.2"
+ "glob": "^5.0.14"
},
"files": [
- "bin.js",
- "rimraf.js",
"LICENSE",
- "README.md"
+ "README.md",
+ "bin.js",
+ "rimraf.js"
],
"devDependencies": {
"mkdirp": "^0.5.1",
- "tap": "^1.2.0"
+ "tap": "^1.3.1"
},
- "gitHead": "2128f0b9ade05a81394dd21a383fa309dd9eca94",
+ "gitHead": "4359e9d3b3c0f26e6abe3139a00b93337f1689d7",
"bugs": {
"url": "https://github.com/isaacs/rimraf/issues"
},
"homepage": "https://github.com/isaacs/rimraf#readme",
- "_id": "rimraf@2.4.0",
- "_shasum": "40ba0416037d8511ecb50f6b07cf8d18e658a864",
- "_from": "rimraf@latest",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "2.0.1",
+ "_id": "rimraf@2.4.2",
+ "_shasum": "ab4f39b08b72eae07c3d9fe9f4831aebfc9f431d",
+ "_from": "rimraf@2.4.2",
+ "_npmVersion": "3.1.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "40ba0416037d8511ecb50f6b07cf8d18e658a864",
- "tarball": "http://registry.npmjs.org/rimraf/-/rimraf-2.4.0.tgz"
+ "shasum": "ab4f39b08b72eae07c3d9fe9f4831aebfc9f431d",
+ "tarball": "http://registry.npmjs.org/rimraf/-/rimraf-2.4.2.tgz"
},
"maintainers": [
{
@@ -57,6 +57,6 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.0.tgz",
+ "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.2.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 12eecdcfd..d650b8953 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,13 +1,10 @@
{
"name": "semver",
- "version": "4.3.6",
+ "version": "5.0.1",
"description": "The semantic version parser used by npm.",
"main": "semver.js",
- "browser": "semver.browser.js",
- "min": "semver.min.js",
"scripts": {
- "test": "tap test/*.js",
- "prepublish": "make"
+ "test": "tap test/*.js"
},
"devDependencies": {
"tap": "^1.2.0",
@@ -16,28 +13,28 @@
"license": "ISC",
"repository": {
"type": "git",
- "url": "git://github.com/npm/node-semver.git"
+ "url": "git+https://github.com/npm/node-semver.git"
},
"bin": {
"semver": "./bin/semver"
},
- "gitHead": "63c48296ca5da3ba6a88c743bb8c92effc789811",
+ "gitHead": "3408896f115cdb241684fb81f85abb0d2ecc27e9",
"bugs": {
"url": "https://github.com/npm/node-semver/issues"
},
"homepage": "https://github.com/npm/node-semver#readme",
- "_id": "semver@4.3.6",
- "_shasum": "300bc6e0e86374f7ba61068b5b1ecd57fc6532da",
- "_from": "semver@>=4.3.6 <4.4.0",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "2.0.1",
+ "_id": "semver@5.0.1",
+ "_shasum": "9fb3f4004f900d83c47968fe42f7583e05832cc9",
+ "_from": "semver@>=5.0.1 <5.1.0",
+ "_npmVersion": "3.1.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "isaacs",
"email": "isaacs@npmjs.com"
},
"dist": {
- "shasum": "300bc6e0e86374f7ba61068b5b1ecd57fc6532da",
- "tarball": "http://registry.npmjs.org/semver/-/semver-4.3.6.tgz"
+ "shasum": "9fb3f4004f900d83c47968fe42f7583e05832cc9",
+ "tarball": "http://registry.npmjs.org/semver/-/semver-5.0.1.tgz"
},
"maintainers": [
{
@@ -50,5 +47,6 @@
}
],
"directories": {},
- "_resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz"
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-5.0.1.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/semver/semver.js b/deps/npm/node_modules/semver/semver.js
index cafcc006b..d4a8dca60 100644
--- a/deps/npm/node_modules/semver/semver.js
+++ b/deps/npm/node_modules/semver/semver.js
@@ -1,6 +1,4 @@
-// export the class if we are in a Node-like system.
-if (typeof module === 'object' && module.exports === exports)
- exports = module.exports = SemVer;
+exports = module.exports = SemVer;
// The debug function is excluded entirely from the minified version.
/* nomin */ var debug;
@@ -1199,7 +1197,3 @@ function outside(version, range, hilo, loose) {
}
return true;
}
-
-// Use the define() function if we're in AMD land
-if (typeof define === 'function' && define.amd)
- define(exports);
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/sha/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/sha/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/README.md b/deps/npm/node_modules/sha/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/package.json b/deps/npm/node_modules/sha/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..84e65e878
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@2 || 3",
+ "/Users/isaacs/dev/npm/npm/node_modules/sha"
+ ]
+ ],
+ "_from": "graceful-fs@>=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/sha/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@2 || 3",
+ "rawSpec": "2 || 3",
+ "scope": null,
+ "spec": ">=2.0.0 <3.0.0||>=3.0.0 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/sha"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@2 || 3",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/sha",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/sha/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/sha/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/node_modules/validate-npm-package-license/LICENSE b/deps/npm/node_modules/validate-npm-package-license/LICENSE
new file mode 100644
index 000000000..17de51c64
--- /dev/null
+++ b/deps/npm/node_modules/validate-npm-package-license/LICENSE
@@ -0,0 +1,174 @@
+SPDX:Apache-2.0
+
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and
+distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the
+copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other
+entities that control, are controlled by, or are under common control
+with that entity. For the purposes of this definition, "control" means
+(i) the power, direct or indirect, to cause the direction or management
+of such entity, whether by contract or otherwise, or (ii) ownership of
+fifty percent (50%) or more of the outstanding shares, or (iii)
+beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising
+permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications,
+including but not limited to software source code, documentation source,
+and configuration files.
+
+"Object" form shall mean any form resulting from mechanical
+transformation or translation of a Source form, including but not
+limited to compiled object code, generated documentation, and
+conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object
+form, made available under the License, as indicated by a copyright
+notice that is included in or attached to the work (an example is
+provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object
+form, that is based on (or derived from) the Work and for which the
+editorial revisions, annotations, elaborations, or other modifications
+represent, as a whole, an original work of authorship. For the purposes
+of this License, Derivative Works shall not include works that remain
+separable from, or merely link (or bind by name) to the interfaces of,
+the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original
+version of the Work and any modifications or additions to that Work or
+Derivative Works thereof, that is intentionally submitted to Licensor
+for inclusion in the Work by the copyright owner or by an individual or
+Legal Entity authorized to submit on behalf of the copyright owner. For
+the purposes of this definition, "submitted" means any form of
+electronic, verbal, or written communication sent to the Licensor or its
+representatives, including but not limited to communication on
+electronic mailing lists, source code control systems, and issue
+tracking systems that are managed by, or on behalf of, the Licensor for
+the purpose of discussing and improving the Work, but excluding
+communication that is conspicuously marked or otherwise designated in
+writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on
+behalf of whom a Contribution has been received by Licensor and
+subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright
+license to reproduce, prepare Derivative Works of, publicly display,
+publicly perform, sublicense, and distribute the Work and such
+Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this
+License, each Contributor hereby grants to You a perpetual, worldwide,
+non-exclusive, no-charge, royalty-free, irrevocable (except as stated in
+this section) patent license to make, have made, use, offer to sell,
+sell, import, and otherwise transfer the Work, where such license
+applies only to those patent claims licensable by such Contributor that
+are necessarily infringed by their Contribution(s) alone or by
+combination of their Contribution(s) with the Work to which such
+Contribution(s) was submitted. If You institute patent litigation
+against any entity (including a cross-claim or counterclaim in a
+lawsuit) alleging that the Work or a Contribution incorporated within
+the Work constitutes direct or contributory patent infringement, then
+any patent licenses granted to You under this License for that Work
+shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work
+or Derivative Works thereof in any medium, with or without
+modifications, and in Source or Object form, provided that You meet the
+following conditions:
+
+(a) You must give any other recipients of the Work or Derivative Works a
+copy of this License; and
+
+(b) You must cause any modified files to carry prominent notices stating
+that You changed the files; and
+
+(c) You must retain, in the Source form of any Derivative Works that You
+distribute, all copyright, patent, trademark, and attribution notices
+from the Source form of the Work, excluding those notices that do not
+pertain to any part of the Derivative Works; and
+
+(d) If the Work includes a "NOTICE" text file as part of its
+distribution, then any Derivative Works that You distribute must include
+a readable copy of the attribution notices contained within such NOTICE
+file, excluding those notices that do not pertain to any part of the
+Derivative Works, in at least one of the following places: within a
+NOTICE text file distributed as part of the Derivative Works; within the
+Source form or documentation, if provided along with the Derivative
+Works; or, within a display generated by the Derivative Works, if and
+wherever such third-party notices normally appear. The contents of the
+NOTICE file are for informational purposes only and do not modify the
+License. You may add Your own attribution notices within Derivative
+Works that You distribute, alongside or as an addendum to the NOTICE
+text from the Work, provided that such additional attribution notices
+cannot be construed as modifying the License.
+
+You may add Your own copyright statement to Your modifications and may
+provide additional or different license terms and conditions for use,
+reproduction, or distribution of Your modifications, or for any such
+Derivative Works as a whole, provided Your use, reproduction, and
+distribution of the Work otherwise complies with the conditions stated
+in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+any Contribution intentionally submitted for inclusion in the Work by
+You to the Licensor shall be under the terms and conditions of this
+License, without any additional terms or conditions. Notwithstanding the
+above, nothing herein shall supersede or modify the terms of any
+separate license agreement you may have executed with Licensor regarding
+such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+names, trademarks, service marks, or product names of the Licensor,
+except as required for reasonable and customary use in describing the
+origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed
+to in writing, Licensor provides the Work (and each Contributor provides
+its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+OF ANY KIND, either express or implied, including, without limitation,
+any warranties or conditions of TITLE, NON-INFRINGEMENT,
+MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely
+responsible for determining the appropriateness of using or
+redistributing the Work and assume any risks associated with Your
+exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+whether in tort (including negligence), contract, or otherwise, unless
+required by applicable law (such as deliberate and grossly negligent
+acts) or agreed to in writing, shall any Contributor be liable to You
+for damages, including any direct, indirect, special, incidental, or
+consequential damages of any character arising as a result of this
+License or out of the use or inability to use the Work (including but
+not limited to damages for loss of goodwill, work stoppage, computer
+failure or malfunction, or any and all other commercial damages or
+losses), even if such Contributor has been advised of the possibility of
+such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the
+Work or Derivative Works thereof, You may choose to offer, and charge a
+fee for, acceptance of support, warranty, indemnity, or other liability
+obligations and/or rights consistent with this License. However, in
+accepting such obligations, You may act only on Your own behalf and on
+Your sole responsibility, not on behalf of any other Contributor, and
+only if You agree to indemnify, defend, and hold each Contributor
+harmless for any liability incurred by, or claims asserted against, such
+Contributor by reason of your accepting any such warranty or additional
+liability.
diff --git a/deps/npm/node_modules/validate-npm-package-license/README.md b/deps/npm/node_modules/validate-npm-package-license/README.md
new file mode 100644
index 000000000..2c51c8e64
--- /dev/null
+++ b/deps/npm/node_modules/validate-npm-package-license/README.md
@@ -0,0 +1,61 @@
+validate-npm-package-license
+============================
+
+Give me a string and I'll tell you if it's a valid npm package license string.
+
+<!-- js var valid = require('./'); -->
+
+```js
+var noWarnings = {
+ validForNewPackages: true,
+ validForOldPackages: true
+};
+
+// SPDX license identifier for common open-source licenses
+valid('MIT'); // => noWarnings
+valid('BSD-2-Clause'); // => noWarnings
+valid('Apache-2.0'); // => noWarnings
+valid('ISC'); // => noWarnings
+
+// Simple SPDX license expression for dual licensing
+valid('(GPL-3.0 OR BSD-2-Clause)'); // => noWarnings
+
+// Refer to a non-standard license found in the package
+valid('SEE LICENSE IN LICENSE.txt'); // => noWarnings
+valid('SEE LICENSE IN license.md'); // => noWarnings
+
+// No license
+valid('UNLICENSED'); // => noWarnings
+valid('UNLICENCED'); // => noWarnings
+
+var warningsWithSuggestion = {
+ validForOldPackages: false,
+ validForNewPackages: false,
+ warnings: [
+ 'license should be ' +
+ 'a valid SPDX license expression without "LicenseRef", ' +
+ '"UNLICENSED", or ' +
+ '"SEE LICENSE IN <filename>"',
+ 'license is similar to the valid expression "Apache-2.0"'
+ ]
+};
+
+// Almost a valid SPDX license identifier
+valid('Apache 2.0'); // => warningsWithSuggestion
+
+var warningAboutLicenseRef = {
+ validForOldPackages: false,
+ validForNewPackages: false,
+ warnings: [
+ 'license should be ' +
+ 'a valid SPDX license expression without "LicenseRef", ' +
+ '"UNLICENSED", or ' +
+ '"SEE LICENSE IN <filename>"',
+ ]
+};
+
+// LicenseRef-* identifiers are valid SPDX expressions,
+// but not valid in package.json
+valid('LicenseRef-Made-Up'); // => warningAboutLicenseRef
+valid('(MIT OR LicenseRef-Made-Up)'); // => warningAboutLicenseRef
+```
diff --git a/deps/npm/node_modules/validate-npm-package-license/index.js b/deps/npm/node_modules/validate-npm-package-license/index.js
new file mode 100644
index 000000000..adf58d675
--- /dev/null
+++ b/deps/npm/node_modules/validate-npm-package-license/index.js
@@ -0,0 +1,74 @@
+var spdx = require('spdx');
+var correct = require('spdx-correct');
+
+var validResult = {
+ validForNewPackages: true,
+ validForOldPackages: true
+};
+
+var genericWarning = (
+ 'license should be ' +
+ 'a valid SPDX license expression without "LicenseRef", ' +
+ '"UNLICENSED", or ' +
+ '"SEE LICENSE IN <filename>"'
+);
+
+var fileReferenceRE = /^SEE LICEN[CS]E IN (.+)$/;
+
+function startsWith(prefix, string) {
+ return string.slice(0, prefix.length) === prefix;
+}
+
+function usesLicenseRef(ast) {
+ if (ast.hasOwnProperty('license')) {
+ var license = ast.license;
+ return (
+ startsWith('LicenseRef', license) ||
+ startsWith('DocumentRef', license)
+ );
+ } else {
+ return (
+ usesLicenseRef(ast.left) ||
+ usesLicenseRef(ast.right)
+ );
+ }
+}
+
+module.exports = function(argument) {
+ var ast;
+
+ try {
+ ast = spdx.parse(argument);
+ } catch (e) {
+ if (
+ argument === 'UNLICENSED' ||
+ argument === 'UNLICENCED' ||
+ fileReferenceRE.test(argument)
+ ) {
+ return validResult;
+ } else {
+ var result = {
+ validForOldPackages: false,
+ validForNewPackages: false,
+ warnings: [genericWarning]
+ };
+ var corrected = correct(argument);
+ if (corrected) {
+ result.warnings.push(
+ 'license is similar to the valid expression "' + corrected + '"'
+ );
+ }
+ return result;
+ }
+ }
+
+ if (usesLicenseRef(ast)) {
+ return {
+ validForNewPackages: false,
+ validForOldPackages: false,
+ warnings: [genericWarning]
+ };
+ } else {
+ return validResult;
+ }
+};
diff --git a/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/LICENSE b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/LICENSE
new file mode 100644
index 000000000..17de51c64
--- /dev/null
+++ b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/LICENSE
@@ -0,0 +1,174 @@
+SPDX:Apache-2.0
+
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and
+distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the
+copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other
+entities that control, are controlled by, or are under common control
+with that entity. For the purposes of this definition, "control" means
+(i) the power, direct or indirect, to cause the direction or management
+of such entity, whether by contract or otherwise, or (ii) ownership of
+fifty percent (50%) or more of the outstanding shares, or (iii)
+beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising
+permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications,
+including but not limited to software source code, documentation source,
+and configuration files.
+
+"Object" form shall mean any form resulting from mechanical
+transformation or translation of a Source form, including but not
+limited to compiled object code, generated documentation, and
+conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object
+form, made available under the License, as indicated by a copyright
+notice that is included in or attached to the work (an example is
+provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object
+form, that is based on (or derived from) the Work and for which the
+editorial revisions, annotations, elaborations, or other modifications
+represent, as a whole, an original work of authorship. For the purposes
+of this License, Derivative Works shall not include works that remain
+separable from, or merely link (or bind by name) to the interfaces of,
+the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original
+version of the Work and any modifications or additions to that Work or
+Derivative Works thereof, that is intentionally submitted to Licensor
+for inclusion in the Work by the copyright owner or by an individual or
+Legal Entity authorized to submit on behalf of the copyright owner. For
+the purposes of this definition, "submitted" means any form of
+electronic, verbal, or written communication sent to the Licensor or its
+representatives, including but not limited to communication on
+electronic mailing lists, source code control systems, and issue
+tracking systems that are managed by, or on behalf of, the Licensor for
+the purpose of discussing and improving the Work, but excluding
+communication that is conspicuously marked or otherwise designated in
+writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on
+behalf of whom a Contribution has been received by Licensor and
+subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright
+license to reproduce, prepare Derivative Works of, publicly display,
+publicly perform, sublicense, and distribute the Work and such
+Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this
+License, each Contributor hereby grants to You a perpetual, worldwide,
+non-exclusive, no-charge, royalty-free, irrevocable (except as stated in
+this section) patent license to make, have made, use, offer to sell,
+sell, import, and otherwise transfer the Work, where such license
+applies only to those patent claims licensable by such Contributor that
+are necessarily infringed by their Contribution(s) alone or by
+combination of their Contribution(s) with the Work to which such
+Contribution(s) was submitted. If You institute patent litigation
+against any entity (including a cross-claim or counterclaim in a
+lawsuit) alleging that the Work or a Contribution incorporated within
+the Work constitutes direct or contributory patent infringement, then
+any patent licenses granted to You under this License for that Work
+shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work
+or Derivative Works thereof in any medium, with or without
+modifications, and in Source or Object form, provided that You meet the
+following conditions:
+
+(a) You must give any other recipients of the Work or Derivative Works a
+copy of this License; and
+
+(b) You must cause any modified files to carry prominent notices stating
+that You changed the files; and
+
+(c) You must retain, in the Source form of any Derivative Works that You
+distribute, all copyright, patent, trademark, and attribution notices
+from the Source form of the Work, excluding those notices that do not
+pertain to any part of the Derivative Works; and
+
+(d) If the Work includes a "NOTICE" text file as part of its
+distribution, then any Derivative Works that You distribute must include
+a readable copy of the attribution notices contained within such NOTICE
+file, excluding those notices that do not pertain to any part of the
+Derivative Works, in at least one of the following places: within a
+NOTICE text file distributed as part of the Derivative Works; within the
+Source form or documentation, if provided along with the Derivative
+Works; or, within a display generated by the Derivative Works, if and
+wherever such third-party notices normally appear. The contents of the
+NOTICE file are for informational purposes only and do not modify the
+License. You may add Your own attribution notices within Derivative
+Works that You distribute, alongside or as an addendum to the NOTICE
+text from the Work, provided that such additional attribution notices
+cannot be construed as modifying the License.
+
+You may add Your own copyright statement to Your modifications and may
+provide additional or different license terms and conditions for use,
+reproduction, or distribution of Your modifications, or for any such
+Derivative Works as a whole, provided Your use, reproduction, and
+distribution of the Work otherwise complies with the conditions stated
+in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+any Contribution intentionally submitted for inclusion in the Work by
+You to the Licensor shall be under the terms and conditions of this
+License, without any additional terms or conditions. Notwithstanding the
+above, nothing herein shall supersede or modify the terms of any
+separate license agreement you may have executed with Licensor regarding
+such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+names, trademarks, service marks, or product names of the Licensor,
+except as required for reasonable and customary use in describing the
+origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed
+to in writing, Licensor provides the Work (and each Contributor provides
+its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+OF ANY KIND, either express or implied, including, without limitation,
+any warranties or conditions of TITLE, NON-INFRINGEMENT,
+MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely
+responsible for determining the appropriateness of using or
+redistributing the Work and assume any risks associated with Your
+exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+whether in tort (including negligence), contract, or otherwise, unless
+required by applicable law (such as deliberate and grossly negligent
+acts) or agreed to in writing, shall any Contributor be liable to You
+for damages, including any direct, indirect, special, incidental, or
+consequential damages of any character arising as a result of this
+License or out of the use or inability to use the Work (including but
+not limited to damages for loss of goodwill, work stoppage, computer
+failure or malfunction, or any and all other commercial damages or
+losses), even if such Contributor has been advised of the possibility of
+such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the
+Work or Derivative Works thereof, You may choose to offer, and charge a
+fee for, acceptance of support, warranty, indemnity, or other liability
+obligations and/or rights consistent with this License. However, in
+accepting such obligations, You may act only on Your own behalf and on
+Your sole responsibility, not on behalf of any other Contributor, and
+only if You agree to indemnify, defend, and hold each Contributor
+harmless for any liability incurred by, or claims asserted against, such
+Contributor by reason of your accepting any such warranty or additional
+liability.
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md
index 05cd9947d..05cd9947d 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md
+++ b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/README.md
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json
index 79b09062a..d0946e3e2 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json
+++ b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/package.json
@@ -1,25 +1,25 @@
{
"name": "spdx-correct",
"description": "correct invalid SPDX identifiers",
- "version": "1.0.0-prerelease-3",
+ "version": "1.0.0",
"author": {
- "name": "Kyle Mitchell",
+ "name": "Kyle E. Mitchell",
"email": "kyle@kemitchell.com",
"url": "http://kemitchell.com"
},
"bugs": {
- "url": "https://github.com/kemitchell/spdx-correct/issues"
+ "url": "https://github.com/kemitchell/spdx-correct.js/issues"
},
"dependencies": {
"spdx": "^0.4.0"
},
"devDependencies": {
- "jscs": "^1.13.0",
- "jshint": "^2.7.0",
- "jsmd": "^0.3.0",
- "tap": "^0.7.1"
+ "jscs": "~1.13.1",
+ "jshint": "~2.8.0",
+ "jsmd": "~0.3.0",
+ "tape": "~4.0.0"
},
- "homepage": "https://github.com/kemitchell/spdx-correct",
+ "homepage": "https://github.com/kemitchell/spdx-correct.js",
"keywords": [
"SPDX",
"law",
@@ -31,18 +31,19 @@
"main": "spdx-correct.js",
"repository": {
"type": "git",
- "url": "https://github.com/kemitchell/spdx-correct"
+ "url": "git+https://github.com/kemitchell/spdx-correct.js.git"
},
"scripts": {
"lint": "jshint spdx-correct.js test && jscs spdx-correct.js test",
"precommit": "npm run lint && npm run test",
- "test": "jsmd README.md && tap test"
+ "test": "jsmd README.md && tape test/*.test.js"
},
- "gitHead": "0289b9068391d4a1db571137083e0beb18a2faef",
- "_id": "spdx-correct@1.0.0-prerelease-3",
- "_shasum": "5706cc6ce05b928a65564c76e1d6809ba033ac7e",
- "_from": "spdx-correct@1.0.0-prerelease-3",
- "_npmVersion": "1.4.28",
+ "gitHead": "eab00824e5a1281fd7c522f33f0498c7d1cedd13",
+ "_id": "spdx-correct@1.0.0",
+ "_shasum": "c2a8654a5a56d8b2d44ee82dfdfc24aaec6b702c",
+ "_from": "spdx-correct@>=1.0.0 <1.1.0",
+ "_npmVersion": "2.11.0",
+ "_nodeVersion": "2.2.1",
"_npmUser": {
"name": "kemitchell",
"email": "kyle@kemitchell.com"
@@ -54,10 +55,10 @@
}
],
"dist": {
- "shasum": "5706cc6ce05b928a65564c76e1d6809ba033ac7e",
- "tarball": "http://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0-prerelease-3.tgz"
+ "shasum": "c2a8654a5a56d8b2d44ee82dfdfc24aaec6b702c",
+ "tarball": "http://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0-prerelease-3.tgz",
+ "_resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js
index 094712d62..094712d62 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js
+++ b/deps/npm/node_modules/validate-npm-package-license/node_modules/spdx-correct/spdx-correct.js
diff --git a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json b/deps/npm/node_modules/validate-npm-package-license/package.json
index 4483ede9e..beb00f8bf 100644
--- a/deps/npm/node_modules/init-package-json/node_modules/validate-npm-package-license/package.json
+++ b/deps/npm/node_modules/validate-npm-package-license/package.json
@@ -1,23 +1,23 @@
{
"name": "validate-npm-package-license",
- "description": "Give me a string and I'll tell you if it's a valid npm package license",
- "version": "1.0.0-prerelease-2",
+ "description": "Give me a string and I'll tell you if it's a valid npm package license string",
+ "version": "2.0.0",
"author": {
"name": "Kyle E. Mitchell",
"email": "kyle@kemitchell.com",
"url": "http://kemitchell.com"
},
"bugs": {
- "url": "https://github.com/kemitchell/npm-valid-package-license/issues"
+ "url": "https://github.com/kemitchell/validate-npm-package-license.js/issues"
},
"dependencies": {
- "spdx": "^0.4.0",
- "spdx-correct": "1.0.0-prerelease-3"
+ "spdx": "~0.4.0",
+ "spdx-correct": "~1.0.0"
},
"devDependencies": {
- "jsmd": "^0.3.0"
+ "jsmd": "~0.3.0"
},
- "homepage": "https://github.com/kemitchell/npm-valid-package-license",
+ "homepage": "https://github.com/kemitchell/validate-npm-package-license.js",
"keywords": [
"license",
"npm",
@@ -28,17 +28,18 @@
"main": "index.js",
"repository": {
"type": "git",
- "url": "https://github.com/kemitchell/npm-valid-package-license"
+ "url": "git+https://github.com/kemitchell/validate-npm-package-license.js.git"
},
"scripts": {
"precommit": "npm run test",
"test": "jsmd README.md"
},
- "gitHead": "d6043c5bf03a71409daae8d584ba74281e3c55c3",
- "_id": "validate-npm-package-license@1.0.0-prerelease-2",
- "_shasum": "0f45adce1728091b289597035c1ad25a5ba549be",
- "_from": "validate-npm-package-license@1.0.0-prerelease-2",
- "_npmVersion": "1.4.28",
+ "gitHead": "24544d4688f7cc9ed9685a3f9f90bd3f3bed58c0",
+ "_id": "validate-npm-package-license@2.0.0",
+ "_shasum": "ce0a29300edfa07e6ac6ad52eed8d6dfd194e42b",
+ "_from": "validate-npm-package-license@2.0.0",
+ "_npmVersion": "2.11.1",
+ "_nodeVersion": "2.3.0",
"_npmUser": {
"name": "kemitchell",
"email": "kyle@kemitchell.com"
@@ -47,13 +48,17 @@
{
"name": "kemitchell",
"email": "kyle@kemitchell.com"
+ },
+ {
+ "name": "othiym23",
+ "email": "ogd@aoaioxxysz.net"
}
],
"dist": {
- "shasum": "0f45adce1728091b289597035c1ad25a5ba549be",
- "tarball": "http://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-1.0.0-prerelease-2.tgz"
+ "shasum": "ce0a29300edfa07e6ac6ad52eed8d6dfd194e42b",
+ "tarball": "http://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-2.0.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-1.0.0-prerelease-2.tgz",
+ "_resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-2.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/validate-npm-package-name/LICENSE b/deps/npm/node_modules/validate-npm-package-name/LICENSE
new file mode 100644
index 000000000..fdcd63b30
--- /dev/null
+++ b/deps/npm/node_modules/validate-npm-package-name/LICENSE
@@ -0,0 +1,6 @@
+Copyright (c) 2015, npm, Inc
+
+
+Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/validate-npm-package-name/README.md b/deps/npm/node_modules/validate-npm-package-name/README.md
index 91a963b76..d967fdda2 100644
--- a/deps/npm/node_modules/validate-npm-package-name/README.md
+++ b/deps/npm/node_modules/validate-npm-package-name/README.md
@@ -1,11 +1,44 @@
# validate-npm-package-name
-Give me a string and I'll tell you if it's a valid npm package name.
+Give me a string and I'll tell you if it's a valid `npm` package name.
-This package exports a single synchronous function that takes a string as
-input and returns an object:
+This package exports a single synchronous function that takes a `string` as
+input and returns an object with two properties:
-## Valid Names
+- `validForNewPackages` :: `Boolean`
+- `validForOldPackages` :: `Boolean`
+
+## Contents
+
+- [Naming rules](#naming-rules)
+- [Examples](#examples)
+ + [Valid Names](#valid-names)
+ + [Invalid Names](#invalid-names)
+- [Legacy Names](#legacy-names)
+- [Tests](#tests)
+- [License](#license)
+
+## Naming Rules
+
+Below is a list of rules that valid `npm` package name should conform to.
+
+- package name length should be greater than zero
+- all the characters in the package name must be lowercase i.e., no uppercase or mixed case names are allowed
+- package name *can* consist of hyphens
+- package name must *not* contain any non-url-safe characters (since name ends up being part of a URL)
+- package name should not start with `.` or `_`
+- package name should *not* contain any leading or trailing spaces
+- package name *cannot* be the same as a node.js/io.js core module nor a reserved/blacklisted name. For example, the following names are invalid:
+ + http
+ + stream
+ + node_modules
+ + favicon.ico
+- package name length cannot exceed 214
+
+
+## Examples
+
+### Valid Names
```js
var validate = require("validate-npm-package-name")
@@ -28,10 +61,10 @@ All of the above names are valid, so you'll get this object back:
}
```
-## Invalid Names
+### Invalid Names
```js
- validate(" leading-space:and:weirdchars")
+validate(" leading-space:and:weirdchars")
```
That was never a valid package name, so you get this:
diff --git a/deps/npm/node_modules/validate-npm-package-name/node_modules/builtins/package.json b/deps/npm/node_modules/validate-npm-package-name/node_modules/builtins/package.json
index c5ec77f13..32b8c3506 100644
--- a/deps/npm/node_modules/validate-npm-package-name/node_modules/builtins/package.json
+++ b/deps/npm/node_modules/validate-npm-package-name/node_modules/builtins/package.json
@@ -4,7 +4,7 @@
"description": "List of node.js builtin modules",
"repository": {
"type": "git",
- "url": "git://github.com/juliangruber/builtins.git"
+ "url": "git+https://github.com/juliangruber/builtins.git"
},
"license": "MIT",
"main": "builtins.json",
@@ -14,33 +14,12 @@
"scripts": {
"test": "node -e \"require('./builtins.json')\""
},
+ "readme": "\n# builtins\n\n List of node.js [builtin modules](http://nodejs.org/api/).\n\n [![build status](https://secure.travis-ci.org/juliangruber/builtins.svg)](http://travis-ci.org/juliangruber/builtins)\n\n## Example\n\n```js\nvar builtins = require('builtins');\n\nassert(builtins.indexOf('http') > -1);\n```\n\n## License\n\n MIT\n",
+ "readmeFilename": "Readme.md",
"bugs": {
"url": "https://github.com/juliangruber/builtins/issues"
},
"homepage": "https://github.com/juliangruber/builtins",
"_id": "builtins@0.0.7",
- "dist": {
- "shasum": "355219cd6cf18dbe7c01cc7fd2dce765cfdc549a",
- "tarball": "http://registry.npmjs.org/builtins/-/builtins-0.0.7.tgz"
- },
- "_from": "builtins@0.0.7",
- "_npmVersion": "1.3.22",
- "_npmUser": {
- "name": "juliangruber",
- "email": "julian@juliangruber.com"
- },
- "maintainers": [
- {
- "name": "juliangruber",
- "email": "julian@juliangruber.com"
- },
- {
- "name": "segment",
- "email": "tj@segment.io"
- }
- ],
- "directories": {},
- "_shasum": "355219cd6cf18dbe7c01cc7fd2dce765cfdc549a",
- "_resolved": "https://registry.npmjs.org/builtins/-/builtins-0.0.7.tgz",
- "readme": "ERROR: No README data found!"
+ "_from": "builtins@0.0.7"
}
diff --git a/deps/npm/node_modules/validate-npm-package-name/package.json b/deps/npm/node_modules/validate-npm-package-name/package.json
index a809d7ac5..b2a6104af 100644
--- a/deps/npm/node_modules/validate-npm-package-name/package.json
+++ b/deps/npm/node_modules/validate-npm-package-name/package.json
@@ -1,6 +1,6 @@
{
"name": "validate-npm-package-name",
- "version": "2.2.0",
+ "version": "2.2.2",
"description": "Give me a string and I'll tell you if it's a valid npm package name",
"main": "index.js",
"directories": {
@@ -33,30 +33,10 @@
"url": "https://github.com/npm/validate-npm-package-name/issues"
},
"homepage": "https://github.com/npm/validate-npm-package-name",
- "gitHead": "acef1219c13a0cf4cf6b8706d65f606d82a7d472",
- "_id": "validate-npm-package-name@2.2.0",
- "_shasum": "4cb6ff120bd7afb0b5681406cfaea8df2d763477",
- "_from": "validate-npm-package-name@2.2.0",
- "_npmVersion": "2.7.6",
- "_nodeVersion": "1.6.2",
- "_npmUser": {
- "name": "bcoe",
- "email": "ben@npmjs.com"
- },
- "dist": {
- "shasum": "4cb6ff120bd7afb0b5681406cfaea8df2d763477",
- "tarball": "http://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.2.0.tgz"
- },
- "maintainers": [
- {
- "name": "zeke",
- "email": "zeke@npmjs.com"
- },
- {
- "name": "bcoe",
- "email": "ben@npmjs.com"
- }
- ],
- "_resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-2.2.0.tgz",
- "readme": "ERROR: No README data found!"
+ "readme": "# validate-npm-package-name\n\nGive me a string and I'll tell you if it's a valid `npm` package name.\n\nThis package exports a single synchronous function that takes a `string` as\ninput and returns an object with two properties:\n\n- `validForNewPackages` :: `Boolean`\n- `validForOldPackages` :: `Boolean`\n\n## Contents\n\n- [Naming rules](#naming-rules)\n- [Examples](#examples)\n + [Valid Names](#valid-names)\n + [Invalid Names](#invalid-names)\n- [Legacy Names](#legacy-names)\n- [Tests](#tests)\n- [License](#license)\n\n## Naming Rules\n\nBelow is a list of rules that valid `npm` package name should conform to.\n\n- package name length should be greater than zero\n- all the characters in the package name must be lowercase i.e., no uppercase or mixed case names are allowed\n- package name *can* consist of hyphens\n- package name must *not* contain any non-url-safe characters (since name ends up being part of a URL)\n- package name should not start with `.` or `_`\n- package name should *not* contain any leading or trailing spaces\n- package name *cannot* be the same as a node.js/io.js core module nor a reserved/blacklisted name. For example, the following names are invalid:\n + http\n + stream\n + node_modules\n + favicon.ico\n- package name length cannot exceed 214\n\n\n## Examples\n\n### Valid Names\n\n```js\nvar validate = require(\"validate-npm-package-name\")\n\nvalidate(\"some-package\")\nvalidate(\"example.com\")\nvalidate(\"under_score\")\nvalidate(\"123numeric\")\nvalidate(\"crazy!\")\nvalidate(\"@npm/thingy\")\nvalidate(\"@jane/foo.js\")\n```\n\nAll of the above names are valid, so you'll get this object back:\n\n```js\n{\n validForNewPackages: true,\n validForOldPackages: true\n}\n```\n\n### Invalid Names\n\n```js\nvalidate(\" leading-space:and:weirdchars\")\n```\n\nThat was never a valid package name, so you get this:\n\n```js\n{\n validForNewPackages: false,\n validForOldPackages: false,\n errors: [\n 'name cannot contain leading or trailing spaces',\n 'name can only contain URL-friendly characters'\n ]\n}\n```\n\n## Legacy Names\n\nIn the old days of npm, package names were wild. They could have capital\nletters in them. They could be really long. They could be the name of an\nexisting module in node core.\n\nIf you give this function a package name that **used to be valid**, you'll see\na change in the value of `validForNewPackages` property, and a warnings array\nwill be present:\n\n```js\nvalidate(\"cRaZY-paCkAgE-with-mixed-case-and-more-than-214-characters-----------------------------------------------------------------------------------------------------------------------------------------------------------\")\n```\n\nreturns:\n\n```js\n{\n validForNewPackages: false,\n validForOldPackages: true,\n warnings: [\n \"name can no longer contain capital letters\",\n \"name can no longer contain more than 214 characters\"\n ]\n}\n```\n\n## Tests\n\n```sh\nnpm install\nnpm test\n```\n\n## License\n\nISC\n",
+ "readmeFilename": "README.md",
+ "gitHead": "3af92c881549f1b96f05ab6bfb5768bba94ad72d",
+ "_id": "validate-npm-package-name@2.2.2",
+ "_shasum": "f65695b22f7324442019a3c7fa39a6e7fd299085",
+ "_from": "validate-npm-package-name@2.2.2"
}
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/.npmignore b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/README.md b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..13a2e8605
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/README.md
@@ -0,0 +1,36 @@
+# graceful-fs
+
+graceful-fs functions as a drop-in replacement for the fs module,
+making various improvements.
+
+The improvements are meant to normalize behavior across different
+platforms and environments, and to make filesystem access more
+resilient to errors.
+
+## Improvements over [fs module](http://api.nodejs.org/fs.html)
+
+graceful-fs:
+
+* Queues up `open` and `readdir` calls, and retries them once
+ something closes if there is an EMFILE error from too many file
+ descriptors.
+* fixes `lchmod` for Node versions prior to 0.6.2.
+* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
+* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
+ `lchown` if the user isn't root.
+* makes `lchmod` and `lchown` become noops, if not available.
+* retries reading a file if `read` results in EAGAIN error.
+
+On Windows, it retries renaming a file for up to one second if `EACCESS`
+or `EPERM` error occurs, likely because antivirus software has locked
+the directory.
+
+## USAGE
+
+```javascript
+// use just like fs
+var fs = require('graceful-fs')
+
+// now go and do stuff with it...
+fs.readFileSync('some-file-or-whatever')
+```
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/fs.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/fs.js
new file mode 100644
index 000000000..64ad98023
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/fs.js
@@ -0,0 +1,11 @@
+// eeeeeevvvvviiiiiiillllll
+// more evil than monkey-patching the native builtin?
+// Not sure.
+
+var mod = require("module")
+var pre = '(function (exports, require, module, __filename, __dirname) { '
+var post = '});'
+var src = pre + process.binding('natives').fs + post
+var vm = require('vm')
+var fn = vm.runInThisContext(src)
+fn(exports, require, module, __filename, __dirname)
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..fb206b838
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,158 @@
+// Monkey-patching the fs module.
+// It's ugly, but there is simply no other way to do this.
+var fs = module.exports = require('./fs.js')
+
+var assert = require('assert')
+
+// fix up some busted stuff, mostly on windows and old nodes
+require('./polyfills.js')
+
+var util = require('util')
+
+function noop () {}
+
+var debug = noop
+if (util.debuglog)
+ debug = util.debuglog('gfs')
+else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
+ debug = function() {
+ var m = util.format.apply(util, arguments)
+ m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
+ console.error(m)
+ }
+
+if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
+ process.on('exit', function() {
+ debug('fds', fds)
+ debug(queue)
+ assert.equal(queue.length, 0)
+ })
+}
+
+
+var originalOpen = fs.open
+fs.open = open
+
+function open(path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+ new OpenReq(path, flags, mode, cb)
+}
+
+function OpenReq(path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(OpenReq, Req)
+
+OpenReq.prototype.process = function() {
+ originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
+}
+
+var fds = {}
+OpenReq.prototype.done = function(er, fd) {
+ debug('open done', er, fd)
+ if (fd)
+ fds['fd' + fd] = this.path
+ Req.prototype.done.call(this, er, fd)
+}
+
+
+var originalReaddir = fs.readdir
+fs.readdir = readdir
+
+function readdir(path, cb) {
+ if (typeof cb !== "function") cb = noop
+ new ReaddirReq(path, cb)
+}
+
+function ReaddirReq(path, cb) {
+ this.path = path
+ this.cb = cb
+ Req.call(this)
+}
+
+util.inherits(ReaddirReq, Req)
+
+ReaddirReq.prototype.process = function() {
+ originalReaddir.call(fs, this.path, this.done)
+}
+
+ReaddirReq.prototype.done = function(er, files) {
+ if (files && files.sort)
+ files = files.sort()
+ Req.prototype.done.call(this, er, files)
+ onclose()
+}
+
+
+var originalClose = fs.close
+fs.close = close
+
+function close (fd, cb) {
+ debug('close', fd)
+ if (typeof cb !== "function") cb = noop
+ delete fds['fd' + fd]
+ originalClose.call(fs, fd, function(er) {
+ onclose()
+ cb(er)
+ })
+}
+
+
+var originalCloseSync = fs.closeSync
+fs.closeSync = closeSync
+
+function closeSync (fd) {
+ try {
+ return originalCloseSync(fd)
+ } finally {
+ onclose()
+ }
+}
+
+
+// Req class
+function Req () {
+ // start processing
+ this.done = this.done.bind(this)
+ this.failures = 0
+ this.process()
+}
+
+Req.prototype.done = function (er, result) {
+ var tryAgain = false
+ if (er) {
+ var code = er.code
+ var tryAgain = code === "EMFILE" || code === "ENFILE"
+ if (process.platform === "win32")
+ tryAgain = tryAgain || code === "OK"
+ }
+
+ if (tryAgain) {
+ this.failures ++
+ enqueue(this)
+ } else {
+ var cb = this.cb
+ cb(er, result)
+ }
+}
+
+var queue = []
+
+function enqueue(req) {
+ queue.push(req)
+ debug('enqueue %d %s', queue.length, req.constructor.name, req)
+}
+
+function onclose() {
+ var req = queue.shift()
+ if (req) {
+ debug('process', req.constructor.name, req)
+ req.process()
+ }
+}
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/package.json b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..974aa9516
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/package.json
@@ -0,0 +1,96 @@
+{
+ "_args": [
+ [
+ "graceful-fs@^3.0.2",
+ "/Users/isaacs/dev/npm/npm/node_modules/write-file-atomic"
+ ]
+ ],
+ "_from": "graceful-fs@>=3.0.2 <4.0.0",
+ "_id": "graceful-fs@3.0.8",
+ "_inCache": true,
+ "_location": "/write-file-atomic/graceful-fs",
+ "_nodeVersion": "2.0.1",
+ "_npmUser": {
+ "email": "isaacs@npmjs.com",
+ "name": "isaacs"
+ },
+ "_npmVersion": "2.10.1",
+ "_phantomChildren": {},
+ "_requested": {
+ "name": "graceful-fs",
+ "raw": "graceful-fs@^3.0.2",
+ "rawSpec": "^3.0.2",
+ "scope": null,
+ "spec": ">=3.0.2 <4.0.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/write-file-atomic"
+ ],
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz",
+ "_shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "_shrinkwrap": null,
+ "_spec": "graceful-fs@^3.0.2",
+ "_where": "/Users/isaacs/dev/npm/npm/node_modules/write-file-atomic",
+ "author": {
+ "email": "i@izs.me",
+ "name": "Isaac Z. Schlueter",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-graceful-fs/issues"
+ },
+ "dependencies": {},
+ "description": "A drop-in replacement for fs, making various improvements.",
+ "devDependencies": {
+ "mkdirp": "^0.5.0",
+ "rimraf": "^2.2.8",
+ "tap": "^1.2.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "dist": {
+ "shasum": "ce813e725fa82f7e6147d51c9a5ca68270551c22",
+ "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.8.tgz"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ },
+ "gitHead": "45c57aa5e323c35a985a525de6f0c9a6ef59e1f8",
+ "homepage": "https://github.com/isaacs/node-graceful-fs#readme",
+ "keywords": [
+ "EACCESS",
+ "EAGAIN",
+ "EINVAL",
+ "EMFILE",
+ "EPERM",
+ "error",
+ "errors",
+ "fs",
+ "handling",
+ "module",
+ "queue",
+ "reading",
+ "retries",
+ "retry"
+ ],
+ "license": "ISC",
+ "main": "graceful-fs.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "name": "graceful-fs",
+ "optionalDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.8"
+}
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/polyfills.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/polyfills.js
new file mode 100644
index 000000000..8ac5006e2
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/polyfills.js
@@ -0,0 +1,254 @@
+var fs = require('./fs.js')
+var constants = require('constants')
+
+var origCwd = process.cwd
+var cwd = null
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
+
+// (re-)implement some things that are known busted or missing.
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
+
+
+// https://github.com/isaacs/node-graceful-fs/issues/4
+// Chown should not fail on einval or eperm if non-root.
+// It should not fail on enosys ever, as this just indicates
+// that a fs doesn't support the intended operation.
+
+fs.chown = chownFix(fs.chown)
+fs.fchown = chownFix(fs.fchown)
+fs.lchown = chownFix(fs.lchown)
+
+fs.chmod = chownFix(fs.chmod)
+fs.fchmod = chownFix(fs.fchmod)
+fs.lchmod = chownFix(fs.lchmod)
+
+fs.chownSync = chownFixSync(fs.chownSync)
+fs.fchownSync = chownFixSync(fs.fchownSync)
+fs.lchownSync = chownFixSync(fs.lchownSync)
+
+fs.chmodSync = chownFix(fs.chmodSync)
+fs.fchmodSync = chownFix(fs.fchmodSync)
+fs.lchmodSync = chownFix(fs.lchmodSync)
+
+function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er, res) {
+ if (chownErOk(er)) er = null
+ cb(er, res)
+ })
+ }
+}
+
+function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+}
+
+// ENOSYS means that the fs doesn't support the op. Just ignore
+// that, because it doesn't matter.
+//
+// if there's no getuid, or if getuid() is something other
+// than 0, and the error is EINVAL or EPERM, then just ignore
+// it.
+//
+// This specific case is a silent failure in cp, install, tar,
+// and most other unix tools that manage permissions.
+//
+// When running as root, or if other types of errors are
+// encountered, then it's strict.
+function chownErOk (er) {
+ if (!er)
+ return true
+
+ if (er.code === "ENOSYS")
+ return true
+
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
+
+ return false
+}
+
+
+// if lchmod/lchown do not exist, then make them no-ops
+if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
+}
+if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ process.nextTick(cb)
+ }
+ fs.lchownSync = function () {}
+}
+
+
+
+// on Windows, A/V software can lock the directory, causing this
+// to fail with an EACCES or EPERM if the directory contains newly
+// created files. Try again on failure, for up to 1 second.
+if (process.platform === "win32") {
+ var rename_ = fs.rename
+ fs.rename = function rename (from, to, cb) {
+ var start = Date.now()
+ rename_(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 1000) {
+ return rename_(from, to, CB)
+ }
+ if(cb) cb(er)
+ })
+ }
+}
+
+
+// if read() returns EAGAIN, then just try it again.
+var read = fs.read
+fs.read = function (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return read.call(fs, fd, buffer, offset, length, position, callback)
+}
+
+var readSync = fs.readSync
+fs.readSync = function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+}
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/max-open.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/max-open.js
new file mode 100644
index 000000000..a6b9ba43d
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/max-open.js
@@ -0,0 +1,69 @@
+var test = require('tap').test
+var fs = require('../')
+
+test('open lots of stuff', function (t) {
+ // Get around EBADF from libuv by making sure that stderr is opened
+ // Otherwise Darwin will refuse to give us a FD for stderr!
+ process.stderr.write('')
+
+ // How many parallel open()'s to do
+ var n = 1024
+ var opens = 0
+ var fds = []
+ var going = true
+ var closing = false
+ var doneCalled = 0
+
+ for (var i = 0; i < n; i++) {
+ go()
+ }
+
+ function go() {
+ opens++
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fds.push(fd)
+ if (going) go()
+ })
+ }
+
+ // should hit ulimit pretty fast
+ setTimeout(function () {
+ going = false
+ t.equal(opens - fds.length, n)
+ done()
+ }, 100)
+
+
+ function done () {
+ if (closing) return
+ doneCalled++
+
+ if (fds.length === 0) {
+ console.error('done called %d times', doneCalled)
+ // First because of the timeout
+ // Then to close the fd's opened afterwards
+ // Then this time, to complete.
+ // Might take multiple passes, depending on CPU speed
+ // and ulimit, but at least 3 in every case.
+ t.ok(doneCalled >= 2)
+ return t.end()
+ }
+
+ closing = true
+ setTimeout(function () {
+ // console.error('do closing again')
+ closing = false
+ done()
+ }, 100)
+
+ // console.error('closing time')
+ var closes = fds.slice(0)
+ fds.length = 0
+ closes.forEach(function (fd) {
+ fs.close(fd, function (er) {
+ if (er) throw er
+ })
+ })
+ }
+})
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/open.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/open.js
new file mode 100644
index 000000000..85732f236
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/open.js
@@ -0,0 +1,39 @@
+var test = require('tap').test
+var fs = require('../graceful-fs.js')
+
+test('graceful fs is monkeypatched fs', function (t) {
+ t.equal(fs, require('../fs.js'))
+ t.end()
+})
+
+test('open an existing file works', function (t) {
+ var fd = fs.openSync(__filename, 'r')
+ fs.closeSync(fd)
+ fs.open(__filename, 'r', function (er, fd) {
+ if (er) throw er
+ fs.close(fd, function (er) {
+ if (er) throw er
+ t.pass('works')
+ t.end()
+ })
+ })
+})
+
+test('open a non-existing file throws', function (t) {
+ var er
+ try {
+ var fd = fs.openSync('this file does not exist', 'r')
+ } catch (x) {
+ er = x
+ }
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+
+ fs.open('neither does this file', 'r', function (er, fd) {
+ t.ok(er, 'should throw')
+ t.notOk(fd, 'should not get an fd')
+ t.equal(er.code, 'ENOENT')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/readdir-sort.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/readdir-sort.js
new file mode 100644
index 000000000..cb63a6846
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/readdir-sort.js
@@ -0,0 +1,20 @@
+var test = require("tap").test
+var fs = require("../fs.js")
+
+var readdir = fs.readdir
+fs.readdir = function(path, cb) {
+ process.nextTick(function() {
+ cb(null, ["b", "z", "a"])
+ })
+}
+
+var g = require("../")
+
+test("readdir reorder", function (t) {
+ g.readdir("whatevers", function (er, files) {
+ if (er)
+ throw er
+ t.same(files, [ "a", "b", "z" ])
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/write-then-read.js b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/write-then-read.js
new file mode 100644
index 000000000..21e4c26bf
--- /dev/null
+++ b/deps/npm/node_modules/write-file-atomic/node_modules/graceful-fs/test/write-then-read.js
@@ -0,0 +1,47 @@
+var fs = require('../');
+var rimraf = require('rimraf');
+var mkdirp = require('mkdirp');
+var test = require('tap').test;
+var p = require('path').resolve(__dirname, 'files');
+
+process.chdir(__dirname)
+
+// Make sure to reserve the stderr fd
+process.stderr.write('');
+
+var num = 4097;
+var paths = new Array(num);
+
+test('make files', function (t) {
+ rimraf.sync(p);
+ mkdirp.sync(p);
+
+ for (var i = 0; i < num; ++i) {
+ paths[i] = 'files/file-' + i;
+ fs.writeFileSync(paths[i], 'content');
+ }
+
+ t.end();
+})
+
+test('read files', function (t) {
+ // now read them
+ var done = 0;
+ for (var i = 0; i < num; ++i) {
+ fs.readFile(paths[i], function(err, data) {
+ if (err)
+ throw err;
+
+ ++done;
+ if (done === num) {
+ t.pass('success');
+ t.end()
+ }
+ });
+ }
+});
+
+test('cleanup', function (t) {
+ rimraf.sync(p);
+ t.end();
+});
diff --git a/deps/npm/package.json b/deps/npm/package.json
index f66c064c0..1dabe8747 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "2.11.3",
+ "version": "2.13.4",
"name": "npm",
"description": "a package manager for JavaScript",
"keywords": [
@@ -32,7 +32,6 @@
"dependencies": {
"abbrev": "~1.0.7",
"ansi": "~0.3.0",
- "ansi-regex": "~1.1.1",
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
@@ -44,65 +43,63 @@
"cmd-shim": "~2.0.1",
"columnify": "~1.5.1",
"config-chain": "~1.1.9",
- "dezalgo": "~1.0.2",
+ "dezalgo": "~1.0.3",
"editor": "~1.0.0",
"fs-vacuum": "~1.2.6",
"fs-write-stream-atomic": "~1.0.3",
- "fstream": "~1.0.6",
- "fstream-npm": "~1.0.2",
+ "fstream": "~1.0.7",
+ "fstream-npm": "~1.0.4",
"github-url-from-git": "~1.4.0",
"github-url-from-username-repo": "~1.0.2",
- "glob": "~5.0.10",
- "graceful-fs": "~3.0.8",
+ "glob": "~5.0.14",
+ "graceful-fs": "~4.1.2",
"hosted-git-info": "~2.1.4",
"inflight": "~1.0.4",
"inherits": "~2.0.1",
"ini": "~1.3.4",
- "init-package-json": "~1.6.0",
+ "init-package-json": "~1.7.1",
"lockfile": "~1.0.1",
- "lru-cache": "~2.6.4",
- "minimatch": "~2.0.8",
+ "lru-cache": "~2.6.5",
+ "minimatch": "~2.0.10",
"mkdirp": "~0.5.1",
- "node-gyp": "~2.0.1",
- "nopt": "~3.0.2",
- "normalize-git-url": "~1.0.1",
- "normalize-package-data": "~2.2.1",
- "npm-cache-filename": "~1.0.1",
- "npm-install-checks": "~1.0.5",
- "npm-package-arg": "~4.0.1",
- "npm-registry-client": "~6.4.0",
+ "node-gyp": "~2.0.2",
+ "nopt": "~3.0.3",
+ "normalize-git-url": "~3.0.1",
+ "normalize-package-data": "~2.3.1",
+ "npm-cache-filename": "~1.0.2",
+ "npm-install-checks": "~1.0.6",
+ "npm-package-arg": "~4.0.2",
+ "npm-registry-client": "~6.5.1",
"npm-user-validate": "~0.1.2",
"npmlog": "~1.2.1",
"once": "~1.3.2",
"opener": "~1.4.1",
- "osenv": "~0.1.2",
+ "osenv": "~0.1.3",
"path-is-inside": "~1.0.0",
"read": "~1.0.6",
- "read-installed": "~4.0.0",
+ "read-installed": "~4.0.2",
"read-package-json": "~2.0.0",
"readable-stream": "~1.1.13",
"realize-package-specifier": "~3.0.1",
- "request": "~2.57.0",
+ "request": "~2.60.0",
"retry": "~0.6.1",
- "rimraf": "~2.4.0",
- "semver": "~4.3.6",
+ "rimraf": "~2.4.2",
+ "semver": "~5.0.1",
"sha": "~1.3.0",
"slide": "~1.1.6",
"sorted-object": "~1.0.0",
"spdx": "~0.4.1",
- "strip-ansi": "~2.0.1",
"tar": "~2.1.1",
"text-table": "~0.2.0",
"uid-number": "0.0.6",
"umask": "~1.1.0",
- "validate-npm-package-name": "2.2.0",
+ "validate-npm-package-name": "~2.2.2",
"which": "~1.1.1",
"wrappy": "~1.0.1",
"write-file-atomic": "~1.1.2"
},
"bundleDependencies": [
"abbrev",
- "ansi-regex",
"ansi",
"ansicolors",
"ansistyles",
@@ -161,11 +158,11 @@
"slide",
"sorted-object",
"spdx",
- "strip-ansi",
"tar",
"text-table",
"uid-number",
"umask",
+ "validate-npm-package-license",
"validate-npm-package-name",
"which",
"wrappy",
@@ -174,13 +171,13 @@
"devDependencies": {
"deep-equal": "~1.0.0",
"marked": "~0.3.3",
- "marked-man": "~0.1.4",
- "nock": "~2.5.0",
+ "marked-man": "~0.1.5",
+ "nock": "~2.9.1",
"npm-registry-couchapp": "~2.6.7",
"npm-registry-mock": "~1.0.0",
"require-inject": "~1.2.0",
"sprintf-js": "~1.0.2",
- "tap": "~1.2.0"
+ "tap": "~1.3.1"
},
"scripts": {
"test-legacy": "node ./test/run.js",
diff --git a/deps/npm/scripts/release.sh b/deps/npm/scripts/release.sh
index 85e871c46..abe6c197f 100644
--- a/deps/npm/scripts/release.sh
+++ b/deps/npm/scripts/release.sh
@@ -8,7 +8,7 @@ set -e
rm -rf release *.tgz || true
mkdir release
-npm pack --loglevel error >/dev/null
+node ./cli.js pack --loglevel error >/dev/null
mv *.tgz release
cd release
tar xzf *.tgz
@@ -18,12 +18,12 @@ mv package node_modules/npm
# make the zip for windows users
cp node_modules/npm/bin/*.cmd .
-zipname=npm-$(npm -v).zip
+zipname=npm-$(node ../cli.js -v).zip
zip -q -9 -r -X "$zipname" *.cmd node_modules
# make the tar for node's deps
cd node_modules
-tarname=npm-$(npm -v).tgz
+tarname=npm-$(node ../../cli.js -v).tgz
tar czf "$tarname" npm
cd ..
diff --git a/deps/npm/test/fixtures/config/userconfig-with-gc b/deps/npm/test/fixtures/config/userconfig-with-gc
index 7268fcb3c..8c8dd06d2 100644
--- a/deps/npm/test/fixtures/config/userconfig-with-gc
+++ b/deps/npm/test/fixtures/config/userconfig-with-gc
@@ -1,4 +1,4 @@
-globalconfig=/Users/ogd/Documents/projects/npm/npm/test/fixtures/config/globalconfig
+globalconfig=/Users/zkat/code/npm/test/fixtures/config/globalconfig
email=i@izs.me
env-thing=asdf
init.author.name=Isaac Z. Schlueter
diff --git a/deps/npm/test/tap/add-remote-git-get-resolved.js b/deps/npm/test/tap/add-remote-git-get-resolved.js
index 4a4f0a5fe..0bd0f29eb 100644
--- a/deps/npm/test/tap/add-remote-git-get-resolved.js
+++ b/deps/npm/test/tap/add-remote-git-get-resolved.js
@@ -4,6 +4,17 @@ var test = require('tap').test
var npm = require('../../lib/npm.js')
var common = require('../common-tap.js')
+var normalizeGitUrl = require('normalize-git-url')
+var getResolved = null
+
+/**
+ * Note: This is here because `normalizeGitUrl` is usually called
+ * before getResolved is, and receives *that* URL.
+ */
+function tryGetResolved(uri, treeish) {
+ return getResolved(normalizeGitUrl(uri).url, treeish)
+}
+
test('setup', function (t) {
var opts = {
registry: common.registry,
@@ -11,14 +22,12 @@ test('setup', function (t) {
}
npm.load(opts, function (er) {
t.ifError(er, 'npm loaded without error')
-
+ getResolved = require('../../lib/cache/add-remote-git.js').getResolved
t.end()
})
})
test('add-remote-git#get-resolved git: passthru', function (t) {
- var getResolved = require('../../lib/cache/add-remote-git.js').getResolved
-
verify('git:github.com/foo/repo')
verify('git:github.com/foo/repo.git')
verify('git://github.com/foo/repo#decadacefadabade')
@@ -26,7 +35,7 @@ test('add-remote-git#get-resolved git: passthru', function (t) {
function verify (uri) {
t.equal(
- getResolved(uri, 'decadacefadabade'),
+ tryGetResolved(uri, 'decadacefadabade'),
'git://github.com/foo/repo.git#decadacefadabade',
uri + ' normalized to canonical form git://github.com/foo/repo.git#decadacefadabade'
)
@@ -35,8 +44,6 @@ test('add-remote-git#get-resolved git: passthru', function (t) {
})
test('add-remote-git#get-resolved SSH', function (t) {
- var getResolved = require('../../lib/cache/add-remote-git.js').getResolved
-
t.comment('tests for https://github.com/npm/npm/issues/7961')
verify('git@github.com:foo/repo')
verify('git@github.com:foo/repo#master')
@@ -45,7 +52,7 @@ test('add-remote-git#get-resolved SSH', function (t) {
function verify (uri) {
t.equal(
- getResolved(uri, 'decadacefadabade'),
+ tryGetResolved(uri, 'decadacefadabade'),
'git+ssh://git@github.com/foo/repo.git#decadacefadabade',
uri + ' normalized to canonical form git+ssh://git@github.com/foo/repo.git#decadacefadabade'
)
@@ -54,16 +61,20 @@ test('add-remote-git#get-resolved SSH', function (t) {
})
test('add-remote-git#get-resolved HTTPS', function (t) {
- var getResolved = require('../../lib/cache/add-remote-git.js').getResolved
-
verify('https://github.com/foo/repo')
verify('https://github.com/foo/repo#master')
verify('git+https://github.com/foo/repo.git#master')
verify('git+https://github.com/foo/repo#decadacefadabade')
+ // DEPRECATED
+ // this is an invalid URL but we normalize it
+ // anyway. Users shouldn't use this in the future. See note
+ // below for how this affected non-hosted URLs.
+ // See https://github.com/npm/npm/issues/8881
+ verify('git+https://github.com:foo/repo.git#master')
function verify (uri) {
t.equal(
- getResolved(uri, 'decadacefadabade'),
+ tryGetResolved(uri, 'decadacefadabade'),
'git+https://github.com/foo/repo.git#decadacefadabade',
uri + ' normalized to canonical form git+https://github.com/foo/repo.git#decadacefadabade'
)
@@ -72,30 +83,48 @@ test('add-remote-git#get-resolved HTTPS', function (t) {
})
test('add-remote-git#get-resolved edge cases', function (t) {
- var getResolved = require('../../lib/cache/add-remote-git.js').getResolved
- t.notOk(
- getResolved('git@bananaboat.com:galbi.git', 'decadacefadabade'),
- 'non-hosted Git SSH non-URI strings are invalid'
+ t.equal(
+ tryGetResolved('git+ssh://user@bananaboat.com:galbi/blah.git', 'decadacefadabade'),
+ 'git+ssh://user@bananaboat.com:galbi/blah.git#decadacefadabade',
+ 'don\'t break non-hosted scp-style locations'
+ )
+
+ t.equal(
+ tryGetResolved('git+ssh://bananaboat:galbi/blah', 'decadacefadabade'),
+ 'git+ssh://bananaboat:galbi/blah#decadacefadabade',
+ 'don\'t break non-hosted scp-style locations'
)
+ // DEPRECATED
+ // When we were normalizing all git URIs, git+https: was being
+ // automatically converted to ssh:. Some users were relying
+ // on this funky behavior, so after removing the aggressive
+ // normalization from non-hosted URIs, we brought this back.
+ // See https://github.com/npm/npm/issues/8881
t.equal(
- getResolved('git+ssh://git.bananaboat.net/foo', 'decadacefadabade'),
+ tryGetResolved('git+https://bananaboat:galbi/blah', 'decadacefadabade'),
+ 'git+https://bananaboat/galbi/blah#decadacefadabade',
+ 'don\'t break non-hosted scp-style locations'
+ )
+
+ t.equal(
+ tryGetResolved('git+ssh://git.bananaboat.net/foo', 'decadacefadabade'),
'git+ssh://git.bananaboat.net/foo#decadacefadabade',
'don\'t break non-hosted SSH URLs'
)
t.equal(
- getResolved('git://gitbub.com/foo/bar.git', 'decadacefadabade'),
- 'git://gitbub.com/foo/bar.git#decadacefadabade',
- 'don\'t break non-hosted git: URLs'
+ tryGetResolved('git+ssh://git.bananaboat.net:/foo', 'decadacefadabade'),
+ 'git+ssh://git.bananaboat.net:/foo#decadacefadabade',
+ 'don\'t break non-hosted SSH URLs'
)
- t.comment('test for https://github.com/npm/npm/issues/3224')
t.equal(
- getResolved('git+ssh://git@git.example.com:my-repo.git#9abe82cb339a70065e75300f62b742622774693c', 'decadacefadabade'),
- 'git+ssh://git@git.example.com:my-repo.git#decadacefadabade',
- 'preserve weird colon in semi-standard ssh:// URLs'
+ tryGetResolved('git://gitbub.com/foo/bar.git', 'decadacefadabade'),
+ 'git://gitbub.com/foo/bar.git#decadacefadabade',
+ 'don\'t break non-hosted git: URLs'
)
+
t.end()
})
diff --git a/deps/npm/test/tap/peer-deps-invalid.js b/deps/npm/test/tap/peer-deps-invalid.js
index b256b8e2e..453f9617f 100644
--- a/deps/npm/test/tap/peer-deps-invalid.js
+++ b/deps/npm/test/tap/peer-deps-invalid.js
@@ -69,25 +69,25 @@ test('installing dependencies that have conflicting peerDependencies', function
'/invalid.js': [200, path.join(pkg, 'file-fail.js')]
}
}
- mr({port: common.port, mocks: customMocks}, function (err, s) {
+ mr({port: common.port, mocks: customMocks}, function (err, s) { // create mock registry.
t.ifError(err, 'mock registry started')
- npm.load(
- {
- cache: cache,
- registry: common.registry
- },
- function () {
- npm.commands.install([], function (err) {
- if (!err) {
- t.fail('No error!')
- } else {
- t.equal(err.code, 'EPEERINVALID')
- }
- s.close()
- t.end()
- })
- }
- )
+ npm.load({
+ cache: pkg + "/cache",
+ registry: common.registry
+ }, function () {
+ npm.commands.install([], function (err) {
+ if (!err) {
+ t.fail("No error!")
+ } else {
+ t.equal(err.code, "EPEERINVALID")
+ t.equal(err.packageName, "underscore")
+ t.equal(err.packageVersion, "1.3.3")
+ t.equal(err.message, "The package underscore@1.3.3 does not satisfy its siblings' peerDependencies requirements!")
+ }
+ s.close() // shutdown mock registry.
+ t.end()
+ })
+ })
})
})
diff --git a/deps/npm/test/tap/ping.js b/deps/npm/test/tap/ping.js
new file mode 100644
index 000000000..13fc4bec9
--- /dev/null
+++ b/deps/npm/test/tap/ping.js
@@ -0,0 +1,68 @@
+var fs = require('fs')
+var path = require('path')
+
+var mkdirp = require('mkdirp')
+var mr = require('npm-registry-mock')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var common = require('../common-tap.js')
+
+var pkg = path.resolve(__dirname, 'ping')
+var opts = { cwd: pkg }
+
+var outfile = path.join(pkg, '_npmrc')
+
+var contents = function () {
+}.toString().split('\n').slice(1, -1).join('\n')
+
+var pingResponse = {
+ host: 'registry.npmjs.org',
+ ok: true,
+ username: null,
+ peer: 'example.com'
+}
+
+function mocks (server) {
+ server.get('/-/ping?write=true').reply(200, JSON.stringify(pingResponse))
+}
+
+test('setup', function (t) {
+ cleanup()
+ setup()
+ t.end()
+})
+
+test('npm ping', function (t) {
+ mr({ port: common.port, plugin: mocks }, function (err, s) {
+ if (err) throw err
+
+ common.npm([
+ 'ping',
+ '--registry', common.registry,
+ '--loglevel', 'silent',
+ '--userconfig', outfile
+ ], opts, function (err, code, stdout) {
+ s.close()
+ t.ifError(err, 'no error output')
+ t.notOk(code, 'exited OK')
+
+ t.same(JSON.parse(stdout), pingResponse)
+ t.end()
+ })
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function setup () {
+ mkdirp.sync(pkg)
+ fs.writeFileSync(outfile, contents)
+}
+
+function cleanup () {
+ rimraf.sync(pkg)
+}
diff --git a/deps/npm/test/tap/splat-with-only-prerelease-to-latest.js b/deps/npm/test/tap/splat-with-only-prerelease-to-latest.js
new file mode 100644
index 000000000..d402bed29
--- /dev/null
+++ b/deps/npm/test/tap/splat-with-only-prerelease-to-latest.js
@@ -0,0 +1,81 @@
+'use strict'
+var test = require('tap').test
+var npm = require('../../lib/npm')
+var log = require('npmlog')
+var stream = require('readable-stream')
+
+var moduleName = 'xyzzy-wibble'
+var testModule = {
+ name: moduleName,
+ 'dist-tags': {
+ latest: '1.3.0-a'
+ },
+ versions: {
+ '1.0.0-a': {
+ name: moduleName,
+ version: '1.0.0-a',
+ dist: {
+ shasum: 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ tarball: 'http://registry.npmjs.org/aproba/-/xyzzy-wibble-1.0.0-a.tgz'
+ }
+ },
+ '1.1.0-a': {
+ name: moduleName,
+ version: '1.1.0-a',
+ dist: {
+ shasum: 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ tarball: 'http://registry.npmjs.org/aproba/-/xyzzy-wibble-1.1.0-a.tgz'
+ }
+ },
+ '1.2.0-a': {
+ name: moduleName,
+ version: '1.2.0-a',
+ dist: {
+ shasum: 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ tarball: 'http://registry.npmjs.org/aproba/-/xyzzy-wibble-1.2.0-a.tgz'
+ }
+ },
+ '1.3.0-a': {
+ name: moduleName,
+ version: '1.3.0-a',
+ dist: {
+ shasum: 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ tarball: 'http://registry.npmjs.org/aproba/-/xyzzy-wibble-1.3.0-a.tgz'
+ }
+ },
+ },
+}
+
+var lastFetched
+test('setup', function (t) {
+ npm.load(function(){
+ npm.config.set('loglevel', 'silly')
+ npm.registry = {
+ get: function (uri, opts, cb) {
+ setImmediate(function () {
+ cb(null, testModule, null, {statusCode: 200})
+ })
+ },
+ fetch: function (u, opts, cb) {
+ lastFetched = u
+ setImmediate(function () {
+ var empty = new stream.Readable()
+ empty.push(null)
+ cb(null, empty)
+ })
+ }
+ }
+ t.end()
+ })
+})
+
+
+test('splat', function (t) {
+ t.plan(3)
+ var addNamed = require('../../lib/cache/add-named.js')
+ addNamed('xyzzy-wibble', '*', testModule, function (err, pkg) {
+ t.error(err, 'Succesfully resolved a splat package')
+ t.is(pkg.name, moduleName)
+ t.is(pkg.version, testModule['dist-tags'].latest)
+ })
+})
diff --git a/deps/npm/test/tap/version-git-not-clean.js b/deps/npm/test/tap/version-git-not-clean.js
index 1deae49f9..22ffb7c98 100644
--- a/deps/npm/test/tap/version-git-not-clean.js
+++ b/deps/npm/test/tap/version-git-not-clean.js
@@ -1,60 +1,48 @@
-var common = require("../common-tap.js")
-var test = require("tap").test
-var npm = require("../../")
-var osenv = require("osenv")
-var path = require("path")
-var fs = require("fs")
-var rimraf = require("rimraf")
-var mkdirp = require("mkdirp")
-var which = require("which")
-var spawn = require("child_process").spawn
+var common = require('../common-tap.js')
+var test = require('tap').test
+var npm = require('../../')
+var osenv = require('osenv')
+var path = require('path')
+var fs = require('fs')
+var rimraf = require('rimraf')
+var mkdirp = require('mkdirp')
+var which = require('which')
+var spawn = require('child_process').spawn
-var pkg = path.resolve(__dirname, "version-git-not-clean")
-var cache = path.resolve(pkg, "cache")
+var pkg = path.resolve(__dirname, 'version-git-not-clean')
+var cache = path.resolve(pkg, 'cache')
-test("npm version <semver> with working directory not clean", function (t) {
+test('npm version <semver> with working directory not clean', function (t) {
setup()
npm.load({ cache: cache, registry: common.registry, prefix: pkg }, function () {
- which("git", function (err, git) {
- t.ifError(err, "git found")
+ which('git', function (err, git) {
+ t.ifError(err, 'git found')
- function gitInit(_cb) {
- var child = spawn(git, ["init"])
- var out = ""
- child.stdout.on("data", function (d) {
- out += d.toString()
- })
- child.on("exit", function () {
- return _cb(out)
- })
- }
-
- function addPackageJSON(_cb) {
- var data = JSON.stringify({ name: "blah", version: "0.1.2" })
- fs.writeFile("package.json", data, function() {
- var child = spawn(git, ["add", "package.json"])
- child.on("exit", function () {
- var child2 = spawn(git, ["commit", "package.json", "-m", "init"])
- var out = ""
- child2.stdout.on("data", function (d) {
+ function addPackageJSON (_cb) {
+ var data = JSON.stringify({ name: 'blah', version: '0.1.2' })
+ fs.writeFile('package.json', data, function () {
+ var child = spawn(git, ['add', 'package.json'])
+ child.on('exit', function () {
+ var child2 = spawn(git, ['commit', 'package.json', '-m', 'init'])
+ var out = ''
+ child2.stdout.on('data', function (d) {
out += d.toString()
})
- child2.on("exit", function () {
+ child2.on('exit', function () {
return _cb(out)
})
})
})
}
- gitInit(function() {
- addPackageJSON(function() {
- var data = JSON.stringify({ name: "blah", version: "0.1.3" })
- fs.writeFile("package.json", data, function() {
- npm.commands.version(["patch"], function (err) {
+ common.makeGitRepo({path: pkg}, function () {
+ addPackageJSON(function () {
+ var data = JSON.stringify({ name: 'blah', version: '0.1.3' })
+ fs.writeFile('package.json', data, function () {
+ npm.commands.version(['patch'], function (err) {
if (!err) {
- t.fail("should fail on non-clean working directory")
- }
- else {
+ t.fail('should fail on non-clean working directory')
+ } else {
t.ok(err.message.match(/Git working directory not clean./))
t.ok(err.message.match(/M package.json/))
}
@@ -67,7 +55,33 @@ test("npm version <semver> with working directory not clean", function (t) {
})
})
-test("cleanup", function (t) {
+test('npm version <semver> --force with working directory not clean', function (t) {
+ common.npm(
+ [
+ '--force',
+ '--no-sign-git-tag',
+ '--registry', common.registry,
+ '--prefix', pkg,
+ 'version',
+ 'patch'
+ ],
+ { cwd: pkg, env: {PATH: process.env.PATH} },
+ function (err, code, stdout, stderr) {
+ t.ifError(err, 'npm version ran without issue')
+ t.notOk(code, 'exited with a non-error code')
+ var errorLines = stderr.trim().split('\n')
+ .map(function (line) {
+ return line.trim()
+ })
+ .filter(function (line) {
+ return !line.indexOf('using --force')
+ })
+ t.notOk(errorLines.length, 'no error output')
+ t.end()
+ })
+})
+
+test('cleanup', function (t) {
// windows fix for locked files
process.chdir(osenv.tmpdir())
@@ -75,7 +89,7 @@ test("cleanup", function (t) {
t.end()
})
-function setup() {
+function setup () {
mkdirp.sync(pkg)
mkdirp.sync(cache)
process.chdir(pkg)
diff --git a/deps/npm/test/tap/version-lifecycle.js b/deps/npm/test/tap/version-lifecycle.js
index da0af1086..5d78b71d5 100644
--- a/deps/npm/test/tap/version-lifecycle.js
+++ b/deps/npm/test/tap/version-lifecycle.js
@@ -10,6 +10,8 @@ var common = require('../common-tap.js')
var npm = require('../../')
var pkg = path.resolve(__dirname, 'version-lifecycle')
var cache = path.resolve(pkg, 'cache')
+var npmrc = path.resolve(pkg, './.npmrc')
+var configContents = 'sign-git-tag=false\n'
test('npm version <semver> with failing preversion lifecycle script', function (t) {
setup()
@@ -24,7 +26,30 @@ test('npm version <semver> with failing preversion lifecycle script', function (
}), 'utf8')
fs.writeFileSync(path.resolve(pkg, 'fail.sh'), 'exit 50', 'utf8')
fs.chmodSync(path.resolve(pkg, 'fail.sh'), 448)
- npm.load({cache: cache, registry: common.registry}, function () {
+ npm.load({cache: cache, 'sign-git-tag': false, registry: common.registry}, function () {
+ var version = require('../../lib/version')
+ version(['patch'], function (err) {
+ t.ok(err)
+ t.ok(err.message.match(/Exit status 50/))
+ t.end()
+ })
+ })
+})
+
+test('npm version <semver> with failing version lifecycle script', function (t) {
+ setup()
+ fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({
+ author: 'Alex Wolfe',
+ name: 'version-lifecycle',
+ version: '0.0.0',
+ description: 'Test for npm version if postversion script fails',
+ scripts: {
+ version: './fail.sh'
+ }
+ }), 'utf8')
+ fs.writeFileSync(path.resolve(pkg, 'fail.sh'), 'exit 50', 'utf8')
+ fs.chmodSync(path.resolve(pkg, 'fail.sh'), 448)
+ npm.load({cache: cache, 'sign-git-tag': false, registry: common.registry}, function () {
var version = require('../../lib/version')
version(['patch'], function (err) {
t.ok(err)
@@ -47,7 +72,7 @@ test('npm version <semver> with failing postversion lifecycle script', function
}), 'utf8')
fs.writeFileSync(path.resolve(pkg, 'fail.sh'), 'exit 50', 'utf8')
fs.chmodSync(path.resolve(pkg, 'fail.sh'), 448)
- npm.load({cache: cache, registry: common.registry}, function () {
+ npm.load({cache: cache, 'sign-git-tag': false, registry: common.registry}, function () {
var version = require('../../lib/version')
version(['patch'], function (err) {
t.ok(err)
@@ -57,6 +82,52 @@ test('npm version <semver> with failing postversion lifecycle script', function
})
})
+test('npm version <semver> execution order', function (t) {
+ setup()
+ fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({
+ author: 'Alex Wolfe',
+ name: 'version-lifecycle',
+ version: '0.0.0',
+ description: 'Test for npm version if postversion script fails',
+ scripts: {
+ preversion: './preversion.sh',
+ version: './version.sh',
+ postversion: './postversion.sh'
+ }
+ }), 'utf8')
+ makeScript('preversion')
+ makeScript('version')
+ makeScript('postversion')
+ npm.load({cache: cache, 'sign-git-tag': false, registry: common.registry}, function () {
+ common.makeGitRepo({path: pkg}, function (err, git) {
+ t.ifError(err, 'git bootstrap ran without error')
+
+ var version = require('../../lib/version')
+ version(['patch'], function (err) {
+ t.ifError(err, 'version command complete')
+
+ t.equal('0.0.0', readPackage('preversion').version, 'preversion')
+ t.deepEqual(readStatus('preversion', t), {
+ 'preversion-package.json': 'A'
+ })
+
+ t.equal('0.0.1', readPackage('version').version, 'version')
+ t.deepEqual(readStatus('version', t), {
+ 'package.json': 'M',
+ 'preversion-package.json': 'A',
+ 'version-package.json': 'A'
+ })
+
+ t.equal('0.0.1', readPackage('postversion').version, 'postversion')
+ t.deepEqual(readStatus('postversion', t), {
+ 'postversion-package.json': 'A'
+ })
+ t.end()
+ })
+ })
+ })
+})
+
test('cleanup', function (t) {
process.chdir(osenv.tmpdir())
rimraf.sync(pkg)
@@ -67,5 +138,37 @@ function setup () {
mkdirp.sync(pkg)
mkdirp.sync(path.join(pkg, 'node_modules'))
mkdirp.sync(cache)
+ fs.writeFileSync(npmrc, configContents, 'ascii')
process.chdir(pkg)
}
+
+function makeScript (lifecycle) {
+ var contents = [
+ 'cp package.json ' + lifecycle + '-package.json',
+ 'git add ' + lifecycle + '-package.json',
+ 'git status --porcelain > ' + lifecycle + '-git.txt'
+ ].join('\n')
+ var scriptPath = path.join(pkg, lifecycle + '.sh')
+ fs.writeFileSync(scriptPath, contents, 'utf-8')
+ fs.chmodSync(scriptPath, 448)
+}
+
+function readPackage (lifecycle) {
+ return JSON.parse(fs.readFileSync(path.join(pkg, lifecycle + '-package.json'), 'utf-8'))
+}
+
+function readStatus (lifecycle, t) {
+ var status = {}
+ fs.readFileSync(path.join(pkg, lifecycle + '-git.txt'), 'utf-8')
+ .trim()
+ .split('\n')
+ .forEach(function (line) {
+ line = line.trim()
+ if (line && !line.match(/^\?\? /)) {
+ var parts = line.split(/\s+/)
+ t.equal(parts.length, 2, lifecycle + ' : git status has too many words : ' + line)
+ status[parts[1].trim()] = parts[0].trim()
+ }
+ })
+ return status
+}